query
stringlengths
9
3.4k
document
stringlengths
9
87.4k
metadata
dict
negatives
sequencelengths
4
101
negative_scores
sequencelengths
4
101
document_score
stringlengths
3
10
document_rank
stringclasses
102 values
Define ZMQ connection and return socket to work with
def connect_to_worker(): socket = context.socket(zmq.REQ) socket.connect("tcp://localhost:5555") return socket
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def meta_trader_connector():\n context = zmq.Context()\n socket = context.socket(zmq.REQ)\n socket.connect(SOCKET_LOCAL_HOST)\n return socket", "def socket(self):\n if not hasattr(self, \"_socket\"):\n # create a new one\n self._socket = self.context.socket(zmq.REQ)\n if hasattr(zmq, \"RECONNECT_IVL_MAX\"):\n self._socket.setsockopt(zmq.RECONNECT_IVL_MAX, 5000)\n\n self._set_tcp_keepalive()\n if self.master.startswith(\"tcp://[\"):\n # Hint PF type if bracket enclosed IPv6 address\n if hasattr(zmq, \"IPV6\"):\n self._socket.setsockopt(zmq.IPV6, 1)\n elif hasattr(zmq, \"IPV4ONLY\"):\n self._socket.setsockopt(zmq.IPV4ONLY, 0)\n self._socket.linger = self.linger\n if self.id_:\n self._socket.setsockopt(zmq.IDENTITY, self.id_)\n self._socket.connect(self.master)\n return self._socket", "def get_socket():\n return socket.create_connection((HOST, PORT))", "def start(self):\n zmq_uri = (\n \"{protocol}://{address}:{port}\".format(\n protocol=self.protocol, address=self.address, port=self.port\n )\n if self.port\n else \"{protocol}://{address}\".format( # noqa\n protocol=self.protocol, address=self.address\n )\n )\n log.debug(\"ZMQ URI: %s\", zmq_uri)\n self.ctx = zmq.Context()\n if hasattr(zmq, self.type):\n skt_type = getattr(zmq, self.type)\n else:\n skt_type = zmq.PULL\n self.sub = self.ctx.socket(skt_type)\n self.sub.connect(zmq_uri)\n if self.hwm is not None:\n self.sub.setsockopt(zmq.RCVHWM, self.hwm)\n if self.recvtimeout is not None:\n log.debug(\"Setting RCVTIMEO to %d\", self.recvtimeout)\n self.sub.setsockopt(zmq.RCVTIMEO, self.recvtimeout)\n if self.keepalive is not None:\n log.debug(\"Setting TCP_KEEPALIVE to %d\", self.keepalive)\n self.sub.setsockopt(zmq.TCP_KEEPALIVE, self.keepalive)\n if self.keepalive_idle is not None:\n log.debug(\"Setting TCP_KEEPALIVE_IDLE to %d\", self.keepalive_idle)\n self.sub.setsockopt(zmq.TCP_KEEPALIVE_IDLE, self.keepalive_idle)\n if self.keepalive_interval is not None:\n log.debug(\"Setting TCP_KEEPALIVE_INTVL to %d\", self.keepalive_interval)\n self.sub.setsockopt(zmq.TCP_KEEPALIVE_INTVL, self.keepalive_interval)", "def __init__(self, port=1071):\n\n context = zmq.Context()\n\n self.socket = context.socket(zmq.REP)\n self.socket.bind('tcp://*:' + str(port))\n\n self.socket.recv()", "def __init__(self, ip='127.0.0.1', port='50020'):\n self.ip = ip \n self.port = port\n self.ctx = zmq.Context()\n self.socket = zmq.Socket(self.ctx, zmq.REQ) # this is pub socket", "def setup(self):\n self.context = zmq.Context()\n self.sub_socket = self.context.socket(zmq.SUB)\n if self.filter:\n self.sub_socket.setsockopt(zmq.SUBSCRIBE, self.filter)\n self.sub_socket.connect('tcp://'+self.host+':'+str(self.com_port))\n return self", "def _create_socket():\n sock = socket.socket()\n return sock", "def build_socket(self, paradigm, topic, url):\n\n socket = None\n if paradigm == \"sub\":\n socket = self.context.socket(zmq.SUB)\n socket.connect(url)\n socket.setsockopt_string(zmq.SUBSCRIBE, topic)\n elif paradigm == \"pub\":\n socket = self.context.socket(zmq.PUB)\n socket.bind(url)\n elif paradigm == \"req\":\n socket = self.context.socket(zmq.REQ)\n socket.connect(url)\n elif paradigm == \"rep\":\n socket == self.context.socket(zmq.REP)\n socket.bind(url)\n else:\n raise Exception(\"Please provide a valid paradigm\")\n\n return socket", "def init_connexion():\n connexion = socket(AF_INET, SOCK_STREAM)\n connexion.bind((hote, port))\n\n return connexion", "def getconnection(self):\n\n # If we were able to create the affix_tcpsocket, then we attempt to call\n # getconnection() on the affix tcp server socket first. If we were unable \n # to create it or get a SocketWouldBlockError, we default to the basic\n # repy getconnection() call. The reason for this is to ensure that even\n # if the affixstack breaks down, we are able to revert to the default repy\n # connection.\n if self.tcpserversocket_dict['affix_tcpsocket']:\n try:\n rip, rport, sockobj = self.tcpserversocket_dict['affix_tcpsocket'].getconnection()\n return (rip, rport, AffixSocket(sockobj, self.affix_object)) \n except SocketWouldBlockError:\n pass\n\n return self.tcpserversocket_dict['repy_tcpsocket'].getconnection()", "def __get_zmq_pub(self):\n print(\"Publishing to tcp://127.0.0.1:%d channel: tweets\" % self.port)\n context = zmq.Context()\n socket = context.socket(zmq.PUB)\n socket.bind(\"tcp://127.0.0.1:%d\" % self.port)\n return socket", "def create_connection(address):\n\n sock = socks.socksocket()\n sock.connect(address)\n return sock", "def _bind_zmq_sockets(config):\n workers_socket = context.socket(zmq.ROUTER)\n manager_socket = context.socket(zmq.DEALER)\n workers_port = config[\"zmq\"][\"ports\"][\"workers\"]\n workers_socket.bind(f\"tcp://*:{workers_port}\")\n logger.info(f\"worker socket bound to port {workers_port}\")\n manager_port = config[\"zmq\"][\"ports\"][\"manager\"]\n manager_socket.bind(f\"tcp://*:{manager_port}\")\n logger.info(f\"manager socket bound to port {manager_port}\")\n return workers_socket, manager_socket", "def getconnection(self):\n # If we were able to create the shim_tcpsocket, then we attempt to call\n # getconnection() on the shim tcp server socket first. If we were unable \n # to create it or get a SocketWouldBlockError, we default to the basic\n # repy getconnection() call. The reason for this is to ensure that even\n # if the shimstack breaks down, we are able to revert to the default repy\n # connection.\n if self.tcpserversocket_dict['shim_tcpsocket']:\n try:\n rip, rport, sockobj = self.tcpserversocket_dict['shim_tcpsocket'].getconnection()\n return (rip, rport, ShimSocket(sockobj, self.shim_object)) \n except SocketWouldBlockError:\n pass\n\n return self.tcpserversocket_dict['repy_tcpsocket'].getconnection()", "def __enter__(self):\n\n self.sock.connect(self.socket_path)\n return self.sock", "def __init__(self, factory, endpoint=None, identity=None):\n self.factory = factory\n self.endpoints = []\n self.identity = identity\n self.socket = Socket(factory.context, self.socketType)\n self.queue = deque()\n self.recv_parts = []\n self.read_scheduled = None\n\n self.fd = self.socket_get(constants.FD)\n self.socket_set(constants.LINGER, factory.lingerPeriod)\n\n if not ZMQ3:\n self.socket_set(\n constants.MCAST_LOOP, int(self.allowLoopbackMulticast))\n\n self.socket_set(constants.RATE, self.multicastRate)\n\n if not ZMQ3:\n self.socket_set(constants.HWM, self.highWaterMark)\n else:\n self.socket_set(constants.SNDHWM, self.highWaterMark)\n self.socket_set(constants.RCVHWM, self.highWaterMark)\n\n if ZMQ3 and self.tcpKeepalive:\n self.socket_set(\n constants.TCP_KEEPALIVE, self.tcpKeepalive)\n self.socket_set(\n constants.TCP_KEEPALIVE_CNT, self.tcpKeepaliveCount)\n self.socket_set(\n constants.TCP_KEEPALIVE_IDLE, self.tcpKeepaliveIdle)\n self.socket_set(\n constants.TCP_KEEPALIVE_INTVL, self.tcpKeepaliveInterval)\n\n if self.identity is not None:\n self.socket_set(constants.IDENTITY, self.identity)\n\n if endpoint:\n self.addEndpoints([endpoint])\n\n self.factory.connections.add(self)\n\n self.factory.reactor.addReader(self)\n self.doRead()", "def ConnectSocket(self) -> Socket:", "def make_connection( hostname, port = 4663 ):\n \tconnection = socket.socket();", "def connect(self):\n assert self.listening\n assert not self.connected\n ctx = zmq.Context.instance()\n port = NODE_INFOS[self.ID].port\n self._send_socket = ctx.socket(zmq.PUB)\n self._send_socket.bind(f\"tcp://*:{port}\")\n self.connected = True", "def __connect():\n # Create socket\n sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n\n # Connect socket to server\n sock.connect((SERVER_IP, SERVER_PORT))\n\n # Return connected socket\n return sock", "def init_connect_mq(self):\n try:\n mq_username = Configs.mq_username\n mq_pwd = Configs.mq_pwd\n mq_ip_addr = Configs.mq_ip_addr\n mq_port_num = Configs.mq_port_num\n mq_vhost = Configs.mq_vhost\n\n mq_credentials = pika.PlainCredentials(mq_username, mq_pwd)\n mq_connection = pika.BlockingConnection(\n pika.ConnectionParameters(host=mq_ip_addr, port=mq_port_num, virtual_host=mq_vhost,\n credentials=mq_credentials))\n # connect to mq channel\n self.mq_channel = mq_connection.channel()\n self.mq_channel.exchange_declare(exchange=Configs.mq_exchange_name, exchange_type='topic', durable='true')\n # self.mq_channel.queue_declare(queue='test', durable=False, arguments={'x-message-ttl': 10000})\n self.mq_conn_flag = True\n print(\" ************** MQ Connect Success ************** \")\n except Exception as e:\n print(e)", "def connect(self):\n if self._zerorpc:\n return\n try:\n self._zerorpc = _ZeroRPCClient(connect_to=self._address, timeout=self._timeout)\n self._zerorpc._events.setsockopt(zmq.LINGER, 0) # when we teardown, we want to discard all messages\n except:\n self._zerorpc = None\n raise", "def connect(self):\n self.socket.connect(f'tcp://{self.ip}:{self.port}')\n self.socket.send_string('PUB_PORT')\n self.pub_port = self.socket.recv_string()\n self.pub_socket = zmq.Socket(self.ctx, zmq.PUB)\n self.pub_socket.connect(f\"tcp://{self.ip}:{self.pub_port}\")", "def _create_socket_context(self):\n # Find upper bound on ACTIME from constants and set timeout to double\n # that\n timeout = int(2000 * self.p_constants[\"ACTIME_UPPER\"])\n\n context = zmq.Context() # Create Context\n socket = context.socket(zmq.REQ) # Create socket\n # Connect to dining philosophers\n socket.connect(self.p_constants[\"SERV_ADDR\"])\n socket.RCVTIMEO = timeout # Set timeout\n\n return context, socket", "def create_socket():\n sock = socket.socket()\n sock.bind(('0.0.0.0', 3000))\n print('Listening for connection...')\n sock.listen(1)\n conn, client_address = sock.accept()\n print('EV3 connected @ %s:%s\\n' % (client_address[0], client_address[1]))\n return conn", "def __init__(self, server_addr, server_port, local_port):\n\n if local_port is None:\n self.local_addr = ('localhost', 7700) \n else:\n self.local_addr = ('localhost', local_port)\n self.server_socket = (server_addr, server_port)\n self.connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.connection.bind(self.local_addr)\n self.message_q = []\n self.failed = False\n \n try:\n self.connection.create_connect(server_port)\n\n except:\n sys.stderr.write('failed to connect to server \\n')\n self.failed = True\n self.connection.close()\n return None", "def __init__(self, socket, info=None): \r\n # If we are given a socket, assume it is setup\r\n if socket != None:\r\n # Is everything setup?\r\n self.connectionInit = True \r\n\r\n # Default incoming and outgoing buffer size expansion value\r\n # Defaults to 128 kilobytes\r\n self.defaultBufSize = 128*1024\r\n\r\n # This is the main socket\r\n self.socket = socket \r\n\r\n # This dictionary contains information about this socket\r\n # This just has some junk default values, and is filled in during init\r\n self.socketInfo = {\"localip\":\"127.0.0.1\",\"localport\":0,\"remoteip\":\"127.0.0.1\",\"remoteport\":0}\r\n\r\n # Locks, this is to make sure only one thread is reading or writing at any time\r\n self.readLock = getlock()\r\n self.writeLock = getlock()\r\n\r\n # Callback function that is passed a socket object\r\n # Maps a host (e.g. 127.0.0.1) to a dictionary of ports -> functions\r\n # So callBackFunctions[\"127.0.0.1\"][50] returns the user function for host 127.0.0.1 port 50\r\n self.callbackFunction = {}\r\n\r\n # This dictionary keeps track of sockets we are waiting to open, e.g. openconn has been called\r\n # but the partner multiplexer has not responded yet\r\n self.pendingSockets = {}\r\n\r\n # If we want a new client, what number should we request?\r\n self.nextReferenceID = 0\r\n\r\n # A dictionary that associates reference ID's to their MultiplexerSocket objects\r\n self.virtualSockets = {}\r\n self.virtualSocketsLock = getlock() \r\n \r\n # Inject or override socket info given to use\r\n if info is not None:\r\n for key, value in info.items():\r\n self.socketInfo[key] = value\r\n \r\n # Set error if one occurs in socketReader\r\n self.error = None\r\n \r\n # Callback function in case of fatal error\r\n self.errorDelegate = None\r\n \r\n # Launch event to handle the multiplexing\r\n # Wait a few seconds so that the user has a chance to set waitforconn\r\n settimer(MULTIPLEXER_START_DELAY, self._socketReader, ())\r\n \r\n else:\r\n raise ValueError, \"Must pass in a valid socket!\"", "def _init_socket_tcp(self, worker_id):\n\n sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n sock.connect((self.host, self.port))\n if len(self.sockets) - 1 < worker_id:\n self.sockets.append(MessageSocket(sock))\n else:\n # socket was already initialized, MessageSocket implements a try:catch\n self.sockets[worker_id].close()\n self.sockets[worker_id] = MessageSocket(sock)", "def build_connection(\r\n self,\r\n socket,\r\n address = None,\r\n datagram = False,\r\n ssl = False\r\n ):\r\n\r\n return Connection(\r\n owner = self,\r\n socket = socket,\r\n address = address,\r\n datagram = datagram,\r\n ssl = ssl\r\n )", "def connect(self, *args, **kw):\n\n return self.get_pool(*args, **kw).connect()", "def connect(self, out_q):\n # received message queue\n self.out_q = out_q\n # try to start the socket\n try:\n # create the socket\n self.sock = socket.create_connection(\n (self.srv_ip, self.srv_port), self.srv_timeout\n )\n # set socket to non blocking\n self.sock.setblocking(0)\n # send the connect frame\n self.send_q.put(Message.ConnectFrame().encode())\n\n # update connected flag\n self.connected = True\n except ConnectionRefusedError:\n # if connection failed, exit with error\n return 1\n\n # create the TCP thread\n self.tcp_thread = threading.Thread(\n target=tcp_client.client_thread,\n daemon=False,\n args=(lambda: self.stop_thread, self.sock, self.recv_q, self.send_q,),\n )\n\n # create the message processing thread\n self.process_thread = threading.Thread(target=self.process_inc, daemon=True,)\n\n # start the threads if they aren't started\n if not self.tcp_thread.isAlive():\n # reset quit flag before starting\n self.stop_thread = False\n self.tcp_thread.start()\n if not self.process_thread.isAlive():\n self.process_thread.start()\n return 0", "def new_socket():\n s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n return s", "def get_connection(self):\n if self.instrument == Instruments.SERVER_LIDAR:\n mess_start = '[LIDAR] '\n elif self.instrument == Instruments.SERVER_LSP:\n mess_start = '[LSP] '\n else:\n mess_start = 'Unknown: '\n # Wait for connection\n if self.gui_message is not None:\n self.gui_message.message(mess_start + 'Listening on port %i...' % self.port)\n else:\n print(mess_start + 'Listening on port %i...' % self.port)\n self.sock.listen(1)\n\n # Accept connection\n self.conn, self.addr = self.sock.accept()\n if self.gui_message is not None:\n self.gui_message.message(mess_start + 'Got connection from %s' % self.addr[0])\n else:\n print(mess_start + 'Got connection from %s' % self.addr[0])\n self._queue.put(self.conn)", "def main(connection_file):\n\n ctx = zmq.Context.instance()\n\n with open(connection_file) as f:\n cfg = json.loads(f.read())\n\n reg_url = cfg['interface']\n iopub_port = cfg['iopub']\n iopub_url = f\"{reg_url}:{iopub_port}\"\n\n session = Session(key=cfg['key'].encode('ascii'))\n sub = ctx.socket(zmq.SUB)\n\n # This will subscribe to all messages:\n sub.SUBSCRIBE = b''\n # replace with b'' with b'engine.1.stdout' to subscribe only to engine 1's stdout\n # 0MQ subscriptions are simple 'foo*' matches, so 'engine.1.' subscribes\n # to everything from engine 1, but there is no way to subscribe to\n # just stdout from everyone.\n # multiple calls to subscribe will add subscriptions, e.g. to subscribe to\n # engine 1's stderr and engine 2's stdout:\n # sub.SUBSCRIBE = b'engine.1.stderr'\n # sub.SUBSCRIBE = b'engine.2.stdout'\n sub.connect(iopub_url)\n while True:\n try:\n idents, msg = session.recv(sub, mode=0)\n except KeyboardInterrupt:\n return\n # ident always length 1 here\n topic = idents[0].decode('utf8', 'replace')\n if msg['msg_type'] == 'stream':\n # stdout/stderr\n # stream names are in msg['content']['name'], if you want to handle\n # them differently\n print(\"{}: {}\".format(topic, msg['content']['text']))\n elif msg['msg_type'] == 'error':\n # Python traceback\n c = msg['content']\n print(topic + ':')\n for line in c['traceback']:\n # indent lines\n print(' ' + line)\n elif msg['msg_type'] == 'error':\n # Python traceback\n c = msg['content']\n print(topic + ':')\n for line in c['traceback']:\n # indent lines\n print(' ' + line)", "async def connection_factory(*args, **kwargs):\n return (transport, protocol)", "async def connection_factory(*args, **kwargs):\n return (transport, protocol)", "def init_connections(self):\n context = zmq.Context()\n self.sock_reply = context.socket(zmq.REQ)\n self.sock_reply.connect(self.sock_consumer_url)\n # Informs prev_stage that I am ready to work\n self.sock_reply.send_pyobj(\"READY\")\n # Create and register poller\n self.poll = zmq.Poller()\n self.poll.register(self.sock_reply, zmq.POLLIN)\n return True", "def __make_connection(self):\n return self.euca.make_connection()", "def _setup_communication(self):\n state = self.ui.checkBox_comm.checkState()\n if state:\n try:\n sys.path.append(\"..\")\n from zmq_interface.gui_interface import ZmqInterface\n except ImportError as e:\n self.write_text(\"ZMQ interface failed to import. No remote control for this session.\")\n self.disable_visualizer()\n return\n try:\n ##TODO: let user specify ports\n self.com = ZmqInterface(rep_port=REPLY_PORT,\n gui_handle=self)\n except Exception as e:\n #traceback.print_exc(file=sys.stdout)\n self.write_text(\"ZMQ interface failed to start. No remote control for this session. Reason: %s\" % e)\n self.disable_visualizer()\n return\n self.start = self._start_session\n self.stop = self._stop_session\n self.load_config = self._load_state\n self.save_config = self._save_state\n self.com_timer = QtCore.QTimer()\n self.com_timer.timeout.connect(self._check_coms)\n self.com_timer.start(200)\n self.write_text(\"ZMQ interface set up. Reply port on %s\" % self.com.rep_port)\n self.enable_visualizer()\n else:\n if self.com:\n self.com.close()\n if self.com_timer:\n self.com_timer.stop()\n self.com = None\n self.com_timer = None\n self.enable_visualizer()\n self.write_text(\"ZMQ interface closed.\")", "def setupTcp(self):\n \tself.tcpManager = QueuedConnectionManager()\n \tself.tcpReader = QueuedConnectionReader(self.tcpManager, 0)\n \tself.tcpWriter = ConnectionWriter(self.tcpManager, 0)", "def GetSocket(self):\n self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self._sock.settimeout(socket_common.SOCKET_TIMEOUT)\n self._sock.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF,\n socket_common.SOCKET_BUFFER_SIZE)\n try:\n self._sock.connect((self.args.hostname, self.args.port))\n # Send qing.\n self._sock.sendall(socket_common.QING)\n # Receive qong.\n received_char = self._sock.recv(1)\n self.debug('Received a char: %s', received_char)\n if not received_char == socket_common.QING_RESPONSE:\n self.debug('Invalid qong: %s', received_char)\n self._sock.shutdown(socket.SHUT_RDWR)\n self._sock.close()\n return False\n return True\n except Exception:\n return False", "def setup() -> socket:\n # Create a TCP/IP socket\n server = socket(AF_INET, SOCK_STREAM)\n server.setblocking(False)\n\n def sigint_handler(sig, frame):\n \"\"\"\n Catches a SIGINT and cleans up\n \"\"\"\n print(\"[i] Caught SIGINT, cleaning up...\")\n server.close()\n exit(0)\n\n signal(SIGINT, sigint_handler)\n\n # Parse arguments\n if len(argv) != 2:\n print(f\"Usage\\n\\t{argv[0]} <port>\")\n exit(1)\n\n try:\n server_address = ('', int(argv[1]))\n print(f'starting up on port {server_address[1]}', file=stderr)\n server.bind(server_address)\n except ValueError as e:\n print(f\"Error while trying to parse arguments {e}\")\n exit(1)\n except OSError as e:\n print(f\"Error while trying to bind to {argv[1]}: {e.strerror}\")\n exit(1)\n\n # Listen for incoming connections\n server.listen(5)\n\n register_functions()\n\n return server", "def create_connection():\n # REDIS_URL is defined in .env and loaded into the environment by Honcho\n redis_url = os.getenv('REDIS_URL')\n # If it's not defined, use the Redis default\n if not redis_url:\n redis_url = 'redis://localhost:6379'\n urlparse.uses_netloc.append('redis')\n url = urlparse.urlparse(redis_url)\n return redis.StrictRedis(\n host=url.hostname,\n port=url.port,\n db=0,\n password=url.password\n )", "def __init__(self,socket_=None):\n if socket_:\n self.socket = socket_\n else:\n socket_path = None\n try: socket_path = os.environ['SSH_AUTH_SOCK']\n except: raise KeyError('Missing environment variable SSH_AUTH_SOCK')\n\n sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)\n sock.connect(socket_path)\n self.socket = sock\n\n self.connection = MessageConnection(self.socket)", "def socket(self):\n s = socket.socket()\n self.openSockets.append(s)\n return s", "def setup_connection(args):\n\n event_loop_group = io.EventLoopGroup(1)\n host_resolver = io.DefaultHostResolver(event_loop_group)\n client_bootstrap = io.ClientBootstrap(event_loop_group, host_resolver)\n\n if args.use_websocket is True:\n proxy_options = None\n if args.proxy_host:\n proxy_options = http.HttpProxyOptions(\n host_name=args.proxy_host,\n port=args.proxy_port,\n )\n\n credentials_provider = auth.AwsCredentialsProvider.new_default_chain(\n client_bootstrap,\n )\n mqtt_connection = mqtt_connection_builder.websockets_with_default_aws_signing(\n endpoint=args.endpoint,\n client_bootstrap=client_bootstrap,\n region=args.signing_region,\n credentials_provider=credentials_provider,\n websocket_proxy_options=proxy_options,\n ca_filepath=args.root_ca,\n on_connection_interrupted=on_connection_interrupted,\n on_connection_resumed=on_connection_resumed,\n client_id=args.client_id,\n clean_session=False,\n keep_alive_secs=6,\n )\n\n else:\n mqtt_connection = mqtt_connection_builder.mtls_from_path(\n endpoint=args.endpoint,\n cert_filepath=args.cert,\n pri_key_filepath=args.key,\n client_bootstrap=client_bootstrap,\n ca_filepath=args.root_ca,\n on_connection_interrupted=on_connection_interrupted,\n on_connection_resumed=on_connection_resumed,\n client_id=args.client_id,\n clean_session=False,\n keep_alive_secs=6,\n )\n\n print(\n f\"Connecting to {args.endpoint} with client ID '{args.client_id}'...\",\n )\n\n return mqtt_connection", "def get_connection(self, params):\r\n return Redis(connection_pool=self.get_or_create_connection_pool(params))", "def initzmq(self):\n\n if \"topics\" not in self.configData:\n raise Exception(\"Topics not found in %s\" % self.configPath)\n\n for topic in self.configData['topics']:\n addr = self.gen_address(topic['protocol'], topic['address'],\n topic['port'])\n socket = self.build_socket(topic['paradigm'], topic['topic'], addr)\n self.topics[topic['name']] = socket", "def _setupSocket(self):\n oldUmask = None\n if type(self._bindAddress) is str:\n # Unix socket\n sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)\n try:\n os.unlink(self._bindAddress)\n except OSError:\n pass\n if self._umask is not None:\n oldUmask = os.umask(self._umask)\n else:\n # INET socket\n assert type(self._bindAddress) is tuple\n assert len(self._bindAddress) == 2\n sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)\n\n sock.bind(self._bindAddress)\n sock.listen(socket.SOMAXCONN)\n\n if oldUmask is not None:\n os.umask(oldUmask)\n\n return sock", "def build_socket(self):\n sock = socket(AF_UNIX, SOCK_SEQPACKET)\n sock.connect(self.my_id)\n\n return sock", "def device(front, end):\n try:\n context = SerializingContext(1)\n # Socket facing clients\n frontend = context.socket(zmq.XREP)\n frontend.bind(\"tcp://*:\" + str(front))\n # Socket facing services\n backend = context.socket(zmq.XREQ)\n backend.bind(\"tcp://*:\" + str(end))\n\n zmq.device(zmq.QUEUE, frontend, backend)\n except Exception as e:\n print(e)\n print(\"bringing down zmq device\")\n finally:\n pass\n frontend.close()\n backend.close()\n context.term()", "def get_socket(self, version, src_addr=None):\n socket = ProxyProtocolSocket(version, src_addr=src_addr)\n socket.sendall = Mock(name='mock-sendall')\n socket.send = Mock(name='mock-send')\n socket.getpeername = Mock(name='mock-getpeername')\n socket.getsockname = Mock(name='mock-getsockname')\n return socket", "def _create_socket(self, address=('', 0)):\n # AF_INET: IPv4, SOCK_STREAM: TCP socket\n sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n # SO_REUSEADDR: reuse the socket in TIME_WAIT state without\n # waiting for its natural timeout to expire\n # Allows local address reuse\n sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)\n # If no timeout is set then recv() will block forever if\n # the connection is kept alive with no data sent\n # SO_RCVTIMEO: the timeout on receive calls in seconds\n # set using a packed binary string containing two uint32s as\n # (seconds, microseconds)\n if self.assoc.network_timeout is not None:\n timeout_seconds = int(self.assoc.network_timeout)\n timeout_microsec = int(self.assoc.network_timeout % 1 * 1000)\n sock.setsockopt(\n socket.SOL_SOCKET,\n socket.SO_RCVTIMEO,\n pack('ll', timeout_seconds, timeout_microsec)\n )\n\n sock.bind(address)\n\n self._is_connected = False\n\n return sock", "def __init__(self):\n # Create a TCP/IP socket\n self.client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)", "def connect():", "def _create_connection(self, host, port):\n return pika.BlockingConnection(pika.ConnectionParameters(host=host,\n port=port))", "def _create_socket(self, socket_type, linger_value):\n socket = zmq.Context.instance().socket(socket_type)\n socket.setsockopt(zmq.LINGER, linger_value)\n socket.set_hwm(0)\n port_number = socket.bind_to_random_port(LOCAL_ADDR)\n self.poller.register(socket, zmq.POLLIN)\n self.logger.debug(\"bind to \" + LOCAL_ADDR + ':' + str(port_number))\n return (socket, port_number)", "def _create_new_socket(self):\n sock = socket()\n sock.setsockopt(SOL_SOCKET, SO_REUSEADDR, True)\n return sock", "def open_socket(self):\n try:\n self.server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR,1)\n self.server.bind((self.host,self.port))\n self.server.listen(5)\n self.server.setblocking(0)\n except socket.error, (value,message):\n if self.server:\n self.server.close()\n print \"Could not open socket: \" + message\n sys.exit(1)", "def _get_connection(self, conf):\n return get_session()", "def connect_to_redis():\n return Redis(host=redis_host, port=redis_port, db=0)", "def _connect(self):\n #print(\"Connecting...\")\n self._connection = reactor.connectTCP(self.host, self.port, self.factory) #@UndefinedVariable", "def get_connection(self):\n mess_start = '[LIDAR] '\n # Wait for connection\n if self.gui_message is not None:\n self.gui_message.message(mess_start + 'Listening on port %i...' % self.port)\n else:\n print(mess_start + 'Listening on port %i...' % self.port)\n self.sock.listen(1)\n\n # Accept connection\n self.conn, self.addr = self.sock.accept()\n if self.gui_message is not None:\n self.gui_message.message(mess_start + 'Got connection from %s' % self.addr[0])\n else:\n print(mess_start + 'Got connection from %s' % self.addr[0])\n self._queue.put(self.conn)", "def create_amqp_connection():\n # type: () -> amqp.Connection\n cget = partial(config.CFG.get, \"rabbitmq\")\n return amqp.Connection(\n host=cget(\"host\"),\n userid=cget(\"user\"),\n password=cget(\"password\"),\n virtual_host=cget(\"vhost\"),\n )", "def make_client_manager(ip,port,auth_key):\n \n ServerQueueManager.register('get_job_q')\n ServerQueueManager.register('get_result_q')\n \n manager = ServerQueueManager(address=(ip,port),authkey=auth_key)\n manager.connect()\n \n print 'Client connected to %s:%s' % (ip, port)\n return manager", "def initialize_socket(self):\n try:\n self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)\n self.sock.bind((self.host, self.port))\n self.sock.listen(10)\n except socket.error, (value, message):\n if self.sock:\n self.sock.close()\n # TODO: LOG and provide means for graceful failure\n print \"Unable to open socket: \" + message\n print \"Error value: \" + str(value)", "def socket(self):\n return self.__socket", "def initialize_socket(self):\n try:\n self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)\n self.sock.bind((self._host, self._port))\n self.sock.listen(10)\n except socket.error, (value, message):\n if self.sock:\n self.sock.close()\n # TODO: LOG and provide means for graceful failure\n print \"Unable to open socket: \" + message\n print \"Error value: \" + str(value)", "def start(self):\n\n address = (socket.gethostbyname(self.hostname), self.port)\n logger.info(\"Connecting to %r\" % (address,))\n self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self._socket.connect(address)\n self._start_processors()\n return self", "def create_tcp_client_socket(address, port):\n\n sock = s.socket(s.AF_INET, s.SOCK_STREAM)\n\n\n return sock", "def _setup_connection(self, parameters):\n logger.info('Connecting to %s', parameters)\n return pika.BlockingConnection(parameters = parameters)", "def set_work_socket(self):\n self.analysis_id = uuid.uuid4().hex\n\n def do_set_work_socket(aw):\n aw.work_socket = cellprofiler_core.constants.worker.the_zmq_context.socket(\n zmq.REQ\n )\n aw.work_socket.connect(self.work_addr)\n aw.work_request_address = self.work_addr\n aw.current_analysis_id = self.analysis_id\n\n self.awthread.execute(do_set_work_socket, self.awthread.aw)", "def create_connection(your_group, other_group=0, other_IP='127.0.0.1', verbose=False):\r\n \r\n # init verbose display\r\n if verbose:\r\n print('\\n[--- starts connection -----------------------------------------------------\\n')\r\n \r\n # check whether there is a referee\r\n if other_group == 0:\r\n if verbose:\r\n print('** group %d connecting to referee on %s **\\n' % (your_group, other_IP))\r\n \r\n # create one socket (client only)\r\n socket_out = create_client_socket(other_IP, 42000+your_group, verbose)\r\n \r\n connection = {'in':socket_out, 'out':socket_out}\r\n \r\n if verbose:\r\n print('** group %d successfully connected to referee on %s **\\n' % (your_group, other_IP))\r\n else:\r\n if verbose:\r\n print('** group %d connecting to group %d on %s **\\n' % (your_group, other_group, other_IP))\r\n\r\n # create two sockets (server and client)\r\n socket_in = create_server_socket(42000+your_group, verbose)\r\n socket_out = create_client_socket(other_IP, 42000+other_group, verbose)\r\n \r\n socket_in = wait_for_connection(socket_in, verbose)\r\n \r\n connection = {'in':socket_in, 'out':socket_out}\r\n\r\n if verbose:\r\n print('** group %d successfully connected to group %d on %s **\\n' % (your_group, other_group, other_IP))\r\n \r\n # end verbose display\r\n if verbose:\r\n print('----------------------------------------------------- connection started ---]\\n')\r\n\r\n return connection", "def connect(self):\n if self._sock is None:\n if not self.proxy_host:\n host = self.host\n port = self.port\n else:\n host = self.proxy_host\n port = self.proxy_port\n \n sock = socket.create_connection((host, port), 5)\n proto = None\n\n if self.secure:\n assert not self.proxy_host, \"Using a proxy with HTTPS not yet supported.\"\n sock, proto = wrap_socket(sock, host, self.ssl_context)\n\n log.debug(\"Selected protocol: %s\", proto)\n sock = BufferedSocket(sock, self.network_buffer_size)\n\n if proto not in ('http/1.1', None):\n raise TLSUpgrade(proto, sock)\n\n self._sock = sock\n\n return", "def Client(self) -> Socket:", "def Client(self) -> Socket:", "def setup_subscriber(publisher_address):\n print(\"Subscribing to server on {}\".format(publisher_address))\n context = zmq.Context()\n socket = context.socket(zmq.SUB)\n socket.connect(publisher_address)\n filter = \"\"\n # the following two lines are for Python2 compatability\n if isinstance(filter, bytes):\n filter = filter.decode(\"ascii\")\n socket.setsockopt_string(zmq.SUBSCRIBE, filter)\n return socket", "def opensock(ipaddr,port):\n s = socket.socket(socket.AF_INET,socket.SOCK_STREAM)\n s.connect((ipaddr,port))\n \n return s", "def conectar(self):\r\n self.socket = socket.create_connection((self.host, self.puerto))", "def listenforconnection(self, localip, localport):\n\n # We build a new stack for the branch that accepts AFFIX connections.\n self.affix_stack_branch = AffixStack(self.affix_string, self.server_name)\n\n tcpserversocket_dict = {}\n\n tcpserversocket_dict['affix_tcpsocket'] = self.affix_stack_branch.peek().listenforconnection(localip, self.tcp_listenport)\n\n tcpserversocket_dict['repy_tcpsocket'] = listenforconnection(gethostbyname(localip), localport)\n\n if _DEBUG_MODE:\n log(\"Opened up two listening sockets with Legacy Affix on '%s:%d'\\n\" % (localip, localport))\n\n return LegacyAffixTCPServerSocket(tcpserversocket_dict, self)", "def CreateAClientSocket(self, address):\r\n requestSocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\r\n requestSocket.setblocking(True)\r\n requestSocket.connect(address)\r\n return requestSocket", "def _connect(self):\r\n self.sock = socket.socket()\r\n host = \"pubsub.pubnub.com\"\r\n port = 80\r\n if self.use_ssl:\r\n self.sock = ssl.wrap_socket(self.sock)\r\n port = 443\r\n self.sock.connect((host, port))\r\n self.connected = True", "def server_socket(port):\r\n sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\r\n sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)\r\n sock.bind((\"localhost\", port))\r\n sock.listen(LQUEUE_SIZE)\r\n return sock", "def _connectOrBind(self, endpoints):\n for endpoint in endpoints:\n if endpoint.type == ZmqEndpointType.connect:\n self.socket.connect(endpoint.address)\n elif endpoint.type == ZmqEndpointType.bind:\n self.socket.bind(endpoint.address)\n else:\n assert False, \"Unknown endpoint type %r\" % endpoint", "def init_socket(self, tunnel=False, connection_type=u\"unknown\"):\n assert isinstance(tunnel, bool), type(tunnel)\n assert isinstance(connection_type, unicode), type(connection_type)\n assert self._socket is None\n port = self._socket_range[0] + self._socket_counter % (self._socket_range[1] - self._socket_range[0])\n type(self)._socket_counter += 1\n\n if port in self._socket_pool:\n logger.warning(\"reuse socket %d\", port)\n\n else:\n s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n s.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 870400)\n s.setblocking(False)\n s.settimeout(0.0)\n while True:\n try:\n s.bind((\"localhost\", port))\n except socket.error:\n port = self._socket_range[0] + self._socket_counter % (self._socket_range[1] - self._socket_range[0])\n type(self)._socket_counter += 1\n continue\n break\n\n self._socket_pool[port] = s\n logger.debug(\"create socket %d\", port)\n\n self._socket = self._socket_pool[port]\n self._tunnel = tunnel\n self._connection_type = connection_type\n\n return self", "def create_chaussette(self):\n ch = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n ch.connect((self.address, self.port))\n\n return ch", "def establish_connection(self):\n conninfo = self.client\n for name, default_value in items(self.default_connection_params):\n if not getattr(conninfo, name, None):\n setattr(conninfo, name, default_value)\n if conninfo.hostname == 'localhost':\n conninfo.hostname = '127.0.0.1'\n conn = self.Connection(host=conninfo.host,\n userid=conninfo.userid,\n password=conninfo.password,\n login_method=conninfo.login_method,\n virtual_host=conninfo.virtual_host,\n insist=conninfo.insist,\n ssl=conninfo.ssl,\n connect_timeout=conninfo.connect_timeout)\n conn.client = self.client\n return conn", "def init_tcp_conn(target: str, port: int) -> socket.socket:\n conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n conn.settimeout(5)\n try:\n conn.connect((target, port))\n return conn\n except socket.timeout as e:\n print(e)\n return None", "def __init__(self):\n self.connection = pika.BlockingConnection(\n pika.ConnectionParameters(host='localhost'))\n self.channel = self.connection.channel()", "def connect(self):\n try:\n self.conn = amqp.Connection(insist=True, **self.config)\n self.chan = self.conn.channel()\n self.chan.queue_declare(queue=self.RQU, durable=False, exclusive=False, auto_delete=True)\n self.chan.exchange_declare(exchange=self.EXCH, type=\"topic\", durable=True, auto_delete=False,)\n self.chan.queue_bind(queue=self.RQU, exchange=self.EXCH, routing_key=self.RKEY)\n self.chan.basic_consume(queue=self.RQU, no_ack=True, callback=self.amqpCallback, consumer_tag=\"ctag\")\n except:\n self.closeConn()", "def get_conn(args):\n\n # connect this thing\n from pyVmomi import vim\n from pyVim.connect import SmartConnect, Disconnect\n import atexit\n try:\n si = SmartConnect(host=args.host, port=args.port, user=args.user, pwd=args.password)\n except Exception as exc:\n if isinstance(exc, vim.fault.HostConnectFault) and '[SSL: CERTIFICATE_VERIFY_FAILED]' in exc.msg:\n try:\n import ssl\n default_context = ssl._create_default_https_context\n ssl._create_default_https_context = ssl._create_unverified_context\n si = SmartConnect(\n host=args.host,\n port=args.port,\n user=args.user,\n pwd=args.password,\n )\n ssl._create_default_https_context = default_context\n except Exception as exc1:\n raise Exception(exc1)\n else:\n import ssl\n context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)\n context.verify_mode = ssl.CERT_NONE\n si = SmartConnect(\n host=args.host,\n port=args.port,\n user=args.user,\n pwd=args.password,\n sslContext=context)\n atexit.register(Disconnect, si)\n return si", "def __init__(self, creator_socket):\n self.__socket = creator_socket\n logger.info(BUNDY_SOCKCREATOR_INIT)", "def create_connection(host, port, local_machine, LOGGER):\n\n try:\n general_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n general_socket.settimeout(100)\n\n if local_machine == \"client\":\n general_socket.connect((host, port))\n elif local_machine == \"server\":\n general_socket.bind((host, port))\n general_socket.listen(5)\n except socket.error as soe:\n LOGGER.info(soe)\n sys.exit(1)\n except Exception as exp:\n LOGGER.unknown_error(exp)\n sys.exit(1)\n else:\n if local_machine == \"client\":\n LOGGER.info(f\"Successfully Connected To [{host}:{port}]\")\n elif local_machine == \"server\":\n LOGGER.info(\"Booting Server [...]\")\n LOGGER.info(\"Server Online!\")\n\n return general_socket", "def open_client_socket():\n\n client_socket = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)\n client_socket.bind(CLIENT_SOCKET_FILE)\n\n return client_socket", "def __init__(self, host=HOST, port=PORT):\r\n self._socket = None\r\n\r\n if host is not None:\r\n self.connect(host, port)", "def connect_to(address):\n \n sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n sock.connect(address)\n sock.setblocking(0)\n sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)\n print 'client connected to {} '.format(address)\n return sock", "def socket_client_send() -> socket.socket:\n try:\n # Create a socket object \n s = socket.socket() \n \n # Define the port on which you want to connect \n port = 55624\n host = '127.0.0.1'\n # connect to the server on local computer \n s.connect((host, port))\n \n except:\n logging.debug(\"Unexpected error\")\n raise\n\n else:\n logging.debug(\"Else clause\")\n return s", "def socket(name=None, type=SOCK_STREAM, i2p_router=(\"127.0.0.1\",7657)):\n if type == SOCK_STREAM:\n # make a streaming socket\n return _StreamSocket(i2p_router)\n raise Exception(\"cannot make socket of unknown type {}\".format(type))", "def buildProtocol(self, addr):\n return ClientConnection()" ]
[ "0.7175116", "0.70785046", "0.6988766", "0.690476", "0.67760694", "0.673751", "0.664417", "0.65480554", "0.64834565", "0.6372554", "0.62621725", "0.6188751", "0.6151901", "0.614989", "0.6100526", "0.60442936", "0.6018518", "0.60160136", "0.59882885", "0.5986249", "0.5965816", "0.5951423", "0.5935449", "0.59229046", "0.5856678", "0.58501065", "0.5824026", "0.5816859", "0.5808708", "0.57883763", "0.57838756", "0.57785994", "0.5771634", "0.57703966", "0.5768608", "0.5735224", "0.5735224", "0.572881", "0.5726579", "0.5725858", "0.5673172", "0.56726116", "0.56648433", "0.56424254", "0.56367826", "0.5636433", "0.56313175", "0.5599981", "0.5596589", "0.5594136", "0.558829", "0.5566969", "0.5563481", "0.5562604", "0.5556699", "0.55500716", "0.55369097", "0.55363995", "0.55324906", "0.5527928", "0.55205774", "0.5516708", "0.5514795", "0.55057883", "0.55051637", "0.550036", "0.5497239", "0.54945034", "0.5476934", "0.5460952", "0.5454767", "0.5453865", "0.5452838", "0.54487234", "0.54427946", "0.5439883", "0.5439883", "0.54278684", "0.5425987", "0.542281", "0.5422109", "0.5415513", "0.5411457", "0.5410778", "0.5410448", "0.5410338", "0.5408506", "0.5405368", "0.54043984", "0.53994894", "0.5393352", "0.5393337", "0.5391859", "0.53916323", "0.53836226", "0.5378872", "0.5377149", "0.5371825", "0.5369131", "0.53674334" ]
0.77242374
0
Make request to ZMQ worker for calculation, logs results and times and returns them
def calculate(expression, socket): start = time.time() socket.send(bytes(expression)) result = socket.recv() total_time = (time.time() - start)*1000.0 total_time_str = '{0:.3f}'.format(total_time) logging.info('{} = {} in {}ms'.format(expression, result, total_time_str)) return { 'result': result, 'time': total_time_str }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def main_task_handler():\n context = zmq.Context()\n\n # socket to sending messages to save\n save_sender = context.socket(zmq.PUSH)\n save_sender.connect(SAVE_PUSH_QUEUE_ADDR)\n\n c = 0\n while (True):\n # simulate some very complex computation\n (x, y) = (random.gauss(0, 1), random.gauss(0, 1))\n result = { 'unit': computer_id, 'counter': c, 'x' : x, 'y': y}\n\n # send message to sender\n save_sender.send_json(result)\n\n # take it easy\n time.sleep(1)\n\n c += 1", "def dispatcher( port, cmd, files, allworkers, start ):\n # Only the host running as dispatcher should be calling this.\n\n host = ipaddrs( socket.gethostname() )\n\n # Initialize a 0mq context\n\n context = zmq.Context()\n\n # Set up a socket to receive task requests and send replies over.\n # The linger option is set to help make sure all comunication is\n # delivered when the thread ends. The time unit is milliseconds. A\n # rigorous receive request - send reply pattern must be followed as\n # the zmq.REP socket keeps track of who sent the request and thus\n # were the reply should go. Trying to do two receives or two sends\n # in a row will cause a fatal error or hang the program. Here we\n # set up the REP side of the socket pattern.\n\n dispatcher_socket = context.socket( zmq.REP )\n dispatcher_socket.setsockopt( zmq.LINGER, 5000 )\n dispatcher_socket.bind( \"tcp://%s:%s\" % ( host, port ) )\n\n maxtime = 0\n tasknum = 0\n workers = {}\n already_notified = 0\n\n sys.stderr.write ( \"Dispatcher:Start:%d\\n\" % ( start ) )\n sys.stderr.flush()\n\n # Adjust starting task for 0 offset:\n\n start = start - 1\n tasknum = start\n lasttask = 0\n\n for f in files[start:]:\n\n request = dispatcher_socket.recv_json()\n worker = request['worker']\n workers[worker] = 1\n\n # Interpret a negative maxtime value as the time up signal.\n\n if request['maxtime'] >= 0 :\n\n if request['maxtime'] > maxtime :\n\n maxtime = request['maxtime']\n sys.stderr.write( \"Dispatcher:Maxtime:%s:%.2f:%.2f\\n\"\n % ( worker, maxtime, time.time() ) )\n sys.stderr.flush()\n\n tasknum = tasknum + 1\n task_message = { 'cmd' : cmd, 'file' : f.strip(),\n 'maxtime' : maxtime, 'tasknum' : tasknum }\n\n else:\n\n maxtime = -1\n sys.stderr.write( \"Dispatcher:Timeup:%s:%.2f\\n\"\n % ( worker, time.time() ) )\n sys.stderr.flush()\n task_message = { 'cmd' : \"FINI\", 'file' : \"None\",\n 'maxtime' : -1, 'tasknum' : tasknum }\n already_notified += 1\n lasttask = request['lasttask']\n\n dispatcher_socket.send_json( task_message )\n if maxtime < 0 :\n break\n\n # Now make sure all workers have received the shutdown message.\n\n shutdown = allworkers - already_notified\n\n if lasttask == 0 :\n # All tasks handed out before any completions received.\n # Have to assume all will complete.\n lasttask = tasknum\n\n if shutdown > 0 :\n task_message = { 'cmd' : \"FINI\", 'file' : \"None\",\n 'maxtime' : -1, 'tasknum' : tasknum }\n sys.stderr.write( \"Dispatcher:Shutdown:%d\\n\" % ( shutdown ) )\n sys.stderr.flush()\n\n # There is always a chance multiple assignments went out before\n # a timeout was received. All should sense time out as well,\n # so check for that when handling their final requests.\n\n for w in range( shutdown ):\n\n request = dispatcher_socket.recv_json()\n\n if request['maxtime'] < 0 :\n if request['lasttask'] < lasttask :\n lasttask = request['lasttask']\n\n dispatcher_socket.send_json( task_message )\n\n sys.stderr.write( \"Dispatcher:Last:%d\\n\" % ( lasttask ) )\n sys.stderr.flush()", "def request(self, *args, **kwargs):\n self.work_request_queue.put((args, kwargs))\n return self.result_queue.get()", "def request(self, *args, **kwargs):\n self.work_request_queue.put((args, kwargs))\n return self.result_queue.get()", "def exec_worker_query(self, endpoint, args, request):\n\n queue = self.iden\n args['endpoint'] = endpoint\n args['headers'] = dict(request.headers)\n client = Producer(queue_host=Config.get('queue', 'host'),\n queue_port=Config.getint('queue', 'port'),\n queue_name=queue)\n client.send(args)\n gen = itertools.imap(json.dumps, client.receive())\n return Response(result_generator(gen, lambda: client.metadata),\n mimetype='application/json')", "def worker(self, q, return_dict):\n pid = os.getpid()\n while True:\n qqq = q.get()\n if qqq == 'DONE':\n # print('proc =', os.getpid())\n break\n\n (idx, d) = qqq\n mol_id = d[0]\n smi = d[1]\n # print screening processing in every pout step\n if self.pout != 0:\n if idx % self.pout == self.pout-1:\n print(\"processing: \", idx+1, flush=True)\n result_dict = self.simulation_process(idx, mol_id, smi, pid)\n return_dict[idx] = result_dict", "def get_results_from_message_queue():\n message_queue.get_result_length()\n logger.info(\"get task results from task queue\")", "def run(self):\r\n counter = 0\r\n counter_increment = 1000 # Reporting frequency\r\n\r\n last_time = 0\r\n \r\n if get_param(\"record_queue_state\"):\r\n # Add event to query queue state.\r\n query_interval = 1\r\n report_queue_state = RecordQueueState(self.servers,\r\n self.stats_manager,\r\n query_interval)\r\n self.event_queue.put((query_interval, report_queue_state))\r\n while len(self.stats_manager.completed_jobs) < self.total_jobs:\r\n assert(not self.event_queue.empty())\r\n current_time, event = self.event_queue.get()\r\n \r\n #if current_time >= 3.0 * get_param(\"total_time\") / 4.0:\r\n # set_param(\"relative_weights\", \"1,2\")\r\n #elif current_time >= 1.0 * get_param(\"total_time\") / 2.0:\r\n # set_param(\"relative_weights\", \"1,4\")\r\n\r\n assert(current_time >= last_time)\r\n last_time = current_time\r\n\r\n if current_time > counter:\r\n counter = counter + counter_increment\r\n new_events = event.run(current_time)\r\n if new_events:\r\n for new_event in new_events:\r\n self.event_queue.put(new_event)\r\n \r\n self.stats_manager.output_stats()\r\n \r\n output_params()", "def _report_results(self, data_start_time: float, data_end_time: float,\n data_elapsed_time: float, user_cpu: float, sys_cpu: float, extra: dict = None):\n if not self.__is_worker:\n raise Exception(\"_report_results must not be called outside of a worker\")\n answer = {\n 'application': 'clusterbuster-json',\n 'namespace': self._namespace(),\n 'pod': self._podname(),\n 'container': self._container(),\n 'process_id': os.getpid(),\n 'pod_create_time': self.__timing_parameters['controller_crtime'] - self.__timing_parameters['controller_basetime'],\n 'pod_start_time': self.__timing_parameters['start_time'],\n 'data_start_time': data_start_time,\n 'data_end_time': data_end_time,\n 'data_elapsed_time': data_elapsed_time,\n 'user_cpu_time': user_cpu,\n 'system_cpu_time': sys_cpu,\n 'cpu_time': user_cpu + sys_cpu,\n 'timing_parameters': self.__timing_parameters\n }\n if isinstance(extra, dict):\n for key, val in extra.items():\n answer[key] = val\n self._timestamp(f\"Report results: {self._namespace()}, {self._podname()}, {self._container()}, {os.getpid()}\")\n try:\n answer = json.dumps(self._clean_numbers(answer))\n except Exception as exc:\n self.__fail(f\"Cannot convert results to JSON: {exc}\")\n self.__do_sync_command('RSLT', answer)\n self.__reported_results = True", "def run(self):\n computation_times=[]\n response_times=[]\n\n s=Session(self.BASE_URL)\n request=s.prepare_execution_request(code,files=['test.txt'])\n sequence=0\n with timing(computation_times):\n with timing(response_times):\n s.send_execution_request(request)\n\n done=False\n while not done:\n sleep(self.POLL_INTERVAL)\n with timing(response_times):\n r=s.output_poll(sequence)\n if len(r)==0 or 'content' not in r:\n continue\n for m in r['content']:\n sequence+=1\n if (m['msg_type']==\"extension\"\n and m['content']['msg_type']==\"files\"):\n returned_file=m['content']['content']['files'][0]\n if returned_file!='test.txt':\n print \"RETURNED FILENAME NOT CORRECT\"\n raise ValueError(\"Returned filename not correct: %s\"%returned_file)\n with timing(response_times):\n f=s.get_file(returned_file)\n if f!=FILE_RESULT_CONTENTS:\n print \"RETURNED FILE CONTENTS NOT CORRECT\"\n raise ValueError(\"Returned file contents not correct: %s\"%f)\n # if we've made it this far, we're done\n done=True\n break\n\n self.custom_timers['Computation']=computation_times\n self.custom_timers['Response']=response_times", "def zmq_qry_pub(context):\n app.logger.info(\"zmq_qry_pub started\")\n socket = context.socket(zmq.PUB)\n socket.connect('tcp://127.0.0.1:7000')\n\n timestamps = ['0810', '0811', '0812']\n idx = EquityIndex('CAC')\n\n # for ts in cycle(timestamps):\n for ts in timestamps:\n price_data = idx.components_last_px(ts)\n\n for topic, msg_data in price_data.iteritems():\n if msg_data:\n # push the code/ticker into the dict\n msg_data['ticker'] = topic\n # reformat with a colon\n msg_data['ts'] = ts[:2] + ':' + ts[2:]\n # and jsonify....\n msg = json.dumps(msg_data)\n socket.send(msg)\n\n gevent.sleep(WAIT)\n\n app.logger.info(\"zmq_qry_pub closed\")", "def callback(ch, method, properties, body):\n requestParams = json.loads(body.decode('utf-8'))\n # print(\"inside the callback\")\n arg1 = int(requestParams[0])\n arg2 = int(requestParams[1])\n result = whaleClassifier.test(arg1, arg2)\n # what this does it publish the RESULT to the exchange (as producers of content \n # cannot send stuff directly to queues, they send to exchanges and then exchanges \n # send to queues. Note Exchange='' is default exchange which then sends to the\n # queue that is listed on the ROUTING_KEY argument.)\n ch.basic_publish(exchange='', \n routing_key=results_queue, \n body=json.dumps(result),\n properties=pika.BasicProperties(\n delivery_mode = 2, # make message persistent\n ))\n # ch.basic_ack(delivery_tag=method.delivery_tag) #need this line so that we don't resend this same message again the next time\n # we start up this script. Which eventually clogs up memory", "def rates_celery() -> Any:\n task = app.send_task(\"tasks.tasks.benchmark_rate\", args=[\"hello\"])\n result = task.get(propagate=False)\n print(result)\n return result", "def send_rpc_result(req, result):", "async def run_mpc(self) -> Dict[str, Dict[Metric, int]]:\n pass", "async def fetch_logs(self) -> bytes:\n host = \"127.0.0.1\"\n port = 42000\n dt = datetime.now(pytz.timezone(\"Europe/Amsterdam\"))\n request = {\"id\": 1, \"method\": \"getstat\"}\n\n point = TCP4ClientEndpoint(reactor, host, port)\n try:\n connected_p = await connectProtocol(\n point, EWBFProtocol()) # type: EWBFProtocol\n response = await connected_p.make_request(request)\n except Exception as e:\n print(\"couldn't connect. {}\".format(e))\n return b\"\"\n else:\n rl = []\n t = 0 # type: int\n power = speed = accept = reject = 0\n for idx, data in enumerate(response['result']):\n rl.append(\"GPU{0}_SPEED: {1} H/s\".format(\n idx, data['speed_sps']))\n rl.append(\"GPU{0}_POWER: {1}\".format(\n idx, data['gpu_power_usage']))\n t = data['start_time']\n power += data['gpu_power_usage']\n speed += data['speed_sps']\n accept += data['accepted_shares']\n reject += data['rejected_shares']\n\n rl.append(\"Power: {0}\".format(power))\n rl.append(\"Total speed: {0} Sol/s\".format(speed))\n rl.append(\"Accepted share: {0}\".format(accept))\n rl.append(\"Rejected share: {0}\".format(reject))\n rl.append(\"Total GPUs: {0}\".format(len(response['result'])))\n rl.append(\"START_TIME: {0}\".format(int(t)))\n rl.append(\"CURRENT_TIME: {0}\".format(int(dt.timestamp())))\n rl.append(\"UPTIME: {0}\".format(int(dt.timestamp() - t)))\n return \";\".join(rl).encode('utf-8') + b\";\"", "def measure(config, result, max_retries=10):\n url = get_api_path('measurement.json')\n data = {'config': config, 'result': result}\n retries = 0\n while(retries < max_retries):\n try:\n r = requests.post(url, data=json.dumps(data, cls=HCEncoder), headers=get_headers(), timeout=30)\n return r.text\n except requests.exceptions.RequestException:\n e = sys.exc_info()[0]\n print(\"Error while calling hyperchamber - retrying \", e)\n retries += 1", "def _worker(self, results):\n keys = {\n \"test-certificate-verify\": {\n \"MD5 forced\": 2,\n \"TLSv1.1 signature in TLSv1.2 Certificate Verify\": 1,\n \"MITIGATION\": \"SLOTH\",\n },\n \"test-sig-algs\": {\"MD5 first\": 2, \"MITIGATION\": \"SLOTH\"},\n \"test-clienthello-md5\": {\n \"only-md5-rsa-signature_algorithm\": 1,\n \"unknown-signature_algorithm-numbers\": 1,\n \"MITIGATION\": \"SLOTH\",\n },\n \"test-tls13-pkcs-signature\": {\n \"rsa_pkcs1_md5 signature\": 1,\n \"MITIGATION\": \"SLOTH_MD5_SIGNATURE_TLS_1_3\",\n },\n }\n return self._obtain_results(results, keys)", "def __call__(self):\n dv = None\n #Push as many queued calls as the self.max_batch_size and the max number of paralel HTTPS sessions allow for.\n while self.active_call_count < self.parallel and self.queue:\n #Get a chunk of entries from the command queue so we can make a batch.\n subqueue = self.queue[:self.max_batch_size]\n self.queue = self.queue[self.max_batch_size:]\n #Send a single batch to the currently selected RPC node.\n dv = self._process_batch(subqueue)\n #If there is nothing left to do, there is nothing left to do\n if not self.queue and self.active_call_count == 0:\n self.log.error(\"Queue is empty and no active HTTPS-POSTs remaining.\")\n if self.stop_when_empty:\n #On request, stop reactor when queue empty while no active queries remain.\n self.reactor.stop() \n return dv", "def output_message_eval(info_dict):\n time_dict = {'time' : str(datetime.now().strftime(\"%H:%M:%S\"))}\n result_dict = dict(time_dict, **info_dict)\n database.results_output_queue.put(result_dict)", "def send_announcement_get_work_request(self):\n self.analysis_id = uuid.uuid4().hex\n while True:\n self.announce_socket.send_json(((self.analysis_id, self.work_addr),))\n try:\n return self.awthread.recv(self.work_socket, 250)\n except six.moves.queue.Empty:\n continue", "def ProcessRequests(self, manager):\n self._CreateSpool()\n metrics_set = self._MetricsSet(\n *(constructor(self._METRIC_PREFIX + name)\n for name, constructor in self._METRICS_CONSTRUCTORS))\n pending_requests = []\n timestamps = {}\n tick_count = 0\n next_heartbeat = time.time()\n while True:\n tick_count += 1\n if time.time() >= next_heartbeat:\n next_heartbeat = time.time() + self._HEARTBEAT_INTERVAL\n logging.debug('Starting tick number %d', tick_count)\n manager.StartTick()\n\n num_completed = 0\n for request_id, result in manager.Reap():\n num_completed += 1\n metrics_set.total_completed.increment(fields={'status': 'normal'})\n time_running = time.time() - timestamps.pop(request_id)\n metrics_set.time_running.add(time_running)\n self._CompleteRequest(request_id, result)\n\n num_added = 0\n for request_id in self._GetNewRequests():\n num_added += 1\n metrics_set.total_received.increment()\n timestamps[request_id] = time.time()\n pending_requests.append(request_id)\n\n num_aborted = 0\n for abort_id in self._GetAbortRequests():\n num_aborted += 1\n metrics_set.total_completed.increment(fields={'status': 'abort'})\n if abort_id in timestamps:\n time_to_abort = time.time() - timestamps.pop(abort_id)\n metrics_set.time_to_abort.add(time_to_abort)\n self._ProcessAbort(abort_id, pending_requests, manager)\n\n num_started = 0\n while pending_requests and manager.HasCapacity():\n num_started += 1\n request_id = pending_requests.pop(0)\n time_now = time.time()\n time_waiting = time_now - timestamps[request_id]\n metrics_set.time_waiting.add(time_waiting)\n timestamps[request_id] = time_now\n self._StartRequest(request_id, manager)\n\n if num_completed or num_added or num_aborted or num_started:\n logging.info('new: %d, started: %d, aborted: %d, completed: %d',\n num_added, num_started, num_aborted, num_completed)\n num_pending = len(pending_requests)\n num_running = len(manager)\n logging.info('pending: %d, running: %d', num_pending, num_running)\n metrics_set.task_count.set(num_pending,\n fields={'state': 'pending'})\n metrics_set.task_count.set(num_running,\n fields={'state': 'running'})\n metrics_set.ticks.increment()\n time.sleep(manager.sample_interval)", "def getResults(workers):\n results = []\n for worker in workers:\n results += worker.getResults()\n \n return results", "def run(self):\n super().run()\n echo = self.echo\n local = self.local\n remote = self.remote\n transport = Transceiver(local)\n transport.set_timeout(0.5)\n self.__result: list[Entry] = []\n\n while True:\n try:\n packet = transport.recv(None)\n params = frame.deserialize(packet)\n seq = params[\"seq\"]\n total = params[\"total\"]\n t_master = params[\"t_master\"]\n infinite = params[\"infinite\"]\n payload = params[\"payload\"]\n\n t_slave = time.time()\n if echo:\n data_send = frame.serialize(infinite, seq, total, t_master, t_slave, payload)\n transport.send(remote, data_send)\n t_ul = (t_slave - t_master) * 1000\n self.add_result(Entry(seq, total, t_ul, 0))\n print(f\"seq = {seq}, ul = {t_ul:.2f} ms, payload: {hex_str(payload)}\")\n if frame.is_end(params):\n print(f\"receive last packet!\")\n break\n except socket.timeout:\n continue\n except KeyboardInterrupt:\n break", "def run_results(self):\n calculation_band = self.ctx.workchain_bands.get_outputs(link_type=LinkType.CALL)[0]\n\n self.report('workchain succesfully completed'.format())\n self.out('band_parameters', calculation_band.out.output_parameters)\n self.out('bandstructure', calculation_band.out.output_band)", "def callback(ch, method, properties, body):\n print(f\" [x] Received {str(body)} kW.\")\n\n try:\n timestamp = properties.timestamp\n current_time = datetime.utcfromtimestamp(timestamp).replace(\n tzinfo=timezone.utc\n )\n except AttributeError:\n # If we don't get a timestamp from the broker, add a timestamp here.\n current_time = datetime.now().replace(tzinfo=timezone.utc)\n\n pv_photovoltaic = generate_pv_output(current_time)\n\n report_item = PVMeterReportItem(\n timestamp=current_time.isoformat(),\n pv_meter=int(body),\n pv_photovoltaic=pv_photovoltaic,\n )\n generate_report(report_item)\n\n ch.basic_ack(delivery_tag=method.delivery_tag)", "def output_result_eval(info_dict):\n time_dict = {'time' : str(datetime.now().strftime(\"%H:%M:%S\"))}\n result_dict = dict(time_dict, **info_dict)\n database.results_output_queue.put(result_dict)", "def _recv(self):\n\n self.had_recv_error = []\n self.recv_exc = {}\n results = []\n import sys;\n #only listen on workers involved in calculation.\n for worker in self.workers[:self.Nsent]:\n if worker in self.had_send_error:\n results.append(None)\n else:\n try:\n sys.stdout.flush()\n results.append(worker.recv())\n except sync_cluster.RemoteError:\n import sys\n err = sys.exc_info()[1]\n # Force the err msg (err[1]) to be a string.\n # This dimishes info content, but makes sure\n # that the sames errors are hashed correctly\n # in the dictionary. (does it?)\n err_type,err_msg, err_traceback = err\n err = err_type,str(err_msg), err_traceback\n self.had_recv_error.append(worker)\n try: self.recv_exc[err].append(worker.id)\n except: self.recv_exc[err] = [worker.id]\n results.append(None)\n except sync_cluster.RemoteCrashError:\n # Gotta be more intelligent here...\n msg = 'Error! Remote worker %d appears to have crashed.' \\\n % worker.id\n raise sync_cluster.RemoteCrashError,msg\n # else handle other errors\n #print\n return tuple(results)", "def measure():\n print(\"alias, timestamp, current, total, power, voltage, err_code\")\n message_str = MeasurementRequest(None).to_json()\n socket_object = UdpSocket()\n s = UDPSendThread(message_str, socket_object)\n r = UDPRecvThread(socket_object, measurement_output_parser)\n s.start()\n r.start()\n\n wait((s, r))", "def rq_worker():\n setup_experiment(log)\n with Connection(db.redis_conn):\n # right now we care about low queue for bots\n worker = Worker(\"low\")\n worker.work()", "def query_weight(self):\n # open socket connection (TCP/IP)\n with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:\n\n # set time out time for connections (seconds)\n s.settimeout(1)\n\n # connect to the terminal\n try:\n s.connect((self.IP_scale, self.PORT_scale))\n except Exception as e:\n print(\"Couldn't connect to the load cell when quering weight\")\n print(f\"Exception: {e}\")\n\n\n # send stable weight or, if timeout (in ms), then send dynamic weight\n request = self._fomat_request(\"SC 420\")\n s.sendall(request)\n\n # keep calling receive until the end of line symbols are received\n response = []\n while True:\n part_response = s.recv(1024).decode()\n response.append(part_response)\n \n if (\"\\r\" in part_response) or (\"\\n\" in part_response):\n break\n\n # format the reponse\n response_str = str(response).strip('[]')\n parsed_response = re.findall(r'\\b\\d+\\b', response_str)\n weight = int(parsed_response[0]) + int(parsed_response[1])/100\n\n\n return weight", "def query_job_progress():\n pass", "def process_results(refresh_count, output_dir, ext_queue, result_queue,\n num_of_workers=8):\n workers_dict = {} # keep track of worker processes\n input_queue = Queue() # asynchronously feed workers task to do \n worker_output_queue = Queue() # output queue from workers\n ack_queue = Queue()\n bug_dict = {} # dict to keep track of how many duplicates of each bug, if\n # exists\n try:\n # separate the non-ads from the ads for ease of handchecking\n os.makedirs(output_dir)\n os.makedirs(os.path.join(output_dir, 'notad'))\n except OSError:\n # Directory is created, Okay to pass\n pass\n\n for i in range(num_of_workers):\n p = Process(target=curl_worker, args=(output_dir, input_queue,\\\n worker_output_queue, i, ack_queue))\n p.start()\n workers_dict[i] = p\n # uses a pool nodesurl' workers\n # curl_worker_pool = Pool(processes=8)\n # manager = Manager()\n # curl_result_queue = manager.Queue()\n \n dl_counter = 0 # keep track of how many bugs downloaded\n while True:\n try:\n found_bugs = json.loads(ext_queue.get(block=True, timeout=2))\n except Exception:\n LOG.debug('No more bugs found, break out of queue')\n break\n\n for entry in found_bugs:\n bug = parse_buginfo(entry)\n try:\n # matched an entry in the bugdict, incr count and continue\n bug_dict[bug] += 1\n continue\n except KeyError:\n bug_dict[bug] = 1 \n\n try:\n saved_location ='Visit%d_%s%d' % (refresh_count, bug.get_name(), dl_counter)\n dl_counter += 1\n save_to_path = os.path.join( output_dir, '%s' % saved_location)\n input_queue.put((saved_location, save_to_path, bug))\n except Exception as e:\n LOG.exception('%s' % e)\n\n for i in range(num_of_workers):\n # send stop signal\n input_queue.put((\"STOP\",))\n \n stopped = 0\n while stopped < len(workers_dict):\n ack = ack_queue.get()\n p = workers_dict[ack]\n p.join(timeout=1)\n if p.is_alive():\n p.terminate()\n LOG.debug('terminating process %d' % ack)\n stopped += 1\n \n while not worker_output_queue.empty():\n # receive results from the worker\n cbug = worker_output_queue.get()\n # ugly code here\n bugcount = bug_dict[cbug]\n del bug_dict[cbug]\n bug_dict[cbug] = bugcount\n\n with open( os.path.join(output_dir, 'bug_dict%d.pkl' % refresh_count), 'w') as fwtr:\n cPickle.dump(bug_dict, fwtr)\n result_queue.put(bug_dict)\n return", "def GetResult(jobid, g_params): # {{{\n # retrieving result from the remote server for this job\n gen_logfile = g_params['gen_logfile']\n gen_errfile = g_params['gen_errfile']\n\n webcom.loginfo(f\"GetResult for {jobid}.\\n\", gen_logfile)\n\n path_static = g_params['path_static']\n path_result = os.path.join(path_static, 'result')\n path_cache = g_params['path_cache']\n finished_date_db = g_params['finished_date_db']\n name_server = g_params['name_server']\n\n rstdir = os.path.join(path_result, jobid)\n runjob_logfile = os.path.join(rstdir, \"runjob.log\")\n runjob_errfile = os.path.join(rstdir, \"runjob.err\")\n outpath_result = os.path.join(rstdir, jobid)\n if not os.path.exists(outpath_result):\n os.mkdir(outpath_result)\n\n remotequeue_idx_file = os.path.join(rstdir, \"remotequeue_seqindex.txt\")\n\n torun_idx_file = os.path.join(rstdir, \"torun_seqindex.txt\")\n finished_idx_file = os.path.join(rstdir, \"finished_seqindex.txt\")\n query_parafile = os.path.join(rstdir, \"query.para.txt\")\n\n query_para = {}\n if os.path.exists(query_parafile):\n content = myfunc.ReadFile(query_parafile)\n if content != \"\":\n try:\n query_para = json.loads(content)\n except ValueError:\n query_para = {}\n failed_idx_file = os.path.join(rstdir, \"failed_seqindex.txt\")\n\n starttagfile = os.path.join(rstdir, \"runjob.start\")\n cnttry_idx_file = os.path.join(rstdir, \"cntsubmittry_seqindex.txt\") # index file to keep log of tries\n tmpdir = os.path.join(rstdir, \"tmpdir\")\n finished_seq_file = os.path.join(outpath_result, \"finished_seqs.txt\")\n\n if not os.path.exists(tmpdir):\n os.mkdir(tmpdir)\n\n finished_info_list = [] # [info for finished record]\n finished_idx_list = [] # [origIndex]\n failed_idx_list = [] # [origIndex]\n resubmit_idx_list = [] # [origIndex]\n keep_queueline_list = [] # [line] still in queue\n\n cntTryDict = {}\n if os.path.exists(cnttry_idx_file):\n with open(cnttry_idx_file, 'r') as fpin:\n try:\n cntTryDict = json.load(fpin)\n except Exception:\n cntTryDict = {}\n\n # in case of missing queries, if remotequeue_idx_file is empty but the job\n # is still not finished, force recreating torun_idx_file\n if 'DEBUG' in g_params and g_params['DEBUG']:\n try:\n webcom.loginfo(\"DEBUG: %s: remotequeue_idx_file=%s, size(remotequeue_idx_file)=%d, content=\\\"%s\\\"\\n\" %(jobid, remotequeue_idx_file, os.path.getsize(remotequeue_idx_file), myfunc.ReadFile(remotequeue_idx_file)), gen_logfile)\n except Exception:\n pass\n if ((not os.path.exists(remotequeue_idx_file) or # {{{\n os.path.getsize(remotequeue_idx_file) < 1)):\n idlist1 = []\n idlist2 = []\n if os.path.exists(finished_idx_file):\n idlist1 = myfunc.ReadIDList(finished_idx_file)\n if os.path.exists(failed_idx_file):\n idlist2 = myfunc.ReadIDList(failed_idx_file)\n\n completed_idx_set = set(idlist1 + idlist2)\n\n jobinfofile = os.path.join(rstdir, \"jobinfo\")\n jobinfo = myfunc.ReadFile(jobinfofile).strip()\n jobinfolist = jobinfo.split(\"\\t\")\n if len(jobinfolist) >= 8:\n numseq = int(jobinfolist[3])\n\n if 'DEBUG' in g_params and g_params['DEBUG']:\n webcom.loginfo(\"DEBUG: len(completed_idx_set)=%d+%d=%d, numseq=%d\\n\"%(len(idlist1), len(idlist2), len(completed_idx_set), numseq), gen_logfile)\n\n if len(completed_idx_set) < numseq:\n all_idx_list = [str(x) for x in range(numseq)]\n torun_idx_str_list = list(set(all_idx_list)-completed_idx_set)\n for idx in torun_idx_str_list:\n try:\n cntTryDict[int(idx)] += 1\n except (ValueError, IndexError, KeyError):\n cntTryDict[int(idx)] = 1\n myfunc.WriteFile(\"\\n\".join(torun_idx_str_list)+\"\\n\", torun_idx_file, \"w\", True)\n\n if 'DEBUG' in g_params and g_params['DEBUG']:\n webcom.loginfo(\"recreate torun_idx_file: jobid = %s, numseq=%d, len(completed_idx_set)=%d, len(torun_idx_str_list)=%d\\n\"%(jobid, numseq, len(completed_idx_set), len(torun_idx_str_list)), gen_logfile)\n else:\n myfunc.WriteFile(\"\", torun_idx_file, \"w\", True)\n else:\n if 'DEBUG' in g_params and g_params['DEBUG']:\n webcom.loginfo(\"DEBUG: %s: remotequeue_idx_file %s is not empty\\n\" %(jobid, remotequeue_idx_file), gen_logfile)\n# }}}\n\n text = \"\"\n if os.path.exists(remotequeue_idx_file):\n text = myfunc.ReadFile(remotequeue_idx_file)\n if text == \"\":\n return 1\n lines = text.split(\"\\n\")\n\n nodeSet = set([])\n for i in range(len(lines)):\n line = lines[i]\n if not line or line[0] == \"#\":\n continue\n strs = line.split(\"\\t\")\n if len(strs) != 6:\n continue\n node = strs[1]\n nodeSet.add(node)\n\n myclientDict = {}\n for node in nodeSet:\n wsdl_url = f\"http://{node}/pred/api_submitseq/?wsdl\"\n try:\n myclient = Client(wsdl_url, cache=None, timeout=30)\n myclientDict[node] = myclient\n except Exception as e:\n webcom.loginfo(f\"Failed to access {wsdl_url} with errmsg {e}\", gen_logfile)\n pass\n\n for i in range(len(lines)): # {{{\n line = lines[i]\n\n if 'DEBUG' in g_params and g_params['DEBUG']:\n myfunc.WriteFile(f\"Process {line}\\n\", gen_logfile, \"a\", True)\n if not line or line[0] == \"#\":\n if 'DEBUG' in g_params and g_params['DEBUG']:\n webcom.loginfo(\"DEBUG: line empty or line[0] = '#', ignore\", gen_logfile)\n continue\n strs = line.split(\"\\t\")\n if len(strs) != 6:\n if 'DEBUG' in g_params and g_params['DEBUG']:\n webcom.loginfo(\"DEBUG: len(strs)=%d (!=6), ignore\\n\"%(len(strs)), gen_logfile)\n continue\n origIndex = int(strs[0])\n node = strs[1]\n remote_jobid = strs[2]\n description = strs[3]\n seq = strs[4]\n submit_time_epoch = float(strs[5])\n subfoldername_this_seq = f\"seq_{origIndex}\"\n outpath_this_seq = os.path.join(outpath_result, subfoldername_this_seq)\n\n try:\n myclient = myclientDict[node]\n except KeyError:\n if 'DEBUG' in g_params and g_params['DEBUG']:\n webcom.loginfo(\"DEBUG: node (%s) not found in myclientDict, ignore\"%(node), gen_logfile)\n keep_queueline_list.append(line)\n continue\n try:\n rtValue = myclient.service.checkjob(remote_jobid)\n except Exception as e:\n msg = \"checkjob(%s) at node %s failed with errmsg %s\"%(remote_jobid, node, str(e))\n webcom.loginfo(msg, gen_logfile)\n rtValue = []\n pass\n isSuccess = False\n isFinish_remote = False\n status = \"\"\n if len(rtValue) >= 1:\n ss2 = rtValue[0]\n if len(ss2) >= 3:\n status = ss2[0]\n result_url = ss2[1]\n errinfo = ss2[2]\n\n if errinfo and errinfo.find(\"does not exist\") != -1:\n if 'DEBUG' in g_params and g_params['DEBUG']:\n msg = \"Failed for remote_jobid %s with errmsg %s\"%(remote_jobid, str(errinfo))\n webcom.loginfo(msg, gen_logfile)\n\n isFinish_remote = True\n\n if status == \"Finished\": # {{{\n isFinish_remote = True\n outfile_zip = f\"{tmpdir}/{remote_jobid}.zip\"\n isRetrieveSuccess = False\n myfunc.WriteFile(\"\\tFetching result for %s/seq_%d from %s \" % (\n jobid, origIndex, result_url), gen_logfile, \"a\", True)\n if myfunc.IsURLExist(result_url, timeout=5):\n try:\n myfunc.urlretrieve(result_url, outfile_zip, timeout=10)\n isRetrieveSuccess = True\n myfunc.WriteFile(f\" succeeded on node {node}\\n\", gen_logfile, \"a\", True)\n except Exception as e:\n myfunc.WriteFile(\" failed with %s\\n\"%(str(e)), gen_logfile, \"a\", True)\n pass\n if os.path.exists(outfile_zip) and isRetrieveSuccess:\n cmd = [\"unzip\", outfile_zip, \"-d\", tmpdir]\n webcom.RunCmd(cmd, gen_logfile, gen_errfile)\n rst_fetched = os.path.join(tmpdir, remote_jobid)\n if name_server.lower() == \"pconsc3\":\n rst_this_seq = rst_fetched\n elif name_server.lower() == \"boctopus2\":\n rst_this_seq = os.path.join(rst_fetched, \"seq_0\", \"seq_0\")\n rst_this_seq_parent = os.path.join(rst_fetched, \"seq_0\")\n else:\n rst_this_seq = os.path.join(rst_fetched, \"seq_0\")\n\n if os.path.islink(outpath_this_seq):\n os.unlink(outpath_this_seq)\n elif os.path.exists(outpath_this_seq):\n shutil.rmtree(outpath_this_seq)\n\n if os.path.exists(rst_this_seq) and not os.path.exists(outpath_this_seq):\n cmd = [\"mv\", \"-f\", rst_this_seq, outpath_this_seq]\n webcom.RunCmd(cmd, gen_logfile, gen_errfile)\n if name_server.lower() == \"boctopus2\":\n # move also seq.fa and time.txt for boctopus2\n file1 = os.path.join(rst_this_seq_parent, \"seq.fa\")\n file2 = os.path.join(rst_this_seq_parent, \"time.txt\")\n for f in [file1, file2]:\n if os.path.exists(f):\n try:\n shutil.move(f, outpath_this_seq)\n except:\n pass\n\n fafile_this_seq = os.path.join(outpath_this_seq, \"seq.fa\")\n if webcom.IsCheckPredictionPassed(outpath_this_seq, name_server):\n # relpace the seq.fa with original description\n myfunc.WriteFile('>%s\\n%s\\n'%(description, seq), fafile_this_seq, 'w', True)\n isSuccess = True\n\n if isSuccess:\n # delete the data on the remote server\n try:\n rtValue2 = myclient.service.deletejob(remote_jobid)\n except Exception as e:\n msg = \"Failed to deletejob(%s) on node %s with errmsg %s\"%(remote_jobid, node, str(e))\n webcom.loginfo(msg, gen_logfile)\n rtValue2 = []\n pass\n\n logmsg = \"\"\n if len(rtValue2) >= 1:\n ss2 = rtValue2[0]\n if len(ss2) >= 2:\n status = ss2[0]\n errmsg = ss2[1]\n if status == \"Succeeded\":\n logmsg = \"Successfully deleted data on %s \"\\\n \"for %s\"%(node, remote_jobid)\n else:\n logmsg = \"Failed to delete data on %s for \"\\\n \"%s\\nError message:\\n%s\\n\"%(node, remote_jobid, errmsg)\n else:\n logmsg = \"Failed to call deletejob %s via WSDL on %s\\n\"%(remote_jobid, node)\n\n # delete the downloaded temporary zip file and\n # extracted file\n if os.path.exists(outfile_zip):\n os.remove(outfile_zip)\n if os.path.exists(rst_fetched):\n shutil.rmtree(rst_fetched)\n\n # create or update the md5 cache\n if name_server.lower() == \"prodres\" and query_para != {}:\n md5_key = hashlib.md5((seq+str(query_para)).encode('utf-8')).hexdigest()\n else:\n md5_key = hashlib.md5(seq.encode('utf-8')).hexdigest()\n subfoldername = md5_key[:2]\n md5_subfolder = \"%s/%s\"%(path_cache, subfoldername)\n cachedir = \"%s/%s/%s\"%(path_cache, subfoldername, md5_key)\n\n # copy the zipped folder to the cache path\n origpath = os.getcwd()\n os.chdir(outpath_result)\n shutil.copytree(\"seq_%d\"%(origIndex), md5_key)\n cmd = [\"zip\", \"-rq\", \"%s.zip\"%(md5_key), md5_key]\n webcom.RunCmd(cmd, runjob_logfile, runjob_errfile)\n if not os.path.exists(md5_subfolder):\n os.makedirs(md5_subfolder)\n shutil.move(\"%s.zip\"%(md5_key), \"%s.zip\"%(cachedir))\n shutil.rmtree(md5_key) # delete the temp folder named as md5 hash\n os.chdir(origpath)\n\n # Add the finished date to the database\n date_str = time.strftime(g_params['FORMAT_DATETIME'])\n MAX_TRY_INSERT_DB = 3\n cnttry = 0\n while cnttry < MAX_TRY_INSERT_DB:\n t_rv = webcom.InsertFinishDateToDB(date_str, md5_key, seq, finished_date_db)\n if t_rv == 0:\n break\n cnttry += 1\n time.sleep(random.random()/1.0)\n\n# }}}\n elif status in [\"Failed\", \"None\"]:\n # the job is failed for this sequence, try to resubmit\n isFinish_remote = True\n if 'DEBUG' in g_params and g_params['DEBUG']:\n webcom.loginfo(\"DEBUG: %s, status = %s\\n\"%(remote_jobid, status), gen_logfile)\n\n if status != \"Wait\" and not os.path.exists(starttagfile):\n webcom.WriteDateTimeTagFile(starttagfile, runjob_logfile, runjob_errfile)\n\n if isSuccess: # {{{\n time_now = time.time()\n runtime1 = time_now - submit_time_epoch # in seconds\n timefile = os.path.join(outpath_this_seq, \"time.txt\")\n runtime = webcom.ReadRuntimeFromFile(timefile, default_runtime=runtime1)\n info_finish = webcom.GetInfoFinish(\n name_server, outpath_this_seq,\n origIndex, len(seq), description,\n source_result=\"newrun\", runtime=runtime)\n finished_info_list.append(\"\\t\".join(info_finish))\n finished_idx_list.append(str(origIndex))\n # }}}\n\n # if the job is finished on the remote but the prediction is failed,\n # try resubmit a few times and if all failed, add the origIndex to the\n # failed_idx_file\n if isFinish_remote and not isSuccess:\n cnttry = 1\n try:\n cnttry = cntTryDict[int(origIndex)]\n except KeyError:\n cnttry = 1\n if cnttry < g_params['MAX_RESUBMIT']:\n resubmit_idx_list.append(str(origIndex))\n cntTryDict[int(origIndex)] = cnttry+1\n else:\n failed_idx_list.append(str(origIndex))\n\n if not isFinish_remote:\n time_in_remote_queue = time.time() - submit_time_epoch\n # for jobs queued in the remote queue more than one day (but not\n # running) delete it and try to resubmit it. This solved the\n # problem of dead jobs in the remote server due to server\n # rebooting)\n if (\n status != \"Running\"\n and status != \"\"\n and time_in_remote_queue > g_params['MAX_TIME_IN_REMOTE_QUEUE']):\n # delete the remote job on the remote server\n try:\n rtValue2 = myclient.service.deletejob(remote_jobid)\n except Exception as e:\n webcom.loginfo(\"Failed to run myclient.service.deletejob(%s) on node %s with msg %s\"%(remote_jobid, node, str(e)), gen_logfile)\n rtValue2 = []\n pass\n else:\n keep_queueline_list.append(line)\n# }}}\n # Finally, write log files\n finished_idx_list = list(set(finished_idx_list))\n failed_idx_list = list(set(failed_idx_list))\n resubmit_idx_list = list(set(resubmit_idx_list))\n\n if len(finished_info_list) > 0:\n myfunc.WriteFile(\"\\n\".join(finished_info_list)+\"\\n\", finished_seq_file,\n \"a\", True)\n if len(finished_idx_list) > 0:\n myfunc.WriteFile(\"\\n\".join(finished_idx_list)+\"\\n\", finished_idx_file,\n \"a\", True)\n if len(failed_idx_list) > 0:\n myfunc.WriteFile(\"\\n\".join(failed_idx_list)+\"\\n\", failed_idx_file, \"a\",\n True)\n if len(resubmit_idx_list) > 0:\n myfunc.WriteFile(\"\\n\".join(resubmit_idx_list)+\"\\n\", torun_idx_file,\n \"a\", True)\n\n if len(keep_queueline_list) > 0:\n keep_queueline_list = list(set(keep_queueline_list))\n myfunc.WriteFile(\"\\n\".join(keep_queueline_list)+\"\\n\",\n remotequeue_idx_file, \"w\", True)\n else:\n myfunc.WriteFile(\"\", remotequeue_idx_file, \"w\", True)\n\n with open(cnttry_idx_file, 'w') as fpout:\n json.dump(cntTryDict, fpout)\n\n return 0", "def _send(self):\n dmpd_response_status = json.dumps(self.status)\n drs = sizeof_fmt(len(dmpd_response_status))\n\n status_sent = False\n output_query_count = 0\n\n queues = []\n executor_keys = self.job.executor_id.split('-')\n for k in range(int(len(executor_keys)/2)):\n qname = 'lithops-{}'.format('-'.join(executor_keys[0:k*3+2]))\n queues.append(qname)\n\n while not status_sent and output_query_count < 5:\n output_query_count = output_query_count + 1\n try:\n with self._create_channel() as ch:\n for queue in queues:\n ch.basic_publish(exchange='', routing_key=queue, body=dmpd_response_status)\n logger.info(\"Execution status sent to RabbitMQ - Size: {}\".format(drs))\n status_sent = True\n except Exception:\n time.sleep(0.2)\n\n if self.status['type'] == '__end__':\n super()._send()", "async def main():\n url = \"http://127.0.0.1:7424\"\n\n pql_bitcoin_price = {\n \"name\": \"Aggregate HTTP requests\",\n \"psql_version\": \"0.1\",\n \"sources\": [\n {\n \"name\": \"Bitcoin price CoinGecko\",\n \"pipeline\": [\n {\n \"step\": \"extract\",\n \"method\": \"http.get\",\n \"uri\": \"https://api.coingecko.com/api/v3/simple/price?ids=bitcoin&vs_currencies=usd\",\n }\n ],\n },\n {\n \"name\": \"Bitcoin price Bitfinex\",\n \"pipeline\": [\n {\n \"step\": \"extract\",\n \"method\": \"http.get\",\n \"uri\": \"https://api-pub.bitfinex.com/v2/ticker/tBTCUSD\",\n }\n ],\n },\n {\n \"name\": \"Bitcoin price CoinDesk\",\n \"pipeline\": [\n {\n \"step\": \"extract\",\n \"method\": \"http.get\",\n \"uri\": \"https://api.coindesk.com/v1/bpi/currentprice.json\",\n }\n ],\n },\n ],\n \"aggregate\": {\n \"method\": \"query.sql\",\n \"params\": [\"json\", \"list\", \"json\"],\n \"query\": \"SELECT AVG(price) FROM (SELECT `bitcoin.usd` AS price FROM result_0 UNION SELECT `6` AS price FROM result_1 UNION SELECT `bpi.USD.rate_float` AS price FROM result_2)\",\n \"result\": True,\n },\n }\n\n # Construct JSON RPC request\n request = {\n \"jsonrpc\": \"2.0\",\n \"method\": \"execute_pql\",\n \"params\": json.dumps(pql_bitcoin_price),\n \"id\": 1,\n }\n\n async with ClientSession() as session:\n async with session.post(url + \"/rpc\", json=request) as resp:\n response = await resp.json()\n print(response)", "def get_values(self, req):\n \n rospy.loginfo(\"Requesting values for \" + str(req.component) +\" \" + str(req.field) + \" with \" + str(req.hz) + \" Hz.\")\n \n values = []\n \n retries = 50\n \n while retries > 0:\n try:\n if req.component not in self.comps.keys():\n comp = mcf.create_component(req.component)\n self.rt_proxy.subscribe_status(comp)\n self.comps[req.component] = comp \n break\n except (CannotSendRequest,ResponseNotReady):\n rospy.logwarn_throttle(1, \"Response not ready for \"+ str(req.component) +\", retrying. Retries remaining: \" + str(retries))\n retries -= 1\n time.sleep(0.10)\n \n if retries == 0:\n print \"Retries exausted, returning..\"\n return\n \n while self.comps[req.component].status.base.timestamp == 0: #wait for status to be ready\n time.sleep(0.2)\n \n rt_field_vals = m3t.get_msg_field_value(self.comps[req.component].status, req.field)\n \n if hasattr(rt_field_vals, '__len__'):\n for val in rt_field_vals:\n values.append(str(val))\n else:\n values.append(str(rt_field_vals))\n \n resp = RequestValuesResponse()\n resp.values = values\n \n if (req.component, req.field, req.datatype) not in self.publishers.keys():\n rospy.loginfo(\"Adding \"+ str(req.hz)+ \" Hz publisher thread for \" + str((req.component, req.field)) + \"...\")\n t = PublisherThread(self.scope, self.comps[req.component], req.field, req.datatype, req.hz) \n t.start()\n timeout = 0\n while not t.running and timeout <=5:\n time.sleep(1) #waiting\n timeout += 1\n if t.running:\n with self.lock: \n self.publishers[req.component, req.field, req.datatype] = t\n self.set_max_rate() \n rospy.loginfo(\"..done!\")\n else:\n rospy.logerr(\"Something went wrong, publisher not created\")\n else:\n rospy.loginfo(\"publisher already exists\")\n if req.hz != self.publishers[req.component, req.field, req.datatype].rate:\n rospy.loginfo(\"adjusting rate...\")\n self.publishers[req.component, req.field, req.datatype].set_hz(req.hz)\n self.set_max_rate()\n \n return resp", "def thingspeak_job():\n try:\n # init thingspeak data dict\n data_d = dict()\n # populate it with valid redis values\n try:\n r_value = int(rdb.get('cvm16:good'))\n if r_value not in [0, 1]:\n raise ValueError\n data_d['field1'] = r_value\n except (TypeError, ValueError):\n logging.warning(f'unable to process redis key \"cvm16:good\" value must be 0 or 1')\n try:\n data_d['field2'] = round(float(rdb.get('cvm16:wobbe')), 2)\n except (TypeError, ValueError):\n logging.warning(f'unable to process redis key \"cvm16:wobbe\" value must be a valid float')\n # add API key\n data_d['api_key'] = API_KEY\n # do thingspeak request\n resp = urlopen(f'https://api.thingspeak.com/update?{urlencode(data_d)}', timeout=5.0)\n # print request status\n try:\n # HTTP request return current entry ID or 0 on error\n entry_id = int(resp.read())\n if entry_id < 1:\n raise ValueError\n logging.info(f'successful data update to entry ID: {entry_id}')\n except ValueError:\n logging.warning(f'unable to update data')\n except redis.RedisError as e:\n logging.error(f'redis error occur: {e!r}')\n except urllib.error.URLError as e:\n logging.error(f'network error occur: {e!r}')", "def poller():\n loop = asyncio.new_event_loop()\n asyncio.set_event_loop(loop)\n\n output = loop.run_until_complete(get_results(loop))\n o = open('data.pkl', 'wb')\n\n pickle.dump(output, o)", "def cpu_monitor():\n\n architecture = uname()[4] # This will return 'x86_64', 'aarc64' (for 64 bit arm), etc.\n if(not architecture in SUPPORTED_ARCHITECTURES):\n logerr(\"This architecture doesn't appear to be one that is supported. Consider adding it and openning\" + \n \" a pull request on github!\")\n exit()\n\n init_node(\"bthere_cpu_monitor\", anonymous=False)\n pub = Publisher(\"/bthere/cpu_data\", CPUData, queue_size=10)\n loginfo(\"Outputting to /bthere/cpu_data\")\n \n #update period should to be somewhat small since the cpu load data is average since you last checked,\n #a slower update rate will be less accurate for bursty loads and may introduce more lag than expected\n #if a load is added later in the time between updates for example.\n update_period = get_param('~update_period', 1.0)\n rate = Rate(1/float(update_period))\n loginfo(\"Publishing rate: \" + str(1.0/update_period) + \" hz\")\n\n quiet = get_param(\"~quiet\", False)\n\n #since the temperature-getting seems likely to be failure prone, try it once to check.\n able_to_get_temps = True\n\n if(isnan(get_cpu_temps(architecture)[0])):\n logwarn(\"Unable to get CPU temperatures\")\n able_to_get_temps = False\n \n last_cpu_times = []\n while not is_shutdown():\n data = CPUData()\n gated_loginfo(quiet, \"------ CPU Data ------\")\n if(able_to_get_temps):\n # If temperature data can be collected, add it to the CPUData to be published and log\n package_temp, core_temps = get_cpu_temps(architecture)\n gated_loginfo(quiet, \"CPU Package temp. (C): \" + str(package_temp))\n data.package_temp = package_temp\n if(len(core_temps) > 0):\n for core in range(len(core_temps)):\n gated_loginfo(quiet, \"CPU Core \" + str(core) + \"temp. (C): \" + str(core_temps[core]))\n data.core_temps = core_temps\n else:\n # If the data is unavailable just publish NaN and log\n gated_loginfo(quiet, \"CPU temperatures unavailable\")\n data.package_temp = float(\"NaN\")\n data.core_temps = [float(\"NaN\")]\n if(len(last_cpu_times) == 0): \n # If this hasn't been initialized, we just won't publish this info yet and init.\n # last_cpu_times can't just be initialized before the loop because it should (for consistency) be the same\n # time between data collections and getting the initial data before the loop would make the time between\n # data collections small and potentially make the data misleading due to burst loads.\n last_cpu_times = get_load_data()\n gated_loginfo(quiet, \"CPU load not yet available\")\n else:\n overall_load, per_cores, last_cpu_times = get_cpu_load(last_cpu_times)\n gated_loginfo(quiet, \"Overall CPU load: \" + str(round(overall_load * 100, 1)) + \"%\")\n data.overall_cpu_load = overall_load\n if(len(per_cores) > 0):\n for core in range(len(per_cores)):\n gated_loginfo(quiet, \"CPU core \" + str(core) + \" load: \" + str(round(per_cores[core] * 100, 1)) + \n \"%\")\n data.core_loads = per_cores\n \n # Add the header information:\n header = Header(stamp=Time.now())\n # The frame_id property seems to be to do with tf frames of reference. That isn't useful for something like \n # this, so just leave it empty. (this might be the wrong way to do this, but I don't have any other info.)\n # The sequential id is apparently set by the publisher.\n data.header = header\n \n pub.publish(data)\n rate.sleep()", "async def run(self):\n\n result = {'start_timestamp': time()}\n\n ping = await create_subprocess_exec(\"/bin/ping\",\n self.device,\n \"-c \" + self.count,\n \"-l \" + self.preload,\n \"-W \" + self.timeout,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n stdout = await ping.stdout.read()\n stderr = await ping.stderr.read()\n\n if stderr:\n result['error'] = stderr.decode('utf-8').strip()\n else:\n lines = stdout.splitlines()\n second_last_line = lines[len(lines)-2].decode('utf-8').split()\n last_line = lines[len(lines)-1].decode('utf-8')\n if not last_line:\n # if the last line is empty\n # none of the packets arrived\n result['error'] = 'Host unreachable'\n result['packets_sent'] = second_last_line[0]\n result['packets_recv'] = second_last_line[3]\n else:\n last_line = last_line.split()[3].split('/')\n result['min'] = last_line[0]\n result['avg'] = last_line[1]\n result['max'] = last_line[2]\n result['mdev'] = last_line[3]\n result['packets_sent'] = second_last_line[0]\n result['packets_recv'] = second_last_line[3]\n\n result['end_timestamp'] = time()\n self.results.append(result)\n return result", "def receive_workers_output(node_request_map, results_list, free_nodes, command, idle_nodes):\n\n if dist.get_backend() == \"nccl\": # Async\n for node, req in node_request_map:\n if req.is_completed():\n result = build_metrics_dict(node) if command == COMMAND_TESTVAL else build_grads_dict(node)\n results_list.append(result)\n free_nodes.append(node)\n node_request_map.remove((node,req))\n print_rank(f\"Finished releasing the nodes {free_nodes}\", loglevel=logging.DEBUG)\n else: # Sync\n print_rank(f\"Waiting for a workers\", loglevel=logging.DEBUG)\n gather_objects = [(None,None,None) for i in range(size())]\n output = [None for _ in gather_objects]\n dist.all_gather_object(output, gather_objects[rank()])\n print_rank(f\" All workers have finished ... taking the remaining clients {len(output)}\", loglevel=logging.DEBUG)\n output = [e for i,e in enumerate(output) if i not in idle_nodes ] # Cleanup for idle workers\n results_list = results_list + output[1:]\n free_nodes = list(range(1, size()))\n \n return node_request_map, results_list, free_nodes", "def _start_request_worker(\n q_in: Queue,\n q_out: Queue,\n data_type: str,\n sc: SeldonClient,\n method: str,\n retries: int,\n batch_id: str,\n payload_type: str,\n batch_interval: float,\n) -> None:\n while True:\n start_time = time.time()\n input_data = q_in.get()\n if method == \"predict\":\n # If we have a batch size > 1 then we wish to use the method for sending multiple predictions\n # as a single request and split the response into multiple responses.\n if len(input_data) > 1:\n str_outputs = _send_batch_predict_multi_request(\n input_data,\n data_type,\n sc,\n retries,\n batch_id,\n payload_type,\n )\n for str_output in str_outputs:\n q_out.put(str_output)\n else:\n batch_idx, batch_instance_id, input_raw = input_data[0]\n str_output = _send_batch_predict(\n batch_idx,\n batch_instance_id,\n input_raw,\n data_type,\n sc,\n retries,\n batch_id,\n )\n q_out.put(str_output)\n elif method == \"feedback\":\n batch_idx, batch_instance_id, input_raw = input_data[0]\n str_output = _send_batch_feedback(\n batch_idx,\n batch_instance_id,\n input_raw,\n data_type,\n sc,\n retries,\n batch_id,\n )\n q_out.put(str_output)\n\n # Setting time interval before the task is marked as done\n if batch_interval > 0:\n remaining_interval = batch_interval - (time.time() - start_time)\n if remaining_interval > 0:\n time.sleep(remaining_interval)\n\n # Mark task as done in the queue to add space for new tasks\n q_in.task_done()", "def main(waiting_time = seconds):\n key = 'YOUR-API-KEY-HERE'\n messages = []\n #my_client = WebSocketClient(STOCKS_CLUSTER, key, my_custom_process_message(messages))\n my_client = WebSocketClient(CRYPTO_CLUSTER, key, my_custom_process_message(messages))\n #my_client = WebSocketClient(FOREX_CLUSTER, key, my_custom_process_message(messages))\n my_client.run_async()\n\n #my_client.subscribe(\"T.MSFT\", \"T.AAPL\", \"T.AMD\", \"T.NVDA\") # Stock data\n my_client.subscribe(\"XA.BTC-USD\", \"XA.ETH-USD\", \"XA.LTC-USD\") # Crypto data\n #my_client.subscribe(\"C.USD/CNH\", \"C.USD/EUR\") # Forex data\n time.sleep(waiting_time)\n\n my_client.close_connection()\n\n df = pd.DataFrame(messages)\n\n df = df.iloc[5:, 0].to_frame()\n df.columns = [\"data\"]\n df[\"data\"] = df[\"data\"].astype(\"str\")\n\n df = pd.json_normalize(df[\"data\"].apply(lambda x : dict(eval(x))))\n\n # export data to sqlite\n with sqlite3.connect(\"realtime_crypto.sqlite\") as conn:\n df.to_sql(\"data\", con=conn, if_exists=\"append\", index=False)", "def execute( self ):\n\n # This allows dynamic changing of the throughput timescale\n self.throughputTimescale = self.am_getOption( 'ThroughputTimescale', 3600 )\n self.throughputTimescale = 60 * 60 * 1\n #print 'ThroughputTimescale:',self.throughputTimescale\n ######################################################################################\n #\n # Obtain information on the current state of the channel queues\n #\n\n res = self.TransferDB.getChannelQueues()\n if not res['OK']:\n errStr = \"ReplicationScheduler._execute: Failed to get channel queues from TransferDB.\"\n gLogger.error( errStr, res['Message'] )\n return S_OK()\n if not res['Value']:\n gLogger.info( \"ReplicationScheduler._execute: No active channels found for replication.\" )\n return S_OK()\n channels = res['Value']\n\n res = self.TransferDB.getChannelObservedThroughput( self.throughputTimescale )\n if not res['OK']:\n errStr = \"ReplicationScheduler._execute: Failed to get observed throughput from TransferDB.\"\n gLogger.error( errStr, res['Message'] )\n return S_OK()\n if not res['Value']:\n gLogger.info( \"ReplicationScheduler._execute: No active channels found for replication.\" )\n return S_OK()\n bandwidths = res['Value']\n\n self.strategyHandler = StrategyHandler( bandwidths, channels, self.section )\n\n processedRequests = []\n requestsPresent = True\n while requestsPresent:\n\n ######################################################################################\n #\n # The first step is to obtain a transfer request from the RequestDB which should be scheduled.\n #\n\n gLogger.info( \"ReplicationScheduler._execute: Contacting RequestDB for suitable requests.\" )\n res = self.RequestDB.getRequest( 'transfer' )\n if not res['OK']:\n gLogger.error( \"ReplicationScheduler._execute: Failed to get a request list from RequestDB.\", res['Message'] )\n continue\n if not res['Value']:\n gLogger.info( \"ReplicationScheduler._execute: No requests found in RequestDB.\" )\n requestsPresent = False\n return S_OK()\n requestString = res['Value']['RequestString']\n requestName = res['Value']['RequestName']\n gLogger.info( \"ReplicationScheduler._execute: Obtained Request %s from RequestDB.\" % ( requestName ) )\n\n ######################################################################################\n #\n # The request must then be parsed to obtain the sub-requests, their attributes and files.\n #\n\n logStr = 'ReplicationScheduler._execute: Parsing Request %s.' % ( requestName )\n gLogger.info( logStr )\n oRequest = RequestContainer( requestString )\n res = oRequest.getAttribute( 'RequestID' )\n if not res['OK']:\n gLogger.error( 'ReplicationScheduler._execute: Failed to get requestID.', res['Message'] )\n return S_ERROR( 'ReplicationScheduler._execute: Failed to get number of sub-requests.' )\n requestID = res['Value']\n if requestID in processedRequests:\n # Break the loop once we have iterated once over all requests\n res = self.RequestDB.updateRequest( requestName, requestString )\n if not res['OK']:\n gLogger.error( \"Failed to update request\", \"%s %s\" % ( requestName, res['Message'] ) )\n return S_OK()\n\n processedRequests.append( requestID )\n\n res = oRequest.getNumSubRequests( 'transfer' )\n if not res['OK']:\n gLogger.error( 'ReplicationScheduler._execute: Failed to get number of sub-requests.', res['Message'] )\n return S_ERROR( 'ReplicationScheduler._execute: Failed to get number of sub-requests.' )\n numberRequests = res['Value']\n gLogger.info( \"ReplicationScheduler._execute: '%s' found with %s sub-requests.\" % ( requestName, numberRequests ) )\n\n ######################################################################################\n #\n # The important request attributes are the source and target SEs.\n #\n\n for ind in range( numberRequests ):\n gLogger.info( \"ReplicationScheduler._execute: Treating sub-request %s from '%s'.\" % ( ind, requestName ) )\n attributes = oRequest.getSubRequestAttributes( ind, 'transfer' )['Value']\n if attributes['Status'] != 'Waiting':\n # If the sub-request is already in terminal state\n gLogger.info( \"ReplicationScheduler._execute: Sub-request %s is status '%s' and not to be executed.\" % ( ind, attributes['Status'] ) )\n continue\n\n sourceSE = attributes['SourceSE']\n targetSE = attributes['TargetSE']\n \"\"\" This section should go in the transfer request class \"\"\"\n if type( targetSE ) in types.StringTypes:\n if re.search( ',', targetSE ):\n targetSEs = targetSE.split( ',' )\n else:\n targetSEs = [targetSE]\n \"\"\"----------------------------------------------------- \"\"\"\n operation = attributes['Operation']\n reqRepStrategy = None\n if operation in self.strategyHandler.getSupportedStrategies():\n reqRepStrategy = operation\n\n ######################################################################################\n #\n # Then obtain the file attribute of interest are the LFN and FileID\n #\n\n res = oRequest.getSubRequestFiles( ind, 'transfer' )\n if not res['OK']:\n gLogger.error( 'ReplicationScheduler._execute: Failed to obtain sub-request files.' , res['Message'] )\n continue\n files = res['Value']\n gLogger.info( \"ReplicationScheduler._execute: Sub-request %s found with %s files.\" % ( ind, len( files ) ) )\n filesDict = {}\n for file in files:\n lfn = file['LFN']\n if file['Status'] != 'Waiting':\n gLogger.debug( \"ReplicationScheduler._execute: %s will not be scheduled because it is %s.\" % ( lfn, file['Status'] ) )\n else:\n fileID = file['FileID']\n filesDict[lfn] = fileID\n if not filesDict:\n gLogger.info( \"ReplicationScheduler._execute: No Waiting files found for request\" )\n continue\n notSched = len( files ) - len( filesDict )\n if notSched:\n gLogger.info( \"ReplicationScheduler._execute: %d files found not Waiting\" % notSched )\n\n ######################################################################################\n #\n # Now obtain replica information for the files associated to the sub-request.\n #\n\n lfns = filesDict.keys()\n gLogger.info( \"ReplicationScheduler._execute: Obtaining replica information for %d sub-request files.\" % len( lfns ) )\n res = self.rm.getCatalogReplicas( lfns )\n if not res['OK']:\n gLogger.error( \"ReplicationScheduler._execute: Failed to get replica information.\", res['Message'] )\n continue\n for lfn, failure in res['Value']['Failed'].items():\n gLogger.error( \"ReplicationScheduler._execute: Failed to get replicas.\", '%s: %s' % ( lfn, failure ) )\n replicas = res['Value']['Successful']\n if not replicas.keys():\n gLogger.error( \"ReplicationScheduler._execute: Failed to get replica information for all files.\" )\n continue\n\n ######################################################################################\n #\n # Now obtain the file sizes for the files associated to the sub-request.\n #\n\n lfns = replicas.keys()\n gLogger.info( \"ReplicationScheduler._execute: Obtaining file sizes for %d sub-request files.\" % len( lfns ) )\n res = self.rm.getCatalogFileMetadata( lfns )\n if not res['OK']:\n gLogger.error( \"ReplicationScheduler._execute: Failed to get file size information.\", res['Message'] )\n continue\n for lfn, failure in res['Value']['Failed'].items():\n gLogger.error( 'ReplicationScheduler._execute: Failed to get file size.', '%s: %s' % ( lfn, failure ) )\n metadata = res['Value']['Successful']\n if not metadata.keys():\n gLogger.error( \"ReplicationScheduler._execute: Failed to get metadata for all files.\" )\n continue\n\n ######################################################################################\n #\n # For each LFN determine the replication tree\n #\n\n for lfn in sortList( metadata.keys() ):\n fileSize = metadata[lfn]['Size']\n lfnReps = replicas[lfn]\n fileID = filesDict[lfn]\n\n targets = []\n for targetSE in targetSEs:\n if targetSE in lfnReps.keys():\n gLogger.debug( \"ReplicationScheduler.execute: %s already present at %s.\" % ( lfn, targetSE ) )\n else:\n targets.append( targetSE )\n if not targets:\n gLogger.info( \"ReplicationScheduler.execute: %s present at all targets.\" % lfn )\n oRequest.setSubRequestFileAttributeValue( ind, 'transfer', lfn, 'Status', 'Done' )\n continue\n if not lfnReps:\n gLogger.error( \"ReplicationScheduler.execute: The file has no replicas.\", lfn )\n continue\n res = self.strategyHandler.determineReplicationTree( sourceSE, targets, lfnReps, fileSize, strategy = reqRepStrategy )\n if not res['OK']:\n gLogger.error( \"ReplicationScheduler.execute: Failed to determine replication tree.\", res['Message'] )\n continue\n tree = res['Value']\n\n ######################################################################################\n #\n # For each item in the replication tree obtain the source and target SURLS\n #\n\n for channelID, dict in tree.items():\n gLogger.info( \"ReplicationScheduler.execute: processing for channel %d %s\" % ( channelID, str( dict ) ) )\n hopSourceSE = dict['SourceSE']\n hopDestSE = dict['DestSE']\n hopAncestor = dict['Ancestor']\n\n # Get the sourceSURL\n if hopAncestor:\n status = 'Waiting%s' % ( hopAncestor )\n res = self.obtainLFNSURL( hopSourceSE, lfn )\n if not res['OK']:\n errStr = res['Message']\n gLogger.error( errStr )\n return S_ERROR( errStr )\n sourceSURL = res['Value']\n else:\n status = 'Waiting'\n res = self.resolvePFNSURL( hopSourceSE, lfnReps[hopSourceSE] )\n if not res['OK']:\n sourceSURL = lfnReps[hopSourceSE]\n else:\n sourceSURL = res['Value']\n\n # Get the targetSURL\n res = self.obtainLFNSURL( hopDestSE, lfn )\n if not res['OK']:\n errStr = res['Message']\n gLogger.error( errStr )\n return S_ERROR( errStr )\n targetSURL = res['Value']\n\n ######################################################################################\n #\n # For each item in the replication tree add the file to the channel\n #\n res = self.TransferDB.addFileToChannel( channelID, fileID, hopSourceSE, sourceSURL, hopDestSE, targetSURL, fileSize, fileStatus = status )\n if not res['OK']:\n errStr = res['Message']\n gLogger.error( \"ReplicationScheduler._execute: Failed to add File to Channel.\" , \"%s %s\" % ( fileID, channelID ) )\n return S_ERROR( errStr )\n res = self.TransferDB.addFileRegistration( channelID, fileID, lfn, targetSURL, hopDestSE )\n if not res['OK']:\n errStr = res['Message']\n gLogger.error( \"ReplicationScheduler._execute: Failed to add File registration.\" , \"%s %s\" % ( fileID, channelID ) )\n result = self.TransferDB.removeFileFromChannel( channelID, fileID )\n if not result['OK']:\n errStr += result['Message']\n gLogger.error( \"ReplicationScheduler._execute: Failed to remove File.\" , \"%s %s\" % ( fileID, channelID ) )\n return S_ERROR( errStr )\n oRequest.setSubRequestFileAttributeValue( ind, 'transfer', lfn, 'Status', 'Scheduled' )\n res = self.TransferDB.addReplicationTree( fileID, tree )\n\n if oRequest.isSubRequestEmpty( ind, 'transfer' )['Value']:\n oRequest.setSubRequestStatus( ind, 'transfer', 'Scheduled' )\n\n ################################################\n # Generate the new request string after operation\n requestString = oRequest.toXML()['Value']\n res = self.RequestDB.updateRequest( requestName, requestString )\n if not res['OK']:\n gLogger.error( \"ReplicationScheduler._execute: Failed to update request\", \"%s %s\" % ( requestName, res['Message'] ) )", "def run(self):\n result = self.Take_Voltage_Measurement()\n self.result_queue.put(result)", "def run(self):\n result = self.Take_Voltage_Measurement()\n self.result_queue.put(result)", "def compute_metrics(self, results: list) -> dict:", "def after_download_results(msg, config, checklist):\n next_workers = {\n \"crash\": [],\n \"failure nowcast\": [],\n \"failure nowcast-green\": [],\n \"failure forecast\": [],\n \"failure forecast2\": [],\n \"failure hindcast\": [],\n \"failure nowcast-agrif\": [],\n \"success nowcast\": [],\n \"success nowcast-green\": [],\n \"success forecast\": [],\n \"success forecast2\": [],\n \"success hindcast\": [],\n \"success nowcast-agrif\": [],\n }\n if msg.type.startswith(\"success\"):\n run_type = msg.type.split()[1]\n run_date = msg.payload[run_type][\"run date\"]\n if run_type == \"hindcast\":\n next_workers[msg.type].append(\n NextWorker(\"nowcast.workers.split_results\", args=[run_type, run_date])\n )\n return next_workers[msg.type]\n if run_type.startswith(\"nowcast\"):\n next_workers[msg.type].append(\n NextWorker(\n \"nowcast.workers.make_plots\",\n args=[\"nemo\", run_type, \"research\", \"--run-date\", run_date],\n )\n )\n if run_type == \"nowcast\":\n compare_date = arrow.get(run_date).shift(days=-1).format(\"YYYY-MM-DD\")\n next_workers[msg.type].extend(\n [\n NextWorker(\n \"nowcast.workers.make_plots\",\n args=[\n \"nemo\",\n run_type,\n \"comparison\",\n \"--run-date\",\n compare_date,\n ],\n ),\n NextWorker(\n \"nowcast.workers.make_CHS_currents_file\",\n args=[run_type, \"--run-date\", run_date],\n ),\n ]\n )\n if run_type == \"nowcast-green\":\n next_workers[msg.type].append(\n NextWorker(\"nowcast.workers.ping_erddap\", args=[\"nowcast-green\"])\n )\n if arrow.get(run_date).shift(days=+1).day == 1:\n yyyymmm = arrow.get(run_date).format(\"YYYY-MMM\").lower()\n next_workers[msg.type].append(\n NextWorker(\n \"nowcast.workers.archive_tarball\",\n args=[\"nowcast-green\", yyyymmm, \"graham-dtn\"],\n )\n )\n return next_workers[msg.type]\n if run_type.startswith(\"forecast\"):\n next_workers[msg.type].append(\n NextWorker(\n \"nowcast.workers.make_CHS_currents_file\",\n args=[run_type, \"--run-date\", run_date],\n )\n )\n return next_workers[msg.type]", "def exec_worker(self, endpoint, args, request):\n raise NotImplementedError", "def remote_getResult(i=None):", "def do_work(self):", "def worker_func(worker_id, w2t_m_queue, events, t2w_d_manager):\n average_iteration_time = 0\n worker_nn = create_neural_network()\n iteration_time = time.time()\n for i in range(ITERATIONS):\n data_point = create_data_point(worker_nn)\n events[\"Workers_can_proceed\"].clear()\n w2t_m_queue.put(data_point)\n # Signal trainer that this worker has placed its data point this iteration\n events[worker_id].set()\n average_iteration_time += (time.time() - iteration_time)\n # Have worker wait until trainer is done processing this iteration\n events[\"Workers_can_proceed\"].wait()\n iteration_time = time.time()\n # Obtain data trainer has placed into shared manager (data is weights of network)\n shared_data = t2w_d_manager[0]\n worker_nn.set_weights(shared_data)\n\n average_iteration_time /= ITERATIONS\n print(\"Worker \" + str(worker_id) + \" average put time: \" + str.format('{0:.6f}', (average_iteration_time*1000)) + \"ms\")", "def summation_worker(group_name):\n proxy = Proxy(\n group_name=group_name,\n component_type=\"sum_worker\",\n expected_peers={\"master\": 1},\n )\n\n # Nonrecurring receive the message from the proxy.\n msg = proxy.receive_once()\n print(f\"{proxy.name} received message from {msg.source}. the payload is {msg.body}.\")\n\n if msg.tag == \"job\":\n replied_payload = sum(msg.body)\n proxy.reply(message=msg, tag=\"sum\", body=replied_payload)", "def watch_worker():\n global isFinished, ComputationTime, UsersOnline, N, CurrentIndex, Count\n received_data = request.json\n Count += received_data\n if CurrentIndex >= N:\n print 'Second text got ', Count, ' entries of given row.'\n print '--- %s seconds ---' % (time.time() - ComputationTime)\n isFinished = True\n return jsonify(current_row='', current_part='')\n else:\n print 'Current row in second text: ', CurrentIndex / 256\n part = SecondText[CurrentIndex:CurrentIndex+1023]\n CurrentIndex += 1024\n return jsonify(current_row=Row, current_part=part)", "def pull():\n context = zmq.Context()\n zmq_socket = context.socket(zmq.PULL)\n zmq_socket.connect('tcp://127.0.0.1:5560')\n\n cache = {}\n cache['trig'] = defaultdict(int)\n cache['trig:nhit'] = defaultdict(int)\n cache['trig:charge'] = defaultdict(int)\n cache['trig:fecd'] = defaultdict(int)\n cache['DISPATCH_ORPHANS'] = 0\n cache_set = {}\n cache_set['trig'] = {}\n cache_nhit = defaultdict(list)\n cache_pmt = defaultdict(int)\n\n then = None\n\n while True:\n try:\n now, record = zmq_socket.recv_pyobj(zmq.NOBLOCK)\n except zmq.ZMQError:\n record = None\n now = int(time.time())\n\n if then is None:\n then = now\n\n if now > then:\n # flush data to redis every second\n flush_cache(cache, cache_set, cache_nhit, cache_pmt, then)\n\n p = redis.pipeline()\n for interval in INTERVALS:\n key = 'ts:%i:%i:heartbeat' % (interval, then//interval)\n p.setex(key,1,interval*EXPIRE)\n p.execute()\n\n cache['trig'].clear()\n cache['trig:nhit'].clear()\n cache['trig:charge'].clear()\n cache['trig:fecd'].clear()\n cache['DISPATCH_ORPHANS'] = 0\n cache_set['trig'].clear()\n cache_nhit.clear()\n cache_pmt.clear()\n then = now\n\n if record is None:\n # nothing to process, take a break\n time.sleep(0.01)\n continue\n\n record_id, data = unpack_header(record)\n\n if record_id != RECORD_IDS['PMT_RECORD']:\n continue\n\n pmt_gen = unpack_pmt_record(data)\n\n pev = next(pmt_gen)\n\n run = pev.RunNumber\n gtid = pev.TriggerCardData.BcGT\n nhit = pev.NPmtHit\n subrun = pev.DaqStatus # seriously :)\n trig = unpack_trigger_type(pev)\n\n nhit = 0\n\n qhs_sum = 0\n for pmt in pmt_gen:\n id = 16*32*pmt.CrateID + 32*pmt.BoardID + pmt.ChannelID\n cache_pmt[id] += 1\n\n if pmt.CrateID == 17 and pmt.BoardID == 15:\n if pmt.ChannelID == 4:\n cache['trig:fecd']['N16'] += 1\n if pmt.ChannelID == 17:\n cache['trig:fecd']['20LB'] += 1\n elif pmt.ChannelID == 19:\n cache['trig:fecd']['20'] += 1\n elif pmt.ChannelID == 28:\n cache['trig:fecd']['100L'] += 1\n elif pmt.ChannelID == 29:\n cache['trig:fecd']['100M'] += 1\n elif pmt.ChannelID == 31:\n cache['trig:fecd']['100H'] += 1\n\n # don't include FEC/D in qhs sum and nhit\n continue\n\n nhit += 1\n\n qhs_sum += pmt.Qhs\n\n if trig == 0:\n # orphan\n cache['DISPATCH_ORPHANS'] += nhit\n continue\n\n cache_nhit['all'].append(nhit)\n\n cache['trig']['TOTAL'] += 1\n cache['trig:nhit']['TOTAL'] += nhit\n cache['trig:charge']['TOTAL'] += qhs_sum\n cache_set['trig']['run'] = run\n cache_set['trig']['subrun'] = subrun\n cache_set['trig']['gtid'] = gtid\n\n for i, name in enumerate(TRIGGER_NAMES):\n if trig & (1 << i):\n cache['trig'][i] += 1\n cache['trig:nhit'][i] += nhit\n cache['trig:charge'][i] += qhs_sum\n cache_nhit[name].append(nhit)", "def calculate():\n print 'AJAX getJSON request to get current data and begin compute on new client'\n global isBegin, CurrentIndex, isFinished, ComputationTime, N, Row, RowIndex\n if isBegin:\n generate_random_texts(N)\n ComputationTime = time.time()\n RowIndex = (RowN - 1) * 256\n Row = FirstText[RowIndex:RowIndex + 255]\n part = SecondText[CurrentIndex:CurrentIndex+1024]\n isBegin = False\n else:\n Row = FirstText[RowIndex:RowIndex + 255]\n part = SecondText[CurrentIndex:CurrentIndex+1024]\n if isFinished:\n Row = ''\n part = ''\n return jsonify(current_row=Row, current_part=part)", "def get_chartdata():\n callback = bottle.request.query.get('callback')\n y_axis = bottle.request.query.get('y_axis').strip()\n w_acts = [\"action='%s'\" % act for act in bottle.request.query.get('actions').strip().split(',')]\n w_acts = 'AND (%s)' % ' OR '.join(w_acts) if w_acts else ''\n f_value = 'AVG(latency)' if y_axis.startswith('avg') else 'COUNT(timestamp)'\n atomic = 1 if y_axis in ['aops', 'avgl'] else 0\n\n db_conn = tools.get_db_conn('%s.db' % bottle.request.query.test_run_id)\n sql = 'SELECT test_run_status, timestamp_started, timestamp_completed FROM info LIMIT 1'\n status, started, finished = tools.db_query(db_conn, sql)[1][0]\n progress = int(float(finished) - float(started)) if finished \\\n else int(tools.get_timestamp() - float(started))\n\n sql = 'SELECT substr(timestamp, 0, 11), code, %s FROM recs ' % f_value + \\\n 'WHERE atomic=%s %s GROUP BY code, substr(timestamp, 0, 11) ' % (atomic, w_acts) + \\\n 'ORDER BY id DESC LIMIT 3600' # last 1 hour activity\n\n result = tools.db_query(db_conn, sql)[1] if finished else tools.db_query(db_conn, sql)[1][:-1]\n result = list(reversed(result))\n results = {str(abs(int(item[0]) - int(float(started)))):\n {'failed': 0, 'passed': 0, 'incomplete': 0} for item in result}\n for item in result: # item[0] - timestamp, item[1] - code (None if incomplete), item[2] - value\n timestamp = str(int(item[0]) - int(float(started)))\n value = item[2] or 0\n results[timestamp]['failed'] += value if item[1] and item[1] != 200 else 0\n results[timestamp]['passed'] += value if item[1] == 200 else 0\n results[timestamp]['incomplete'] += value if item[1] == None else 0\n results = [{'timestamp': key, 'failed': value['failed'], 'passed': value['passed'],\n 'incomplete': value['incomplete']} for key, value in results.items()]\n result = {bottle.request.query.slave: results, 'status': status,\n 'started': started, 'finished': finished or '(not finished)', 'progress': progress}\n return '{0}({1})'.format(callback, result)", "def get(self):\n return dumps(AQ.queue()), 200", "def _send(self):\n executor_id = self.status['executor_id']\n job_id = self.status['job_id']\n call_id = self.status['call_id']\n act_id = self.status['activation_id']\n\n if self.status['type'] == '__init__':\n init_key = create_init_key(executor_id, job_id, call_id, act_id)\n self.internal_storage.put_data(init_key, '')\n\n elif self.status['type'] == '__end__':\n status_key = create_status_key(executor_id, job_id, call_id)\n dmpd_response_status = json.dumps(self.status)\n drs = sizeof_fmt(len(dmpd_response_status))\n logger.info(\"Storing execution stats - Size: {}\".format(drs))\n self.internal_storage.put_data(status_key, dmpd_response_status)", "def return_results(self):\n\n caching_info = f'INFO: cache_source of BS calc node: {self.ctx.BS_run.get_cache_source}'\n self.report(caching_info)\n\n if not self.ctx.BS_run.is_finished_ok:\n self.ctx.successful = False\n error = f'ERROR BS calculation failed somehow it is in state {self.ctx.BS_run.process_state}'\n self.report(error)\n self.ctx.errors.append(error)\n return self.exit_codes.ERROR_BS_CALC_FAILED # pylint: disable=no-member\n\n # create dict to store results of workflow output\n outputnode_dict = {}\n outputnode_dict['workflow_name'] = self.__class__.__name__\n outputnode_dict['workflow_version'] = self._wf_version\n outputnode_dict['withmpi'] = self.ctx.withmpi\n outputnode_dict['resources'] = self.ctx.resources\n outputnode_dict['max_wallclock_seconds'] = self.ctx.max_wallclock_seconds\n outputnode_dict['queue_name'] = self.ctx.queue\n outputnode_dict['custom_scheduler_commands'] = self.ctx.custom_scheduler_commands\n outputnode_dict['BS_params'] = self.ctx.BS_params_dict\n if 'kpoints' not in self.inputs:\n outputnode_dict['structure_type'] = self.ctx.structure_data\n outputnode_dict['BS_wf_description'] = self.ctx.description_wf\n outputnode_dict['BS_wf_label'] = self.ctx.label_wf\n try:\n outputnode_dict['nspin'] = self.ctx.BS_run.res.nspin\n except:\n error = 'ERROR: nspin not extracted'\n self.report(error)\n self.ctx.successful = False\n self.ctx.errors.append(error)\n outputnode_dict['successful'] = self.ctx.successful\n outputnode_dict['list_of_errors'] = self.ctx.errors\n\n # create output node with data-provenance\n outputnode = Dict(outputnode_dict)\n outputnode.label = 'kkr_BS_wc_results'\n outputnode.description = 'Contains the info of the WC'\n\n self.report('INFO: create Banstructure results nodes')\n try:\n self.report(\n f'INFO: create Bandstructure results nodes. BS calc retrieved node={self.ctx.BS_run.outputs.retrieved}'\n )\n has_BS_run = True\n except AttributeError as e:\n self.report('ERROR: No Bandstructure calc retrieved node found')\n self.report(f'Caught AttributeError {e}')\n return self.exit_codes.ERROR_BS_CALC_FAILED # pylint: disable=no-member\n\n if has_BS_run:\n BS_retrieved = self.ctx.BS_run.outputs.retrieved\n\n ef = self.ctx.fermi_energy # in Ry unit\n kpoints = self.ctx.BS_kpoints\n\n # Here outdict dictionary has been created to set the Dict result_wf, BS_data\n # to the output(spec.output) of the wf\n outdict = {}\n if has_BS_run:\n ArraData = parse_BS_data(BS_retrieved, Float(ef), kpoints)\n outdict['BS_Data'] = ArraData['BS_Data']\n\n # link to the BS output nodes\n link_nodes = outdict.copy()\n\n outdict['results_wf'] = create_out_dict_node(outputnode, **link_nodes)\n\n # create links to output nodes\n for link_name, node in outdict.items():\n self.out(link_name, node)\n\n self.report('INFO: done with BS_workflow!\\n')", "def main():\n # initialize zeromq subscriber\n context = zmq.Context()\n subscriber = context.socket(zmq.SUB)\n\n # subscribe to all events\n subscriber.setsockopt(zmq.SUBSCRIBE, b\"\")\n subscriber.setsockopt(zmq.RCVTIMEO, 600000)\n\n file = open(outputFile, \"a\")\n events = 0\n\n while True:\n try:\n subscriber.connect(relayURI)\n\n while True:\n msg = subscriber.recv()\n\n if not msg:\n subscriber.disconnect(relayURI)\n break\n\n msg = zlib.decompress(msg)\n json = simplejson.loads(msg)\n if should_save(json):\n file.write(msg.decode(\"utf8\") + \"\\n\")\n events += 1\n print(f\"Received {events} events\", end=\"\\r\")\n except zmq.ZMQError as e:\n print(f\"ZMQ error: {e}\")\n subscriber.disconnect(relayURI)\n time.sleep(5)", "def run(self):\n import pxp # want to have fresh instance ???!\n ans = {}\n pu.mdbg.log(\"PXPWORKER started ------>cmd:{} cookie:{}\".format(self.cmd, self.cookie))\n if (self.cmd=='tagset'):\n ans = pxp.tagset(self.param)\n elif (self.cmd=='tagmod'): \n ans = pxp.tagmod(self.param)\n elif (self.cmd=='teleset'): \n ans = pxp.teleset(self.param)\n elif (self.cmd=='sumset'): \n ans = pxp.sumset(self.param)\n elif (self.cmd=='sumget'): \n ans = pxp.sumget(self.param)\n elif (self.cmd=='rec_stat'):\n self.rec_stat = {}\n self.rec_stat = self.pxp_rec_stat()\n self.done = True\n ans['cookie'] = self.cookie\n pu.mdbg.log(\"PXPHeler finished ------>cmd:{} param:{}\".format(self.cmd, self.param))\n return\n \n ans['cookie'] = self.cookie\n #resp = pu.disk.sockSendWait(\"AUP|\"+json.dumps(ans), addnewline=True, timeout=1)\n pu.disk.sockSendWait(\"AUP|\"+json.dumps(ans), addnewline=True)\n self.done = True\n pu.mdbg.log(\"PXPHeler finished ------>cmd:{} cookie:{}\".format(self.cmd, self.cookie))", "def work(self):\n while(True):\n debug_print = False\n if debug_print == True:\n start = time.time()\n\n flow = self.gauge.read_flow_from_dp()\n self.flw_q.put([time.time(), flow])\n\n if debug_print == True:\n flow_time = time.time()\n print(f\"Runtime - calc_flow: {1000 * (flow_time - start):.0f} ms\")\n\n pressure = self.gauge.read_pressure()\n self.prs_q.put([time.time(), pressure])\n\n if debug_print == True:\n pressure_time = time.time()\n print(f\"Runtime - read_pressure: {1000 * (pressure_time - flow_time):.0f} ms\")\n \n if debug_print == True:\n runtime = time.time() - start\n print(f\"Runtime - total: {1000 * runtime:.1f} ms\")\n print(f\"Frequency: {1 / runtime:.1f} Hz\")", "def main():\n model = sys.argv[1]\n maxfun = int(sys.argv[2])\n n_threads = int(sys.argv[3])\n\n # Validate input.\n assert maxfun >= 0, \"Maximum number of function evaluations cannot be negative.\"\n assert n_threads >= 1 or n_threads == -1, (\n \"Use -1 to impose no restrictions on maximum number of threads or choose a \"\n \"number higher than zero.\"\n )\n\n # Set number of threads\n os.environ[\"NUMBA_NUM_THREADS\"] = f\"{n_threads}\"\n os.environ[\"MKL_NUM_THREADS\"] = f\"{n_threads}\"\n os.environ[\"OMP_NUM_THREADS\"] = f\"{n_threads}\"\n os.environ[\"NUMEXPR_NUM_THREADS\"] = f\"{n_threads}\"\n\n # Late import of respy to ensure that environment variables are read by Numpy, etc..\n import respy as rp\n\n # Get model\n params, options = rp.get_example_model(model, with_data=False)\n\n # Simulate the data\n simulate = rp.get_simulate_func(params, options)\n df = simulate(params)\n\n # Get the criterion function and the parameter vector.\n crit_func = rp.get_log_like_func(params, options, df)\n\n # Run the estimation\n start = dt.datetime.now()\n\n for _ in range(maxfun):\n crit_func(params)\n\n end = dt.datetime.now()\n\n # Aggregate information\n output = {\n \"model\": model,\n \"maxfun\": maxfun,\n \"n_threads\": n_threads,\n \"start\": str(start),\n \"end\": str(end),\n \"duration\": str(end - start),\n }\n\n # Save time to file\n with open(\"scalability_results.txt\", \"a+\") as file:\n file.write(json.dumps(output))\n file.write(\"\\n\")", "def write_reps():\n global maxcount\n\n # Process the next set.\n for count, req in enumerate(req_queue):\n\n rep = {}\n\n if req['type'] == 'check':\n\n if req['body']['type'] == 'standard':\n job_set = standard_job_set(req['body']['msg'])\n run_num = req['body']['msg']['run']\n rep['result'] = check_job_set(run_num, job_set)\n\n else:\n rep['result'] = False\n\n if req['type'] == 'var':\n\n if req['body'] == 'nworkers':\n rep['result'] = nworkers\n\n elif req['body'] == 'njobs':\n rep['result'] = len(job_queue)\n\n else:\n rep['result'] = None\n\n if req['type'] == 'status':\n\n status = ['gm2-nmr-crunchd is running as process %i' % os.getpid()]\n jobs = ' '.join(['(%s, %s)' % (w[1], w[2]['name']) for w in workers])\n status.append(' running jobs: %s' % jobs)\n status.append(' queue has %i jobs' % len(job_queue))\n\n req['result'] = '\\n'.join(status)\n\n try:\n status_sck.send_json(rep)\n\n except(zmq.error.ZMQError):\n pass\n\n req_queue.remove(req)\n\n if count > maxcount:\n break", "def _process_worker(call_queue, result_queue):\n while True:\n call_item = call_queue.get(block=True)\n if call_item is None:\n # Wake up queue management thread\n result_queue.put(os.getpid())\n return\n try:\n r = call_item.fn(*call_item.args, **call_item.kwargs)\n except BaseException as e:\n exc = _ExceptionWithTraceback(e, e.__traceback__)\n result_queue.put(_ResultItem(call_item.work_id, exception=exc))\n logger.exception(e) # 主要是直接显示错误。\n else:\n result_queue.put(_ResultItem(call_item.work_id,\n result=r))", "def ceilometer_callback(self, ch, method, properties, body):\n payload = json.loads(body)\n try:\n message_body = json.loads(payload['oslo.message'])\n samples = message_body['args']['data']\n #print \"--------------------------------------------------\"\n self.pool.spawn_n(self.zabbix_sender.consume_samples,samples)\n except Exception,e:\n log.warn(str(e))", "def populate_miner_results(self, miner, elapsed_secs, worker, algo, pool, chips,\n temps, fan_speeds, hashrate_ghs5s, hw_error_rate):\n\n self.set_result(miner, 'uptimes', timedelta(seconds=elapsed_secs))\n self.set_result(miner, 'temperatures', temps) # all temps\n self.set_result(miner, 'temperature', math_functions.get_average_of_list(temps, 0))\n self.set_result(miner, 'hw_error_rates', hw_error_rate)\n\n self.populate_chip_results(miner, chips)\n self.populate_fan_results(miner, fan_speeds)\n\n call_pool_apis = True\n call_hashrate_calcs = True\n\n try:\n if sys._unit_tests_running:\n call_pool_apis = sys._unit_tests_MINERMEDIC_CALL_POOL_APIS\n call_hashrate_calcs = sys._unit_tests_MINERMEDIC_CALL_HASHRATE_CALCS\n except:\n pass\n\n if call_pool_apis:\n try:\n self.__process_miner_pool_apis(miner, worker, algo, pool)\n except Exception as ex:\n logger.error(\"Problem while processing POOL APIS, pool='{}', error='{}'\".format(pool, ex))\n\n if call_hashrate_calcs:\n try:\n self.__process_hashrate_calculations(miner, hashrate_ghs5s, algo)\n except Exception as ex:\n logger.error(\"Problem while processing Hashrate Calcs, \"\n \"algo='{}', hashrate='{}', error='{}'\".format(algo, hashrate_ghs5s, ex))", "def _fetch_daily_internal(delta, swarming, process, endpoint, start, end, state,\n tags, parallel):\n out = {}\n with threading_utils.ThreadPool(1, parallel, 0) as pool:\n while start < end:\n cmd = _get_cmd(swarming, endpoint, _get_epoch(start),\n _get_epoch(start + delta), state, tags)\n pool.add_task(0, _run_json, start.strftime('%Y-%m-%d'), process, cmd)\n start += delta\n for k, v in pool.iter_results():\n sys.stdout.write('.')\n sys.stdout.flush()\n out[k] = v\n print('')\n return out", "def main():\n output_queue = Queue()\n\n out_list = list()\n\n logging.info('Retrieving news...')\n download = DownloadNewsWorker(output_queue)\n download.retrieve_news()\n\n while not output_queue.empty():\n item = output_queue.get()\n out_list.append(item)\n\n return out_list", "def run_step_rpc_blackbox_optimizer(config,\n current_input,\n blackbox_optimizer,\n workers,\n iteration,\n best_input,\n best_core_hyperparameters,\n best_value,\n log_bool=False):\n requests, proposed_perturbations, proposed_dnas = propose_queries_blackbox_optimizer(\n config, current_input, blackbox_optimizer, iteration)\n\n finished_dnas = []\n\n results = []\n futures = []\n num_worker_failures = 0\n for stub, request in zip(workers, requests):\n future = stub.EvaluateBlackboxInput.future(request)\n futures.append(future)\n start = time.time()\n for w, future in enumerate(futures):\n try:\n results.append(future.result())\n finished_dnas.append(proposed_dnas[w])\n except: # pylint: disable=bare-except\n print('RPC error caught in collecting results !')\n num_worker_failures += 1\n logging.info('worker failed ID: ')\n logging.info(w)\n\n end = time.time()\n print('Responds received in time: [in sec].')\n print(end - start)\n sys.stdout.flush()\n if float(num_worker_failures) > config.critical * float(len(workers)):\n return [False, current_input]\n\n if log_bool:\n logging_data = {\n 'best_value': best_value,\n 'iteration': iteration,\n 'best_input': best_input,\n 'best_core_hyperparameters': best_core_hyperparameters\n }\n else:\n logging_data = None\n\n return run_step_blackbox_optimizer(config, current_input, blackbox_optimizer,\n proposed_perturbations, finished_dnas,\n results, logging_data)", "def main(config):\n all_procs = []\n result_q = mp.Queue()\n for seed in config[\"seeds\"]:\n config[\"seed\"] = seed\n p = mp.Process(target=run, args=(config, result_q))\n p.start()\n all_procs.append(p)\n\n for p in all_procs:\n p.join()\n\n all_returns = [result_q.get() for p in all_procs]\n mean_per_restart = np.mean(all_returns, axis=1)\n mean, std = np.mean(mean_per_restart), np.std(mean_per_restart)\n\n # Return the negative since we're minimizing the function\n # .. the metric minimized is suggested from Duan et al. (2016)\n return -(mean - std)", "def run(self):\n #use subprocess for your bindings when develop a new functionality\n fulldate = datetime.now().strftime(\"%A, %d. %B %Y %I:%M%p\")\n\n hours = datetime.now().strftime(\"%I\")\n minutes = datetime.now().strftime(\"%I\")\n\n if self.req_from == 'jabber':\n response = {'request': self.request\n ,'text' : fulldate\n ,'jmsg' : fulldate\n ,'continue' : 0\n ,'type':'response' }\n\n if self.req_from == 'julius':\n response = {'request': self.request\n ,'say': \"IT'S, %d O'CLOCK AND %d MINUTES\" % ( int(hours), int(minutes))\n ,'text' : fulldate\n ,'continue' : 0\n ,'type' : 'response' }\n\n return response\n #import subprocess\n #s = subprocess.Popen(['ffmpeg', '-i', speech, flac ] , stderr=subprocess.STDOUT, stdout=subprocess.PIPE).communicate()[0]", "def request_measures(self):\n question = jbus.jbus_generator_read(self.node, 0x1060, 48)\n answer = self.send_request(question)\n #print(\"Question: [\", question, \"]\")\n #print(\"Answer: [\",answer,\"] LEN: \",len(answer))\n result = self.verify_response(question, answer)\n if (result == \"OK\"):\n return answer\n else:\n self.error=result\n return False", "def cmd_calculation():", "def _process_results(self, *args, **kwargs): # noqa: E501\n # Lock before processing results to prevent conflicts\n if not self._acquire_pr_lock():\n return\n\n # Get the future instance\n future = self.future\n\n # Skip if no Future\n if not future:\n return\n\n # Skip processing results if forget\n if self.forget:\n # Clean up client\n self.client.close()\n return\n\n try:\n # Get results using the client\n result = self.client.gather(future)\n except Exception as e:\n # Tell scheduler to stop sending updates about this key\n self.client.set_metadata(self.key, False)\n # Clean up client\n self.client.close()\n result = e\n log.warning(\n 'Exception encountered when retrieving results: \"{}\"'.format(str(e))\n )\n\n # Tell scheduler to stop sending updates about this key\n self.client.set_metadata(self.key, False)\n\n # Handle custom process results function\n if self.process_results_function:\n # Get the process_results_function in TethysJob and call it with the result retrived\n try:\n result = self.process_results_function(result)\n except Exception as e:\n log.exception(\"Process Results Function Error\")\n self._status = \"ERR\"\n result = str(e)\n\n # Serialize the result\n try:\n self.result = result\n except Exception:\n log.exception(\"Results Serialization Error\")\n self._status = \"ERR\"\n else:\n self._status = \"COM\" if self._status != \"ERR\" else \"ERR\"\n\n # Erase the key to avoid problem with dask recycle key\n self.key = \"\"\n\n # save the results or status in the database\n self.save()\n\n # Clean up client\n self.client.close()\n\n if client_fire_forget:\n client_fire_forget.close()\n\n self._release_pr_lock()", "def get_data(self):\n# epoch_from = 1301641200\n# epoch_to = epoch_from+60*60*24\n \"\"\"\n letting runs finish for 2 more hours\n ideally, want to make this a function of time from schedule plus some\n variation, like 1 hour just in case\n \"\"\" \n# epoch_to_adjusted = epoch_to + 7200\n conn = self.connect_to_mongo()\n db = conn.muni\n \n# print \"==== Collecting starting runs from %s to %s ====\"\\\n# % (str(time.ctime(epoch_from)), str(time.ctime(epoch_to)))\n \"\"\"\n > db.location.find({loc:{$within:{$center:[[37.80241, -122.4364],\n 0.01]}}})\n > db.location.find({loc:{$within:{$center:[[37.76048, -122.38895],\n 0.002]}}})\n \"\"\"\n bus_ids = db.location.find({'route':self.route_name}).distinct(\"bus_id\")\n for bus_id in bus_ids:\n c_start = db.location.find({\"bus_id\":bus_id,\n \"loc\":{\"$within\":{\"$center\":[[self.start_lat, self.start_lon],\n self.start_prec]}}\n }).sort(\"cur_time\", DESCENDING)\n self.massage_start_data(c_start)\n \"\"\"\n TODO: the end point seems to be too nice to Muni, need to tighten\n the circle a little\n \"\"\"\n c_end = db.location.find({\"bus_id\":bus_id,\n \"loc\":{\"$within\":{\"$center\":[[self.end_lat, self.end_lon],\n self.end_prec]}}\n }).sort(\"cur_time\", ASCENDING)\n self.massage_end_data(c_end)\n if self.to_log:\n print self.start_bus_ids_to_times\n print self.end_bus_ids_to_times\n \n return self.start_bus_ids_to_times, self.end_bus_ids_to_times", "def responsetime(conn):\n c = conn.cursor()\n results = c.execute(\"\"\"\n select finished.time, event.time, finished.time - event.time as responsetime\n from event\n left join (select time, task_id from event where type_id=\"\"\" + taskid(\"run_task\") + \"\"\") as finished\n on event.task_id = finished.task_id\n where event.type_id=\"\"\" + taskid(\"add_task\")).fetchall()\n\n results = np.matrix(results, dtype=float)\n runtimes = results[:,2]\n\n nones = runtimes == np.array(None)\n (finished, nofinish) = (runtimes[~np.isnan(runtimes).all(axis=1)], runtimes[np.isnan(runtimes).any(axis=1)])\n\n return {\n \"completion\":{\n \"finished\":finished.size,\n \"dnf\":nofinish.size,\n },\n \"response_times\":{\n \"min\":np.min(finished),\n \"mean\":np.mean(finished),\n \"max\":np.max(finished),\n \"std\":np.std(finished)\n }\n }", "def worker(nums, out_q):\n outdict = {}\n print(threading.current_thread().name)\n print (\"pid:\", os.getpid())\n print (\"data size:\", nums)\n for n in nums:\n outdict[n] = factorize_naive(n)\n out_q.put(outdict)", "def exec(list_req, wb,write,Total):\n ret = None\n\n if write==True:\n for tick in list_req:\n retrieve_score(wb,tick,increase=True,write = write)\n retrieve_score(wb,tick,increase=False,write = write) \n \n else:\n if Total == True:\n ret_inc = retrieve_score(wb,list_req[0],increase=True,write = write)\n ret_score = retrieve_score(wb,list_req[0],increase=False,write = write)\n for tick in list_req[1:]:\n ret_inc = ret_inc.append(retrieve_score(wb,tick,increase=True,write = write))\n ret_score = ret_score.append(retrieve_score(wb,tick,increase=False,write = write))\n \n else:\n ret_inc = []\n ret_score = []\n for tick in list_req[1:]:\n ret_inc.append(retrieve_score(wb,tick,increase=True,write = write))\n ret_score.append(retrieve_score(wb,tick,increase=False,write = write))\n\n\n ret = (ret_score,ret_inc)\n\n \n return ret", "def run(self):\n # Get data objects (in a dict) from the controller process \n dataDict = self.controller.recv()\n self.orderedStreams = dataDict['orderedStreams']\n\n ID = None\n data = None\n while self.clients:\n result = self.resultQ.get()\n if result is None:\n self.clients -= 1\n continue\n ID, data = result\n # Data sequence is unimportant, simply write it out and proceed\n self.writePairs(data)\n\n # Send updated data (stats mainly) via the pipe directly back to\n # the MPController object, close filehandles and finish up.\n self.updateObjectsToController()\n self.closeFileHandles()", "def main():\r\n mvip, user, user_pass, mvip_node = get_inputs()\r\n payload = build_payload()\r\n headers, url = build_auth(mvip, user, user_pass, mvip_node)\r\n response_json = connect_cluster(headers, url, payload)\r\n paired_vols = get_replication_status(response_json)\r\n payload = get_vol_stats(paired_vols)\r\n response_json = connect_cluster(headers, url, payload)\r\n parse_volume_stats(paired_vols, response_json)", "def main():\n\n args = parse_args()\n metric_sender = MetricSender(verbose=args.verbose, debug=args.debug)\n\n discovery_key_disk = 'disc.disk'\n interval = 3\n pcp_disk_dev_metrics = ['disk.dev.total', 'disk.dev.avactive']\n item_prototype_macro_disk = '#OSO_DISK'\n item_prototype_key_tps = 'disc.disk.tps'\n item_prototype_key_putil = 'disc.disk.putil'\n\n disk_metrics = pminfo.get_sampled_data(pcp_disk_dev_metrics, interval, 2)\n\n pcp_metrics_divided = {}\n for metric in pcp_disk_dev_metrics:\n pcp_metrics_divided[metric] = {k: v for k, v in disk_metrics.items() if metric in k}\n\n # do TPS checks; use disk.dev.total\n filtered_disk_totals = clean_up_metric_dict(pcp_metrics_divided[pcp_disk_dev_metrics[0]],\n pcp_disk_dev_metrics[0] + '.')\n\n # Add dynamic items\n metric_sender.add_dynamic_metric(discovery_key_disk, item_prototype_macro_disk, filtered_disk_totals.keys())\n\n # calculate the TPS and add them to the ZaggSender\n for disk, totals in filtered_disk_totals.iteritems():\n disk_tps = (totals[1] - totals[0]) / interval\n metric_sender.add_metric({'%s[%s]' % (item_prototype_key_tps, disk): disk_tps})\n\n # do % Util checks; use disk.dev.avactive\n filtered_disk_totals = clean_up_metric_dict(pcp_metrics_divided[pcp_disk_dev_metrics[1]],\n pcp_disk_dev_metrics[1] + '.')\n\n # calculate the % Util and add them to the ZaggSender\n for disk, totals in filtered_disk_totals.iteritems():\n total_active = (float)(totals[1] - totals[0]) / 1000.0\n putil = 100 * total_active / interval\n\n metric_sender.add_metric({'%s[%s]' % (item_prototype_key_putil, disk): putil})\n\n metric_sender.send_metrics()", "def processWork(self):\n while self.running == True:\n if len(self.work_queue) == 0:\n self.work_queue = [Instruction('Do Math'), Instruction('Send HUPD'), Instruction('Receive All HUPDs')]\n else:\n instruction = self.work_queue.pop(0)\n if instruction.type == 'Do Math':\n #start calculations\n self.updated = False\n #print('Doing Math')\n # run calculations\n elif instruction.type == 'Send HUPD':\n #echo host update to all other hosts on the network\n min_max = str(self.x_min) + ':' + str(self.x_max)\n payload = 'a' + '\\0' + 'b' + '\\0' + 'c' + '\\0' + 'd' + '\\0' + 'e' + '\\0' + 'f' + '\\0' + 'g' + '\\0' + min_max + '\\0'\n our_update = Message(\"HUPD\", self.ip, payload)\n #if there are no connections, send to myself\n for connection in self.connections:\n connection.host_sock.sendall(our_update.generateByteMessage())\n elif instruction.type == 'Receive All HUPDs':\n # make sure to receive all HUPDs from listening threads\n if len(self.connections) > 0:\n while len(self.updates_received) != len(self.connections):\n msg = 'wait'\n # only set to true once all updates have been received\n self.updated = True\n self.updates_received = []\n # Once all updates are recieved update ABoid locations\n self.all_alphas = []\n elif instruction.type == 'NHST':\n #New host tring to connect to network\n new_host_ip = instruction.message.origin\n payload_array = instruction.message.payload.split(':')\n\n #check if the new host is a neighbor\n if self.x_max == self.curr_x_max:\n self.r_neighbor = new_host_ip\n if self.x_min == self.curr_x_min:\n self.l_neighbor = new_host_ip\n self.host_ips.append(new_host_ip)\n #Start the thread that is listening to the socket connected to the new host\n new_thread = Thread(target=lambda: self.listenToHost(instruction.sock))\n new_thread.daemon = True\n new_thread.start()\n new_connection = Connection(new_host_ip, instruction.sock, new_thread)\n self.connections.append(new_connection)\n host_area = str(self.x_min) + ':' + str(self.x_max)\n #send current host area to the newly connected host\n area_message = Message('AREA', self.ip, host_area)\n instruction.sock.sendall(area_message.generateByteMessage())\n print('Sent AREA message to ' + new_host_ip)\n elif instruction.type == 'LHST':\n #Host has disconnected to the network\n for host_ip in self.host_ips:\n if host_ip == instruction.message.origin:\n #remove host from list of connected ips\n self.host_ips.remove(host_ip)\n for connection in self.connections:\n #remove the connection object from list of known connections\n if connection.ip == instruction.message.origin:\n #close the hosts socket and thread\n connection.close()\n self.connections.remove(connection)\n else:\n print('Invalid Instruction - skipping...')\n return", "def run(self):\n # client -> server\n self.client.send_message(\n new_order_message(self.client,\n symbol='abc',\n side='0',\n order_type='1',\n extra_tags=[(38, 100), # orderQty\n (44, 10), ])) # price\n\n # server <- client\n message = self.server.wait_for_message('waiting for new order')\n assert_is_not_none(message)\n\n # server -> client\n self.server.send_message(\n execution_report(self.server,\n message,\n exec_trans_type='0',\n exec_type='0',\n ord_status='0',\n symbol='abc',\n side='0',\n leaves_qty='100',\n cum_qty='0',\n avg_px='0'))\n\n # client <- server\n message = self.client.wait_for_message('waiting for new order ack')\n assert_is_not_none(message)", "def gather_qpt_function(self, func_name, *args, **kwargs):\n partial = self.gather_qpt_function_me(func_name, *args, **kwargs)\n\n if i_am_master:\n\n # Contruct an array with the shape of partial,\n # adding a dimension of length nqpt.\n total = np.zeros([self.nqpt] + list(partial.shape[1:]),\n dtype=partial.dtype)\n\n for i, arr in enumerate(partial):\n total[i,...] = arr[...]\n\n active_ranks = self.get_active_ranks()\n if len(active_ranks) > 1:\n for irank in active_ranks[1:]:\n partial = comm.recv(source=irank, tag=irank)\n for arr in partial:\n i += 1\n total[i,...] = arr[...]\n\n elif self.active_worker:\n comm.send(partial, dest=0, tag=rank)\n return\n\n else:\n return\n\n # Now I could broadcast the total result to all workers\n # but right now there is no need to.\n\n return total", "def master(group_name: str, sum_worker_number: int, multiply_worker_number: int, is_immediate: bool = False):\n proxy = Proxy(\n group_name=group_name,\n component_type=\"master\",\n expected_peers={\n \"sum_worker\": sum_worker_number,\n \"multiply_worker\": multiply_worker_number,\n },\n )\n\n sum_list = np.random.randint(0, 10, 100)\n multiple_list = np.random.randint(1, 10, 20)\n print(\"Generate random sum/multiple list with length 100.\")\n\n # Assign sum tasks for summation workers.\n destination_payload_list = []\n for idx, peer in enumerate(proxy.peers[\"sum_worker\"]):\n data_length_per_peer = int(len(sum_list) / len(proxy.peers[\"sum_worker\"]))\n destination_payload_list.append((peer, sum_list[idx * data_length_per_peer : (idx + 1) * data_length_per_peer]))\n\n # Assign multiply tasks for multiplication workers.\n for idx, peer in enumerate(proxy.peers[\"multiply_worker\"]):\n data_length_per_peer = int(len(multiple_list) / len(proxy.peers[\"multiply_worker\"]))\n destination_payload_list.append(\n (peer, multiple_list[idx * data_length_per_peer : (idx + 1) * data_length_per_peer]),\n )\n\n if is_immediate:\n session_ids = proxy.iscatter(\n tag=\"job\",\n session_type=SessionType.TASK,\n destination_payload_list=destination_payload_list,\n )\n # Do some tasks with higher priority here.\n replied_msgs = proxy.receive_by_id(session_ids, timeout=-1)\n else:\n replied_msgs = proxy.scatter(\n tag=\"job\",\n session_type=SessionType.TASK,\n destination_payload_list=destination_payload_list,\n timeout=-1,\n )\n\n sum_result, multiply_result = 0, 1\n for msg in replied_msgs:\n if msg.tag == \"sum\":\n print(f\"{proxy.name} receive message from {msg.source} with the sum result {msg.body}.\")\n sum_result += msg.body\n elif msg.tag == \"multiply\":\n print(f\"{proxy.name} receive message from {msg.source} with the multiply result {msg.body}.\")\n multiply_result *= msg.body\n\n # Check task result correction.\n assert sum(sum_list) == sum_result\n assert np.prod(multiple_list) == multiply_result", "def meta_trader_get_values(socket, data):\n try:\n socket.send_string(data)\n msg = socket.recv_string()\n return msg\n\n except zmq.Again as e:\n print(\"Something went wrong: \" + str(e))", "async def run(self):\n\n result = {'hops': [],\n 'start_timestamp': time()}\n\n if self.icmp:\n trace = await create_subprocess_exec(\"traceroute\",\n \"-n\",\n \"-I\",\n \"-w\" + self.wait_time,\n \"-m\" + self.max_hops,\n \"-q 1\",\n self.device,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n else:\n trace = await create_subprocess_exec(\"traceroute\",\n \"-n\",\n \"-w\" + self.wait_time,\n \"-m\" + self.max_hops,\n \"-q 1\",\n self.device,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n\n stdout = await trace.stdout.read()\n stderr = await trace.stderr.read()\n\n if stderr:\n result['error'] = stderr\n\n lines = stdout.splitlines()\n # remove first line \"traceroute to...\"\n del lines[0]\n\n for line in lines:\n line = line.decode('utf-8')\n ip_address = self.extract_ip_from_line(line)\n rtt = self.extract_rtt_from_line(line)\n if(ip_address):\n result['hops'].append({'ip_address': ip_address,\n 'rtt': rtt})\n elif '*' in line:\n result['hops'].append({'ip_address': '*',\n 'rtt': '*'})\n\n result['end_timestamp'] = time()\n self.results.append(result)", "def statsWorker():\n logger.info('STATS: Starting. Will report out every {0:.1g} hours'.format(\n config.STATS_HOURS))\n while True:\n gevent.sleep(timedelta(hours=config.STATS_HOURS).total_seconds())\n logger.info('STATS: {0}'.format(stats))\n stats.resetStats()\n\n return", "def getEnergyUsage():\n energy_data = asyncio.run(plug.get_emeter_realtime())\n\n return energy_data", "def get_stats():\r\n stats = {\r\n \"progress_precent\": 100.0*finished_work_units_amount/work_units_amount,\r\n \"results\": None if work_status == Db.WorkStatusNames.finished_work.value else Db.collect_results(),\r\n #If it's already finished, then all the results were already sent to the main server.\r\n }\r\n return stats", "def run(self):\n\n # Get data objects (in a dict) from the controller process \n dataDict = self.controller.recv()\n self.orderedStreams = dataDict['orderedStreams']\n orderedID = 0\n\n# TO DO - perhaps treat empty results differently? Place the ID in a \"discard\"\n# list and do the stats update immediately. Then when incrementing the \n# orderedID, if the updated value is in the discard list, increment again..\n # Begin procesing results queue\n ID = None\n data = None\n c = 0\n while self.clients:\n result = self.resultQ.get()\n if result is None:\n self.clients -= 1\n continue\n c += 1\n ID, data = result\n while self.clients and ID != orderedID:\n result = self.resultQ.get()\n if result is None:\n self.clients -= 1\n continue\n c += 1\n self.cacheIDX.append(ID)\n self.cache.append(data)\n ID, data = result\n\n # Data is next in sequence, write it out and proceed\n self.writePairs(data)\n orderedID += 1\n while orderedID in self.cacheIDX:\n idx = self.cacheIDX.index(orderedID)\n self.cacheIDX.pop(idx)\n data = self.cache.pop(idx)\n self.writePairs(data)\n orderedID += 1\n\n # Processing is completed but the cache may not be empty. Drain it\n # now (it should contain any missing objects at this point)\n if len(self.cacheIDX):\n while orderedID in self.cacheIDX:\n idx = self.cacheIDX.index(orderedID)\n self.cacheIDX.pop(idx)\n data = self.cache.pop(idx)\n self.writePairs(data)\n orderedID += 1\n\n # Send updated data (stats mainly) via the pipe directly back to\n # the MPController object, close filehandles and finish up.\n self.updateObjectsToController()\n self.closeFileHandles()", "def ProcessMeasurementRequestParallel(request, base_dir='.'):\n\n print \"Processing RequestMeasurement in Parallel:\"\n print request, request.form\n\n # Apply some sanity checks\n if request.method != 'POST':\n print \"FitMeasurement() - ERROR: Expected POST http request\"\n return jsonify(flag=\"error\")\n\n # Get the data to be fit from the JSON\n measurement_string_JSON = request.form['measurement']\n\n # Call the external script\n print \"Opening Subprocess\"\n script_location = base_dir + '/' + 'fitMeasurement.py'\n p = subprocess.Popen([script_location, measurement_string_JSON], stdout=subprocess.PIPE)\n out, err = p.communicate()\n print \"Subprocess successfully executed\"\n\n # Use the deliminator to determine where\n # the desired dict is in the output\n delim = 'BEGIN_HERE'\n out = out[out.find(delim)+len(delim):]\n json_string = out\n result_json = json.loads(json_string)\n\n fitted_params = result_json['fitted_params'] \n fitted_bins = result_json['fitted_bins'] \n profile_png = result_json['profile_png']\n\n print \"Returning result\"\n return jsonify(flag=\"success\", \n fitted_params=fitted_params, fitted_bins=fitted_bins,\n profile_png=profile_png)", "def mex_list_info():\n pub = rospy.Publisher('~mex_list_info', MexListInfo, queue_size=10)\n rate = rospy.Rate(1) # 1hz\n while not rospy.is_shutdown():\n # Loop here, publishing the MexListInfo message with a list of MexInfo objects at the specified rate.\n mexlistinfo = MexListInfo()\n mexlistinfo.stamp = rospy.Time.now()\n mexlistinfo.total_mex_number = len(mex_list)\n mexlistinfo.standby = 0\n mexlistinfo.charging = 0\n mexlistinfo.assigned = 0\n mexlistinfo.executing_task = 0\n mexlistinfo.error = 0\n\n for i in mex_list:\n mex_info = MexInfo()\n mex_info.status = i.status.name\n mex_info.id = i.id\n mex_info.job_id = str(i.job_id)\n mexlistinfo.mex_list_info_array.append(mex_info)\n if i.status.name == MExStatus.STANDBY.name:\n mexlistinfo.standby += 1\n elif i.status.name == MExStatus.CHARGING.name:\n mexlistinfo.charging += 1\n elif i.status.name == MExStatus.ASSIGNED.name:\n mexlistinfo.assigned += 1\n elif i.status.name == MExStatus.EXECUTING_TASK.name:\n mexlistinfo.executing_task += 1\n elif i.status.name == MExStatus.ERROR.name:\n mexlistinfo.error += 1\n\n pub.publish(mexlistinfo)\n rate.sleep()", "def _disp_times():\n fields = request.args.get('fields', type=str)\n format_type = request.args.get('format', type=str)\n top = request.args.get('top', type=int)\n token = request.args.get('token', type=str)\n results = {}\n\n result, length, code = retrieve(token, format_type, top, request_table[fields])\n return flask.jsonify(result=result, length=length, code=code)\n\n # elif code == 401: # Unauthorized\n # app.logger.debug(\"Token Expired! Let's log the user out.\")\n # return render_template('calc.html')", "def evaluate_data():\n try:\n # General system related info\n ram = psutil.virtual_memory()\n total_ram = round((ram.total / 1024 / 1024),2)\n free_ram = round((ram.available / 1024 / 1024),2)\n used_ram = round((ram.used / 1024 / 1024),2)\n cpu_total = psutil.cpu_count(logical=True)\n cpu_loadavg = round([x / cpu_total * 100 for x in psutil.getloadavg()][0],2)\n acs_8080 = sp.getoutput(\"netstat -an|grep -c 8080\")\n acs_8181 = sp.getoutput(\"netstat -an|grep -c 8181\")\n acs_8443 = sp.getoutput(\"netstat -an|grep -c 8443\")\n mysql = sp.getoutput(\"netstat -an|grep -c 3306\")\n oracle = sp.getoutput(\"netstat -an|grep -c 1521\")\n logging.info('General system info obtained')\n except Exception as e:\n logging.exception(f\"EXCEPTION: {e} \\n Full stack trace: \\n\", exc_info=1)\n # Process specific details\n try:\n iis_pid = SystemInformation.get_pid(\"w3wp.exe\")\n iis_ram = SystemInformation.get_ram_usage(iis_pid)\n iis_cpu = SystemInformation.get_cpu_usage(iis_pid)\n java_pid = SystemInformation.get_pid(\"java.exe\")\n java_ram = SystemInformation.get_ram_usage(java_pid)\n java_cpu = SystemInformation.get_cpu_usage(java_pid)\n mysqld_pid = SystemInformation.get_pid(\"mysqld.exe\")\n mysqld_ram = SystemInformation.get_ram_usage(mysqld_pid) \n mysqld_cpu = SystemInformation.get_cpu_usage(mysqld_pid)\n except Exception as e:\n logging.exception(f\"EXCEPTION: {e} \\n Full stack trace: \\n\", exc_info=1)\n\n try:\n dictionary = {}\n now = datetime.datetime.now()\n timestampt = now.strftime(\"%Y-%m-%d-%H:%M:%S\")\n fieldnames = ['timestampt','total_ram','free_ram','used_ram','cpu_total','cpu_loadavg','acs_8080','acs_8181','acs_8443','mysql','oracle','iis_ram','iis_cpu','java_ram','java_cpu','mysqld_ram','mysqld_cpu']\n for var in fieldnames:\n dictionary[var] = eval(var)\n \n logging.info('Data for report generated')\n return dictionary\n except Exception as e:\n logging.exception(f\"EXCEPTION: {e} \\n Full stack trace: \\n\", exc_info=1)", "def worker_function(taskQ, resultQ):\n \n while True:\n try: ivel = taskQ.get(block=True, timeout=10)# try to get the next task, allow some time for process clash (ivel number)\n except queue.Empty: break# kill process if no more tasks left\n example = generate_example(ivel)\n resultQ.put(example)# push the example to the results queue", "def handle(self):\n while True:\n try:\n chunk = self.connection.recv(4)\n if len(chunk) < 4:\n break\n slen = struct.unpack(\">L\", chunk)[0]\n chunk = self.connection.recv(slen)\n while len(chunk) < slen:\n chunk = chunk + self.connection.recv(slen - len(chunk))\n obj = self.unPickle(chunk)\n msg = obj['msg']\n if type(msg) is str:\n record = logging.makeLogRecord(obj)\n self.handleLogRecord(record)\n else:\n self.statsThread.addRecord(msg)\n timeDict = msg['time'] \n if timeDict['total'] > LOG_THRESHOLD: \n #obj['msg'] = 'Processed ' + msg['request'] + ' on ' + msg['file'] + ' in ' + ('%.3f' % msg['time']['total']) + ' seconds'\n logMsg = 'Processed ' + msg['request'] + ' on ' + msg['file'] + '. Timing entries in seconds: '\n addComma=False\n for SECTION in self.SECTION_KEYS:\n timeKey=SECTION.strip()\n if timeDict.has_key(timeKey):\n if addComma:\n logMsg += ','\n else:\n addComma = True\n logMsg += ' ' + timeKey + ' ' + ('%.3f' % timeDict[timeKey])\n \n obj['msg'] = logMsg\n record = logging.makeLogRecord(obj)\n self.handleLogRecord(record)\n except Exception, e:\n import sys, traceback, string\n t, v, tb = sys.exc_info()\n print string.join(traceback.format_exception(t, v, tb))" ]
[ "0.5938819", "0.59349483", "0.5909281", "0.5909281", "0.5895395", "0.5823286", "0.5821491", "0.57913584", "0.57686776", "0.5757286", "0.5728129", "0.57082814", "0.56994784", "0.56603485", "0.56561863", "0.565306", "0.5591266", "0.5583581", "0.5565303", "0.5563218", "0.54797685", "0.5465706", "0.5435589", "0.54318595", "0.541826", "0.5410478", "0.53953266", "0.53912157", "0.5365989", "0.530458", "0.5303408", "0.5302744", "0.52789134", "0.527428", "0.5272694", "0.52726835", "0.5270866", "0.52669954", "0.52669406", "0.52501106", "0.5247305", "0.524339", "0.52433664", "0.5234862", "0.5224209", "0.52099484", "0.52099484", "0.5186886", "0.5178175", "0.517076", "0.5169387", "0.5163943", "0.5161034", "0.5157081", "0.5157015", "0.5147692", "0.5144682", "0.51405096", "0.5136727", "0.5125427", "0.51242566", "0.5114945", "0.5106845", "0.5105372", "0.5104778", "0.51022124", "0.50995857", "0.5085155", "0.5079612", "0.50764316", "0.5073474", "0.5071099", "0.50583357", "0.5056233", "0.50483495", "0.5044515", "0.5043749", "0.50436896", "0.5041366", "0.503524", "0.5034931", "0.5028462", "0.5028354", "0.5014762", "0.50144374", "0.5009212", "0.4995677", "0.49891824", "0.49830964", "0.497812", "0.49762967", "0.49722588", "0.49705425", "0.49618313", "0.49511668", "0.49494585", "0.49473172", "0.4947306", "0.49442354", "0.49437997" ]
0.57413363
10
Used to handle not responding zmq server
def raise_timeout(*args, **kwargs): raise ZMQNotResponding('ZMQ server is not responding')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def fix_zmq_exit():\n import zmq\n ctx = zmq.Context.instance()\n ctx.term()", "def test_recv_nomsg(self):\n flag, msg_recv = self.recv_instance.recv(timeout=self.sleeptime)\n assert(not flag)\n nt.assert_equal(msg_recv, self.recv_instance.eof_msg)", "def connectionLost(reason):", "def checkConnection(self,msg):\n if (len(msg) == 0):\n sleep(self.m_to/2)\n print >>sys.stderr, 'Closing due to possible server fault'\n self.close()", "def server_exit():\n return", "def test_keep_alive_cancelled(self):\n sleep(0.005) # Wait before a keep-alive message will be sent\n self.inverter.send(b\"\\x01\\x02\\x03\", b\"\") # Send something arbitrary\n self.sock.recv(4096) # Retrieve the sent message\n sleep(0.008) # Wait until just before the next keep-alive is supposed to happen\n # Check that no message was sent\n self.sock.setblocking(False)\n with self.assertRaises(BlockingIOError):\n self.sock.recv(4096)", "def connectionLost(self,reason):\n pass", "def recv(self):\n return None", "def time_server_not_responding(self):\n if not self.time_server_set:\n return False\n if self.am_leader:\n return False\n try:\n uid = self.global_time_server.get_id()\n except socket.error:\n self.global_time_server = None\n self.time_server_set = False\n print \"The time server is not responding.\"\n return True\n print \"The time server is responding!\"\n return False", "def broker_null(self, data):\n\n print(\"Heartbeat\")\n #TODO: Reset heartbeat timer or something like that", "def connectionLost(self, reason):\n print \"connection lost from\", self.addr\n reactor.stop()", "def connectionLost(self, reason):\n print \"lost connection to\", host, \"port\", port\n reactor.stop()", "def connection_lost(self, exc):\n logger.info('The server closed the connection')\n self.loop.stop()", "def start_server(self) -> None:\n with self.socket.bind(self.address):\n print(\"ZeroMQ Server listening at {}\".format(self.address))\n while True:\n payload_rx = self.socket.recv(flags=0)\n if payload_rx:\n self.decode_payload(payload_rx)\n self.socket.send_string(self.reply(), flags=0, copy=False)", "def main(_):\n context = zmq.Context()\n socket = context.socket(zmq.REQ)\n socket.connect(CORENLP_ADDRESS)\n socket.send(\"stop\")\n message = socket.recv()\n print(\"Received reply [%s]\" % message)", "def __connection_lost(self):\n print(\"Error: connection lost.\")\n try:\n # Try and send a message back to the server to notify connection\n # lost\n self.client_socket.send(\"q\".encode())\n except:\n pass\n # Raise an error to finish\n raise Exception", "def peer_server_host(self):\n try:\n while True:\n while not self.peer_server_listener_queue.empty():\n with futures.ThreadPoolExecutor(max_workers=8) as executor:\n conn, addr = self.peer_server_listener_queue.get()\n data_received = json.loads(conn.recv(1024))\n\n if data_received['command'] == 'obtain_active':\n fut = executor.submit(\n self.peer_server_upload, conn, data_received)\n except Exception as e:\n print \"Peer Server Hosting Error, %s\" % e", "def _receive_thread(self):\r\n while True:\r\n try:\r\n self.response, ip = self.socket.recvfrom(3000)\r\n except socket.error as exc:\r\n print (f\"Caught exception socket.error: {exc}\")", "def controls():\n\n context = zmq.Context()\n\n print(\"Transmitting commands to process.\")\n socket = context.socket(zmq.REQ)\n rc = socket.connect(\"ipc:///tmp/mail_queue_ipc\")\n #print(rc)\n\n\n for request in range(2):\n print(\"Sending request %s\" % request)\n socket.send(b\"insert\")\n\n message = socket.recv()\n print(\"Recieved reply %s [ %s ]\" % (request, message))\n time.sleep(1)", "def _shutdown(self):\n self.control_socket.send(zmqmessage.IPC_END)\n self.end_threads = True\n self.timeout = 1", "def exit(self):\n self._status = \"\"\n self._sock.settimeout(1.0)\n self._sock.sendto(bytes(\"bla\", \"utf-8\"), (self._cfg.host, self._cfg.port))", "def do_socket_logic():\n pass", "def run(self) -> None:\n\n while not self.stop_event.is_set():\n if self.my_queue:\n # if heartbeat received at '/heartbeat' route from the monitored peer,\n # sleep until next\n self.my_queue.clear()\n time.sleep(7)\n\n else:\n # else drop peer data from database and inform central server appending '0'\n # to my queue\n self.db_access.drop_peer(self.peer_id)\n self.my_queue.append(0)\n break", "def wait_for_termination(self):\n self.server.wait_for_termination()", "def ecute(self):\n msg = self.up_queue_recv_socket.recv()\n result, e = self.up_queue.get()\n if e is not None:\n raise e\n return result", "def keepAliveReceived(self):", "def _receive_thread(self):\r\n while True:\r\n try:\r\n self.response, ip = self.socket.recvfrom(2048)\r\n print(\"Response \", self.response)\r\n except socket.error as exc:\r\n print (\"Receive Thread caught exception socket.error : %s\" % exc)", "def test_solicitation_no_reply_resend(self):\n waittime = self.autoconflayer._solicitation_timeout * 4.0\n self.autoconflayer.start_process()\n interest = Interest(Name('/foo/bar'))\n self.queue_from_higher.put([None, interest])\n\n # Catch all data the autoconfig layer sends downwards for 3 seconds\n deadline = datetime.utcnow() + timedelta(seconds=waittime)\n tolower = []\n while datetime.utcnow() < deadline:\n try:\n data = self.queue_to_lower.get(timeout=waittime/10)\n tolower.append(data)\n except queue.Empty:\n pass\n # Make sure the broadcast face was actually created and get its face id\n bcfid = self.faceidtable.get_or_create_faceid(AddressInfo(('127.255.255.255', 4242), 0))\n self.assertIsNotNone(bcfid)\n # Make sure the forwarder solicitation was sent more than once\n solictiation = Interest(Name('/autoconfig/forwarders'))\n solictiation_count = len([1 for data in tolower if data == [bcfid, solictiation]])\n self.assertGreater(solictiation_count, 1)", "def slot_not_connected(self, addr):\n if self.next_connection(addr):\n self.connection_responded()\n else:\n print(addr, \": Reconnecting...\")", "def cmd_handler():\n context = zmq.Context()\n\n # socket to receive commands (a subscription to ELECTION_CODE channel)\n cmd_socket = context.socket(zmq.SUB)\n cmd_socket.connect (\"tcp://%s:5556\" % SERVER_HOST)\n topicfilter = \"politiche2013\"\n cmd_socket.setsockopt(zmq.SUBSCRIBE, topicfilter)\n\n # socket to send replies\n reply_sender = context.socket(zmq.PUSH)\n reply_sender.connect(\"tcp://%s:5557\" % SERVER_HOST)\n\n # main loop\n while True:\n print \"Aye sir, unit {0} ready for your commands ...\".format(computer_id)\n # wait for a command\n string = cmd_socket.recv()\n\n # action\n print \"Message received: '%s'\" % (string,)\n\n # send reply to server\n print \"Sending reply to server\"\n reply = { 'unit' : computer_id, 'status' : 'configured'}\n reply_sender.send_json(reply)", "def connectionLost(self, reason):\n self.setTimeout(None)\n if reason.check(ResponseDone, PotentialDataLoss):\n self.deferred.callback(None)\n else:\n self.deferred.errback(reason)", "def test_heartbeat_only(cls):\n # type: () -> None\n\n uniclient_thread = ErrorAssertUniClientThread(cls.TEST_ZMQ_ENDPOINT)\n uniclient_thread.start()\n\n uniworker_thread = ConsoleUniWorkerThread(cls.TEST_ZMQ_ENDPOINT)\n uniworker_thread.start()\n\n uniclient_thread.wait_for_worker(INITIAL_CONNECTION_TIME_SECS)\n # Wait for 60 seconds, make sure the client and worker remain connected.\n sleep(60)\n #sleep(30000)\n assert uniclient_thread.is_connected()\n assert uniworker_thread.is_connected()\n\n # Shut down the worker and client\n uniworker_thread.join()\n uniclient_thread.join()", "def server_activate(self):\n\t\tself.socket.listen(self.request_queue_size)", "def handle(self):\n try:\n # Wait for data\n data = json.loads(self.request.recv(1024).decode('UTF-8').strip())\n\n # Process data\n self.process_data(data)\n\n except Exception as e:\n print(\"Exception wile receiving message: \", e)\n self.request.sendall(\n bytes(json.dumps({'return': 'error'}), 'UTF-8'))", "def handle_connection_lost(self, exc: Optional[Exception]) -> None:", "def handle(self):\n try:\n while True:\n\n # Pop the message from the queue\n\n msg = self.queue.get_nowait()\n\n # Log anything if necesary\n\n self.log_message(msg)\n\n # Identify the src peer\n\n if 'src_id' in msg:\n\n if msg['src_id'] == -1:\n\n this_peer = None # Server message\n\n else:\n\n this_peer = self.peers[msg['src_id']]\n\n # If we are not up-to-date with server, only accept MSG_CONNECT and MSG_SET_ALL\n\n if isinstance(msg, MSG_CONNECT):\n\n if self.marker.id != msg['src_id']:\n\n print(\"Peer '{}' has joined the session\".format(msg['name']))\n\n elif type(msg) == MSG_SET_ALL:\n\n # Set the contents of the text box\n\n self.handle_setall(msg['data'])\n\n # Move the peers to their position\n\n for _, peer in self.peers.items():\n \n peer.move(peer.row, peer.col)\n\n # self.mark_set(peer.mark, peer.index())\n\n # Format the lines\n\n self.format_text()\n\n # Move the local peer to the start\n\n self.marker.move(1,0)\n\n # Flag that we've been update\n\n self.is_up_to_date = True\n\n elif self.is_up_to_date:\n\n # If the server responds with a console message\n\n if isinstance(msg, MSG_RESPONSE):\n\n if hasattr(self.root, \"console\"):\n\n self.root.console.write(msg['string']) \n\n # Stop running when server is manually killed \n\n elif isinstance(msg, MSG_KILL):\n\n if hasattr(self.root, \"console\"):\n\n self.root.console.write(msg['string']) \n\n self.root.push.kill()\n self.root.pull.kill()\n\n # Handles selection changes\n\n elif isinstance(msg, MSG_SELECT):\n\n sel1 = str(msg['start'])\n sel2 = str(msg['end'])\n \n this_peer.select(sel1, sel2)\n\n # Handles keypresses\n\n elif isinstance(msg, MSG_DELETE):\n\n self.handle_delete(this_peer, msg['row'], msg['col'])\n\n self.root.colour_line(msg['row'])\n\n elif type(msg) == MSG_BACKSPACE:\n\n self.handle_backspace(this_peer, msg['row'], msg['col'])\n\n self.root.colour_line(msg['row'])\n\n elif isinstance(msg, MSG_EVALUATE_BLOCK):\n\n lines = (int(msg['start_line']), int(msg['end_line']))\n\n this_peer.highlightBlock(lines)\n\n # Experimental -- evaluate code based on highlight\n\n string = self.get(\"{}.0\".format(lines[0]), \"{}.end\".format(lines[1]))\n \n self.root.lang.evaluate(string, name=str(this_peer), colour=this_peer.bg)\n\n elif isinstance(msg, MSG_EVALUATE_STRING):\n\n # Handles single lines of code evaluation, e.g. \"Clock.stop()\", that\n # might be evaluated but not within the text\n\n self.root.lang.evaluate(msg['string'], name=str(this_peer), colour=this_peer.bg)\n\n elif isinstance(msg, MSG_SET_MARK):\n\n row = msg['row']\n col = msg['col']\n\n this_peer.move(row, col)\n\n # If this is a local peer, make sure we can see the marker\n\n if this_peer == self.marker:\n\n self.mark_set(INSERT, \"{}.{}\".format(row, col))\n\n self.see(self.marker.mark)\n\n elif isinstance(msg, MSG_INSERT):\n\n self.handle_insert(this_peer, msg['char'], msg['row'], msg['col'])\n\n # Update IDE keywords\n\n self.root.colour_line(msg['row'])\n\n # If the msg is from the local peer, make sure they see their text AND marker\n\n if this_peer == self.marker:\n\n self.see(self.marker.mark)\n\n self.edit_separator()\n\n elif isinstance(msg, MSG_GET_ALL):\n\n # Return the contents of the text box\n\n data = self.handle_getall()\n\n reply = MSG_SET_ALL(-1, data, msg['src_id'])\n\n self.root.push_queue.put( reply ) \n\n elif isinstance(msg, MSG_REMOVE):\n\n # Remove a Peer\n this_peer.remove()\n \n del self.peers[msg['src_id']]\n \n print(\"Peer '{}' has disconnected\".format(this_peer)) \n\n elif isinstance(msg, MSG_BRACKET):\n\n # Highlight brackets on local client only\n\n if this_peer.id == self.marker.id:\n\n row1, col1 = msg['row1'], msg['col1']\n row2, col2 = msg['row2'], msg['col2']\n\n peer_col = int(self.index(this_peer.mark).split(\".\")[1])\n\n # If the *actual* mark is a ahead, adjust\n\n col2 = col2 + (peer_col - col2) - 1\n\n self.tag_add(\"tag_open_brackets\", \"{}.{}\".format(row1, col1), \"{}.{}\".format(row1, col1 + 1))\n self.tag_add(\"tag_open_brackets\", \"{}.{}\".format(row2, col2), \"{}.{}\".format(row2, col2 + 1))\n\n elif type(msg) == MSG_CONSTRAINT:\n\n new_name = msg['name']\n\n print(\"Changing to constraint to '{}'\".format(new_name))\n\n for name in self.root.creative_constraints:\n\n if name == new_name:\n\n self.root.creative_constraints[name].set(True)\n self.root.__constraint__ = constraints[name](msg['src_id'])\n\n else:\n\n self.root.creative_constraints[name].set(False)\n\n elif type(msg) == MSG_SYNC:\n\n # Set the contents of the text box\n\n self.handle_setall(msg['data'])\n\n # Move the peers to their position\n\n for _, peer in self.peers.items():\n \n peer.move(peer.row, peer.col)\n\n # Format the lines\n\n self.format_text()\n\n elif type(msg) == MSG_UNDO:\n\n self.handle_undo()\n\n # Give some useful information about what the message looked like if error\n\n else:\n\n print(\"Error in text box handling. Message was {}\".format(msg.info()))\n\n raise e\n\n # Update any other idle tasks\n\n self.update_idletasks()\n\n # This is possible out of date - TODO check\n\n if msg == self.root.wait_msg:\n\n self.root.waiting = False\n self.root.wait_msg = None\n self.root.reset_title()\n\n self.refreshPeerLabels()\n\n # Break when the queue is empty\n except queue.Empty:\n \n self.refreshPeerLabels()\n\n # Recursive call\n self.after(30, self.handle)\n return", "def recv_invalid_response(self, recv_data, invalid_type = \"\"):\t\n\tif (invalid_type == \"bit_signature\"):\n\t print(\"Error: Packet received from outside our network (wrong bit signature)\")\t \n\t recv_data = \"\"\t \n\telif (invalid_type == \"response_type\"):\n\t print(\"Error: Wrong response type in packet received.\")\t \n\t recv_data = \"\"\t \t\n\treturn", "def handshake(self):\n print(\"No: \"+str(len(self.threads)))\n indexes_to_del = []\n if len(self.threads)>2:\n raise IOError\n for i in range(0,len(self.threads)):\n if not self.threads[i].is_alive():\n indexes_to_del.append(i)\n \n for i in indexes_to_del:#do this otherwise if deleted above, out of index error occurs\n del self.threads[i]\n \n while True:\n data = self.s.recv(1024)\n if data ==\"O\":\n print(\"Hanshake Received\")\n return", "def run(self):\n self.stopped = False\n # receives incoming 'host up' requests\n serverSocket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, 0)\n serverSocket.bind(('255.255.255.255', Globals.BROADCAST_PORT))\n \n # wait for UDP broadcast, send TCP ACK\n while 1:\n \n # open a socket and listen for a message\n value,address = serverSocket.recvfrom(256)\n host,port = address\n \n # this actually prevents a seg fault ;( for some reason\n if self.stopped:\n return\n \n if value == 'host up':\n \n sendSocket = socket.socket(socket.AF_INET, \n socket.SOCK_STREAM, 0)\n sendSocket.connect((host, Globals.ACK_PORT))\n sendSocket.send('host up ack')\n sendSocket.close()\n sendSocket = None\n self._addHost(host)\n \n elif value.find('host down') == 0:\n self._removeHost(host)\n \n elif value.find('add group') == 0:\n self._postEvent(value)\n \n elif value.find('remove group') == 0:\n self._postEvent(value)\n \n elif value.find('group beat') == 0:\n self._postEvent(value)\n \n serverSocket.close()", "def runNodesListener(self):\n \n socketNodes = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n socketNodes.bind((self.ip_address, 5003))\n\n while True:\n socketNodes.listen(5)\n try :\n conn, addr1 = socketNodes.accept()\n data = conn.recv(self.BUFFER_SIZE)\n if data:\n decriptedData = ast.literal_eval(data.decode('utf-8'))\n addr = decriptedData[0]\n try:\n \"\"\"\n We want to know what kind of message we received\n Here we consider it is a new block\n \"\"\"\n \n receivedBlock = decriptedData[1]['Block']\n if self.blockchain.chain == []:\n self.arrivingBlock(decriptedData[1], addr, receivedBlock) \n\n else:\n if receivedBlock['previous_hash'] == self.blockchain.last_block['hash']:\n self.arrivingBlock(decriptedData[1], addr, receivedBlock)\n else:\n self.message = self.setMessage((self.ip_address,{'Confirmation':'block rejected'}))\n nodesMessage = Thread(target = self.runNodesMessage) #Problem. We kill the last thread even if it didn't accomplished the task\n nodesMessage.setDaemon(True)\n nodesMessage.start()\n \n \n except KeyError:\n try:\n \"\"\"\n The message is not a new block but a response to a received block\n If the block is rejected we drop everything and broadcast a message of rejection\n If it is accepted we check if it is accepted by every neighbour if yes we ad it to the chain\n and broadcast the info\n \"\"\"\n if self.blockchain.waiting_blocks != []:\n receivedConfirmation = decriptedData[1]['Confirmation']\n \n if receivedConfirmation == 'block rejected':\n self.blockchain.waiting_blocks.clear()\n self.contactedIP.clear()\n self.message = self.setMessage((self.ip_address,decriptedData[1]))\n nodesMessage = Thread(target = self.runNodesMessage) #Problem. We kill the last thread even if it didn't accomplished the task\n nodesMessage.setDaemon(True)\n nodesMessage.start()\n elif receivedConfirmation == 'All my neighbours ok':\n if addr in self.neighboursOk:\n pass\n else:\n self.neighboursOk.append(addr)\n if self.verifyConfirmed(self.neighboursOk):\n if self.blockchain.waiting_blocks != []:\n self.blockchain.chain.append(self.blockchain.waiting_blocks[0])\n print(self.blockchain.chain)\n self.blockchain.waiting_blocks.clear()\n self.neighboursOk.clear()\n self.confirmed.clear() \n else:\n continue\n except KeyError:\n continue\n else:\n continue\n except socket.timeout:\n pass", "def test_process_packet_heartbeat(self):\n\n pkt = {'type': 'heartbeat',\n 'endpoint': ''\n }\n self.ns.process_packet(pkt)\n assert not self.environ['socketio'].error.called", "def handle_error(self):\n # perhaps do some nifty stuff here to\n # mark bad workers, try to restart, etc.\n msg = ''\n Nworkers = len(self.workers)\n Nsend_errors = len(self.had_send_error)\n Nsend_error_types = len(self.send_exc.keys())\n Nrecv_errors = len(self.had_recv_error)\n Nrecv_error_types = len(self.recv_exc.keys())\n if (Nsend_errors == Nworkers and\n Nsend_error_types == 1):\n sock_err_type,err_msg = self.send_exc.keys()[0]\n if sock_err_type == 111:\n # An attempt at helpful info for a common problem.\n msg = '\\n\\nConnection refused on all workers.\\n'\n msg = msg + ' Perhaps restarting the cluster would help.\\n'\n msg = msg + ' Use Your_Clusters_Name_Here.restart()'\n else:\n msg = 'A Socket error occured sending to all workers.\\n\\t'\n msg = msg + str(sock_err_type) + ': ' + str(err_msg)\n elif Nsend_errors:\n msg = '\\n\\nThe following errors occured when sending data:\\n\\t'\n for err,guilty_workers in self.send_exc.items():\n msg = msg + str(err) + '\\n\\t'\n msg = msg + 'Guilty workers: ' + str(guilty_workers) + '\\n'\n\n if (Nrecv_errors == Nworkers and\n Nrecv_error_types == 1):\n err,dummy = self.recv_exc.items()[0]\n err_type, err_msg, err_traceback = err\n msg = '\\n\\nThe same error occured on all workers:\\n\\t'\n msg = msg + str(err_type) + ': ' + str(err_msg)\n msg = msg + err_traceback\n elif Nrecv_errors:\n msg = '\\n\\nThe following errors occured on workers:\\n\\t'\n for err,guilty_workers in self.recv_exc.items():\n err_type, err_msg, err_traceback = err\n msg = msg + str(err_type) + ': ' + str(err_msg) + '\\n'\n msg = msg + 'Guilty workers: ' + str(guilty_workers) + '\\n'\n msg = msg + err_traceback\n\n\n raise ClusterError, msg", "def server():", "def server():", "def receive_msg(self):\n while True:\n try:\n msg = self.srvsock.recv(100)\n with self.t.location(0, self.k):\n print(msg.decode())\n self.k = self.k + 1\n except BaseException as e:\n print('Server Error! Connection shut down.')\n raise e", "async def connection_lost(self):\n logging.info('connection dropped')", "def on_shutdown(self, server):\n pass", "def handle_read(self):\n while True:\n try:\n content = self.recv(1024)\n if content:\n self.rbuf.write(content.decode('utf-8'))\n if len(content) < 1024:\n break\n except Exception as e:\n print(e)\n self.handle_rpc()", "def test_recv(self):\n Tout = self.instance.start_timeout()\n while ((not Tout.is_out)\n and (os.stat(self.tempfile).st_size == 0)): # pragma: debug\n self.instance.sleep()\n self.instance.stop_timeout()\n msg_flag, res = self.instance.recv(timeout=self.timeout)\n assert(msg_flag)\n assert(len(res) > 0)\n nt.assert_equal(res, self.ply_dict)", "def run(self):\n\n\t\t#Begin running the clientHandler\n\t\tself.running = True\n\t\tself.rxThread.start()\n\n\t\twhile self.running:\n\t\t\ttime.sleep(0.1)\n\t\n\t\t\t#Keep a count of the number of missing Hello requests, over 5 kill client\n\t\t\tif self.missingCount >= 5:\n\t\t\t\tself.running = False", "def on_server_shutdown(self):\n raise NotImplementedError", "def on_bindok(self, unused_frame):\n\n self.logger.info('queue bound')\n if self.acked:\n # if we wish to care about the servers replies, this is were we set up things\n self.logger.info('issuing confirm.select RPC')\n self._channel.confirm_delivery(self.on_delivery_confirmation)\n\n if self.sender:\n pass\n self.send()\n else:\n self.start_consuming()", "def check(self):\n if self.backend.poll():\n raise RuntimeError('Backend process died.')\n\n if self.esp.poll():\n raise RuntimeError('ESP process died.')", "def onSlaveLost(self):", "def found_terminator(self):\r\n self.msg = ''.join(self.msg_buffer)\r\n self.msg_split = self.msg.split(client_api[\"delimiter\"])\r\n cmd = self.msg_split[0]\r\n try:\r\n self.msg_handler[cmd]()\r\n except KeyError as e:\r\n server_log.info('Unhandled command received from client id {}: {}'.format(self.client_id, cmd))\r\n except Exception as e:\r\n server_log.info('Exception raised in server when receiving message from client: {!r}'.format(e))\r\n raise e\r\n finally:\r\n self.msg_buffer = []\r\n self.msg = ''\r\n self.msg_split = []", "def connectionLost(self, reason):\r\n _Protocol.remote_destroy(self)", "def on_disconnect(self, raw_msg, server, port, **kwargs):", "def _PQConEnd(self, m):\n self.server = True", "async def test_run_without_launching(self):\n\n port = get_first_available_port(7860, 7870)\n\n io = gr.Interface(lambda s: s, gr.Textbox(), gr.Textbox()).queue()\n\n config = uvicorn.Config(app=io.app, port=port, log_level=\"warning\")\n\n server = Server(config=config)\n server.run_in_thread()\n\n try:\n async with websockets.connect(f\"ws://localhost:{port}/queue/join\") as ws:\n completed = False\n while not completed:\n msg = json.loads(await ws.recv())\n if msg[\"msg\"] == \"send_data\":\n await ws.send(json.dumps({\"data\": [\"Victor\"], \"fn_index\": 0}))\n if msg[\"msg\"] == \"send_hash\":\n await ws.send(\n json.dumps({\"fn_index\": 0, \"session_hash\": \"shdce\"})\n )\n if msg[\"msg\"] == \"process_completed\":\n completed = True\n assert msg[\"output\"][\"data\"][0] == \"Victor\"\n finally:\n server.close()", "def dispatcher( port, cmd, files, allworkers, start ):\n # Only the host running as dispatcher should be calling this.\n\n host = ipaddrs( socket.gethostname() )\n\n # Initialize a 0mq context\n\n context = zmq.Context()\n\n # Set up a socket to receive task requests and send replies over.\n # The linger option is set to help make sure all comunication is\n # delivered when the thread ends. The time unit is milliseconds. A\n # rigorous receive request - send reply pattern must be followed as\n # the zmq.REP socket keeps track of who sent the request and thus\n # were the reply should go. Trying to do two receives or two sends\n # in a row will cause a fatal error or hang the program. Here we\n # set up the REP side of the socket pattern.\n\n dispatcher_socket = context.socket( zmq.REP )\n dispatcher_socket.setsockopt( zmq.LINGER, 5000 )\n dispatcher_socket.bind( \"tcp://%s:%s\" % ( host, port ) )\n\n maxtime = 0\n tasknum = 0\n workers = {}\n already_notified = 0\n\n sys.stderr.write ( \"Dispatcher:Start:%d\\n\" % ( start ) )\n sys.stderr.flush()\n\n # Adjust starting task for 0 offset:\n\n start = start - 1\n tasknum = start\n lasttask = 0\n\n for f in files[start:]:\n\n request = dispatcher_socket.recv_json()\n worker = request['worker']\n workers[worker] = 1\n\n # Interpret a negative maxtime value as the time up signal.\n\n if request['maxtime'] >= 0 :\n\n if request['maxtime'] > maxtime :\n\n maxtime = request['maxtime']\n sys.stderr.write( \"Dispatcher:Maxtime:%s:%.2f:%.2f\\n\"\n % ( worker, maxtime, time.time() ) )\n sys.stderr.flush()\n\n tasknum = tasknum + 1\n task_message = { 'cmd' : cmd, 'file' : f.strip(),\n 'maxtime' : maxtime, 'tasknum' : tasknum }\n\n else:\n\n maxtime = -1\n sys.stderr.write( \"Dispatcher:Timeup:%s:%.2f\\n\"\n % ( worker, time.time() ) )\n sys.stderr.flush()\n task_message = { 'cmd' : \"FINI\", 'file' : \"None\",\n 'maxtime' : -1, 'tasknum' : tasknum }\n already_notified += 1\n lasttask = request['lasttask']\n\n dispatcher_socket.send_json( task_message )\n if maxtime < 0 :\n break\n\n # Now make sure all workers have received the shutdown message.\n\n shutdown = allworkers - already_notified\n\n if lasttask == 0 :\n # All tasks handed out before any completions received.\n # Have to assume all will complete.\n lasttask = tasknum\n\n if shutdown > 0 :\n task_message = { 'cmd' : \"FINI\", 'file' : \"None\",\n 'maxtime' : -1, 'tasknum' : tasknum }\n sys.stderr.write( \"Dispatcher:Shutdown:%d\\n\" % ( shutdown ) )\n sys.stderr.flush()\n\n # There is always a chance multiple assignments went out before\n # a timeout was received. All should sense time out as well,\n # so check for that when handling their final requests.\n\n for w in range( shutdown ):\n\n request = dispatcher_socket.recv_json()\n\n if request['maxtime'] < 0 :\n if request['lasttask'] < lasttask :\n lasttask = request['lasttask']\n\n dispatcher_socket.send_json( task_message )\n\n sys.stderr.write( \"Dispatcher:Last:%d\\n\" % ( lasttask ) )\n sys.stderr.flush()", "def connection_lost(self, exc: Optional[Exception]) -> None:\n self.listener.handle_connection_lost(exc)", "def test_zmq_execution_error_no_status(self, pool):\n s, c = self._create_zmq_execution_mocks(\n pool, valid=False, response=ZMQ_NO_STATUS)\n\n command = stellr.SelectCommand(TEST_ZMQ)\n command.add_param('fq', 'field:filter')\n try:\n data = command.execute()\n except stellr.StellrError as e:\n self.assertFalse(e.timeout)\n self.assertEqual(e.status, -1)\n self.assertEqual(e.url, '/select?wt=json&fq=field%3Afilter')\n self.assertEqual(e.body, None)\n self.assertEqual(e.response, ZMQ_NO_STATUS)\n return\n\n self.assertFalse(True, 'Error should have been raised')", "def connection_lost(self, exc: Optional[Exception]) -> None:\n if exc:\n logger.critical(f\"udp bridge lost its connection {exc}\")\n else:\n logger.info(\"udp connection stopped\")", "def run(self):\n while True:\n socks = select.select(self.sockets.values(), [], [], 0.1)[0]\n for conn in socks:\n try:\n k = conn.recv(65535)\n except:\n # either died on a connection reset, or was SIGTERM's by parent\n return\n if k:\n for sock in self.sockets:\n if self.sockets[sock] == conn:\n srcip = sock\n msg = json.loads(k)\n # TODO commented out below code because handled by handle packet\n #self.update(srcip, msg)\n #print(msg[TYPE])\n #print(json.dumps(msg, sort_keys=True, indent=4))\n if not self.handle_packet(srcip, msg):\n self.send_error(conn, msg)\n else:\n return", "def\tconnectionLost(self, reason):\n\t\tprint \"[:)] Connectionlost con reason: \", reason, self.addr, self.port", "def cleanup():\n if pool_dst.get(key, None) == handler:\n logging.info(\"Timeout client connection %s, cannot find match key=%s\",\n handler.name(), key)\n pool_dst.pop(key)\n handler.send_data(struct.pack('@i', RPC_MAGIC + 2))\n handler.signal_close()", "def _process(connection, process):\n try:\n command = connection.recv()\n except IOError as e:\n return \"Connection receive error: %s\" %(str(e))\n\n if command == __quit_command:\n try:\n connection.send(\"Exited server.\")\n finally:\n connection.close()\n return __quit_command\n\n #print \"Processing command\", command\n data = process(command)\n\n try:\n connection.send(data)\n except IOError as e:\n return \"Connection send error: %s\" %(str(e))\n\n connection.close()", "def server_close(self):\n\t\tpass", "def test_process_packet_ack(self):\n pkt = {'type': 'ack',\n 'ackId': 140,\n 'endpoint': '',\n 'args': []}\n self.ns.process_packet(pkt)\n assert not self.environ['socketio'].error.called", "def test_shutdown(self):\n server = self._server(None)\n server.bio_shutdown()\n with pytest.raises(Error) as err:\n server.recv(1024)\n # We don't want WantReadError or ZeroReturnError or anything - it's a\n # handshake failure.\n assert type(err.value) in [Error, SysCallError]", "def test_zero_msgs(self):\n msg = []\n self.dead_letter.handle_messages(msg)", "def _listen(self):\n if not self.is_connected:\n self.connect()\n\n while True:\n data = self.recv()\n ping = PING_RE.match(data)\n if ping:\n self.handle_ping(ping.group(1))\n else:\n result = self.handle_message(data)\n\n if result:\n print(result)\n\n time.sleep(1)", "def ReceiveTimeout(self) -> int:", "def ReceiveTimeout(self) -> int:", "def _try_receive(connection):\n result = connection.recv()\n if result == -2:\n # An exception has occurred on the other end\n e, tb_str = connection.recv()\n # The other end does not send an actual traceback object because these are\n # not picklable, but a string representation.\n logger.debug(\"%s\", tb_str)\n for child in multiprocessing.active_children():\n child.terminate()\n raise e\n return result", "def main(self):\n while True:\n # check for polling demand\n\n read_sockets, _, exception_sockets = select.select([self.socket], [], [self.socket], 0.5)\n # Iterate over notified sockets\n for notified_socket in read_sockets:\n # If notified socket is a server socket - new connection, accept it\n if notified_socket == self.socket:\n # Receive message\n message = receive_file(notified_socket, header_length=HEADER_LENGTH)\n # If False, client disconnected, cleanup\n if message is False:\n print(\"Server Crashed!\")\n self.connect_to_backup()\n break\n message = message[\"data\"].decode()\n if message == \"poll\":\n print(\"A poll is received from the server...\")\n while not self.q.empty():\n word = self.q.get()\n send_msg(self.socket, word, HEADER_LENGTH)\n print(\"The word \\'{}\\' was retrieved by the server.\".format(word))\n send_msg(self.socket, \"poll_end\", HEADER_LENGTH)\n\n if self.send_file_to_server:\n response = self.send_file()\n # in case file requested does not exist\n if not response:\n self.send_file_to_server = False\n continue\n print(\"Successfully uploaded file to the server.\")\n read_sockets, _, exception_sockets = select.select([self.socket], [], [self.socket])\n # Iterate over notified sockets\n for notified_socket in read_sockets:\n # If notified socket is a server socket - new connection, accept it\n if notified_socket == self.socket:\n # Receive message\n message = receive_file(notified_socket, header_length=HEADER_LENGTH)\n\n # If False, client disconnected, cleanup\n if message is False:\n if message is False:\n print(\"Server Crashed!\")\n self.connect_to_backup()\n break\n message = message[\"data\"].decode()\n path = \"client_files/\"\n filename = \"annotated_{}_{}.txt\".format(self.filename, self.username)\n save_file(message, path, filename)\n print(\"The spell check sequence has been completed.\")\n self.send_file_to_server = False", "def _CheckForIdleQuit(self):\n timeout = time.time() + self.idle_timeout_secs\n while time.time() < timeout:\n if self._shutdown_requested_event.is_set():\n # An external source called shutdown()\n return\n elif self._rpc_received_event.is_set():\n logging.debug('Resetting the idle timeout')\n timeout = time.time() + self.idle_timeout_secs\n self._rpc_received_event.clear()\n time.sleep(1)\n # We timed out, kill the server\n logging.warning('Shutting down the server due to the idle timeout')\n self.shutdown()", "def test_recv(self):\n Tout = self.instance.start_timeout()\n while ((not Tout.is_out)\n and (os.stat(self.tempfile).st_size == 0)): # pragma: debug\n self.instance.sleep()\n self.instance.stop_timeout()\n msg_flag, res = self.instance.recv(timeout=self.timeout)\n assert(msg_flag)\n assert(len(res) > 0)\n self.assert_equal_data_dict(res)", "def _idle_worker(self) -> bool:\n try:\n while True:\n try:\n message = self._queue_to_frontend.get_nowait()\n assert isinstance(message, Message)\n if ((\n self._queue_to_frontend.qsize() > self.frontendqueue_warn_length) and self.ready and not self.frontendqueue_warn_state):\n logger.warning(\n 'Too many messages (exactly {}) are waiting in the front-end queue for device {}.'.format(\n self._queue_to_frontend.qsize(), self.name))\n self.frontendqueue_warn_state = True\n elif (\n self._queue_to_frontend.qsize() < 0.75 * self.frontendqueue_warn_length) and self.frontendqueue_warn_state:\n logger.info(\n 'Number of messages waiting in the front-end queue for device {} is now below the limit.'.format(\n self.name))\n self.frontendqueue_warn_state = False\n except queue.Empty:\n break\n if message['type'] == 'exited':\n if not message['normaltermination']:\n # backend process died abnormally\n logger.error(\n 'Communication error in device ' + self.name + ', disconnecting.')\n logger.debug('Joining background process for ' + self.name)\n self._background_process.join()\n self._background_process = None\n # this must be here, since a 'disconnect' signal handler can attempt\n # to reinitialize the connection, thus after the emission of the signal,\n # we can expect that self._background_process and self._idle_handler carry\n # the new handlers.\n self._idle_handler.stop()\n self._idle_handler = None\n logger.debug('Emitting disconnect signal')\n self.emit('disconnect', not message['normaltermination'])\n logger.debug('Exiting the previous idle handler.')\n return False # prevent re-scheduling this idle handler\n elif message['type'] == 'ready':\n self._ready = True\n self.emit('ready')\n elif message['type'] == 'telemetry':\n self.emit('telemetry', message['data'])\n elif message['type'] == 'log':\n logger.handle(message['logrecord'])\n elif message['type'] == 'error':\n self.emit('error', message['variablename'],\n message['exception'], message['traceback'])\n elif message['type'] == 'update':\n try:\n self.emit('variable-change', message['name'], message['value'])\n finally:\n self._properties[message['name']] = message['value']\n self._timestamps[message['name']] = message['timestamp']\n else:\n raise ValueError(message['type'])\n except Exception as exc:\n logger.error('Error in the idle function for device {}: {} {}'.format(\n self.name, exc, traceback.format_exc()))\n return True # this is an idle function, we want to be called again.", "def connection_lost(self, exc):\n self.node.notify(Task(TaskType.CONN_LOST, exc))", "def test_rsp_invalid(self):\n\n def handle(event):\n return 0x0000\n\n self.ae = ae = AE()\n ae.acse_timeout = 5\n ae.dimse_timeout = 5\n ae.network_timeout = 5\n ae.add_supported_context(BasicFilmSession)\n scp = ae.start_server(\n (\"localhost\", 11112), block=False, evt_handlers=[(evt.EVT_N_DELETE, handle)]\n )\n\n ae.add_requested_context(BasicFilmSession)\n assoc = ae.associate(\"localhost\", 11112)\n assert assoc.is_established\n\n class DummyResponse:\n is_valid_response = False\n\n class DummyDIMSE:\n msg_queue = queue.Queue()\n gotten = False\n\n def send_msg(*args, **kwargs):\n return\n\n def get_msg(self, *args, **kwargs):\n if not self.gotten:\n self.gotten = True\n return None, DummyResponse()\n return None, None\n\n assoc._reactor_checkpoint.clear()\n while not assoc._is_paused:\n time.sleep(0.01)\n assoc.dimse = DummyDIMSE()\n status = assoc.send_n_delete(BasicFilmSession, \"1.2.840.10008.5.1.1.40.1\")\n assert status == Dataset()\n assert assoc.is_aborted\n\n scp.shutdown()", "def serve(self):\r\n self.channel.wait()\r\n handler, seq, obj = self._recv()\r\n if handler == \"result\":\r\n self.dispatch_result(seq, obj)\r\n elif handler == \"exception\":\r\n self.dispatch_exception(seq, obj)\r\n else:\r\n self.dispatch_request(handler, seq, obj)", "def awaitVerification(self):\r\n method = moduleName + '.' + self.className + '.' + 'awaitVerification'\r\n while True:\r\n try:\r\n self._stopevent.wait(self._sleepperiod)\r\n verification = self.localCommQueue.get_nowait()\r\n if verification == terminationVerificationMsg.COMMIT:\r\n #The parent AE agrees that we can shutdown. Terminate\r\n break\r\n elif verification == terminationVerificationMsg.ROLLBACK:\r\n #Roll back the termination\r\n raise Exceptions.WorkerThreadTerminationRollback()\r\n elif verification == terminationVerificationMsg.ERROR:\r\n errorMsg = \"Worker thread for landmark %s is improperly indexed\" %self.queueID\r\n Graph.logQ.put( [logType , logLevel.ERROR , method , errorMsg])\r\n raise Exceptions.WorkerThreadIndexError(errorMsg)\r\n else:\r\n #Should not happen\r\n errorMsg = \"Unexpected shutdown verification response for worker thread on landmark %s\" %self.queueID\r\n Graph.logQ.put( [logType , logLevel.ERROR , method , errorMsg])\r\n raise Exceptions.WorkerThreadIndexError(errorMsg)\r\n break\r\n except queue.Empty:\r\n pass\r\n except Exceptions.WorkerThreadTerminationRollback:\r\n raise Exceptions.WorkerThreadTerminationRollback()\r\n except Exception as e:\r\n errorMsg = \"Unexpected error during shutdown verification process for worker thread on landmark %s. Traceback= %s\" %(self.queueID, e)\r\n Graph.logQ.put( [logType , logLevel.ERROR , method , errorMsg])\r\n raise e", "def _check_comm_reply(self):\n if len(self._pending_comms) == 0:\n return\n for comm in self._pending_comms.values():\n self._notify_comm_ready(comm)\n self.kernel.io_loop.call_later(1, self._check_comm_reply)", "def connection_lost(self, exc):\n pass", "def on_shutdown(self):\n while self._queue:\n self._get()", "def _send_and_response(self, addr, msg):\n self._namefixer(msg)\n return send_and_receive(addr, msg, 30) # manual timeout !!!!! fix it!", "def _read_responses(self):\n try:\n while not self.shutting_down:\n try:\n raw_message = self.kernel_socket.recv()\n response_message = json_decode(utf8(raw_message))\n\n msg_id = KernelClient._get_msg_id(response_message, self.log)\n\n if msg_id not in self.response_queues:\n # this will happen when the msg_id is generated by the server\n self.response_queues[msg_id] = queue.Queue()\n\n # insert into queue\n self.log.debug(\"Inserting response for msg_id: {}, msg_type: {}\".\n format(msg_id, response_message['msg_type']))\n self.response_queues.get(msg_id).put_nowait(response_message)\n except BaseException as be1:\n if self.restarting: # If restarting, wait until restart has completed - which includes new socket\n i = 1\n while self.restarting:\n if i >= 10 and i % 2 == 0:\n self.log.debug(\"Still restarting after {} secs...\".format(i))\n time.sleep(1)\n i += 1\n continue\n raise be1\n\n except websocket.WebSocketConnectionClosedException:\n pass # websocket closure most likely due to shutdown\n\n except BaseException as be2:\n if not self.shutting_down:\n self.log.warning('Unexpected exception encountered ({})'.format(be2))\n\n self.log.debug('Response reader thread exiting...')", "def do_QUIT(self):\r\n self.send_response(200)\r\n self.end_headers()\r\n self.server.stop = True", "def watch_for_heartbeat_messages(self):\n while True:\n message = self.socket_manager.get_heartbeat_message()\n self.ensure_sender_is_known(message)\n if message.direction == \"0\":\n self.respond_to_heartbeat_message(message)\n elif message.direction == \"1\":\n self.handle_heartbeat_response(message)", "def test_recv(self):\n Tout = self.instance.start_timeout()\n while ((not Tout.is_out)\n and (os.stat(self.tempfile).st_size == 0)): # pragma: debug\n self.instance.sleep()\n self.instance.stop_timeout()\n msg_flag, res = self.instance.recv(timeout=self.timeout)\n assert(msg_flag)\n assert(len(res) > 0)\n nt.assert_equal(res, self.obj_dict)", "def test_listen_error(self):\n listen_port = self.get_port()\n config = Qdrouterd.Config([\n ('router', {'mode': 'standalone', 'id': 'bad'}),\n ('listener', {'port': listen_port, 'maxFrameSize': '2048', 'stripAnnotations': 'no'}),\n ('listener', {'port': listen_port, 'http':True})])\n r = Qdrouterd(name=\"expect_fail\", config=config, wait=False);\n self.assertEqual(1, r.wait())", "def handler(self):\n\t\tself.exitClient()", "def _is_running(self, _):\n if self._shutdown_event.is_set():\n raise RequestProcessingError(\n \"Unable to process message - currently shutting down\"\n )", "def workerProcess(self):\r\n\r\n if self.postForkCallback:\r\n self.postForkCallback()\r\n\r\n while self.isRunning.value == True:\r\n try:\r\n client = self.serverTransport.accept()\r\n self.serveClient(client)\r\n except (KeyboardInterrupt, SystemExit):\r\n return 0\r\n except Exception, x:\r\n logging.exception(x)", "def receive_zmq_send(self, zmq_host, zmq_rcv_port):\n\n interrupted = False\n while not interrupted:\n print ('waiting')\n msg = self.socket.recv_json()\n key = msg.get(\"key\")\n if key == \"end\":\n print ('end of data, closing connection')\n interrupted = True\n self.destroy()\n elif key == \"dim\":\n print('initializing dims')\n self.dims = (msg[\"dim_x\"], msg[\"dim_y\"])\n if self.num_sinograms > 0:\n if (self.beg_sinogram < 0) or (self.beg_sinogram + self.num_sinograms > self.dims[0]):\n raise Exception(\"Exceeds the sinogram boundary: {} vs. {}\".format(\n self.beg_sinogram + self.num_sinograms, self.dims[0]))\n self.beg_index = self.beg_sinogram * self.dims[1]\n self.end_index = self.beg_sinogram * self.dims[1] + self.num_sinograms * self.dims[1]\n elif key == \"image\":\n print('got msg')\n msg[\"receiving_timestamp\"] = time.time()\n dtype = msg[\"dtype\"]\n uniqueId = msg['image_number']\n theta = msg['theta']\n ver_result = msg['ver']\n\n\n img = np.frombuffer(self.socket.recv(), dtype=dtype)\n\n if self.num_sinograms != 0:\n img = img[self.beg_index: self.end_index]\n img = img.reshape((self.num_sinograms, self.dims[1]))\n else:\n img = img.reshape(self.dims)\n\n self.builder.Reset()\n serializer = TraceSerializer.ImageSerializer(self.builder)\n serialized_data = serializer.serialize(image=img, uniqueId=uniqueId,\n rotation=theta, seq=self.seq)\n self.publisher_socket.send(serialized_data)\n self.seq += 1\n\n else:\n pass\n\n print(\"Connection ended\")", "def run(self):\n while True:\n msg = self.recv()", "def _receive_message_loop(self):\n\n while True:\n try:\n message = self.connection_socket.recv(4096)\n if len(message) > 0:\n self.add_message_to_chat(message.decode('utf-8'))\n sleep(0.2)\n\n except ConnectionResetError:\n # messagebox.showerror(\"Client dropped\", \"The other person has dropped from the connection.\")\n self.root.destroy()", "async def unhandled_response(self, pkt, source):\n if False:\n yield None", "def dispecher(self):\r\n while True:\r\n connection, address = self._sockobj.accept()\r\n print('server connected by', address)\r\n print('at', self.now())\r\n thread.start_new(self.handleClient, (connection, address,))" ]
[ "0.6562494", "0.62199134", "0.62131953", "0.61594874", "0.61404806", "0.6073438", "0.6002306", "0.59552497", "0.5933235", "0.59127086", "0.5870852", "0.58468467", "0.5773297", "0.5767091", "0.5763371", "0.57514524", "0.5717791", "0.57169217", "0.5679771", "0.56770015", "0.56764233", "0.56656206", "0.5659198", "0.56323355", "0.55985117", "0.559015", "0.5590063", "0.55804837", "0.5579394", "0.5575713", "0.55717963", "0.5558772", "0.55535793", "0.5552", "0.55512315", "0.5544985", "0.5524176", "0.55167156", "0.55106807", "0.5507977", "0.55030876", "0.5499172", "0.54968345", "0.54968345", "0.549204", "0.54893947", "0.548829", "0.5487155", "0.54792464", "0.5477053", "0.5472197", "0.5467934", "0.5463406", "0.5451859", "0.54405594", "0.5439972", "0.5436273", "0.5435598", "0.5432323", "0.5422539", "0.5422235", "0.5421848", "0.5421281", "0.53989136", "0.53957003", "0.53920513", "0.5383804", "0.5383035", "0.5382613", "0.53745085", "0.537318", "0.53707147", "0.53603107", "0.53603107", "0.5358724", "0.53475857", "0.53457224", "0.53388375", "0.53354", "0.53329223", "0.533153", "0.5328659", "0.53271335", "0.531978", "0.53194803", "0.5316596", "0.53158736", "0.53120756", "0.53103095", "0.52901304", "0.5287528", "0.5285972", "0.52851164", "0.5280208", "0.527851", "0.5276763", "0.5275422", "0.52745795", "0.5274067", "0.5273426" ]
0.71370596
0
Create a recurring subscription.
def create_braintree(): #TODO: plan_id and user_id should be unique, I should be updating # old plans if they get reactivated. Implement logic to check this in future. # otherwise you can end up with a bunch of unactive plans when you get the user. # Which may cause slowness for that user. # Set the subscription details. self.user_id = user.id self.plan_id = plan_id # Create the user account if none exists. if user.customer_id is None: result = braintree.Customer.create() if result.is_success: user.customer_id = result.customer.id else: return result # Create the payment method result = braintree.PaymentMethod.create({ 'customer_id': user.customer_id, 'payment_method_nonce': payment_method_nonce, 'options': { 'verify_card': True } }) if result.is_success: self.payment_method_token = result.payment_method.token else: return result # Create subscription result = braintree.Subscription.create({ 'payment_method_token': self.payment_method_token, 'plan_id': plan_id }) if result.is_success: self.subscription_id = result.subscription.id self.active = True else: return result # Commit to database db.session.add(user) db.session.add(self) db.session.commit() return result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def recurring_charge_subscription(\n subscription_id: str,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = RecurringChargeSubscription.create(\n subscription_id=subscription_id,\n namespace=namespace,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)", "def create_subscription(connection, project_id, body, fields=None, error_msg=None):\n return connection.post(\n url=f'{connection.base_url}/api/subscriptions',\n params={'fields': fields},\n headers={'X-MSTR-ProjectID': project_id},\n json=body,\n )", "async def recurring_charge_subscription_async(\n subscription_id: str,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = RecurringChargeSubscription.create(\n subscription_id=subscription_id,\n namespace=namespace,\n )\n return await run_request_async(\n request, additional_headers=x_additional_headers, **kwargs\n )", "def create_subscription(self,\n body):\n\n return super().new_api_call_builder.request(\n RequestBuilder().server('default')\n .path('/v2/subscriptions')\n .http_method(HttpMethodEnum.POST)\n .header_param(Parameter()\n .key('Content-Type')\n .value('application/json'))\n .body_param(Parameter()\n .value(body))\n .header_param(Parameter()\n .key('accept')\n .value('application/json'))\n .body_serializer(APIHelper.json_serialize)\n .auth(Single('global'))\n ).response(\n ResponseHandler()\n .deserializer(APIHelper.json_deserialize)\n .is_api_response(True)\n .convertor(ApiResponse.create)\n ).execute()", "def post_create_subscription(\n self, response: pubsub.Subscription\n ) -> pubsub.Subscription:\n return response", "async def create_subscription(user: int, redis: RedisDB):\n subscription_data = {\n \"subscriber_id\": user.id,\n \"cost\": str(os.getenv(\"AMOUNT\")),\n \"currency\": \"NANO\",\n \"period\": int(os.getenv(\"PERIOD\"))\n }\n json_data = json.dumps(subscription_data)\n r = requests.post(f\"{os.getenv('API_ENDPOINT')}create_subscription?token={os.getenv('NR_TOKEN')}\", json_data)\n rx = r.json()\n await redis.set(user.id, rx['subscription_id'])\n return r.json()", "def add_subscription(self):\n schema = schemas.load(schemas.Subscription, self.request)\n subscription = self.customer.add_subscription(**schema)\n self.request.db.flush()\n self.request.response.status_int = 201\n return {'abonnement': subscription}", "def test_create_subscription(self):\n pass", "def create_subscription(self, user, standard):\r\n\r\n subscription = self.create(\r\n user=user,\r\n standard=standard,\r\n )\r\n\r\n return subscription", "def post(self):\n data = request.json\n return new_subscription(data=data)", "def create_subscription(self, device_type):\n url = '{}/v2/subscriptions'.format(self.url)\n device_type = device_type.split('.')[0]\n device_pattern = \"urn:ngsi-ld:{}:*\".format(device_type)\n description = \"Notify QuantumLeap with {}\".format(device_type)\n data = {\n \"description\": description,\n \"subject\": {\n \"entities\": [\n {\n \"idPattern\": device_pattern\n }\n ]\n },\n \"notification\": {\n \"http\": {\n \"url\": \"http://quantumleap:8668/v2/notify\"\n },\n \"metadata\": [\"dateCreated\", \"dateModified\"]\n },\n \"throttling\": 1\n }\n return self.post(url, data=json.dumps(data), headers=self.headers_json)", "def _InsertSubscription(self,\n id='python.gcal.test%40gmail.com'):\n print 'Subscribing to the calendar with ID: %s' % id\n calendar = gdata.calendar.data.CalendarEntry()\n calendar.id = atom.data.Id(text=id)\n returned_calendar = self.cal_client.InsertCalendarSubscription(calendar)\n return returned_calendar", "def handle_create(self):\n subscription = self.client().subscription(\n self.properties[self.QUEUE_NAME],\n subscriber=self.properties[self.SUBSCRIBER],\n ttl=self.properties[self.TTL],\n options=self.properties[self.OPTIONS]\n )\n self.resource_id_set(subscription.id)", "def _create_subscription(self):\n try:\n self.client.create_subscription(\n name=self.subscription_path, topic=self.topic_path\n )\n except NotFound:\n # suitable topic does not exist in the Pitt-Google project\n raise ValueError(\n (\n f\"A subscription named {self.subscription_name} does not exist\"\n \"in the Google Cloud Platform project \"\n f\"{settings.GOOGLE_CLOUD_PROJECT}, \"\n \"and one cannot be automatically create because Pitt-Google \"\n \"does not publish a public topic with the same name.\"\n )\n )\n else:\n self._log_and_print(f\"Created subscription: {self.subscription_path}\")", "def subscribePost() -> object:\n log = logging.getLogger(__name__)\n db = Db()\n\n body = request.get_json()\n\n if body is None:\n return jsonify({\"error\": \"json body is required\"}), HTTPStatus.HTTPStatus.BAD_REQUEST\n\n if not('datasetId') in body:\n return jsonify({\"error\": \"datasetId is a required attribute\"}), HTTPStatus.HTTPStatus.BAD_REQUEST\n\n if not('notificationUrl') in body:\n return jsonify({\"error\": \"notificationUrl is a required attribute\"}), HTTPStatus.HTTPStatus.BAD_REQUEST\n\n\n subscription = db.Subscriptions(\n datasetId=body['datasetId'],\n notificationUrl=body['notificationUrl']\n )\n\n subscription.save()\n\n subscription = json.loads(subscription.to_json())\n subscription['id'] = subscription['_id'][\"$oid\"]\n subscription.pop(\"_id\")\n log.debug(\"subscription created\")\n\n return jsonify(subscription), HTTPStatus.CREATED", "def create_subscription(chid, use_time=False, use_ctrl=False,\n mask=None, callback=None):\n mask = mask or DEFAULT_SUBSCRIPTION_MASK\n\n ftype = promote_type(chid, use_ctrl=use_ctrl, use_time=use_time)\n\n uarg = ctypes.py_object(callback)\n evid = ctypes.c_void_p()\n poll()\n ret = libca.ca_create_subscription(ftype, 0, chid, mask,\n _CB_EVENT, uarg, ctypes.byref(evid))\n PySEVCHK('create_subscription', ret)\n\n poll()\n return (_CB_EVENT, uarg, evid)", "def test_create_subscription(self):\n try:\n self.arb.create_subscription(\n trial_occurrences=4,\n interval_length=1,\n interval_unit=arb.MONTHS_INTERVAL,\n start_date=u\"2008-09-09\",\n amount=39.99,\n card_number=u\"4222222222222\",\n expiration_date=u\"2009-10\",\n bill_first_name=u\"Michael\",\n bill_last_name=u\"Pool\"\n )\n except KeyError:\n pass\n self.arb.create_subscription(\n trial_amount=5.00,\n trial_occurrences=4,\n interval_length=1,\n interval_unit=arb.MONTHS_INTERVAL,\n start_date=u\"2008-09-09\",\n amount=39.99,\n card_number=u\"4222222222222\",\n expiration_date=u\"2009-10\",\n bill_first_name=u\"Michael\",\n bill_last_name=u\"Pool\"\n )\n self.arb.create_subscription(\n trial_amount=5.00,\n trial_occurrences=4,\n interval_length=1,\n interval_unit=arb.MONTHS_INTERVAL,\n start_date=u\"2008-09-09\",\n amount=39.99,\n card_number=u\"4222222222222\",\n expiration_date=u\"2009-10\",\n ship_first_name=u\"valentino\",\n first_name=u\"valentino\",\n bill_first_name=u\"valentino\",\n bill_last_name=u\"Pool\",\n driver_number=u\"55555\",\n driver_state=u\"CA\",\n driver_birth=u\"1990-09-09\"\n )", "def create_subscription(post, user, sub_type=None, update=False):\n subs = Subscription.objects.filter(post=post.root, user=user)\n sub = subs.first()\n\n default = Subscription.TYPE_MAP.get(user.profile.message_prefs,\n Subscription.LOCAL_MESSAGE)\n\n empty = sub_type is None\n # Get the current sub type from what's given or the existing sub\n sub_type = None if empty else sub_type\n # No type has been given so default\n sub_type = sub_type or default\n\n # Ensure the sub type is not set to something wrote\n if sub and update:\n # Update an existing subscription\n sub.type = sub_type\n sub.save()\n else:\n # Drop all existing subscriptions for the user by default.\n subs.delete()\n Subscription.objects.create(post=post.root, user=user, type=sub_type)\n\n # Recompute subscription count\n subs_count = Subscription.objects.filter(post=post.root).exclude(type=Subscription.NO_MESSAGES).count()\n\n # Update root subscription counts.\n Post.objects.filter(pk=post.root.pk).update(subs_count=subs_count)", "async def create_subscription(self, installed_app_id: str, data: dict) -> dict:\r\n return await self.post(\r\n API_SUBSCRIPTIONS.format(installed_app_id=installed_app_id), data\r\n )", "def test_add_recurring_schedule(self):\n pass", "def public_subscribe_subscription(\n user_id: str,\n body: Optional[SubscribeRequest] = None,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = PublicSubscribeSubscription.create(\n user_id=user_id,\n body=body,\n namespace=namespace,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)", "def create_subscription_in_snuba(query_subscription_id, **kwargs):\n try:\n subscription = QuerySubscription.objects.get(id=query_subscription_id)\n except QuerySubscription.DoesNotExist:\n metrics.incr(\"snuba.subscriptions.create.subscription_does_not_exist\")\n return\n if subscription.status != QuerySubscription.Status.CREATING.value:\n metrics.incr(\"snuba.subscriptions.create.incorrect_status\")\n return\n if subscription.subscription_id is not None:\n metrics.incr(\"snuba.subscriptions.create.already_created_in_snuba\")\n # This mostly shouldn't happen, but it's possible that a subscription can get\n # into this state. Just attempt to delete the existing subscription and then\n # create a new one.\n try:\n _delete_from_snuba(\n QueryDatasets(subscription.snuba_query.dataset), subscription.subscription_id\n )\n except SnubaError:\n logger.exception(\"Failed to delete subscription\")\n\n subscription_id = _create_in_snuba(subscription)\n subscription.update(\n status=QuerySubscription.Status.ACTIVE.value, subscription_id=subscription_id\n )", "def _InsertRecurringEvent(self, title='Weekly Tennis with Beth',\n content='Meet for a quick lesson', where='On the courts',\n recurrence_data=None):\n\n if recurrence_data is None:\n recurrence_data = ('DTSTART;VALUE=DATE:20070501\\r\\n'\n + 'DTEND;VALUE=DATE:20070502\\r\\n'\n + 'RRULE:FREQ=WEEKLY;BYDAY=Tu;UNTIL=20070904\\r\\n')\n\n new_event = self._InsertEvent(title, content, where,\n recurrence_data=recurrence_data, start_time=None, end_time=None)\n\n print 'New recurring event inserted: %s' % (new_event.id.text,)\n print '\\tEvent edit URL: %s' % (new_event.GetEditLink().href,)\n print '\\tEvent HTML URL: %s' % (new_event.GetHtmlLink().href,)\n\n return new_event", "async def public_subscribe_subscription_async(\n user_id: str,\n body: Optional[SubscribeRequest] = None,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = PublicSubscribeSubscription.create(\n user_id=user_id,\n body=body,\n namespace=namespace,\n )\n return await run_request_async(\n request, additional_headers=x_additional_headers, **kwargs\n )", "def do_create_subscription(csp: CloudProviderInterface, environment_id=None):\n environment = Environments.get(environment_id)\n payload = build_subscription_payload(environment)\n try:\n csp.create_subscription(payload)\n except GeneralCSPException as e:\n app.logger.warning(\n \"Unable to create subscription for environment %s.\", environment.id,\n )\n raise e", "def create_subscription(self, organization, collaborations, contractors):\r\n\r\n subscription = self.create(\r\n organization=organization,\r\n collaborations=collaborations,\r\n contractors=contractors,\r\n partner_discovery=partner_discovery,\r\n )\r\n return subscription", "async def create_and_subscribe(user_id):\n client = gql(\n query=Query,\n mutation=Mutation,\n subscription=Subscription,\n consumer_attrs={\"strict_ordering\": True, \"confirm_subscriptions\": True},\n )\n await client.connect_and_init()\n\n sub_id = await client.send(\n msg_type=\"start\",\n payload={\n \"query\": textwrap.dedent(\n \"\"\"\n subscription op_name($user_id: UserId) {\n on_chat_message_sent(user_id: $user_id) { event }\n }\n \"\"\"\n ),\n \"variables\": {\"user_id\": user_id},\n \"operationName\": \"op_name\",\n },\n )\n\n # Receive the subscription confirmation message.\n resp = await client.receive(assert_id=sub_id, assert_type=\"data\")\n assert resp == {\"data\": None}\n\n return sub_id, client", "def test_issue_add_subscription(self):\n pass", "def subscribe(self, **subscription_request):\n return self.subscribe_impl(mode='subscribe', **subscription_request)", "def __call__(\n self,\n request: pubsub.Subscription,\n *,\n retry: OptionalRetry = gapic_v1.method.DEFAULT,\n timeout: Optional[float] = None,\n metadata: Sequence[Tuple[str, str]] = (),\n ) -> pubsub.Subscription:\n\n http_options: List[Dict[str, str]] = [\n {\n \"method\": \"put\",\n \"uri\": \"/v1/{name=projects/*/subscriptions/*}\",\n \"body\": \"*\",\n },\n ]\n request, metadata = self._interceptor.pre_create_subscription(\n request, metadata\n )\n pb_request = pubsub.Subscription.pb(request)\n transcoded_request = path_template.transcode(http_options, pb_request)\n\n # Jsonify the request body\n\n body = json_format.MessageToJson(\n transcoded_request[\"body\"],\n including_default_value_fields=False,\n use_integers_for_enums=True,\n )\n uri = transcoded_request[\"uri\"]\n method = transcoded_request[\"method\"]\n\n # Jsonify the query params\n query_params = json.loads(\n json_format.MessageToJson(\n transcoded_request[\"query_params\"],\n including_default_value_fields=False,\n use_integers_for_enums=True,\n )\n )\n query_params.update(self._get_unset_required_fields(query_params))\n\n query_params[\"$alt\"] = \"json;enum-encoding=int\"\n\n # Send the request\n headers = dict(metadata)\n headers[\"Content-Type\"] = \"application/json\"\n response = getattr(self._session, method)(\n \"{host}{uri}\".format(host=self._host, uri=uri),\n timeout=timeout,\n headers=headers,\n params=rest_helpers.flatten_query_params(query_params, strict=True),\n data=body,\n )\n\n # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception\n # subclass.\n if response.status_code >= 400:\n raise core_exceptions.from_http_response(response)\n\n # Return the response\n resp = pubsub.Subscription()\n pb_resp = pubsub.Subscription.pb(resp)\n\n json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True)\n resp = self._interceptor.post_create_subscription(resp)\n return resp", "def test_create_subscription_template(self):\n pass", "def create_pubsub_subscription(client, project, topic, name):\n topic_name = pubsub.topic_name(project, topic)\n full_name = pubsub.subscription_name(project, name)\n if client.get_subscription(full_name):\n return\n\n client.create_subscription(full_name, topic_name)", "def subscription(self, uuid):\r\n return subs.Subscription(self, uuid)", "def subscribe(self, request):\n email = self.cleaned_data.get('email')\n\n email_name, domain_part = email.rsplit('@', 1)\n domain_name = '@' + domain_part\n email_domain, created = Domain.objects.get_or_create(name=domain_name)\n\n subscriber, created = Subscriber.objects.get_or_create(email=email, mailing_list=self.mailing_list, defaults={\n 'domain': email_domain\n })\n subscriber.status = Status.PENDING\n subscriber.optin_ip_address = get_client_ip(request)\n subscriber.optin_date = timezone.now()\n subscriber.save()\n\n if not created:\n subscriber.tokens.filter(description='confirm_subscription').delete()\n\n token = subscriber.tokens.create(description='confirm_subscription')\n current_site = get_current_site(request)\n protocol = 'https' if request.is_secure() else 'http'\n domain = current_site.domain\n path = reverse('subscribers:confirm_double_optin_token', kwargs={\n 'mailing_list_uuid': self.mailing_list.uuid,\n 'token': token.text\n })\n confirm_link = '%s://%s%s' % (protocol, domain, path)\n\n confirm_email = self.mailing_list.get_confirm_email_template()\n confirm_email.send(subscriber.get_email(), {\n 'confirm_link': confirm_link\n })\n\n return subscriber", "def setup_subscription(subscription, info: GraphQLResolveInfo, variables, complete_on_error=False):\n excluded_field_nodes = filter_selection_set(info)\n variables = frappe._dict(variables)\n subscription_id = frappe.generate_hash(f\"{subscription}-{frappe.session.user}\", length=8)\n\n subscription_data = frappe._dict(\n subscribed_at=now_datetime(),\n last_ping=now_datetime(),\n variables=variables,\n subscription_id=subscription_id,\n selection_set=excluded_field_nodes,\n user=frappe.session.user,\n complete_on_error=complete_on_error\n )\n\n frappe.cache().hset(\n get_subscription_redis_key(subscription), subscription_id, subscription_data)\n\n return frappe._dict(\n subscription_id=subscription_id\n )", "def subscribe():\n form = SubscribeForm()\n if form.validate_on_submit():\n subscription = Subscription(email=form.email.data)\n db.session.add(subscription)\n db.session.commit()\n return redirect(url_for('main.index'))\n\n return render_template('subscribes.html', form=form)", "def create(self, validated_data):\n subscription = super().create(validated_data)\n subscription.send_verification_email()\n return subscription", "def post_get_subscription(\n self, response: pubsub.Subscription\n ) -> pubsub.Subscription:\n return response", "def charge_recurring(self, grace_period=None):\n pass", "def subscribe(request):\n address = request.POST.get('address')\n\n new_sub = Subscription(**{\n \"address\": address\n })\n new_sub.save()\n\n return HttpResponse(json.dumps({\n \"status\": \"success\"\n }, default=helpers.json_custom_parser), content_type='application/json')", "def platform_subscribe_subscription(\n user_id: str,\n body: Optional[PlatformSubscribeRequest] = None,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = PlatformSubscribeSubscription.create(\n user_id=user_id,\n body=body,\n namespace=namespace,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)", "def subscription(bot, update):\n chat_id = update.message.chat_id\n bot.sendMessage(chat_id=chat_id, text=SUBSCRIPTION_MSG, parse_mode='markdown', \n disable_web_page_preview=True)\n \n mp.track(get_user_info(chat_id)['PID'], 'Checked Subscription')", "def create_subscription_if_not_exists(self):\n create_subscription_if_not_exists(self.project_id, self.topic_name, self.subscription_name)", "def subscribe(self, request: Request) -> Response:\n ids = request.data.get(\"ids\", None)\n session_id = request.data.get(\"session_id\")\n content_type = ContentType.objects.get_for_model(self.get_queryset().model)\n user = request.user if request.user.is_authenticated else get_anonymous_user()\n subscription = Subscription.objects.create(user=user, session_id=session_id)\n\n if ids is None:\n # Subscribe to the whole table.\n subscription.subscribe(\n content_type, [Observer.ALL_IDS], (ChangeType.CREATE, ChangeType.DELETE)\n )\n else:\n # Verify all ids exists and user has permissions to view them.\n for id in ids:\n if not self.user_has_permission(id, request.user):\n raise NotFound(f\"Item {id} does not exist\")\n\n change_types = (ChangeType.UPDATE, ChangeType.DELETE, ChangeType.CREATE)\n subscription.subscribe(content_type, ids, change_types)\n\n resp = {\"subscription_id\": subscription.subscription_id}\n return Response(resp)", "def subscribe(self, feed, **args):\n args.update(feed=feed)\n return self.fetch(\"/subscribe\", post_args=args)", "async def grant_days_to_subscription_async(\n subscription_id: str,\n user_id: str,\n body: Optional[GrantSubscriptionDaysRequest] = None,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = GrantDaysToSubscription.create(\n subscription_id=subscription_id,\n user_id=user_id,\n body=body,\n namespace=namespace,\n )\n return await run_request_async(\n request, additional_headers=x_additional_headers, **kwargs\n )", "def test_create_response_descriptor_subscriptions_subscription_subscription_resource(self):\n pass", "def subscribe():\n if request.method == 'POST':\n surname = request.form['surname']\n name = request.form['name']\n birthdate = request.form['birthdate']\n username = request.form['username']\n password = request.form['password']\n retype_password = request.form['retype-password']\n is_new_user_valid = create_account(name, surname, birthdate, username, password, retype_password)\n if is_new_user_valid == \"OK\":\n session_id = generate_session(username)\n return render_template('home.html', cars_list=get_cars_preview(), news_list=get_news_list(), user=username,\n session_id=session_id, authjs=False, user_added_correctly=True, preview_length=get_cars_preview().__len__())\n else:\n return render_template('sign_up.html', subscription_error=is_new_user_valid)", "async def platform_subscribe_subscription_async(\n user_id: str,\n body: Optional[PlatformSubscribeRequest] = None,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = PlatformSubscribeSubscription.create(\n user_id=user_id,\n body=body,\n namespace=namespace,\n )\n return await run_request_async(\n request, additional_headers=x_additional_headers, **kwargs\n )", "def subscribe(self, *, other_subreddits: list[praw.models.Subreddit] | None = None):\n data = {\n \"action\": \"sub\",\n \"skip_inital_defaults\": True,\n \"sr_name\": self._subreddit_list(\n other_subreddits=other_subreddits, subreddit=self\n ),\n }\n self._reddit.post(API_PATH[\"subscribe\"], data=data)", "def _subscribe(self):\n self.subscribed = True\n self.subscribe_date = now()\n self.unsubscribed = False", "def create_subscriptionrequests(self, registration):\n self.l.info(\"Starting subscriptionrequest creation\")\n\n self.l.info(\"Calculating weeks\")\n weeks = 1 # default week number\n\n # . calculate weeks along\n if (registration.reg_type == \"momconnect_prebirth\" and\n registration.source.authority not in [\"hw_partial\", \"patient\"]):\n weeks = utils.get_pregnancy_week(utils.get_today(),\n registration.data[\"edd\"])\n\n elif registration.reg_type == \"pmtct_prebirth\":\n weeks = utils.get_pregnancy_week(utils.get_today(),\n registration.data[\"edd\"])\n\n elif registration.reg_type == \"pmtct_postbirth\":\n weeks = utils.get_baby_age(utils.get_today(),\n registration.data[\"baby_dob\"])\n\n # . determine messageset shortname\n self.l.info(\"Determining messageset shortname\")\n short_name = utils.get_messageset_short_name(\n registration.reg_type, registration.source.authority, weeks)\n\n # . determine sbm details\n self.l.info(\"Determining SBM details\")\n msgset_id, msgset_schedule, next_sequence_number =\\\n utils.get_messageset_schedule_sequence(\n short_name, weeks)\n\n subscription = {\n \"identity\": registration.registrant_id,\n \"messageset\": msgset_id,\n \"next_sequence_number\": next_sequence_number,\n \"lang\": registration.data[\"language\"],\n \"schedule\": msgset_schedule\n }\n self.l.info(\"Creating SubscriptionRequest object\")\n SubscriptionRequest.objects.create(**subscription)\n self.l.info(\"SubscriptionRequest created\")\n\n return \"SubscriptionRequest created\"", "def PostSubscription(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def subscription(self):\r\n return SubscriptionResource(self)", "def grant_days_to_subscription(\n subscription_id: str,\n user_id: str,\n body: Optional[GrantSubscriptionDaysRequest] = None,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = GrantDaysToSubscription.create(\n subscription_id=subscription_id,\n user_id=user_id,\n body=body,\n namespace=namespace,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)", "def do_create(service,summary,description,startday,\\\n starttime,endtime,username,email):\n event = {\n 'summary': 'Code Clinic: {}'.format(summary),\n 'description': '{}.'.format(description),\n 'start': {\n 'dateTime': '{}T{}:00'.format(startday, starttime),\n 'timeZone': 'GMT+02',\n },\n 'end': {\n 'dateTime': '{}T{}:00'.format(startday,endtime),\n 'timeZone': 'GMT+02',\n },\n 'recurrence': [\n 'RRULE:FREQ=DAILY;COUNT=1'\n ],\n 'attendees': [\n {\n 'displayName': username,\n 'email': email,\n 'optional': True,\n 'comment': 'Creator',\n 'responseStatus': 'accepted',\n },\n ],\n 'anyoneCanAddSelf': True,\n\n 'reminders': {\n 'useDefault': False,\n 'overrides': [\n {'method': 'email', 'minutes': 24 * 60},\n {'method': 'popup', 'minutes': 10},\n ],\n },\n }\n\n event = service.events().insert(calendarId='primary', body=event,\\\n sendUpdates='all').execute()\n\n return event", "def CreateSubscribeTransaction(self, dest, once=False):\n c = Subscribe(dest, self.node_id, once)\n self.connections.append((\"REACTIVE\", c))\n return c", "def test_subscription(self):\n self.token_login()\n cassette_name = self.cassette_name(\"subscription\")\n with self.recorder.use_cassette(cassette_name):\n repository = self.gh.repository(\"sigmavirus24\", \"github3.py\")\n threads = list(repository.notifications(all=True))\n assert len(threads) > 0\n thread = threads[0]\n assert isinstance(thread, github3.notifications.Thread)\n assert isinstance(\n thread.subscription(),\n github3.notifications.ThreadSubscription,\n )", "def to_subscription_instance_10(self):\n delivery_params = None # TODO: Implement this\n poll_instances = None # TODO: Implement this\n\n si = tm10.SubscriptionInstance(subscription_id=str(self.subscription_id),\n delivery_parameters=delivery_params,\n poll_instances=poll_instances)\n return si", "def send_subscription(\n connection, subscription_id, project_id, body, fields=None, error_msg=None\n):\n return connection.get(\n url=f'{connection.base_url}/api/subscriptions/{subscription_id}/send',\n params={'fields': fields},\n headers={'X-MSTR-ProjectID': project_id},\n json=body,\n )", "def create_subscription(self, client_URI_endpoint, event_destination_id,\n name, subscription_context):\n self.client_URI_endpoints[client_URI_endpoint] = \\\n Event(event_destination_id, name, subscription_context)\n self.write_subscriptions_to_tmp(self.client_URI_endpoints)", "def get_subscription(self):\n return self.request({\n 'path': '/' + UUID + '/subscription'})", "def create(self, request, *args, **kwargs):\n subreddit_title = kwargs[\"sub_title\"]\n if subreddit_title.lower() in Sub.pseudo_subreddits.keys():\n message = _((\n \"You can't create a post to the \"\n \"'{}' subreddit\".format(subreddit_title)\n ))\n return Response(\n {\"detail\": message},\n status=status.HTTP_400_BAD_REQUEST\n )\n else:\n subreddit = Sub.objects.get(title=subreddit_title)\n user = self.request.user\n data = request.data.copy()\n data[\"subreddit\"] = subreddit.title\n data[\"authorsender\"] = user.username\n serializer = self.get_serializer(data=data)\n serializer.is_valid(raise_exception=True)\n self.perform_create(serializer)\n headers = self.get_success_headers(serializer.data)\n return Response(\n serializer.data,\n status=status.HTTP_201_CREATED,\n headers=headers\n )", "def create_pending_survey_account(registration_data):\n logger.info(\"Attempting to create new account against share survey\")\n\n url = f\"{app.config['PARTY_URL']}/party-api/v1/pending-survey-respondent\"\n registration_data[\"status\"] = \"ACTIVE\"\n response = requests.post(url, auth=app.config[\"BASIC_AUTH\"], json=registration_data)\n\n try:\n response.raise_for_status()\n except requests.exceptions.HTTPError:\n if response.status_code == 400:\n logger.info(\"Email has already been used\")\n else:\n logger.error(\"Failed to create account\")\n raise ApiError(logger, response)\n\n logger.info(\"Successfully created account\")", "def test_update_subscription(self):\n args = dict(trial_amount=5.00,\n trial_occurrences=4,\n interval_length=1,\n interval_unit=arb.MONTHS_INTERVAL,\n start_date=u\"2008-09-09\",\n amount=39.99,\n card_number=u\"4222222222222\",\n expiration_date=u\"2009-10\",\n ship_first_name=u\"valentino\",\n first_name=u\"valentino\",\n bill_first_name=u\"valentino\",\n bill_last_name=u\"pool\",\n driver_number=u\"55555\",\n driver_state=u\"CA\",\n driver_birth=u\"1990-09-09\"\n )\n\n try:\n self.arb.update_subscription(**args)\n except KeyError:\n self.arb.update_subscription(subscription_id=u\"1234\", **args)", "def subject_create(context, values):\n return _subject_update(context, values, None, purge_props=False)", "def _subscribe(self, sub_type: str, sub_version: str, condition: dict, callback) -> str:\n self.__logger.debug(f'subscribe to {sub_type} version {sub_version} with condition {condition}')\n data = {\n 'type': sub_type,\n 'version': sub_version,\n 'condition': condition,\n 'transport': {\n 'method': 'webhook',\n 'callback': f'{self.callback_url}/callback',\n 'secret': self.secret\n }\n }\n r_data = self.__api_post_request(TWITCH_API_BASE_URL + 'eventsub/subscriptions', data=data)\n result = r_data.json()\n error = result.get('error')\n if r_data.status_code == 500:\n raise TwitchBackendException(error)\n if error is not None:\n if error.lower() == 'conflict':\n raise EventSubSubscriptionConflict(result.get('message', ''))\n raise EventSubSubscriptionError(result.get('message'))\n sub_id = result['data'][0]['id']\n self.__add_callback(sub_id, callback)\n if self.wait_for_subscription_confirm:\n timeout = datetime.datetime.utcnow() + datetime.timedelta(\n seconds=self.wait_for_subscription_confirm_timeout)\n while timeout >= datetime.datetime.utcnow():\n if self.__callbacks[sub_id]['active']:\n return sub_id\n asyncio.get_event_loop().run_until_complete(asyncio.sleep(0.01))\n self.__callbacks.pop(sub_id, None)\n raise EventSubSubscriptionTimeout()\n return sub_id", "def MakeSubscription(self, observedUser, www = True, im = False):\n\n if (www == False and im == False):\n return\n\n data = { \"subscription\" : { \"im\" : im, \"www\" : www } }\n self.__PutJson(\"/subscriptions/\"+observedUser, data)", "def post(self, request):\n serializer = WorkflowCollectionSubscriptionSummarySerializer(\n data=request.data, context={\"request\": request}\n )\n\n try:\n workflow_subscriptions = request.data[\n \"workflowcollectionsubscriptionschedule_set\"\n ]\n except KeyError:\n # If a schedule isn't passed through, we'll skip this part\n pass\n else:\n for schedule_set in workflow_subscriptions:\n # This try/except is converting the time that is being passed in\n # from the mobile app into UTC time.\n try:\n schedule_set[\"time_of_day\"] = convert_to_utc_time(\n schedule_set[\"time_of_day\"], \"%H:%M:%S%z\"\n )\n except ValueError:\n # If there is no offset specified, we want to use the time\n # that was passed in\n pass\n\n try:\n serializer.is_valid(raise_exception=True)\n\n except ValidationError as e:\n logger.error(\n \"Error Validating Workflow Collection Subscription\",\n exc_info=e,\n extra=generate_extra(\n request=request,\n serializer_errors=serializer.errors,\n ),\n )\n\n # Handle uniqueness constraint violation\n if (\n \"non_field_errors\" in serializer.errors\n and serializer.errors[\"non_field_errors\"][0].code == \"unique\"\n ):\n\n return Response(\n data={\"detail\": serializer.errors[\"non_field_errors\"][0]},\n status=status.HTTP_409_CONFLICT,\n )\n raise e\n else:\n instance: WorkflowCollectionSubscription = serializer.save()\n logger.info(\n \"User '%s' subscribed to workflow collection '%s'\",\n request.user,\n instance.workflow_collection,\n extra=generate_extra(\n event_code=\"WORKFLOW_COLLECTION_SUBSCRIPTION_CREATED\",\n user=request.user,\n workflow_collection=instance.workflow_collection,\n workflow_collection_subscription__active=instance.active,\n ),\n )\n return Response(serializer.data, status=status.HTTP_201_CREATED)", "def test_existing_subscriptions_autosubscription(self) -> None:\n stream_name = \"new_public_stream\"\n cordelia = self.example_user(\"cordelia\")\n self.common_subscribe_to_streams(cordelia, [stream_name], invite_only=False)\n result = self.client_post(\n \"/json/subscriptions/exists\", {\"stream\": stream_name, \"autosubscribe\": \"false\"}\n )\n response_dict = self.assert_json_success(result)\n self.assertIn(\"subscribed\", response_dict)\n self.assertFalse(response_dict[\"subscribed\"])\n\n result = self.client_post(\n \"/json/subscriptions/exists\", {\"stream\": stream_name, \"autosubscribe\": \"true\"}\n )\n response_dict = self.assert_json_success(result)\n self.assertIn(\"subscribed\", response_dict)\n self.assertTrue(response_dict)", "def _create_sub(name, rostype, topic_callback, *args, **kwargs):\n # counting subscriber instance per topic name\n if name in TopicBack.sub_instance_count.keys():\n TopicBack.sub_instance_count[name] += 1\n else:\n TopicBack.sub_instance_count[name] = 1\n\n return rospy.Subscriber(name, rostype, topic_callback, *args, **kwargs)", "def create_hosted_office(sub, pw): \r\n s1 = ims.hostedOfficeSubscriber(sub)\r\n session = {}\r\n session['emaSession'] = ema.emaLogin()\r\n session['sub_pw'] = pw # Get password from xls sheet and put here\r\n\r\n result = s1.subscriberCreate(session)\r\n ema.ema_logout(session['emaSession'])\r\n return result", "def test_get_subscription(self):\n pass", "def subscribe(self, group, user, reason=GroupSubscriptionReason.unknown):\n try:\n with transaction.atomic():\n self.create(\n user=user,\n group=group,\n project=group.project,\n is_active=True,\n reason=reason,\n )\n except IntegrityError:\n pass", "def getSubscription(uniq):\n return Subscription(Cuebot.getStub('subscription').Get(\n subscription_pb2.SubscriptionGetRequest(id=uniq), timeout=Cuebot.Timeout).subscription)", "def post_update_subscription(\n self, response: pubsub.Subscription\n ) -> pubsub.Subscription:\n return response", "def activate_subscription(**kwargs):\n sub, created = Subscription.objects.get_or_create(**kwargs)\n # check if it already existed and was deactivated\n if not created and not sub.active:\n sub.active = True\n sub.save()\n created = True\n return sub, created", "def add_subscription(self, query, price):\n session = Session()\n sub = self.cart.add_subscription(query, price)\n offers = session.search(query, self.lat, self.lon, self.radius)\n list(sub.handle_offers(offers))\n sub.check_offers()\n self.config_updated()", "def test_issue_subscriptions(self):\n pass", "async def subscribe(self, subscription: Subscription, reqid: int) -> SubStreamPrivate:\n # a simple request response API, unblocking.\n\n # Because subscribe is callable multiple times with the same subdata,\n # but this would trigger \"already subscribed\" error on kraken side\n\n chanpriv = private_subscribe(channel_name=subscription.name,\n loop=asyncio.get_running_loop())\n\n subdata = Subscribe(subscription=subscription, reqid=reqid)\n\n strdata = self.subscribe_schema.dumps(subdata)\n await self.connect(strdata)\n\n # retrieving all channel_ids for this subscription:\n\n self._streams[subdata] = SubStreamPrivate(channelprivate=chanpriv)\n\n # await subscription to be set before returning\n return await self._streams[subdata]\n # TODO : maybe context manager to cleanup the queue when we dont use it or unsubscribe ?", "def test_remove_recurring_schedule(self):\n pass", "def add_recurrences(session, event):\n start_time = strptime(event.start_time)\n end_time = strptime(event.end_time)\n\n if event.recurrence_rule == 'NORMAL':\n session.add(\n Recurrence(user_id=event.user_id, calendar_id=event.calendar_id, event_id=event.id, id=1, start_time=start_time,\n end_time=end_time))\n else:\n rec_rule = RRULE[event.recurrence_rule]['name']\n until = strptime(event.until)\n\n start_occurrences, end_occurrences = generate_occurrences(rec_rule, start_time, end_time, until)\n\n for i, (s_time, e_time) in enumerate(zip(start_occurrences, end_occurrences), 1):\n session.add(\n Recurrence(user_id=event.user_id, calendar_id=event.calendar_id, event_id=event.id, id=i, start_time=s_time,\n end_time=e_time))\n session.flush()", "def retrieve_or_create_recurring_donation_plan(voter_we_vote_id, donation_amount):\n recurring_donation_plan_id = voter_we_vote_id + \"-monthly-\" + str(donation_amount)\n # plan_name = donation_plan_id + \" Plan\"\n billing_interval = \"monthly\"\n currency = \"usd\"\n donation_plan_is_active = True\n exception_multiple_object_returned = False\n status = ''\n stripe_plan_id = ''\n success = False\n\n try:\n # the donation plan needs to exist in two places: our stripe account and our database\n # plans can be created here or in our stripe account dashboard\n donation_plan_query, is_new = DonationPlanDefinition.objects.get_or_create(\n donation_plan_id=recurring_donation_plan_id,\n plan_name=recurring_donation_plan_id,\n base_cost=donation_amount,\n billing_interval=billing_interval,\n currency=currency,\n donation_plan_is_active=donation_plan_is_active)\n if is_new:\n # if a donation plan is not found, we've added it to our database\n success = True\n status += 'SUBSCRIPTION_PLAN_CREATED_IN_DATABASE '\n else:\n # if it is found, do nothing - no need to update\n success = True\n status += 'DONATION_PLAN_ALREADY_EXISTS_IN_DATABASE '\n\n plan_id_query = stripe.Plan.retrieve(recurring_donation_plan_id)\n if positive_value_exists(plan_id_query.id):\n stripe_plan_id = plan_id_query.id\n logger.debug(\"Stripe, plan_id_query.id \" + plan_id_query.id)\n except DonationManager.MultipleObjectsReturned as e:\n handle_record_found_more_than_one_exception(e, logger=logger)\n success = False\n status += 'MULTIPLE_MATCHING_SUBSCRIPTION_PLANS_FOUND '\n exception_multiple_object_returned = True\n\n except stripe.error.StripeError:\n pass\n\n if not positive_value_exists(stripe_plan_id):\n # if plan doesn't exist in stripe, we need to create it (note it's already been created in database)\n plan = stripe.Plan.create(\n amount=donation_amount,\n interval=\"month\",\n currency=\"usd\",\n name=recurring_donation_plan_id,\n id=recurring_donation_plan_id,\n )\n if plan.id:\n success = True\n status += 'SUBSCRIPTION_PLAN_CREATED_IN_STRIPE '\n else:\n success = False\n status += 'SUBSCRIPTION_PLAN_NOT_CREATED_IN_STRIPE '\n results = {\n 'success': success,\n 'status': status,\n 'MultipleObjectsReturned': exception_multiple_object_returned,\n 'recurring_donation_plan_id': recurring_donation_plan_id,\n }\n return results", "def subscribe(self, req: SubscribeRequest):\n tick = TickData(\n symbol=req.symbol,\n exchange=req.exchange,\n name=req.symbol,\n datetime=datetime.now(),\n gateway_name=self.gateway_name,\n )\n self.ticks[req.symbol] = tick", "def test_register_subscription_existing_type(self):\n mock_type = Mock()\n bus = event_bus._event_bus\n bus._subscriptions[mock_type] = [\n EventSubscription(mock_type, lambda _: None)]\n new_subscription = EventSubscription(mock_type, lambda _: True)\n\n reg_id = event_bus.register_subscription(new_subscription)\n\n self.assertTrue(new_subscription in bus._subscriptions[mock_type])\n self.assertTrue(reg_id in bus._registration_id_map.keys())", "def make_instance(self, include_optional):\n # model = ICA_SDK.models.create_subscription_request.CreateSubscriptionRequest() # noqa: E501\n if include_optional :\n return CreateSubscriptionRequest(\n type = 'a', \n actions = [\n '0'\n ], \n name = 'a', \n description = '0', \n filter_expression = '0', \n delivery_target = ICA_SDK.models.delivery_target.DeliveryTarget(\n aws_sns_topic = ICA_SDK.models.delivery_target_aws_sns_topic.DeliveryTargetAwsSnsTopic(\n topic_arn = '0', ), \n aws_sqs_queue = ICA_SDK.models.delivery_target_aws_sqs_queue.DeliveryTargetAwsSqsQueue(\n queue_url = '0', ), \n workflow_run_launch = ICA_SDK.models.delivery_target_workflow_run_launch.DeliveryTargetWorkflowRunLaunch(\n id = '0', \n version = '0', \n name = '0', \n input = ICA_SDK.models.input.input(), ), )\n )\n else :\n return CreateSubscriptionRequest(\n type = 'a',\n name = 'a',\n delivery_target = ICA_SDK.models.delivery_target.DeliveryTarget(\n aws_sns_topic = ICA_SDK.models.delivery_target_aws_sns_topic.DeliveryTargetAwsSnsTopic(\n topic_arn = '0', ), \n aws_sqs_queue = ICA_SDK.models.delivery_target_aws_sqs_queue.DeliveryTargetAwsSqsQueue(\n queue_url = '0', ), \n workflow_run_launch = ICA_SDK.models.delivery_target_workflow_run_launch.DeliveryTargetWorkflowRunLaunch(\n id = '0', \n version = '0', \n name = '0', \n input = ICA_SDK.models.input.input(), ), ),\n )", "def subscribe(self, user_id):\n added_subscription = self.data_source.subscribe(user_id)\n\n return added_subscription", "def subscription(request):\n\n if request.method == 'POST':\n args = request.data\n\n try:\n user = User.objects.get(username=args.get('username'))\n return Response({'field': 'username', 'errorMsg': 'This user already exist'}, status=400)\n except User.DoesNotExist:\n # Create User\n user = User.objects.create_user(args.get('username'), args.get('email'), args.get('password'))\n user.save()\n # Create Account\n currency = Currency.objects.get(id=args.get('currency'))\n account = Account.objects.create(user=user, name=args.get('name'), currency=currency)\n\n token = Token.objects.get_or_create(user=user)\n # Log user with Token\n return Response({'code': 200, 'token': token[0].key}, status=200)", "def __call__(\n self,\n request: pubsub.GetSubscriptionRequest,\n *,\n retry: OptionalRetry = gapic_v1.method.DEFAULT,\n timeout: Optional[float] = None,\n metadata: Sequence[Tuple[str, str]] = (),\n ) -> pubsub.Subscription:\n\n http_options: List[Dict[str, str]] = [\n {\n \"method\": \"get\",\n \"uri\": \"/v1/{subscription=projects/*/subscriptions/*}\",\n },\n ]\n request, metadata = self._interceptor.pre_get_subscription(\n request, metadata\n )\n pb_request = pubsub.GetSubscriptionRequest.pb(request)\n transcoded_request = path_template.transcode(http_options, pb_request)\n\n uri = transcoded_request[\"uri\"]\n method = transcoded_request[\"method\"]\n\n # Jsonify the query params\n query_params = json.loads(\n json_format.MessageToJson(\n transcoded_request[\"query_params\"],\n including_default_value_fields=False,\n use_integers_for_enums=True,\n )\n )\n query_params.update(self._get_unset_required_fields(query_params))\n\n query_params[\"$alt\"] = \"json;enum-encoding=int\"\n\n # Send the request\n headers = dict(metadata)\n headers[\"Content-Type\"] = \"application/json\"\n response = getattr(self._session, method)(\n \"{host}{uri}\".format(host=self._host, uri=uri),\n timeout=timeout,\n headers=headers,\n params=rest_helpers.flatten_query_params(query_params, strict=True),\n )\n\n # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception\n # subclass.\n if response.status_code >= 400:\n raise core_exceptions.from_http_response(response)\n\n # Return the response\n resp = pubsub.Subscription()\n pb_resp = pubsub.Subscription.pb(resp)\n\n json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True)\n resp = self._interceptor.post_get_subscription(resp)\n return resp", "def add_subscription(self, query):\n key = query.key()\n if key not in self.subscriptions:\n self.subscriptions += [key]\n self.put()", "def create_recurring_run(\n self,\n experiment_id: str,\n job_name: str,\n description: Optional[str] = None,\n start_time: Optional[str] = None,\n end_time: Optional[str] = None,\n interval_second: Optional[int] = None,\n cron_expression: Optional[str] = None,\n max_concurrency: Optional[int] = 1,\n no_catchup: Optional[bool] = None,\n params: Optional[dict] = None,\n pipeline_package_path: Optional[str] = None,\n pipeline_id: Optional[str] = None,\n version_id: Optional[str] = None,\n enabled: bool = True,\n enable_caching: Optional[bool] = None,\n service_account: Optional[str] = None,\n ) -> kfp_server_api.V1Job:\n\n job_config = self._create_job_config(\n experiment_id=experiment_id,\n params=params,\n pipeline_package_path=pipeline_package_path,\n pipeline_id=pipeline_id,\n version_id=version_id,\n enable_caching=enable_caching,\n )\n\n if all([interval_second, cron_expression\n ]) or not any([interval_second, cron_expression]):\n raise ValueError(\n 'Either interval_second or cron_expression is required')\n if interval_second is not None:\n trigger = kfp_server_api.models.V1Trigger(\n periodic_schedule=kfp_server_api.models.V1PeriodicSchedule(\n start_time=start_time,\n end_time=end_time,\n interval_second=interval_second))\n if cron_expression is not None:\n trigger = kfp_server_api.models.V1Trigger(\n cron_schedule=kfp_server_api.models.V1CronSchedule(\n start_time=start_time,\n end_time=end_time,\n cron=cron_expression))\n\n job_body = kfp_server_api.models.V1Job(\n enabled=enabled,\n pipeline_spec=job_config.spec,\n resource_references=job_config.resource_references,\n name=job_name,\n description=description,\n no_catchup=no_catchup,\n trigger=trigger,\n max_concurrency=max_concurrency,\n service_account=service_account)\n return self._job_api.create_job(body=job_body)", "def subscription_factory_fixture():\n def _factory(capability):\n sub = Subscription()\n sub.capability = capability\n return sub\n return _factory", "def create(self, request, *args, **kwargs):\n subreddit_title = kwargs[\"sub_title\"]\n # username = kwargs[\"username\"]\n username = self.kwargs.get('username', None)\n\n # if username.lower() in Sub.pseudo_subreddits:\n # qs = getattr(\n # self,\n # \"get_{}_queryset\".format(subreddit_title.lower())\n # )()\n\n # make sure the subreddit exists\n try:\n user = User.objects.get(username=username)\n except User.DoesNotExist:\n message = _(\"The '{}' user does not exist\".format(\n username\n ))\n raise exceptions.NotFound(message)\n\n if subreddit_title.lower() in Sub.pseudo_subreddits.keys():\n message = _((\n \"You can't create a post to the \"\n \"'{}' subreddit\".format(subreddit_title)\n ))\n return Response(\n {\"detail\": message},\n status=status.HTTP_400_BAD_REQUEST\n )\n else:\n subreddit = Sub.objects.get(title=subreddit_title)\n\n # user = self.request.user\n data = request.data.copy()\n data[\"subreddit\"] = subreddit.title\n data[\"authorsender\"] = user.username\n serializer = self.get_serializer(data=data)\n serializer.is_valid(raise_exception=True)\n self.perform_create(serializer)\n headers = self.get_success_headers(serializer.data)\n return Response(\n serializer.data,\n status=status.HTTP_201_CREATED,\n headers=headers\n )", "def create_region_subscription(self, create_region_subscription_details, tenancy_id, **kwargs):\n resource_path = \"/tenancies/{tenancyId}/regionSubscriptions\"\n method = \"POST\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"opc_retry_token\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"create_region_subscription got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"tenancyId\": tenancy_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"opc-retry-token\": kwargs.get(\"opc_retry_token\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n if not isinstance(retry_strategy, retry.NoneRetryStrategy):\n self.base_client.add_opc_retry_token_if_needed(header_params)\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n body=create_region_subscription_details,\n response_type=\"RegionSubscription\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n body=create_region_subscription_details,\n response_type=\"RegionSubscription\")", "def add_reminder():\n body = request.json\n try:\n contact_num = str(body['contact_number'])\n appt_dt = arrow.get(body['appointment_time'], \"YYYY-MM-DDTHH:mmZ\")\n notify_win = int(body['notify_window'])\n location = body.get('location', None)\n participant = body.get('participant', None)\n except (KeyError, ParserError):\n raise InvalidAPIUsage(\n (\"Required arguments: 'contact_number' (str), \"\n \"'appointment_time' (str) eg. '2016-01-01T13:00+02:00', \"\n \"'notify_window' (int)\"))\n else:\n Reminder.clean_expired()\n reminder = Reminder(contact_num, appt_dt, notify_win,\n location, participant)\n db_session.add(reminder)\n try:\n db_session.commit()\n except IntegrityError:\n msg = (\"Unable to create a new reminder. Duplicate \"\n \"contact_number {}.\".format(contact_num))\n log.error({\"message\": msg})\n return Response(json.dumps({\"message\": msg}), status=400,\n content_type=\"application/json\")\n else:\n try:\n send_reminder.apply_async(args=[reminder.id],\n eta=reminder.notify_sys_dt)\n except ConnectionError as e:\n log.critical({\"message\": \"unable to connect to redis\",\n \"exc\": type(e)})\n db_session.delete(reminder)\n db_session.commit()\n return Response(json.dumps(\n {\"message\": (\"Unable to create a new reminder.\"\n \" Redis is unreachable.\"),\n \"exc\": \"RedisConnectionError\"}),\n status=500, content_type=\"application/json\")\n\n msg = \"Successfully created a reminder with id {}.\".format(reminder.id)\n log.info({\"message\": msg})\n content = json.dumps({\"message\": msg, \"reminder_id\": reminder.id})\n return Response(content, status=200,\n content_type=\"application/json\")", "def do_charge_purchase_create(cs, args):\n purchase = cs.charge_purchases.create(args.purchase_name)\n utils.print_dict(purchase._info)", "def test_subscribe(mocker, api: API, account: Account, order, sku_id, activation):\n api.regnum.order.return_value = order\n api.activation.activate.return_value = activation\n assert account.subscribe(sku_id) == activation[\"id\"]\n api.regnum.order.assert_called_with(\n \"USERNAME\", sku_id, 1, date.today(), timedelta(days=365)\n )\n api.activation.activate.assert_called_with(\"USERNAME\", 5678, mocker.ANY, date.today())\n assert order in account.orders\n assert activation in account.activations", "def create_subject(name=\"Basket Weaving\"):\n subj = Subject(name=name)\n subj.save()\n return subj", "def test_update_subscription(self):\n pass", "def test_aws_service_api_validate_subscription_post(self):\n pass", "def add_tasmota_subscription(self, prefix, topic, detail, payload_type, bool_values=None, item=None, callback=None):\n tpc = self.full_topic.replace(\"%prefix%\", prefix)\n tpc = tpc.replace(\"%topic%\", topic)\n tpc += detail\n self.add_subscription(tpc, payload_type, bool_values=bool_values, callback=callback)" ]
[ "0.7562298", "0.72675866", "0.7255067", "0.6888839", "0.67131966", "0.66710687", "0.663895", "0.66039276", "0.6548797", "0.6534004", "0.6509307", "0.6500344", "0.64609605", "0.6345497", "0.6329807", "0.6320571", "0.629122", "0.627429", "0.62693095", "0.6210127", "0.6126169", "0.6094807", "0.6066244", "0.6058192", "0.60420936", "0.5905028", "0.58791494", "0.58694696", "0.5851868", "0.5833925", "0.58271146", "0.5763124", "0.57550997", "0.57465565", "0.56975925", "0.56970227", "0.5690568", "0.5659167", "0.5635145", "0.5596897", "0.5579235", "0.5563794", "0.55577946", "0.55571824", "0.5536101", "0.5524258", "0.55161846", "0.5484791", "0.54800314", "0.54707134", "0.54616284", "0.54557806", "0.54481226", "0.54470384", "0.54464775", "0.5443979", "0.54133505", "0.54126745", "0.5410286", "0.53780395", "0.5363074", "0.5340725", "0.53387094", "0.53356135", "0.5300754", "0.5283013", "0.5276517", "0.52671146", "0.5262717", "0.526111", "0.5257484", "0.52517223", "0.5232912", "0.52281386", "0.5192253", "0.5173698", "0.5160618", "0.51478016", "0.51476556", "0.5145459", "0.51380867", "0.51282305", "0.51226115", "0.51127124", "0.50853956", "0.50815666", "0.50694907", "0.5055116", "0.50533694", "0.5052904", "0.50510055", "0.5050555", "0.5038201", "0.5037489", "0.5025394", "0.50232506", "0.50167507", "0.5007314", "0.5005505", "0.49992126", "0.49937484" ]
0.0
-1
DEPERECIATED FUNCTION PLEASE USE active field of subscription
def is_active(self): return self.active
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_get_subscription(self):\n pass", "def list_subscriptions(self):\n return {'abonnementen': self.customer.abonnementen}", "def activate_subscription(**kwargs):\n sub, created = Subscription.objects.get_or_create(**kwargs)\n # check if it already existed and was deactivated\n if not created and not sub.active:\n sub.active = True\n sub.save()\n created = True\n return sub, created", "def test_get_subscriptions(self):\n pass", "def verifysubscriptionstatusinaccounttab():\n pass", "def _subscribed(self, account_id):\n sql = \"\"\"SELECT 1 FROM hive_subscriptions\n WHERE community_id = :community_id\n AND account_id = :account_id\"\"\"\n return bool(DB.query_one(\n sql, community_id=self.community_id, account_id=account_id))", "def _update_on_active(self):\n pass", "def test_update_subscription(self):\n pass", "def verifysubscriptioninhomedevicestatus(sub):\n try:\n if \"Subscription Active\" in sub:\n print \" Hi chetan You have Active subscription\"\n else:\n print \" your subscription is not active \"\n except Exception as er:\n print(\"not able to get subscription details\")\n return False", "def test_process_subscriptions(self):\n pass", "def _subscribe(self):\n self.subscribed = True\n self.subscribe_date = now()\n self.unsubscribed = False", "def subscription(bot, update):\n chat_id = update.message.chat_id\n bot.sendMessage(chat_id=chat_id, text=SUBSCRIPTION_MSG, parse_mode='markdown', \n disable_web_page_preview=True)\n \n mp.track(get_user_info(chat_id)['PID'], 'Checked Subscription')", "def test_create_subscription(self):\n pass", "def retrieveAlcaSubscription():\n if GlobalValues._alcaSubscription == None:\n # This method will set subscription name from config\n alcaNewSelection()\n GlobalValues._alcaSubscription = \\\n _getSubscription(Workflow(spec = \"FileAlcaSkim\", \n owner = \"CMSTier0\",\n name = \"FileAlcaSkim\"),\n Fileset( name = GlobalValues._alcaSubName )\n ) \n \n return GlobalValues._alcaSubscription", "def _callback_active(self, chat_id, user_id, args, update):\n msg_id = update[\"callback_query\"][\"message\"][\"message_id\"]\n \n if len(args) == 3 and args[1] == str(user_id):\n if args[2] == 'none':\n self.db.set_active_spruch(user_id)\n self.tclient.edit_message_text('Es ist nun kein Spruch mehr aktiv.', chat_id, msg_id)\n else:\n self.db.set_active_spruch(args[1], args[2])\n self.tclient.edit_message_text('Aktiver Spruch wurde geändert.', chat_id, msg_id)", "def test_issue_subscriptions(self):\n pass", "def get_first_active_subscription(self):\n if self.has_active_subscription():\n return self.subscriptions.filter(active=True)[0]\n else:\n return None", "def _update_on_active(self):\n if self.user:\n self.set_user(self.user.name, self.user.balance, self.user.credit)\n else:\n self.set_unknown_user()\n\n for product in self.owner.products:\n self.on_scan(product)", "def getActiveCurrency():", "def get_subscription(self):\n if not hasattr(self, '_subscription'):\n self._subscription = self.admin.subscriptions.select_related('plan').get_overlapping(\n self.admin_id, DateRange(self.period, self.period_end, bounds='[]'))\n return self._subscription", "def get_subscription(self, article: BeautifulSoup):\n if self.parsing_template.subscription and article.select_one(self.parsing_template.subscription):\n return True\n return False", "def _subscribe(self):\n if self.subscribed:\n return False\n return {}", "def get_subscription(self):\n return self.request({\n 'path': '/' + UUID + '/subscription'})", "def test_request_cancel_active_subscription(self):\n self.braintree_customer.subscription_id = \"1234\"\n self.braintree_customer.pending_cancel = False\n self.braintree_customer.save()\n self.assertTrue(SubscriptionManager.request_cancel(self.braintree_customer))\n self.assertTrue(self.braintree_customer.pending_cancel)", "def confirm_subscription(self, sub_code):\n\t\tresult = {}\n\t\tconnection = DbHelper.connect()\n\t\tsub_id = 0\n\t\t# print(\"Subscription Code: \" + sub_code)\n\n\t\ttry:\n\t\t\twith connection.cursor() as cursor:\n\t\t\t\tsql = \"SELECT * FROM mail_list \\\n\t\t\t\t\t WHERE email_hash=%s;\"\n\t\t\t\tcursor.execute(sql, [sub_code])\n\t\t\t\tresult = cursor.fetchone()\n\t\t\t\t\n\t\t\t\tif not result:\n\t\t\t\t\tconnection.close()\n\t\t\t\t\treturn \"CODE_DOES_NOT_EXIST\"\n\t\t\t\t\n\t\t\t\telif result['is_activated']:\n\t\t\t\t\tconnection.close()\n\t\t\t\t\treturn \"CODE_ALREADY_ACTIVATED\"\n\n\t\t\t\tsub_id = result['sub_id']\n\n\t\t\t\tsql = \"UPDATE mail_list \\\n\t\t\t\t\t SET is_activated=is_activated+1 \\\n\t\t\t\t\t WHERE sub_id=%s;\"\n\t\t\t\tcursor.execute(sql, [sub_id])\n\t\t\t\tconnection.commit()\n\t\t\t\tconnection.close()\n\t\t\t\treturn result\n\t\texcept pymysql.MySQLError as e:\n\t\t\tconnection.close()\n\t\t\treturn \"DATABASE_ERROR\"", "def test_get_subscription_template(self):\n pass", "def subscription(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"subscription\")", "def subscription(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"subscription\")", "def no_save_individual_subscription(sender, instance, **kwargs):\n try:\n Subscription.objects.get(pk=instance.pk) # looking if the subscription exist, if the case, we assume here is updating active status or email status\n except:\n if instance.user is not None:\n subs_ids = instance.user.groups.values_list('subscription')\n for sub in subs_ids:\n if None not in sub:\n alarm = Subscription.objects.get(id=sub[0]).alarm\n if alarm == instance.alarm:\n raise ValidationError('The user is subscribed to the same alarm for a group')\n\n subs = Subscription.objects.filter(user=instance.user)\n for sub in subs:\n if sub.alarm == instance.alarm:\n raise ValidationError('The user is subscribed to this alarm')", "def add_subscription(self):\n schema = schemas.load(schemas.Subscription, self.request)\n subscription = self.customer.add_subscription(**schema)\n self.request.db.flush()\n self.request.response.status_int = 201\n return {'abonnement': subscription}", "def change_plan(request):\n\n data = request.data\n\n start_date = datetime.datetime.now().strftime(\"%c\")\n end_date = end_date = (datetime.datetime.now() + datetime.timedelta(30)).strftime(\"%x\")\n \n # print(data[\"subscription_plan\"])\n \n try: \n user = User.objects.get(email=request.user) \n customer = Customer.objects.get(user=user)\n subscription_plan = SubscriptionPlan.objects.get(subscription_plan_name=data[\"subscription_plan\"])\n\n if customer.is_subscribe:\n stripe.Subscription.delete(\n customer.subscription_id,\n ) \n\n plan_id = \"price_1JsHMxSDkRo5FXlkOsq2QHSV\"\n\n if data[\"subscription_plan\"]== \"Globalnet Silver\":\n plan_id = \"price_1JsHOJSDkRo5FXlkQmfEQzhN\"\n \n if data[\"subscription_plan\"]== \"Globalnet Gold\":\n plan_id = \"price_1JsHPFSDkRo5FXlk9VSl41rV\"\n\n # Create new stripe subscription\n subscription = stripe.Subscription.create(\n customer = customer.stripe_id,\n items = [{'plan':plan_id}]\n ) \n \n # Update SubscriptionData \n subscription_user_data = SubscriptionData.objects.filter(subscriber=customer.primary_number) \n for data_subscriber in subscription_user_data:\n if(data_subscriber.subscription_start == customer.start_date):\n data_subscriber.subscription_end = start_date \n data_subscriber.save() \n break \n \n \n # Change subscription plan info\n customer.subscription_plan = subscription_plan\n customer.start_date = start_date\n customer.end_date = end_date\n customer.subscription_id = subscription.id\n customer.is_subscribe = True\n customer.save()\n \n # Create new subscription data \n SubscriptionData.objects.create(\n subscriber = customer.primary_number,\n subscription = subscription_plan.subscription_plan_name,\n subscription_start = start_date,\n subscription_end = end_date \n \n )\n \n serializer= CustomerSerializer(customer,many=False)\n \n return Response(serializer.data)\n \n except Exception as e: \n message = {\"Error\":str(e)}\n return Response(message)", "def test_subscribe_no_previous_subscription(self):\n self.braintree_customer.payment_method_token = 'valid_payment_token'\n SubscriptionManager.subscribe(self.braintree_customer)\n\n # fetch customer again\n self.braintree_customer = BraintreeUser.objects.get(user=self.user)\n self.assertTrue(self.braintree_customer.active)\n self.assertFalse(self.braintree_customer.pending_cancel)\n self.assertEqual(self.braintree_customer.expiry_date, timezone.make_aware(datetime.datetime(2017,7,25,0,0),pytz.utc))", "def subscription(self, uuid):\r\n return subs.Subscription(self, uuid)", "def subscription(self):\n return self._subscription", "def subscription(self):\n return self._subscription", "def clean_subscriptions(self):\n cleaned_data = super(SignupSubscriptionForm, self).clean() or self.cleaned_data\n checked = 0\n for key, value in cleaned_data.items():\n if key.startswith('subscription') and value:\n checked += 1\n if checked > 0:\n return cleaned_data\n else:\n raise ValidationError(self.unchecked_error)", "def test_change_payment_method_active_subscription_all_success(self):\n self.braintree_customer.active = True\n self.braintree_customer.subscription_id = \"SubscriptionID\"\n\n # Declare server response for change on subscription\n when(braintree.Subscription).update(\"SubscriptionID\",{\n 'payment_method_token' : self.create_payment_success_response.payment_method.token\n }).thenReturn(self.change_payment_method_success)\n\n # Call method under test\n self.assertTrue(SubscriptionManager.change_payment_method(self.braintree_customer,self.valid_payment_nonce))\n\n # Check customers payment method changed\n self.assertEqual(self.create_payment_success_response.payment_method.token,\n self.braintree_customer.payment_method_token)\n\n # Verifications\n verify(braintree.PaymentMethod).create({\n 'customer_id' : \"12345\",\n 'payment_method_nonce' : self.valid_payment_nonce,\n 'options': {\n 'make_default' : True\n }\n })\n\n verify(braintree.Subscription).update(\"SubscriptionID\",{\n 'payment_method_token' : self.create_payment_success_response.payment_method.token\n })", "def on_subscribe( client, userdata, mid, granted_qos ):\n logging.info( \"Topic successfully subcribed with QoS: %s\" %granted_qos )", "def _get_active(self):\n return self.__active", "def _get_active(self):\n return self.__active", "def _get_active(self):\n return self.__active", "def _get_active(self):\n return self.__active", "def test_subscribe(mocker, api: API, account: Account, order, sku_id, activation):\n api.regnum.order.return_value = order\n api.activation.activate.return_value = activation\n assert account.subscribe(sku_id) == activation[\"id\"]\n api.regnum.order.assert_called_with(\n \"USERNAME\", sku_id, 1, date.today(), timedelta(days=365)\n )\n api.activation.activate.assert_called_with(\"USERNAME\", 5678, mocker.ANY, date.today())\n assert order in account.orders\n assert activation in account.activations", "def test_issue_add_subscription(self):\n pass", "def test_get_template_subscription(self):\n pass", "async def status(ctx):\n redis = await RedisDB.create()\n user = ctx.message.author\n try:\n subscription_id = await get_subscription_id(user, redis)\n\n if subscription_id is None:\n subscription_json = await create_subscription(user, redis)\n # There is no active indicator returned on a create user call - add it here to prevent issues.\n subscription_json['active'] = False\n else:\n subscription_json = verify_subscription(subscription_id)\n\n await send_status_message(user, subscription_json)\n\n except Exception as e:\n await user.send(\n \"There was an unexpected error during checking the status of your subscription.\\n\"\n \"Please contact the Nano Center Ambassadors for more information.\"\n )\n raise e\n finally:\n await redis.close()", "def test_update_subscription_template(self):\n pass", "def subscription_attributes_tag(self) :\n self._curattributes = self._subscription_attributes\n return self.parseTag()", "def subscribe(self, request):\n email = self.cleaned_data.get('email')\n\n email_name, domain_part = email.rsplit('@', 1)\n domain_name = '@' + domain_part\n email_domain, created = Domain.objects.get_or_create(name=domain_name)\n\n subscriber, created = Subscriber.objects.get_or_create(email=email, mailing_list=self.mailing_list, defaults={\n 'domain': email_domain\n })\n subscriber.status = Status.PENDING\n subscriber.optin_ip_address = get_client_ip(request)\n subscriber.optin_date = timezone.now()\n subscriber.save()\n\n if not created:\n subscriber.tokens.filter(description='confirm_subscription').delete()\n\n token = subscriber.tokens.create(description='confirm_subscription')\n current_site = get_current_site(request)\n protocol = 'https' if request.is_secure() else 'http'\n domain = current_site.domain\n path = reverse('subscribers:confirm_double_optin_token', kwargs={\n 'mailing_list_uuid': self.mailing_list.uuid,\n 'token': token.text\n })\n confirm_link = '%s://%s%s' % (protocol, domain, path)\n\n confirm_email = self.mailing_list.get_confirm_email_template()\n confirm_email.send(subscriber.get_email(), {\n 'confirm_link': confirm_link\n })\n\n return subscriber", "def test_list_pending_template_subscriptions(self):\n pass", "def has_active_subscription(self, count=False):\n subs = self.subscriptions.filter(active=True)\n return subs.exists() if count is False else subs.count()", "def subscribed(cls, team):\n return cls.query(\n cls.status == 'subscribe',\n cls.team == team.lower()\n ).fetch(100)", "def is_subscribed(user_id, profile_user_id):\n\n subscription = Subscription.query.filter(\n Subscription.user_id == user_id,\n Subscription.subscribe_to_id == profile_user_id\n ).first()\n print(\"IS SUBSCRIBED\")\n print(subscription)\n print(subscription is not None)\n return subscription is not None", "def subscriber_pending(args):\n\n\tclass ActiveMqSubscriberPendingContext(np.ScalarContext):\n\t\tdef evaluate(self, metric, resource):\n\t\t\tif metric.value < 0:\n\t\t\t\treturn self.result_cls(np.Critical, metric=metric)\n\t\t\treturn super(ActiveMqSubscriberPendingContext, self).evaluate(metric, resource)\n\t\tdef describe(self, metric):\n\t\t\tif metric.value < 0:\n\t\t\t\treturn 'ERROR: ' + metric.name\n\t\t\treturn super(ActiveMqSubscriberPendingContext, self).describe(metric)\n\n\tclass ActiveMqSubscriberPending(np.Resource):\n\t\tdef probe(self):\n\t\t\ttry:\n\t\t\t\tresp = loadJson(query_url(args))\n\t\t\t\tsubs = (resp['value']['TopicSubscribers'] +\n\t\t\t\t resp['value']['InactiveDurableTopicSubscribers'])\n\t\t\t\tfor sub in subs:\n\t\t\t\t\tqJ = loadJson(make_url(args, sub['objectName']))['value']\n\t\t\t\t\tif not qJ['SubscriptionName'] == args.subscription:\n\t\t\t\t\t\tcontinue # skip subscriber\n\t\t\t\t\tif not qJ['ClientId'] == args.clientId:\n\t\t\t\t\t\t# When this if is entered, we have found the correct\n\t\t\t\t\t\t# subscription, but the clientId doesn't match\n\t\t\t\t\t\treturn np.Metric('ClientId error: Expected: %s. Got: %s'\n\t\t\t\t\t\t % (args.clientId, qJ['ClientId']),\n\t\t\t\t\t\t -1, context='subscriber_pending')\n\t\t\t\t\treturn np.Metric('Pending Messages for %s' % qJ['SubscriptionName'],\n\t\t\t\t\t qJ['PendingQueueSize'], min=0,\n\t\t\t\t\t context='subscriber_pending')\n\t\t\texcept IOError as e:\n\t\t\t\treturn np.Metric('Fetching network FAILED: ' + str(e), -1, context='subscriber_pending')\n\t\t\texcept ValueError as e:\n\t\t\t\treturn np.Metric('Decoding Json FAILED: ' + str(e), -1, context='subscriber_pending')\n\t\t\texcept KeyError as e:\n\t\t\t\treturn np.Metric('Getting Subscriber FAILED: ' + str(e), -1, context='subscriber_pending')\n\n\tnp.Check(\n\t\tActiveMqSubscriberPending(),\n\t\tActiveMqSubscriberPendingContext('subscriber_pending', args.warn, args.crit),\n\t).main(timeout=get_timeout())", "def test_subscribe_already_subscribed(self):\n self.braintree_customer.active = True\n self.braintree_customer.save()\n with self.assertRaises(BraintreeError):\n SubscriptionManager.subscribe(self.braintree_customer)\n\n # Check state not altered\n self.assertTrue(self.braintree_customer.active)\n self.assertFalse(self.braintree_customer.pending_cancel)\n self.assertIsNone(self.braintree_customer.expiry_date)", "def isActive(self):\n pass", "def get_subscriptions(self):\n return {}", "def test_no_credit_change_for_same_subscription(self):\n credits = 20\n\n current_plan = Subscription.get_plan_by_id('pro')\n new_plan = Subscription.get_plan_by_id('pro')\n\n may_29_2015 = datetime.datetime(2015, 5, 29, 0, 0, 0)\n may_29_2015 = pytz.utc.localize(may_29_2015)\n\n credits = add_subscription_credits(credits, current_plan, new_plan,\n may_29_2015)\n\n assert credits == 20", "def _is_active_subscription(self, topic: str) -> bool:\n return topic in self._simple_subscriptions or any(\n other.topic == topic for other in self._wildcard_subscriptions\n )", "def is_active(self) -> bool:", "def active_tu(active):\n active = list(active)\n t_units = list({tu for gene in active for tu in gene.transcription_units})\n return t_units", "def test_list_template_subscriptions(self):\n pass", "def getActiveCurrencies():", "def read_sdpLinkActive(self):\n # PROTECTED REGION ID(CspSubElementSubarray.sdpLinkActive_read) ENABLED START #\n return (False,)\n # PROTECTED REGION END # // CspSubElementSubarray.sdpLinkActive_read", "def test_index_response_descriptor_subscriptions_subscription_subscription_resource(self):\n pass", "def post_get_subscription(\n self, response: pubsub.Subscription\n ) -> pubsub.Subscription:\n return response", "def test_update_subscription(self):\n args = dict(trial_amount=5.00,\n trial_occurrences=4,\n interval_length=1,\n interval_unit=arb.MONTHS_INTERVAL,\n start_date=u\"2008-09-09\",\n amount=39.99,\n card_number=u\"4222222222222\",\n expiration_date=u\"2009-10\",\n ship_first_name=u\"valentino\",\n first_name=u\"valentino\",\n bill_first_name=u\"valentino\",\n bill_last_name=u\"pool\",\n driver_number=u\"55555\",\n driver_state=u\"CA\",\n driver_birth=u\"1990-09-09\"\n )\n\n try:\n self.arb.update_subscription(**args)\n except KeyError:\n self.arb.update_subscription(subscription_id=u\"1234\", **args)", "def _pause_subscription(self):\n return {}", "def retrieveExpressDBSSubscription():\n if GlobalValues._expressDBSSubscription == None:\n GlobalValues._expressDBSSubscription = \\\n _getSubscription(Workflow(spec = \"ExpressDBSUpload\", \n owner = \"CMSTier0\",\n name = \"ExpressDBSUpload\"),\n Fileset(name = \"ExpressDBSUploadable\")\n )\n \n return GlobalValues._expressDBSSubscription", "def purchased_callback(self):\r\n raise NotImplementedError", "def setup_subscription(subscription, info: GraphQLResolveInfo, variables, complete_on_error=False):\n excluded_field_nodes = filter_selection_set(info)\n variables = frappe._dict(variables)\n subscription_id = frappe.generate_hash(f\"{subscription}-{frappe.session.user}\", length=8)\n\n subscription_data = frappe._dict(\n subscribed_at=now_datetime(),\n last_ping=now_datetime(),\n variables=variables,\n subscription_id=subscription_id,\n selection_set=excluded_field_nodes,\n user=frappe.session.user,\n complete_on_error=complete_on_error\n )\n\n frappe.cache().hset(\n get_subscription_redis_key(subscription), subscription_id, subscription_data)\n\n return frappe._dict(\n subscription_id=subscription_id\n )", "def toggle_subscription(self):\n user = self.context['request'].user\n # pylint: disable=no-member\n profile = UserProfile.objects.get(\n user=user)\n club = self.context['club']\n\n if club in profile.subscriptions.all():\n club.subscribed_users.remove(profile)\n else:\n club.subscribed_users.add(profile)", "def test_renew_attempt_on_active_subscription(self):\n self.braintree_customer.pending_cancel = False\n self.braintree_customer.active = True\n\n with self.assertRaises(BraintreeError):\n SubscriptionManager.renew(self.braintree_customer)", "def subscribe_user_trades(self, update_handler):\n pass", "def subscriptions(id='None'):\n\trows = mongo_data({}, [\"publisher_id\",\"dt_hour\", \"new_subs\"],\"subscribers\")\n\t#returns [{_id:...,field1:...,field2:...}]\n\n\n\tCOLS = [\"publisher_id\", \"dt_hour\", \"new subs\"]\n\tROWS = [[y[\"publisher_id\"],y[\"dt_hour\"],y[\"new_subs\"]] for y in rows]\n\n\tTITLE = 'SUBSCRIPTIONS'\n\n\treturn render_template(\"simple_tester_report.html\", cols=COLS, rows=ROWS, report_title=TITLE);", "def subscription_required(self) -> pulumi.Output[Optional[bool]]:\n return pulumi.get(self, \"subscription_required\")", "def test_create_subscription(self):\n try:\n self.arb.create_subscription(\n trial_occurrences=4,\n interval_length=1,\n interval_unit=arb.MONTHS_INTERVAL,\n start_date=u\"2008-09-09\",\n amount=39.99,\n card_number=u\"4222222222222\",\n expiration_date=u\"2009-10\",\n bill_first_name=u\"Michael\",\n bill_last_name=u\"Pool\"\n )\n except KeyError:\n pass\n self.arb.create_subscription(\n trial_amount=5.00,\n trial_occurrences=4,\n interval_length=1,\n interval_unit=arb.MONTHS_INTERVAL,\n start_date=u\"2008-09-09\",\n amount=39.99,\n card_number=u\"4222222222222\",\n expiration_date=u\"2009-10\",\n bill_first_name=u\"Michael\",\n bill_last_name=u\"Pool\"\n )\n self.arb.create_subscription(\n trial_amount=5.00,\n trial_occurrences=4,\n interval_length=1,\n interval_unit=arb.MONTHS_INTERVAL,\n start_date=u\"2008-09-09\",\n amount=39.99,\n card_number=u\"4222222222222\",\n expiration_date=u\"2009-10\",\n ship_first_name=u\"valentino\",\n first_name=u\"valentino\",\n bill_first_name=u\"valentino\",\n bill_last_name=u\"Pool\",\n driver_number=u\"55555\",\n driver_state=u\"CA\",\n driver_birth=u\"1990-09-09\"\n )", "def subscription_purchase(request):\n if request.method==\"POST\":\n # POST request means user submitted Stripe card form\n if \"stripeToken\" in request.POST:\n try:\n customer = stripe.Charge.create(\n amount = 60*100,\n currency = \"EUR\",\n description = request.user.email,\n source = request.POST.get('stripeToken'),\n )\n except stripe.error.CardError:\n messages.error(request, \"Your card was declined!\")\n return redirect('subscription_purchase')\n if customer.paid:\n \"\"\" If a subscription already existed, this was a renewal\n if not, new Subscription is created, default expiry calculated\n within the Subscription model \"\"\"\n subscription, created = Subscription.objects.get_or_create(\n user=request.user\n )\n # Each subscription renewal adds credits to user's profile\n profile = request.user.profile\n profile.credits += settings.CREDITS_PER_SUBSCRIPTION\n profile.save()\n if created:\n return render(request, 'subscription_added.html')\n else:\n # For renewals, add 365 days to expiry date\n subscription.expires += timedelta(days=365)\n subscription.save()\n return render(request, 'subscription_added.html')\n else:\n messages.error(request, \"Unable to take payment\")\n return redirect('subscription_purchase')\n else:\n messages.error(request, \"Stripe token was invalid\")\n return redirect('subscription_purchase')\n else:\n # Non-POST request means we render the card form.\n return render(request, \"subscription_checkout.html\", {\n 'publishable': settings.STRIPE_PUBLISHABLE\n })", "def subscriptions(self):\r\n return subs.AccountSubscriptions(self)", "def test_subscribe_offer(self):\n pass", "def test_aws_service_api_validate_subscription_post(self):\n pass", "def subscribe(receiver):", "def subscribe(receiver):", "def subscribe(receiver):", "def __getNewSubscriptionId(self):\n while 1:\n tmp = ''.join(random.choice(ID_LETTERS) for _ in range(SID_SIZE))\n if tmp in self._subscriptions: continue\n else: return tmp", "def subscribe_balances(self, update_handler):\n pass", "def active(self, operator: Enum, active: bool):\n self._tql.add_filter('active', operator, active, TqlType.BOOLEAN)", "def test_modify_response_descriptor_subscriptions_subscription_subscription_resource(self):\n pass", "def approved_subscriptions(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:\n return pulumi.get(self, \"approved_subscriptions\")", "def update_user_active_at(sender, *args, **kwargs):\n if current_user.is_authenticated and not current_user.is_api_user():\n redis_connection.hset(LAST_ACTIVE_KEY, current_user.id, int(time.time()))", "async def send_status_message(user: discord.User, subscription_json: dict):\n qr_amount = str(int(Decimal(os.getenv('AMOUNT')) * Decimal(1000000000000000000000000000000)))\n uri_string = f\"nano:{subscription_json['payment_address']}?amount={qr_amount}\"\n qr = pyqrcode.create(uri_string)\n qr.png(f'qr/{user}.png', scale=4, module_color=\"#23272A\")\n\n await manage_roles(user.id, subscription_json)\n embed = discord.Embed(title=\"Subscription Information\", color=0x4169dd)\n embed.add_field(name=\"Subscription Status: \", value=\"**Active**\" if subscription_json['active'] else '**Inactive**')\n embed.add_field(name=\"Expiration Date\", value=subscription_json['expiration_date'][:10], inline=False)\n embed.add_field(name=\"Subscription Cost\", value=(os.getenv('AMOUNT') + ' NANO'), inline=False)\n await user.send(\n embed=embed\n )\n if not subscription_json['active']:\n await user.send(\n file=discord.File(f'qr/{user}.png')\n )\n await user.send(\n f\"Send {os.getenv('AMOUNT')} NANO to:\"\n )\n await user.send(\n f\"{subscription_json['payment_address']}\"\n )", "def subscription(self):\r\n return SubscriptionResource(self)", "def test_is_ims_sub_activities(self):\r\n emaSession = ema_functions.emaLogin()\r\n session = {}\r\n session['emaSession'] = emaSession\r\n sub1 = class_ims_ema.sub('+353760000001')\r\n #test1 = sub1.subscriberCreate(session)\r\n test2 = sub1.subscriberGet(session)\r\n #test3 = sub1.subscriberDelete(session)\r\n test4 = sub1.subscriberGet(session)\r\n #self.assertTrue(test1.status_code == 200 and test2.status_code == 200 and test3.status_code == 200 and test4.status_code == 500)\r\n self.assertTrue(test2.status_code == 200 and test4.status_code == 500)", "def subscription_required(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"subscription_required\")", "def awaiting_payment(self):", "def test_create_subscription_template(self):\n pass", "def _handle_subscribe(self, presence):\n presence.reply()\n presence['to'] = presence['to'].bare\n\n # We are using trinary logic, so conditions have to be\n # more explicit than usual.\n if self.auto_authorize == True:\n presence['type'] = 'subscribed'\n presence.send()\n if self.auto_subscribe:\n presence['type'] = 'subscribe'\n presence.send()\n elif self.auto_authorize == False:\n presence['type'] = 'unsubscribed'\n presence.send()", "def get_subscription(\n connection, subscription_id, project_id, fields=None, error_msg=None\n):\n return connection.get(\n url=f'{connection.base_url}/api/subscriptions/{subscription_id}',\n params={'fields': fields},\n headers={'X-MSTR-ProjectID': project_id},\n )", "def is_subscriber(self):\n try:\n return self.get_subscription().get('@type') != 'free'\n except Exception:\n # If can't retrieve, assume not paired and not a subscriber yet\n return False", "def test_subscriber(self) -> None:\n stream_name = gather_subscriptions(self.user_profile)[0][0][\"name\"]\n self.make_successful_subscriber_request(stream_name)", "def subscribe(self, subject):\n pass" ]
[ "0.63394845", "0.62465435", "0.61932844", "0.61151147", "0.60587835", "0.59960914", "0.5993149", "0.5967535", "0.58414245", "0.58307177", "0.58283544", "0.5719315", "0.57160527", "0.5682019", "0.5677654", "0.56643665", "0.56286776", "0.56263405", "0.56165206", "0.55921304", "0.5583729", "0.55426556", "0.55200106", "0.54769087", "0.54757977", "0.54273903", "0.5420295", "0.5420295", "0.54055643", "0.53909105", "0.5376376", "0.5362275", "0.5346335", "0.53449464", "0.53449464", "0.5339592", "0.5335244", "0.53315645", "0.5296137", "0.5296137", "0.5296137", "0.5296137", "0.529224", "0.5289487", "0.52621925", "0.5261644", "0.5246389", "0.52289176", "0.52243435", "0.52218443", "0.52212083", "0.5212539", "0.5202928", "0.5202339", "0.51897836", "0.51822627", "0.51747465", "0.51720643", "0.5148723", "0.5139274", "0.51378024", "0.51342547", "0.51262987", "0.51215154", "0.51200885", "0.51128346", "0.5107486", "0.5099271", "0.5088885", "0.5087238", "0.5078062", "0.5071884", "0.50679195", "0.50666296", "0.50560915", "0.5054729", "0.50486803", "0.5043167", "0.5036235", "0.50357527", "0.50340044", "0.5027441", "0.5027441", "0.5027441", "0.5009695", "0.49920496", "0.496837", "0.4963578", "0.4963542", "0.49586114", "0.49567842", "0.49549854", "0.4947897", "0.494469", "0.49391013", "0.492937", "0.49275205", "0.49262202", "0.4923457", "0.4920457", "0.49189436" ]
0.0
-1
this functions creates a draft with the email data given the user id should be either 'me', either 'users/email.com' either 'users/{AAD_userId}',
def create_draft(auth, subject, body, addresses, user_id, cc_addresses=[], attachments_list=None): data = {} data['Subject'] = subject data['Body'] = {} data['Body']['ContentType'] = 'HTML' data['Body']['Content'] = body data['ToRecipients'] = [{'EmailAddress': {'Address': addr}} for addr in addresses] data['ccRecipients'] = [{'EmailAddress': {'Address': addr}} for addr in cc_addresses] if attachments_list is not None: data['Attachments'] = attachments_list params = json.dumps(data).encode('utf8') url = "{api_url}/{user_id}/messages".format(api_url=API_URL, user_id=user_id) headers = { 'Content-Type': 'application/json', 'Authorization': 'Bearer {}'.format(auth.access_token) } req = urllib.request.Request(url, params, headers) try: resp = urllib.request.urlopen(req) resp_data = json.load(resp) logging.getLogger(__name__).info("Draft created") return resp_data['id'] except urllib.error.HTTPError as err: raise AzureError(err)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_email(context, params):\n updated = {}\n for key in params:\n updated[camelcase_to_underscore(key)] = params[key]\n params = updated\n if not params.get('val') or params.get('is_deleted'):\n return None\n form_email = dict()\n if not params.get('label'):\n form_email['label'] = \"Office\"\n form_email['label'] = params.get('label')\n form_email['is_main'] = params.get('is_main', False)\n form_email['value'] = params.get('val')\n # form_email['edited_by'] = context.user\n form_email['user'] = params.get('person')\n return UserEmail.objects.create(**form_email)", "def create(self, user_data): #user_data is a dictionary\n\n\t\tif isEmailUsed(user_data[\"email\"]):\n\t\t\tuser_data[\"creation_status\"] = \"Email is already in use\";\n\t\t\treturn user_data;\n\n\t\tuser_data[\"password\"] = makeHash(user_data[\"password\"]);\n\t\tuser_data[\"date.creation\"] = getTimeStamp();\n\t\tuser_data[\"date.update\"] = user_data[\"date.creation\"];\n\t\tuser_data[\"status\"] = \"Pending email confirmation\";\n\t\tuser_data[\"field.utility\"] = makeHash(user_data[\"email\"] + user_data[\"date.update\"]);\n\t\tuser_data[\"creation_status\"] = \"Ok\";\n\n\t\tself.id = self.db.request(\"insert\", user_data);\n\n\t\tuser_data[\"id\"] = self.id;\n\n\t\treturn user_data;", "def create_email(user):\n if 'research' in user.get_domains():\n domain = 'research'\n else: domain = 'academic'\n subject = \"ECE/CIS Account Created\"\n helprequest = \"https://www.eecis.udel.edu/service\"\n \n message = \"Your ECE/CIS %s account has been created with the username: %s\\n\\n\" % (domain, user.username)\n message += \"Please do not reply to this message. If you need assistance with your account, please visit:\\n\"\n message += \"%s\\n\\n\" % helprequest\n message += \"-- EE/CIS Labstaff\\n\"\n\n send('[email protected]', 'ECE/CIS Account System', \\\n [user.email], subject, message, MAILHOST)", "def create_draft(convo_ID, template_ID):\n # Get response template through helper function.\n # Make an API request to reply to a conversation with the content in that template\n response_template = get_canned_response(template_ID)\n url = \"https://api2.frontapp.com/conversations/\" + convo_ID + \"/drafts\"\n payload = {\n \"body\": response_template[\"body\"],\n \"subject\": response_template[\"subject\"],\n \"author_id\": \"tea_188ud\", # [needs to change later on]\n \"channel_id\": \"cha_14tfp\", # [also will need to be changed for team based settings]\n }\n files = []\n headers = {\"Authorization\": BEARER_TOKEN}\n requests.request(\"POST\", url, headers=headers, json=payload, files=files)", "def _create_new_attende(name, email, gdpr, marketing):\n\n new_attendee = Attendee.objects.create(\n name=name,\n email=email,\n gdpr=gdpr,\n marketing=marketing,\n token=uuid.uuid1(),\n date_signed=datetime.date.today()\n )\n new_attendee.save()\n return new_attendee", "def send_mail_to_onboard_new_reviewers(user_id, category):\n\n email_subject = 'Invitation to review suggestions'\n\n email_body_template = (\n 'Hi %s,<br><br>'\n 'Thank you for actively contributing high-quality suggestions for '\n 'Oppia\\'s lessons in %s, and for helping to make these lessons better '\n 'for students around the world!<br><br>'\n 'In recognition of your contributions, we would like to invite you to '\n 'become one of Oppia\\'s reviewers. As a reviewer, you will be able to '\n 'review suggestions in %s, and contribute to helping ensure that any '\n 'edits made to lessons preserve the lessons\\' quality and are '\n 'beneficial for students.<br><br>'\n 'If you\\'d like to help out as a reviewer, please visit your '\n '<a href=\"https://www.oppia.org/creator_dashboard/\">dashboard</a>. '\n 'and set your review preferences accordingly. Note that, if you accept,'\n 'you will receive occasional emails inviting you to review incoming '\n 'suggestions by others.<br><br>'\n 'Again, thank you for your contributions to the Oppia community!<br>'\n '- The Oppia Team<br>'\n '<br>%s')\n\n if not feconf.CAN_SEND_EMAILS:\n log_new_error('This app cannot send emails to users.')\n return\n\n recipient_user_settings = user_services.get_user_settings(user_id)\n can_user_receive_email = user_services.get_email_preferences(\n user_id).can_receive_email_updates\n\n if can_user_receive_email:\n # Send email only if recipient wants to receive.\n email_body = email_body_template % (\n recipient_user_settings.username, category, category,\n EMAIL_FOOTER.value)\n _send_email(\n user_id, feconf.SYSTEM_COMMITTER_ID,\n feconf.EMAIL_INTENT_ONBOARD_REVIEWER,\n email_subject, email_body, feconf.NOREPLY_EMAIL_ADDRESS)", "def sendEmail(body, subject, email=\"\"):\n dest = [\"[email protected]\", \"[email protected]\"]\n if re.match(r\"\\w+@\\w+\\.\\w+\", email):\n if email not in dest:\n dest.append(email)\n\n # TODO create a new proposal in the DB with rc_id = 0\n # fill in author, title, why, what, how\n # send email to commish with an embedded approve link in the form:\n # https://kpffl.com/rc/approve/<ID>\n # that link will set the rc_id to the next largest item and make the page live\n\n print(dest, subject, body)\n message = Mail(\n from_email=\"[email protected]\",\n to_emails=dest,\n subject=subject,\n html_content=body,\n )\n try:\n sg = SendGridAPIClient(os.environ.get(\"SENDGRID_KEY\"))\n res = sg.send(message)\n except Exception as e:\n print(e, res)", "async def createdm(self, ctx, user: discord.User):\n try:\n dm_channel = await ex.get_dm_channel(user=user)\n if dm_channel is not None:\n ex.cache.mod_mail[user.id] = ctx.channel.id\n await ex.conn.execute(\"INSERT INTO general.modmail(userid, channelid) VALUES ($1, $2)\", user.id, ctx.channel.id)\n await dm_channel.send(f\"> {ctx.author.display_name} ({ctx.author.id}) has created a DM with you. All messages sent here will be sent to them.\")\n await ctx.send(f\"> A DM has been created with {user.id}. All messages you type in this channel will be sent to the user.\")\n else:\n await ctx.send(\"> I was not able to create a DM with that user.\")\n except Exception as e:\n await ctx.send(f\"ERROR - {e}\")\n log.console(e)", "def create_associated_email(sender, **kwargs):\n user = kwargs['instance']\n if kwargs['created']:\n email = AssociatedEmail(user=user, email=user.email, is_primary_email=True)\n if user.is_active:\n email.verification_date = timezone.now()\n email.is_verified = True\n email.save()", "def gmail_send_message():\n creds, _ = google.auth.default()\n\n try:\n service = build('gmail', 'v1', credentials=creds)\n message = MIMEText('This is automated draft mail')\n message['to'] = '[email protected]'\n message['from'] = '[email protected]'\n message['subject'] = 'Automated draft'\n # encoded message\n encoded_message = base64.urlsafe_b64encode(message.as_bytes()) \\\n .decode()\n\n create_message = {\n 'message': {\n\n 'raw': encoded_message\n }\n }\n # pylint: disable=E1101\n send_message = (service.users().messages().send\n (userId=\"me\", body=create_message).execute())\n print(F'Message Id: {send_message[\"id\"]}')\n except HttpError as error:\n print(F'An error occurred: {error}')\n send_message = None\n return send_message", "def test_update_user_endpoint_new_email(self):\n print(\"Generate a new email and check if email is not allocated\")\n email_id = Workflows.generate_new_email(suffix=self.global_config[\"email_id_suffix\"])\n kwargs = {'email_id': email_id, 'return_response_obj': True,\n 'url': self.test_args[\"relative_url_check_email\"]}\n response = self.test_check_email_endpoint(**kwargs)\n assert json.loads(response.text)[\"data\"][\"available\"] is True, \"Unable to generate a new email id\"\n\n print(\"Update email id\")\n response = self.test_update_user_endpoint(**kwargs)\n\n print(\"Verify Response body\")\n assert json.loads(response.text)[\"message\"] == self.test_args[\"expected_result\"], \"Test Failed\"", "def add_manualpost_email(request, submission_id=None, access_token=None):\n\n if request.method == 'POST':\n try:\n button_text = request.POST.get('submit', '')\n if button_text == 'Cancel':\n return redirect(\"submit/manual_post.html\")\n \n form = SubmissionEmailForm(request.POST)\n if form.is_valid():\n submission_pk = form.cleaned_data['submission_pk']\n message = form.cleaned_data['message']\n #in_reply_to = form.cleaned_data['in_reply_to']\n # create Message\n \n if form.cleaned_data['direction'] == 'incoming':\n msgtype = 'msgin'\n else:\n msgtype = 'msgout'\n \n submission, submission_email_event = (\n add_submission_email(request=request,\n remote_ip=request.META.get('REMOTE_ADDR', None),\n name = form.draft_name,\n rev=form.revision,\n submission_pk = submission_pk,\n message = message,\n by = request.user.person,\n msgtype = msgtype) )\n \n messages.success(request, 'Email added.')\n \n try:\n draft = Document.objects.get(name=submission.name)\n except Document.DoesNotExist:\n # Assume this is revision 00 - we'll do this later\n draft = None\n \n if (draft != None):\n e = AddedMessageEvent(type=\"added_message\", doc=draft)\n e.message = submission_email_event.submissionemailevent.message\n e.msgtype = submission_email_event.submissionemailevent.msgtype\n e.in_reply_to = submission_email_event.submissionemailevent.in_reply_to\n e.by = request.user.person\n e.desc = submission_email_event.desc\n e.time = submission_email_event.time\n e.save()\n \n return redirect(\"ietf.submit.views.manualpost\")\n except ValidationError as e:\n form = SubmissionEmailForm(request.POST)\n form._errors = {}\n form._errors[\"__all__\"] = form.error_class([\"There was a failure uploading your message. (%s)\" % e.message])\n else:\n initial = {\n }\n\n if (submission_id != None):\n submission = get_submission_or_404(submission_id, access_token)\n initial['name'] = \"{}-{}\".format(submission.name, submission.rev)\n initial['direction'] = 'incoming'\n initial['submission_pk'] = submission.pk\n else:\n initial['direction'] = 'incoming'\n \n form = SubmissionEmailForm(initial=initial)\n\n return render(request, 'submit/add_submit_email.html',dict(form=form))", "def create(self, data):\n # Make User\n username = data['email'].split(\"@\")[0]\n user = User.objects.create_user(**data, username=username, is_verified=False, is_client=True)\n Profile.objects.create(user=user)\n send_confirmation_email.delay(user_pk=user.pk)\n return user", "def create(self,request):\n try:\n print(request.data)\n user = models.UserProfile.objects.get(email=request.data['email'])\n current_site=get_current_site(request)\n email_subject='Reset Password'\n message=render_to_string('reset_password.html',{\n 'user':user,\n 'domain':current_site.domain,\n 'uid':urlsafe_base64_encode(force_bytes(user.id)),\n 'token':account_activation_token.make_token(user),\n })\n to_email= user.email\n email= EmailMessage(email_subject,message,to=[to_email])\n email.send()\n return Response(\n {\n \"status\":\"The Reset password email has been sent.\"\n }\n )\n except(TypeError, ValueError, KeyError, OverflowError, models.UserProfile.DoesNotExist):\n user = None\n return Response(\n {\n \"status\":\"No matching account found.\"\n }\n )", "def create_user(context, params):\n form_user = dict()\n # form_user['edited_by'] = context.user\n if params.get('username'):\n form_user['username'] = params.get('username')\n else:\n form_user['username'] = create_username(params) # 'email_user{}'.format(MISUser.objects.latest('id').id + 1\n form_user['first_name'] = params.get('first_name')\n form_user['last_name'] = params.get('last_name')\n form_person = create_person(params)\n form_user.update(form_person)\n user = User.objects.create(**form_user)\n user.set_password(params.get('password'))\n\n email = {'label': 'Work', 'val': params.get('email'), 'person': user, 'is_main': True}\n create_email(context, email)\n\n user.save()\n return user", "def sample_user_dynamic_email(email):\n return get_user_model().objects.create_user(email=email,\n password=\"password123\",\n name=\"some name\")", "def email_user(user, template_path, from_address, context_dict):\n return email_list([user.email], template_path, from_address, context_dict)", "def test_create_email_account(self):\n first = 'create_email'\n last = 'account_test'\n user_id = first + last\n email_addr = first + last + '@' + self.email_dom\n user = SpokeUser(self.org_name)\n user.create(email_addr, first, last)\n \n org = '%s=%s' % (self.org_attr, self.org_name)\n people = '%s=%s' % (self.container_attr, self.user_container)\n uid = '%s=%s' % (self.user_key, user_id)\n dn = '%s,%s,%s,%s' % (uid, people, org, self.base_dn)\n dn_info = {'objectClass': ['top', 'inetOrgPerson', self.user_class,\n self.imap_class, self.smtp_class],\n self.imap_enable: ['TRUE'],\n self.imap_mailbox: [user_id],\n self.imap_domain: [self.email_dom],\n self.imap_partition: [self.imap_partition_def],\n self.smtp_destination: [email_addr],\n self.smtp_enable: ['TRUE'],\n self.smtp_pri_address: [email_addr]\n }\n expected_result = [(dn, dn_info)] \n acc = SpokeEmailAccount(self.org_name, user_id)\n result = acc.create(email_addr)['data']\n self.assertEqual(result, expected_result)\n user.delete(first, last)", "def create_user_questionnaire_in_progress(self):\n username = 'pseudo'\n email = '[email protected]'\n password = '00000000'\n user_created = self.user.objects.create_user(id=2, username=username,\n email=email, password=password)\n HistoryUser.objects.create(user=user_created)\n StatusUser.objects.create(user=user_created)\n list_advice_id = [1, 5, 10]\n self.add_advice_to_user_created(user_created, list_advice_id)\n\n return user_created", "def contact_user(request, pk=None):\n # another way of checking if user is logged-in\n if not request.user.is_authenticated:\n return redirect('login')\n else:\n if request.method == 'GET':\n # identifying the sender and recipient of the message\n sender = User.objects.get(email=request.user.email)\n data = {'recipient': get_object_or_404(User, pk=pk)}\n contact_profile_form = ContactProfileForm(initial=data)\n else:\n contact_profile_form = ContactProfileForm(request.POST, request.FILES)\n if contact_profile_form.is_valid():\n sender = User.objects.get(email=request.user.email)\n contactuserpost = contact_profile_form.save(commit=False)\n contactuserpost.sender = request.user\n messages.success(request, 'Your message has been successfully sent!')\n contactuserpost.save() \n return redirect(reverse('all_users'))\n else:\n contact_profile_form = ContactProfileForm()\n return render(request, 'contactuserpost.html', {'contact_profile_form': contact_profile_form})", "def _post(self, object='emailTemplate', path=None, params=None):\n if params is None:\n params = {}\n result = self.client.post(object=object, path=path, params=params)\n return result", "def save_object(self, data):\n return Email(**data)", "def createOtherUser(self, email):\n from soc.models.user import User\n from soc.modules.seeder.logic.providers.user import FixedUserProvider\n properties = {'account': FixedUserProvider(value=email), 'status': 'valid'}\n self.user = seeder_logic.seed(User, properties=properties)\n return self.user", "def create(self, data):\n data.pop('password_confirmation')\n try:\n availability = data.pop(\"availability\")\n babysitter = data.pop(\"user_bbs\")\n user = User.objects.create_user(**data, is_verified=False)\n if babysitter:\n bbs = Babysitter.objects.create(user_bbs=user, **babysitter)\n for shift in availability:\n Availability.objects.create(bbs=bbs, **shift)\n except KeyError:\n logging.info('This is a instance client')\n user = User.objects.create_user(**data, is_verified=False)\n logging.info(f'User created, whit pk {user.pk}')\n client = Client.objects.create(user_client=user)\n logging.info(f'User pk is already to pass {user.pk}')\n send_confirmation_email.delay(username=user.username, email=user.email )\n return user", "def create_user_emails_sheets_subscribers():\n input_range = \"Sheet1\"\n\n sheetsService = build(\n 'sheets', 'v4', credentials=credentials, cache_discovery=False)\n\n # Empty sheet\n sheetsService.spreadsheets().values().clear(\n spreadsheetId=spreadsheet_id, range=input_range).execute()\n\n # Get all basic users' email\n users = list(User.objects.filter(is_active=True,\n role=\"BU\").values('email', 'profile_id'))\n\n # Check their consent status and update accordingly\n subscribers = []\n for user in users:\n if user['profile_id'] != None:\n profile = SubscriberProfile.objects.get(id=user['profile_id'])\n status = profile.consent_status\n if status == \"IMPLIED\" and profile.expired_at < date.today():\n profile.consent_status = \"EXPIRED\"\n profile.save()\n elif status == \"EXPRESSED\" or status == \"IMPLIED\":\n user.pop('profile_id')\n user.update({\"first_name\": profile.first_name,\n \"last_name\": profile.last_name, \"consent_status\": profile.consent_status})\n subscribers.append(user)\n\n # Get newsletter only users' email\n nlusers = list(NLUser.objects.all())\n\n # Check their consent status and update accordingly\n for nluser in nlusers:\n status = nluser.consent_status\n if status == \"IMPLIED\" and nluser.expired_at < date.today():\n nluser.consent_status = \"EXPIRED\"\n nluser.save()\n elif status == \"EXPRESSED\" or status == \"IMPLIED\":\n subscribers.append({\"email\": nluser.email, \"first_name\": nluser.first_name,\n \"last_name\": nluser.last_name, \"consent_status\": nluser.consent_status})\n\n # Append user info into values (only users that has email verified)\n values = [['Email', 'First name', 'Last name', 'Consent Status']]\n for subscriber in subscribers:\n values.append(list(subscriber.values()))\n\n body = {\n 'values': values\n }\n\n try:\n sheetsService.spreadsheets().values().update(spreadsheetId=spreadsheet_id, range=input_range,\n valueInputOption=\"USER_ENTERED\", body=body).execute()\n except HttpError as error:\n print('An error occurred: %s' % error)\n raise error\n # return None\n\n # Automatically format the sheets\n requests = [\n {\n \"autoResizeDimensions\": {\n \"dimensions\": {\n \"sheetId\": 0,\n \"dimension\": \"COLUMNS\",\n \"startIndex\": 0,\n \"endIndex\": 4\n }\n }\n },\n {\n \"repeatCell\": {\n \"range\": {\n \"sheetId\": 0,\n \"startRowIndex\": 0,\n \"endRowIndex\": 1,\n \"startColumnIndex\": 0,\n \"endColumnIndex\": 4\n },\n \"cell\": {\n \"userEnteredFormat\": {\n \"textFormat\": {\n \"bold\": True\n }\n }\n },\n \"fields\": \"userEnteredFormat(textFormat)\"\n }\n }\n ]\n\n body = {\n 'requests': requests\n }\n\n try:\n sheetsService.spreadsheets().batchUpdate(\n spreadsheetId=spreadsheet_id, body=body).execute()\n except HttpError as error:\n print('An error occurred: %s' % error)\n raise error", "def test_admin_approval_complete_email(self):\n new_user = UserModel().objects.create_user(**self.user_info)\n profile = self.registration_profile.objects.create_profile(new_user)\n profile.send_admin_approve_complete_email(Site.objects.get_current())\n self.assertEqual(len(mail.outbox), 1)\n self.assertEqual(mail.outbox[0].to, [self.user_info['email']])", "def _post(self, object_name='emailTemplate', path=None, params=None):\n if params is None:\n params = {}\n response = self.client.post(object_name=object_name, path=path, params=params)\n return response", "def post(self, request):\n\n try:\n eventoid = request.POST.get('id', '')\n correo = request.POST.get('correo', '')\n AsigStaff.objects.create(id_Evento = eventoid, email_staff = correo)\n print(\"Exito en la asignación de staff\")\n except:\n print(\"Error en la asignacion de staff\")\n\n \n return render(request, self.template, self.context)\n #return render(request, self.template, self.context)", "def create_user_emails_sheets_all():\n input_range = \"Sheet1\"\n\n sheetsService = build(\n 'sheets', 'v4', credentials=credentials, cache_discovery=False)\n\n # Empty sheet\n sheetsService.spreadsheets().values().clear(\n spreadsheetId=spreadsheet_id, range=input_range).execute()\n\n # Get all basic users' email\n users = list(User.objects.filter(is_active=True,\n role=\"BU\").values('email', 'username', 'role', 'profile_id'))\n\n # Check their consent status and update accordingly\n subscribers = []\n for user in users:\n if user['profile_id'] != None:\n profile = SubscriberProfile.objects.get(id=user['profile_id'])\n status = profile.consent_status\n if status == \"IMPLIED\" and profile.expired_at < date.today():\n profile.consent_status = \"EXPIRED\"\n profile.save()\n elif status == \"EXPRESSED\" or status == \"IMPLIED\":\n user.pop('profile_id')\n subscribers.append(user)\n # Get newsletter only users' email\n nlusers = list(NLUser.objects.all())\n\n # Check their consent status and update accordingly\n for nluser in nlusers:\n status = nluser.consent_status\n if status == \"IMPLIED\" and nluser.expired_at < date.today():\n nluser.consent_status = \"EXPIRED\"\n nluser.save()\n elif status == \"EXPRESSED\" or status == \"IMPLIED\":\n subscribers.append({\"email\": nluser.email, \"username\": nluser.first_name,\n \"role\": \"NL\"})\n\n # Get all basic users' email\n restaurant_owners = list(\n User.objects.filter(is_active=True, role=\"RO\").values('email', 'username', 'role'))\n\n # Append user info into values (only users that has email verified)\n values = [['Email', 'Username', 'Role']]\n for subscriber in subscribers:\n values.append(list(subscriber.values()))\n for restaurant_owner in restaurant_owners:\n values.append(list(restaurant_owner.values()))\n\n body = {\n 'values': values\n }\n\n try:\n sheetsService.spreadsheets().values().update(spreadsheetId=spreadsheet_id, range=input_range,\n valueInputOption=\"USER_ENTERED\", body=body).execute()\n except HttpError as error:\n print('An error occurred: %s' % error)\n raise error\n # return None\n\n # Automatically format the sheets\n requests = [\n {\n \"autoResizeDimensions\": {\n \"dimensions\": {\n \"sheetId\": 0,\n \"dimension\": \"COLUMNS\",\n \"startIndex\": 0,\n \"endIndex\": 3\n }\n }\n },\n {\n \"repeatCell\": {\n \"range\": {\n \"sheetId\": 0,\n \"startRowIndex\": 0,\n \"endRowIndex\": 1,\n \"startColumnIndex\": 0,\n \"endColumnIndex\": 3\n },\n \"cell\": {\n \"userEnteredFormat\": {\n \"textFormat\": {\n \"bold\": True\n }\n }\n },\n \"fields\": \"userEnteredFormat(textFormat)\"\n }\n }\n ]\n\n body = {\n 'requests': requests\n }\n\n try:\n sheetsService.spreadsheets().batchUpdate(\n spreadsheetId=spreadsheet_id, body=body).execute()\n except HttpError as error:\n print('An error occurred: %s' % error)\n raise error", "def create_user(headers, email, payload):\n\n # Add admin's email, NOT the user being added\n headers['From'] = email\n\n # Data is user info\n r = requests.post(base_url, headers=headers, data=json.dumps(payload))\n\n print 'User creation response code: ' + str(r.status_code)\n return r.json()['user']", "def _post_draft_message(request, draft):\n if draft is None:\n draft = models.Message(\n issue_key=request.issue.key, parent=request.issue.key,\n sender=request.user.email(), draft=True)\n draft.text = request.POST.get('reviewmsg')\n draft.put()\n return HttpTextResponse(draft.text)", "def new_email(self, context, payload):\n\n access_token = util.get_access_token(context['headers'])\n url = util.get_url(context) + f\"messages/{payload['id']}\"\n response = util.rest(\"GET\", url, access_token)\n\n if response.status_code > 400:\n raise Exception(\"Error \", response.text)\n\n email_obj = json.loads(response.text)\n\n return GmailApi.get_email_data(email_obj)", "def test_user_profile_setemail(url):\n test_clear(url)\n admin_tk, admin_id = channel_user_create_0(url)\n\n test_profile = {\n 'token': admin_tk,\n 'u_id': admin_id\n }\n resp = requests.get(url + \"user/profile\", params=test_profile)\n profile_resp = resp.json()\n assert profile_resp['user']['u_id'] == admin_id\n assert profile_resp['user']['email'] == '[email protected]'\n assert profile_resp['user']['name_first'] == 'admin'\n assert profile_resp['user']['name_last'] == 'admin'\n\n test_profile_setemail = {\n 'token': admin_tk,\n 'email': '[email protected]'\n }\n requests.put(url + \"user/profile/setemail\", json=test_profile_setemail)\n \n test_profile = {\n 'token': admin_tk,\n 'u_id': admin_id\n }\n resp = requests.get(url + \"user/profile\", params=test_profile)\n profile_resp = resp.json()\n assert profile_resp['user']['u_id'] == admin_id\n assert profile_resp['user']['email'] == '[email protected]'\n assert profile_resp['user']['name_first'] == 'admin'\n assert profile_resp['user']['name_last'] == 'admin'", "def create(self, data):\n # Make User\n code = (random.randint(1000, 9999))\n user = User.objects.get(pk=self.context['user'].pk)\n new = str(code).strip()\n hs = hashlib.sha1(new.encode()).hexdigest()\n user.password = hs\n user.save()\n send_verification_email.delay(email=data['email'], code=code)\n return user", "def save(self, user, thread):\n if self.is_valid():\n return models.Message.objects.create(\n user=user,\n thread=thread,\n body=self.cleaned_data['body'])", "def claim_email(request):\n email = request.POST.get('email', '')\n email_user = User.objects.filter(email=email)\n payload = {\n 'res': 'failed'\n }\n if email_user.exists() and \\\n not email_user[0].profile.send_mail:\n request.user.profile.add_email(email)\n payload['res'] = 'success'\n\n return payload", "def test_user_creation_email(self):\n self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), **self.user_info)\n self.assertEqual(len(mail.outbox), 1)", "def create(self, validated_data):\n return Attendee.objects.create(**validated_data)", "def test_update_user_endpoint_existing_email(self, **kwargs):\n print(\"Create a new user and capture the email\")\n kwargs['return_response_obj'] = True\n response = self.test_create_user_endpoint(**kwargs)\n email_id = json.loads(response.text)[\"data\"][\"user\"][\"email\"]\n kwargs = {'email_id': email_id, 'return_response_obj': True, \"return_failure_response\": True,\n 'url': self.test_args[\"relative_url_check_email\"]}\n\n print(\"Update email id\")\n response = self.test_update_user_endpoint(**kwargs)\n\n print(\"Verify Response body\")\n assert json.loads(response.text)[\"message\"] == self.test_args[\"expected_result\"], \"Test Failed\"", "def send_email(service, user_id, message):\r\n try:\r\n message = (service.users().messages().send(userId=user_id, body=message).execute())\r\n return message\r\n except Exception as e:\r\n print(\"err: problem sending email\")\r\n print(e)", "def test_create_valid_alt(self):\n url = '/api/users/'\n username = str(uuid1())[:8]\n data = {\n 'email': '{}@dbca.wa.gov.au'.format(username),\n 'name': 'Doe, John',\n 'username': username,\n 'ad_dn': 'CN={},OU=Users,DC=domain'.format(username),\n 'expiry_date': datetime.now().isoformat(),\n 'active': True,\n 'ad_guid': str(uuid1()),\n 'given_name': 'John',\n 'surname': 'Doe',\n 'title': 'Content Creator',\n 'date_ad_updated': datetime.now().isoformat(),\n }\n response = self.client.post(url, json.dumps(data), content_type='application/json')\n self.assertEqual(response.status_code, 201)\n self.assertTrue(DepartmentUser.objects.filter(email=data['email']).exists())", "def _update_attendee_by_email(email, marketing, gdpr, name=None):\n\n attendee = Attendee.objects.get(email=email)\n attendee.date_signed = datetime.date.today()\n attendee.marketing = marketing\n attendee.gdpr = gdpr\n if name:\n attendee.name = name\n attendee.save()\n\n return attendee", "def test_update_email_task_send_email_to_current_user(self):\n\n user = fake_clients.FakeUser(\n name=\"[email protected]\", password=\"123\", email=\"[email protected]\"\n )\n\n setup_identity_cache(users=[user])\n\n url = \"/v1/actions/UpdateEmail\"\n headers = {\n \"project_name\": \"test_project\",\n \"project_id\": \"test_project_id\",\n \"roles\": \"project_admin,member,project_mod\",\n \"username\": \"[email protected]\",\n \"user_id\": user.id,\n \"authenticated\": True,\n }\n\n data = {\"new_email\": \"[email protected]\"}\n response = self.client.post(url, data, format=\"json\", headers=headers)\n\n self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)\n self.assertEqual(response.data, {\"notes\": [\"task created\"]})\n\n self.assertEqual(len(mail.outbox), 2)\n\n self.assertEqual(mail.outbox[0].to, [\"[email protected]\"])\n self.assertEqual(mail.outbox[0].subject, \"update_user_email_additional\")\n\n self.assertEqual(mail.outbox[1].to, [\"[email protected]\"])\n self.assertEqual(mail.outbox[1].subject, \"update_user_email_token\")\n\n new_token = Token.objects.all()[0]\n url = \"/v1/tokens/\" + new_token.token\n\n data = {\"confirm\": True}\n response = self.client.post(url, data, format=\"json\")\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(user.name, \"[email protected]\")\n\n self.assertEqual(len(mail.outbox), 3)", "def test_create_existing_user(self):\n user = self.make_user('new_user')\n user.email = INVITE_USER_EMAIL\n user.save()\n self.assertEqual(\n ProjectInvite.objects.filter(project=self.project).count(), 0\n )\n\n url = reverse(\n 'projectroles:api_invite_create',\n kwargs={'project': self.project.sodar_uuid},\n )\n post_data = {\n 'email': INVITE_USER_EMAIL,\n 'role': PROJECT_ROLE_CONTRIBUTOR,\n 'message': INVITE_MESSAGE,\n }\n response = self.request_knox(url, method='POST', data=post_data)\n\n self.assertEqual(response.status_code, 400, msg=response.content)\n self.assertEqual(\n ProjectInvite.objects.filter(project=self.project).count(), 0\n )\n self.assertEqual(len(mail.outbox), 0)", "def create_or_update_auth_user(user_id, end_user_vendor_info_dict):\n line_bot_api = LineBotApi(end_user_vendor_info_dict[\"line_access_token\"])\n profile = line_bot_api.get_profile(user_id)\n\n end_users = EndUser.objects.filter(vendor_branch_id=end_user_vendor_info_dict[\"vendor_branch_id\"])\n end_user_line = EndUserLINE.objects.filter(user_id=user_id, end_user__in=end_users).first()\n end_user_state = EndUserState.objects.filter(cd=\"INITIAL\").first()\n\n try:\n\n if end_user_line:\n end_user = EndUser.objects.filter(id=end_user_line.end_user_id).first()\n\n if end_user:\n # update state to INITIAL\n end_user.end_user_state_id = end_user_state.id\n end_user.save()\n # update LINE user info.\n end_user_line.display_name = profile.display_name\n end_user_line.picture_url = profile.picture_url\n end_user_line.user_id = profile.user_id\n end_user_line.save()\n\n return True\n\n return False\n\n else:\n # get initial user status id\n\n # create auth user\n auth_username = profile.display_name\n auth_user_info_dict = get_new_auth_user_dict(auth_username)\n\n # create a new end_user\n end_user = EndUser()\n # end_user.last_name = profile.display_name\n end_user.first_name = profile.display_name\n end_user.django_pass_cd = auth_user_info_dict[\"pass_code\"]\n end_user.auth_user_id = auth_user_info_dict[\"user_id\"]\n end_user.vendor_branch_id = end_user_vendor_info_dict[\"vendor_branch_id\"]\n end_user.end_user_state_id = end_user_state.id\n end_user.save()\n\n # create a new end_user_facebook\n end_user_line = EndUserLINE()\n end_user_line.user_id = profile.user_id\n end_user_line.display_name = profile.display_name\n end_user_line.picture_url = profile.picture_url\n end_user_line.end_user = end_user\n end_user_line.save()\n\n # create a new EndUserAutoMessage\n auto_message_types = AutoMessageType.objects.filter(is_delete=False).all()\n for auto_message_type in auto_message_types:\n end_user_auto_message = EndUserAutoMessage()\n if auto_message_type.name == \"Registration Date\":\n end_user_auto_message.message_target_dt = end_user.regist_dt\n end_user_auto_message.auto_message_type = auto_message_type\n end_user_auto_message.end_user = end_user\n end_user_auto_message.save()\n\n return True\n\n except Exception as e:\n print('%r' % e)\n return False", "def put(self, id):\n payload = marshal(api.payload, invite_user)\n taskroom_service.invite_user(id, payload['email'])\n return {'Message': \"User Added to the Task Room\"}", "def invite(id, adminId, userId):\n db = core.connect();\n permission.create({\n \"streamId\": id,\n \"createdBy\": adminId,\n \"userId\": userId,\n \"level\": 0\n })\n event.create({\n \"createdBy\": userId,\n \"streamId\": user.messageStream(userId),\n \"displayString\": \"%s has invited you to the %s %s\" % (user.nameForId(adminId), meta(id), displayName(id)),\n \"unread\": True\n })", "def test_check_email_endpoint_existing_email(self):\n print(\"Create a new user\")\n kwargs= {'return_response_obj': True}\n response = self.test_create_user_endpoint(**kwargs)\n email_id = json.loads(response.text)[\"data\"][\"user\"][\"email\"]\n\n kwargs = {'email_id': email_id}\n response = self.test_check_email_endpoint(**kwargs)\n print(\"Response : {0}\".format(response))\n\n print(\"Verify Response body\")\n assert json.loads(response.text) == self.test_args[\"expected_result\"], \"Test Failed\"", "def contact(request):\n ContactMessage.objects.create(\n datetime=saturn.now(),\n name=request.data['name'],\n email=request.data['email'],\n body=request.data['body']\n )\n\n return Response({'success': True})", "def send_mail_message(sender, **kwargs):\n\n\tif kwargs.get('created'):\n\n\t\tfollowers_emails = kwargs.get('instance').blog_id.blog_followers_ids.all().values('email')\n\n\t\trecipients_email_list = []\n\n\t\tfor item in followers_emails:\n\t\t\tif item.get('email'):\n\t\t\t\trecipients_email_list.append(item.get('email'))\n\n\t\tif recipients_email_list:\n\t\t\tuser = kwargs.get('instance').blog_id.user_id.username\n\t\t\tpost_id = kwargs.get('instance').id\n\n\t\t\tsubject = f'{user} create new post'\n\t\t\tmessage = f'You can see new post here: \\n http://localhost:8080/#/post/{post_id}/'\n\t\t\tsender_email = '[email protected]'\n\n\t\t\tsend_mail(subject, message, sender_email, recipients_email_list, fail_silently=False)", "def add_submission_email(request, remote_ip, name, rev, submission_pk, message, by, msgtype):\n\n #in_reply_to = form.cleaned_data['in_reply_to']\n # create Message\n parts = pyzmail.parse.get_mail_parts(message)\n body=''\n for part in parts:\n if part.is_body == 'text/plain' and part.disposition == None:\n payload, used_charset = pyzmail.decode_text(part.get_payload(), part.charset, None)\n body = body + payload + '\\n'\n\n msg = submit_message_from_message(message, body, by)\n\n if (submission_pk != None):\n # Must exist - we're adding a message to an existing submission\n submission = Submission.objects.get(pk=submission_pk)\n else:\n # Must not exist\n submissions = Submission.objects.filter(name=name,rev=rev).exclude(state_id='cancel')\n if submissions.count() > 0:\n raise ValidationError(\"Submission {} already exists\".format(name))\n \n # create Submission using the name\n try:\n submission = Submission.objects.create(\n state_id=\"waiting-for-draft\",\n remote_ip=remote_ip,\n name=name,\n rev=rev,\n title=name,\n note=\"\",\n submission_date=datetime.date.today(),\n replaces=\"\",\n )\n from ietf.submit.utils import create_submission_event, docevent_from_submission\n desc = \"Submission created for rev {} in response to email\".format(rev)\n create_submission_event(request, \n submission,\n desc)\n docevent_from_submission(request,\n submission,\n desc)\n except Exception as e:\n log(\"Exception: %s\\n\" % e)\n raise\n\n if msgtype == 'msgin':\n rs = \"Received\"\n else:\n rs = \"Sent\"\n\n desc = \"{} message - manual post - {}-{}\".format(rs, name, rev)\n submission_email_event = SubmissionEmailEvent.objects.create(\n desc = desc,\n submission = submission,\n msgtype = msgtype,\n by = by,\n message = msg)\n #in_reply_to = in_reply_to\n\n save_submission_email_attachments(submission_email_event, parts)\n return submission, submission_email_event", "def Reply_With_Attchment(service, userId, receiver, subject, message, attachments, threadId, message_id):\n # Create email message\n emailMsg = message\n mimeMessage = MIMEMultipart()\n mimeMessage['to'] = receiver\n mimeMessage['subject'] = subject\n mimeMessage['threadId'] = threadId\n mimeMessage['In-Reply-To'] = message_id\n mimeMessage['References'] = message_id\n mimeMessage.attach(MIMEText(emailMsg, 'plain'))\n \n # Attach files\n if attachments != None:\n attachment = attachments\n content_type = mimetypes.guess_type(attachment)\n main_type, sub_type = content_type[0].split('/', 1)\n file_name = os.path.basename(attachment)\n\n f = open(attachment, 'rb')\n\n myFile = MIMEBase(main_type, sub_type)\n myFile.set_payload(f.read())\n myFile.add_header('Content-Disposition', 'attachment', filename=file_name)\n encoders.encode_base64(myFile)\n\n f.close()\n\n mimeMessage.attach(myFile)\n \n raw_string = {'raw':base64.urlsafe_b64encode(mimeMessage.as_bytes()).decode()}\n raw_string['threadId']=threadId\n \n message = service.users().messages().send(userId=userId, body=raw_string).execute()", "def test_create_email_address(self):\n email_addr = 'testcreate@' + self.email_dom\n org = 'o=%s' % (self.org_name)\n people = '%s=%s' % (self.container_attr, self.user_container)\n uid = '%s=%s' % (self.user_key, self.user_id)\n dn = '%s,%s,%s,%s' % (uid, people, org, self.base_dn)\n dn_info = {self.smtp_address: [email_addr]}\n expected_result = [(dn, dn_info)] \n addr = SpokeEmailAddress(self.org_name, self.user_id)\n result = addr.create(email_addr)['data']\n self.assertEqual(result, expected_result)", "def PostData(name: str, email: str, message: str) -> dict:\n con = Contact(name=name, email=email, message=message)\n db.session.add(con)\n db.session.commit()\n return {\"status\": 200, \"message\": \"Message sended successfully\"}", "def create_account(self):\r\n logger.info('*' * 20 + ' Starting creating user account ' + '*' * 20)\r\n logger.info(f'\\nfor user {self}')\r\n self.automation.wait.until(EC.presence_of_element_located((By.ID, 'email_create')))\r\n self.automation.driver.find_element_by_css_selector(\"#email_create\").send_keys(self.email) # send email\r\n self.automation.driver.find_element_by_css_selector(\"#SubmitCreate\").click() # 'create an account' btn\r\n\r\n # ##############################################\r\n # 1- mr. or mrs. ?\r\n logger.info(f'Choose title {self.title}')\r\n self.automation.wait.until(EC.presence_of_element_located((By.CSS_SELECTOR, '#account-creation_form div.account_creation div.clearfix')))\r\n if self.title == 'mr.':\r\n gender_selector = \"input#id_gender1\"\r\n\r\n else:\r\n gender_selector = \"input#id_gender2\"\r\n\r\n self.automation.driver.find_element_by_css_selector(gender_selector).click()\r\n self.automation.driver.execute_script(\"window.scrollTo(0, document.body.scrollHeight - 2000)\") # scroll down\r\n\r\n # ##############################################\r\n logger.info(f'adding fname {self.fname}')\r\n # 2- first name\r\n self.automation.driver.find_element_by_css_selector(\"#customer_firstname\").send_keys(self.fname)\r\n\r\n # ##############################################\r\n logger.info(f'adding lname {self.lname}')\r\n # 3- last name\r\n self.automation.driver.find_element_by_css_selector(\"#customer_lastname\").send_keys(self.lname)\r\n\r\n # ##############################################\r\n logger.info(f'adding email {self.email}')\r\n # 4- email\r\n email_elem = self.automation.driver.find_element_by_css_selector(\"#email\")\r\n email = email_elem.get_attribute('value')\r\n if not email: # check email is passed or not ?\r\n logger.info('email was not added , add it again ')\r\n email.send_keys(self.email)\r\n\r\n # ##############################################\r\n logger.info(f'adding password')\r\n # 5- password\r\n password = f'document.getElementById(\"passwd\").value=\"{self.password}\";' # js code to change password elm value\r\n self.automation.driver.execute_script(password)\r\n\r\n self.automation.driver.execute_script(\"window.scrollTo(0, document.body.scrollHeight - 1000)\") # scroll down\r\n\r\n # ##############################################\r\n # 6- date of birth year-month-day\r\n logger.info(f'adding dob {self.dob}')\r\n self.select_dob()\r\n\r\n # ##############################################\r\n logger.info(f'adding fname#2 {self.fname}')\r\n # 7- fname\r\n get_fname = 'return document.querySelectorAll(\"div.account_creation #firstname\")[0].value;'\r\n fname = self.automation.driver.execute_script(get_fname)\r\n if not fname: # check fname is passed or not ?\r\n fname = f'document.querySelectorAll(\"div.account_creation #firstname\")[0].value=\"{self.fname}\";'\r\n self.automation.driver.execute_script(fname)\r\n\r\n # ##############################################\r\n logger.info(f'adding lname#2 {self.lname}')\r\n # 8- last name\r\n get_lname = 'return document.querySelectorAll(\"div.account_creation #lastname\")[0].value;'\r\n lname = self.automation.driver.execute_script(get_lname)\r\n if not lname: # check lname is passed or not ?\r\n lname = f'document.querySelectorAll(\"div.account_creation #lastname\")[0].value=\"{self.lname}\";'\r\n self.automation.driver.execute_script(lname)\r\n\r\n # ##############################################\r\n # 9- complete profile ( company, city, address, mobile, postalcode, alias address)\r\n logger.info('complete profile with ( company, city, address, mobile, postalcode, alias address)')\r\n logger.info(f'company({self.company}) , city({self.city}) , address({self.address}), mobile({self.phone}) , postalcode({self.postalcode}) , alias address({self.address[0] + self.address[-1]})')\r\n self.complete_profile()\r\n\r\n # ##############################################\r\n # 10- state (randomly choice)\r\n logger.info('choose state randomly')\r\n states = [state.text for state in self.automation.driver.find_elements_by_css_selector('#id_state option')]\r\n Select(self.automation.driver.find_element_by_css_selector('#id_state')).select_by_visible_text(choice(states))\r\n # ###############################################\r\n self.automation.driver.execute_script(\"window.scrollTo(0, document.body.scrollHeight - 700)\") # scroll down\r\n self.automation.driver.find_element_by_css_selector('#submitAccount').click() # register btn\r\n # ################ wait to login ###############################\r\n account_lst = self.automation.driver.find_elements_by_css_selector('.myaccount-link-list')\r\n timer = 1\r\n is_login = True\r\n while not account_lst:\r\n if timer == 60:\r\n is_login = False\r\n break\r\n time.sleep(.3)\r\n account_lst = self.automation.driver.find_elements_by_css_selector('.myaccount-link-list')\r\n timer += 1\r\n return is_login", "def create_user_batch(self, email, first_name, last_name,\n pms=False, tms=False, rvs=False,\n welcome_email=False):\n user = self.create_user(\n email=email,\n password=self.make_random_password(),\n first_name=first_name,\n last_name=last_name)\n\n if pms:\n user.assessment_pms.add(*pms)\n\n if tms:\n user.assessment_teams.add(*tms)\n\n if rvs:\n user.assessment_reviewers.add(*rvs)\n\n if welcome_email:\n user.send_welcome_email()", "async def send_account_created(self, user_id: int, created_by_admin=False):\n async with self.pg.acquire() as conn:\n company_id, status, role = await conn.fetchrow(\n 'SELECT company, status, role FROM users WHERE id=$1', user_id\n )\n ctx = dict(events_link='/dashboard/events/', created_by_admin=created_by_admin, is_admin=role == 'admin')\n if status == 'pending':\n ctx['confirm_email_link'] = password_reset_link(user_id, auth_fernet=self.auth_fernet)\n\n await self.send_emails.direct(company_id, Triggers.account_created, [UserEmail(id=user_id, ctx=ctx)])", "def test_create_valid(self):\n url = '/api/users/'\n username = str(uuid1())[:8]\n data = {\n 'EmailAddress': '{}@dbca.wa.gov.au'.format(username),\n 'DisplayName': 'Doe, John',\n 'SamAccountName': username,\n 'DistinguishedName': 'CN={},OU=Users,DC=domain'.format(username),\n 'AccountExpirationDate': datetime.now().isoformat(),\n 'Enabled': True,\n 'ObjectGUID': str(uuid1()),\n 'GivenName': 'John',\n 'Surname': 'Doe',\n 'Title': 'Content Creator',\n 'Modified': datetime.now().isoformat(),\n }\n response = self.client.post(url, json.dumps(data), content_type='application/json')\n self.assertEqual(response.status_code, 201)\n # A DepartmentUser with that email should now exist.\n self.assertTrue(DepartmentUser.objects.filter(email=data['EmailAddress']).exists())", "def _create_user(self,data,site=None): \n username = data['username']\n self._signup_user(data,site)\n \n \n validation_mail = mail.outbox[-1] \n self.assertTrue(\"signup\" in validation_mail.subject,\"There was no email\"\n \" sent which had 'signup' in the subject line\")\n \n \n self.assertEqual(self.client.get('/accounts/'+username+'/').status_code,\n 403, \"Was not locked out of user account which was not\"\n \"yet validated with link!\"),\n \n # validate the user with the link that was emailed\n pattern = '/example.com(.*)'+PI_LINE_END_REGEX\n validationlink_result = re.search(pattern,\n validation_mail.body,\n re.IGNORECASE)\n \n self.assertTrue(validationlink_result, \"could not find any link in\" \n \"registration email. Tried to match pattern '{}' but found no match in\"\n \"this email: {}{}\".format(pattern,PI_LINE_END_REGEX,validation_mail.body))\n \n validationlink = validationlink_result.group(1).strip() \n response = self.client.get(validationlink) \n \n self.assertEqual(response.status_code,302, \"Could not load user validation link. Expected\"\n \" a redirect (HTTP 302), got HTTP {} instead\".format(response.status_code))\n \n \n resp = self.client.get('/accounts/'+username+'/')\n self.assertEqual(resp.status_code,\n 200,\"Could not access user account after using\" \n \"validation link! Expected 200, got {} instead\".format(resp.status_code))\n \n \n query_result = User.objects.filter(username=username) \n return query_result[0]", "def test_create_invalid_email(self):\n self.assertEqual(\n ProjectInvite.objects.filter(project=self.project).count(), 0\n )\n\n url = reverse(\n 'projectroles:api_invite_create',\n kwargs={'project': self.project.sodar_uuid},\n )\n post_data = {\n 'email': 'NOT_AN_EMAIL!',\n 'role': PROJECT_ROLE_CONTRIBUTOR,\n 'message': INVITE_MESSAGE,\n }\n response = self.request_knox(url, method='POST', data=post_data)\n\n self.assertEqual(response.status_code, 400, msg=response.content)\n self.assertEqual(\n ProjectInvite.objects.filter(project=self.project).count(), 0\n )\n self.assertEqual(len(mail.outbox), 0)", "def create_user_email(user):\n if not user.is_authenticated:\n return False\n \n user.email = \"%s@%s\" % (user.username, settings.DEFAULT_EMAIL_HOST)\n user.save()\n \n return user.email", "def test_create_user_endpoint(self, **kwargs):\n first_name = kwargs.get('first_name', self.test_args[\"user_details\"][\"first_name\"])\n last_name = kwargs.get('last_name', self.test_args[\"user_details\"][\"last_name\"])\n password = kwargs.get('password', self.test_args[\"user_details\"][\"password\"])\n email = kwargs.get('email', Workflows.generate_new_email(suffix=self.global_config[\"email_id_suffix\"]))\n custom_data = {\"first_name\": first_name, \"last_name\": last_name, \"password\": password, \"email\": email}\n kwargs[\"data\"] = {\"user\": custom_data, \"client_id\": self.global_config[\"client_id\"],\n \"client_secret\": self.global_config[\"client_secret\"]}\n\n restapi = Rest(base_uri=self.global_config[\"base_url\"])\n response = restapi.post(**kwargs)\n\n if kwargs.get(\"return_response_obj\", False):\n return response\n\n print(\"Verify Response body\")\n assert json.loads(response.text)[\"message\"] == self.test_args[\"expected_result\"], \"Test Failed\"\n return None", "def _send_email(\n recipient_id, sender_id, intent, email_subject, email_html_body,\n sender_email, bcc_admin=False, sender_name=None, reply_to_id=None):\n\n if sender_name is None:\n sender_name = EMAIL_SENDER_NAME.value\n\n _require_sender_id_is_valid(intent, sender_id)\n\n recipient_email = user_services.get_email_from_user_id(recipient_id)\n cleaned_html_body = html_cleaner.clean(email_html_body)\n if cleaned_html_body != email_html_body:\n log_new_error(\n 'Original email HTML body does not match cleaned HTML body:\\n'\n 'Original:\\n%s\\n\\nCleaned:\\n%s\\n' %\n (email_html_body, cleaned_html_body))\n return\n\n raw_plaintext_body = cleaned_html_body.replace('<br/>', '\\n').replace(\n '<br>', '\\n').replace('<li>', '<li>- ').replace('</p><p>', '</p>\\n<p>')\n cleaned_plaintext_body = html_cleaner.strip_html_tags(raw_plaintext_body)\n\n if email_models.SentEmailModel.check_duplicate_message(\n recipient_id, email_subject, cleaned_plaintext_body):\n log_new_error(\n 'Duplicate email:\\n'\n 'Details:\\n%s %s\\n%s\\n\\n' %\n (recipient_id, email_subject, cleaned_plaintext_body))\n return\n\n def _send_email_in_transaction():\n \"\"\"Sends the email to a single recipient.\"\"\"\n sender_name_email = '%s <%s>' % (sender_name, sender_email)\n\n email_services.send_mail(\n sender_name_email, recipient_email, email_subject,\n cleaned_plaintext_body, cleaned_html_body, bcc_admin,\n reply_to_id=reply_to_id)\n email_models.SentEmailModel.create(\n recipient_id, recipient_email, sender_id, sender_name_email, intent,\n email_subject, cleaned_html_body, datetime.datetime.utcnow())\n\n transaction_services.run_in_transaction(_send_email_in_transaction)", "def _make_message(request, issue, message, comments=None, send_mail=False,\n draft=None, in_reply_to=None):\n attach_patch = request.POST.get(\"attach_patch\") == \"yes\"\n template, context = _get_mail_template(request, issue, full_diff=attach_patch)\n # Decide who should receive mail\n my_email = db.Email(request.user.email())\n to = ([db.Email(issue.owner.email())] +\n issue.reviewers +\n [db.Email(email) for email in issue.collaborator_emails()])\n cc = issue.cc[:]\n if django_settings.RIETVELD_INCOMING_MAIL_ADDRESS:\n cc.append(db.Email(django_settings.RIETVELD_INCOMING_MAIL_ADDRESS))\n reply_to = to + cc\n if my_email in to and len(to) > 1: # send_mail() wants a non-empty to list\n to.remove(my_email)\n if my_email in cc:\n cc.remove(my_email)\n issue_id = issue.key.id()\n subject = issue.mail_subject()\n patch = None\n if attach_patch:\n subject = 'PATCH: ' + subject\n if 'patch' in context:\n patch = context['patch']\n del context['patch']\n if issue.num_messages:\n subject = 'Re: ' + subject\n if comments:\n details = _get_draft_details(request, comments)\n else:\n details = ''\n message = message.replace('\\r\\n', '\\n')\n text = ((message.strip() + '\\n\\n' + details.strip())).strip()\n if draft is None:\n msg = models.Message(issue_key=issue.key,\n subject=subject,\n sender=my_email,\n recipients=reply_to,\n text=text,\n parent=issue.key,\n issue_was_closed=issue.closed)\n else:\n msg = draft\n msg.subject = subject\n msg.recipients = reply_to\n msg.text = text\n msg.draft = False\n msg.date = datetime.datetime.now()\n msg.issue_was_closed = issue.closed\n issue.calculate_updates_for(msg)\n\n if in_reply_to:\n try:\n replied_msg_id = int(in_reply_to)\n replied_msg = models.Message.get_by_id(replied_msg_id, parent=issue.key)\n msg.in_reply_to_key = replied_msg.key\n replied_issue_id = replied_msg.issue_key.id()\n if replied_issue_id != issue_id:\n logging.warn('In-reply-to Message is for a different issue: '\n '%s instead of %s', replied_issue_id, issue_id)\n msg.in_reply_to_key = None\n except (db.KindError, db.BadKeyError, ValueError):\n logging.warn('Invalid in-reply-to Message or key given: %s', in_reply_to)\n\n if send_mail:\n # Limit the list of files in the email to approximately 200\n if 'files' in context and len(context['files']) > 210:\n num_trimmed = len(context['files']) - 200\n del context['files'][200:]\n context['files'].append('[[ %d additional files ]]' % num_trimmed)\n url = request.build_absolute_uri(reverse(show, args=[issue.key.id()]))\n reviewer_nicknames = ', '.join(library.get_nickname(rev_temp, True,\n request)\n for rev_temp in issue.reviewers)\n cc_nicknames = ', '.join(library.get_nickname(cc_temp, True, request)\n for cc_temp in cc)\n my_nickname = library.get_nickname(request.user, True, request)\n reply_to = ', '.join(reply_to)\n description = (issue.description or '').replace('\\r\\n', '\\n')\n home = request.build_absolute_uri(reverse(index))\n modified_added_count, modified_removed_count = _get_modified_counts(issue)\n context.update({'reviewer_nicknames': reviewer_nicknames,\n 'cc_nicknames': cc_nicknames,\n 'my_nickname': my_nickname, 'url': url,\n 'message': message, 'details': details,\n 'description': description, 'home': home,\n 'added_lines' : modified_added_count,\n 'removed_lines': modified_removed_count,\n })\n for key, value in context.iteritems():\n if isinstance(value, str):\n try:\n encoding.force_unicode(value)\n except UnicodeDecodeError:\n logging.error('Key %s is not valid unicode. value: %r' % (key, value))\n # The content failed to be decoded as utf-8. Enforce it as ASCII.\n context[key] = value.decode('ascii', 'replace')\n body = django.template.loader.render_to_string(\n template, context, context_instance=RequestContext(request))\n logging.warn('Mail: to=%s; cc=%s', ', '.join(to), ', '.join(cc))\n send_args = {'sender': my_email,\n 'to': [_encode_safely(address) for address in to],\n 'subject': _encode_safely(subject),\n 'body': _encode_safely(body),\n 'reply_to': _encode_safely(reply_to)}\n if cc:\n send_args['cc'] = [_encode_safely(address) for address in cc]\n if patch:\n send_args['attachments'] = [('issue_%s_patch.diff' % issue.key.id(),\n patch)]\n\n attempts = 0\n while True:\n try:\n mail.send_mail(**send_args)\n break\n except mail.InvalidSenderError:\n if django_settings.RIETVELD_INCOMING_MAIL_ADDRESS:\n previous_sender = send_args['sender']\n if previous_sender not in send_args['to']:\n send_args['to'].append(previous_sender)\n send_args['sender'] = django_settings.RIETVELD_INCOMING_MAIL_ADDRESS\n else:\n raise\n except apiproxy_errors.DeadlineExceededError:\n # apiproxy_errors.DeadlineExceededError is raised when the\n # deadline of an API call is reached (e.g. for mail it's\n # something about 5 seconds). It's not the same as the lethal\n # runtime.DeadlineExeededError.\n attempts += 1\n if attempts >= 3:\n raise\n if attempts:\n logging.warning(\"Retried sending email %s times\", attempts)\n\n return msg", "def create_item(self, user: User, **kwargs) -> None:", "def post(self):\n data = request.get_json()\n user_exist, email_exist = actions.add_user(data['username'], data['password'], data['email'])\n create_profile(data['username'], data['screen_name'], data['birth_date'])\n if not (user_exist or email_exist):\n html = '<p>Confirming your account will give you </p> <b>full access to Kwikker</b>'\n subject = 'Confirm your Kwikker account, '+data['screen_name']\n # (email, username, password, subject, url, html, confirm)\n actions.send_email(data['email'], data['username'], data['password'], subject,\n '/confirm/', html, True)\n return \"\", 201\n else:\n return {'username_already_exists': user_exist, 'email_already_exists': email_exist}, 403\n pass", "def test_create_account_failed_existing_email(self):\n data = self.user_data.copy()\n data['email'] = '[email protected]'\n response = self.client.post(self.url, data, format='json')\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n self.assertEqual(response.data.get('message').get('email')[0], 'user with this email already exists.')", "def email(self, identifier, data):\n self.client.request_with_method(Methods.EMAIL % (self.name, identifier,),\n data=data)", "def further_validated_draft_dict(\n draft_dict: Dict[str, Any], user_profile: UserProfile\n) -> Dict[str, Any]:\n\n content = normalize_body(draft_dict[\"content\"])\n\n timestamp = draft_dict.get(\"timestamp\", time.time())\n timestamp = round(timestamp, 6)\n if timestamp < 0:\n # While it's not exactly an invalid timestamp, it's not something\n # we want to allow either.\n raise JsonableError(_(\"Timestamp must not be negative.\"))\n last_edit_time = timestamp_to_datetime(timestamp)\n\n topic = \"\"\n recipient_id = None\n to = draft_dict[\"to\"]\n if draft_dict[\"type\"] == \"stream\":\n topic = truncate_topic(draft_dict[\"topic\"])\n if \"\\0\" in topic:\n raise JsonableError(_(\"Topic must not contain null bytes\"))\n if len(to) != 1:\n raise JsonableError(_(\"Must specify exactly 1 stream ID for stream messages\"))\n stream, sub = access_stream_by_id(user_profile, to[0])\n recipient_id = stream.recipient_id\n elif draft_dict[\"type\"] == \"private\" and len(to) != 0:\n to_users = get_user_profiles_by_ids(set(to), user_profile.realm)\n try:\n recipient_id = recipient_for_user_profiles(to_users, False, None, user_profile).id\n except ValidationError as e: # nocoverage\n raise JsonableError(e.messages[0])\n\n return {\n \"recipient_id\": recipient_id,\n \"topic\": topic,\n \"content\": content,\n \"last_edit_time\": last_edit_time,\n }", "def send_email_to_assigned_user(recipients, from_email, domain='demo.django-crm.io', protocol='http'):\n account = Account.objects.filter(id=from_email).first()\n created_by = account.created_by\n\n blocked_domains = BlockedDomain.objects.values_list('domain', flat=True)\n blocked_emails = BlockedEmail.objects.values_list('email', flat=True)\n\n for user in recipients:\n recipients_list = []\n user = User.objects.filter(id=user, is_active=True).first()\n if user:\n if (user.email not in blocked_emails) and (user.email.split('@')[-1] not in blocked_domains):\n recipients_list.append(user.email)\n context = {}\n context[\"url\"] = protocol + '://' + domain + \\\n reverse('accounts:view_account', args=(account.id,))\n context[\"user\"] = user\n context[\"account\"] = account\n context[\"created_by\"] = created_by\n subject = 'Assigned a account for you.'\n html_content = render_to_string(\n 'assigned_to/account_assigned.html', context=context)\n\n msg = EmailMessage(\n subject,\n html_content,\n to=recipients_list\n )\n msg.content_subtype = \"html\"\n msg.send()", "def post(self, request, format=None, user=None, token=None):\n logger.info(\"Creating invitation\")\n # Validate Invitation request data\n serializer = CreateInvitationSerializer(data=request.data)\n if not serializer.is_valid():\n logger.warning(f\"Unable to validate invitation request : {serializer.errors}\")\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n\n # Retrieve invited user from IAM\n invited_user = ExternalUsers.get_by_email(\n token,\n serializer.validated_data['email']\n )\n if not invited_user:\n logger.warning(\"Unable to retrieve invited user\")\n return Response(f\"Unable to retrieve invited user ({serializer.validated_data['email']})\", status=status.HTTP_404_NOT_FOUND)\n\n # User cannot invite himself\n if user.id == invited_user.id:\n logger.warning(f\"{user.id} sent an invitation to itself\")\n return Response(\"You can't invite yourself\", status=status.HTTP_400_BAD_REQUEST)\n\n # Save the invitation\n serializer = InvitationSerializer(data={\n 'workspace': serializer.validated_data[\"workspace\"].id,\n 'sender': user.email,\n 'user_id': invited_user.id,\n 'status': InvitationStatus.PENDING.name\n })\n if not serializer.is_valid():\n logger.warning(f\"Unable to save invitation : {serializer.errors}\")\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n\n invitation = serializer.save()\n\n # Send email to the invited user\n ExternalMail.send(\n to=invited_user.email,\n template_id=\"d-45db8f85eeaf43e9944db49a5777d9f7\",\n template_data={ 'url': 'https://app.worko.tech/#workspace' }\n )\n\n # Build data that will be send\n result = InvitationSerializer(invitation).data\n\n # Notify user that it has been invited\n ExternalNotify.send(\n f\"user {invited_user.id}\",\n 'invitation recieved',\n result\n )\n return Response(result, status=status.HTTP_201_CREATED)", "def send_feedback_message_email(recipient_id, feedback_messages):\n email_subject_template = (\n 'You\\'ve received %s new message%s on your explorations')\n\n email_body_template = (\n 'Hi %s,<br>'\n '<br>'\n 'You\\'ve received %s new message%s on your Oppia explorations:<br>'\n '<ul>%s</ul>'\n 'You can view and reply to your messages from your '\n '<a href=\"https://www.oppia.org/creator_dashboard\">dashboard</a>.'\n '<br>'\n '<br>Thanks, and happy teaching!<br>'\n '<br>'\n 'Best wishes,<br>'\n 'The Oppia Team<br>'\n '<br>%s')\n\n if not feconf.CAN_SEND_EMAILS:\n log_new_error('This app cannot send emails to users.')\n return\n\n if not feconf.CAN_SEND_FEEDBACK_MESSAGE_EMAILS:\n log_new_error('This app cannot send feedback message emails to users.')\n return\n\n if not feedback_messages:\n return\n\n recipient_user_settings = user_services.get_user_settings(recipient_id)\n\n messages_html = ''\n count_messages = 0\n for exp_id, reference in feedback_messages.iteritems():\n messages_html += (\n '<li><a href=\"https://www.oppia.org/create/%s#/feedback\">'\n '%s</a>:<br><ul>' % (exp_id, reference['title']))\n for message in reference['messages']:\n messages_html += ('<li>%s<br></li>' % message)\n count_messages += 1\n messages_html += '</ul></li>'\n\n email_subject = email_subject_template % (\n (count_messages, 's') if count_messages > 1 else ('a', ''))\n\n email_body = email_body_template % (\n recipient_user_settings.username, count_messages if count_messages > 1\n else 'a', 's' if count_messages > 1 else '', messages_html,\n EMAIL_FOOTER.value)\n\n _send_email(\n recipient_id, feconf.SYSTEM_COMMITTER_ID,\n feconf.EMAIL_INTENT_FEEDBACK_MESSAGE_NOTIFICATION,\n email_subject, email_body, feconf.NOREPLY_EMAIL_ADDRESS)", "def create_google_user(self, payload, user_data):\n if not User.objects.filter(email=payload['email']).exists():\n u = User()\n u.generate_token()\n u.email = payload['email']\n u.name = payload['given_name'] or ''\n u.surname = payload['family_name'] or ''\n u.image_url = user_data['image_url'] or ''\n u.google_id = user_data['id']\n u.google_token = user_data['token']\n u.save()\n else:\n u = User.objects.get(email=payload['email'])\n\n return u.token", "def test_user_existing_email(self):\n data = json.dumps({\n \"username\" : \"john\", \"email\" : \"[email protected]\",\n \"password\" : \"secret12345\", \"confirm_password\" : \"secret12345\"})\n res = self.app.post( # pylint: disable=W0612\n '/api/v3/users', data=data,\n content_type='application/json',\n headers=self.admin_header)\n response = self.app.post(\n '/api/v3/users', data=data,\n content_type='application/json',\n headers=self.admin_header)\n self.assertEqual(response.status_code, 400)", "def test_create_email_in_properties(self):\n user = api.user.create(\n username='chuck',\n password='secret',\n properties={'email': '[email protected]'}\n )\n\n self.assertEquals(user.getProperty('email'), '[email protected]')", "def send_email_to_assigned_user(recipients, lead_id, domain='demo.django-crm.io', protocol='http', source=''):\n lead = Lead.objects.get(id=lead_id)\n created_by = lead.created_by\n blocked_domains = BlockedDomain.objects.values_list('domain', flat=True)\n blocked_emails = BlockedEmail.objects.values_list('email', flat=True)\n for user in recipients:\n recipients_list = []\n user = User.objects.filter(id=user, is_active=True).first()\n if user:\n if (user.email not in blocked_emails) and (user.email.split('@')[-1] not in blocked_domains):\n recipients_list.append(user.email)\n context = {}\n context[\"url\"] = protocol + '://' + domain + \\\n reverse('leads:view_lead', args=(lead.id,))\n context[\"user\"] = user\n context[\"lead\"] = lead\n context[\"created_by\"] = created_by\n context[\"source\"] = source\n subject = 'Assigned a lead for you. '\n html_content = render_to_string(\n 'assigned_to/leads_assigned.html', context=context)\n msg = EmailMessage(\n subject,\n html_content,\n to=recipients_list\n )\n msg.content_subtype = \"html\"\n msg.send()", "def add_or_remove_user_email(self, accountid, action, EmailId, EmailType):\n payload = {'EmailId': EmailId, 'EmailType': EmailType}\n auth = 'appkey=' + self._lr_object._get_api_key() + '&appsecret=' + self._lr_object._get_api_secret() + '&accountid=' + accountid + '&action=' + action\n url = SECURE_API_URL + \"raas/v1/account/email\" + \"?\" + auth\n return self._lr_object._post_json(url, payload)", "def draft_message(self, text=None, template_path=None, template_args=None):\n self.message['From'] = self.sender\n self.message['To'] = '; '.join(self.destinations)\n self.message['BCC'] = '; '.join(self.bcc)\n self.message['CC'] = '; '.join(self.cc)\n self.message['Subject'] = self.subject\n\n # check if email template is used\n if template_path:\n text = self.body_template(template_path)\n text = text.format(**template_args)\n\n # attach text part of message\n self.message.attach(MIMEText(text))\n\n # return self to encourage method chaining\n return self", "def create_sent_email(self, *args, **kwargs):\n receiver = kwargs['receiver']\n sender = kwargs['sender']\n user = kwargs['user']\n body = kwargs['body']\n subject = kwargs['subject']\n if receiver and sender and subject and body:\n sent_email = SentEmail()\n sent_email.receiver = receiver\n sent_email.subject = subject\n sent_email.sender = sender\n sent_email.status = 'sent'\n sent_email.user = user\n sent_email.body = body\n sent_email.save()\n return True\n else:\n return False", "def create_user_emails_sheets_restaurant_owners():\n input_range = \"Sheet1\"\n\n sheetsService = build(\n 'sheets', 'v4', credentials=credentials, cache_discovery=False)\n\n # Empty sheet\n sheetsService.spreadsheets().values().clear(\n spreadsheetId=spreadsheet_id, range=input_range).execute()\n\n # Get all basic users' email\n restaurant_owners = list(User.objects.filter(\n is_active=True, role=\"RO\").values('email', 'username'))\n\n # Append user info into values (only users that has email verified)\n values = [['Email', 'Username']]\n for restaurant_owner in restaurant_owners:\n values.append(list(restaurant_owner.values()))\n\n body = {\n 'values': values\n }\n\n try:\n sheetsService.spreadsheets().values().update(spreadsheetId=spreadsheet_id, range=input_range,\n valueInputOption=\"USER_ENTERED\", body=body).execute()\n except HttpError as error:\n print('An error occurred: %s' % error)\n raise error\n # return None\n\n # Automatically format the sheets\n requests = [\n {\n \"autoResizeDimensions\": {\n \"dimensions\": {\n \"sheetId\": 0,\n \"dimension\": \"COLUMNS\",\n \"startIndex\": 0,\n \"endIndex\": 2\n }\n }\n },\n {\n \"repeatCell\": {\n \"range\": {\n \"sheetId\": 0,\n \"startRowIndex\": 0,\n \"endRowIndex\": 1,\n \"startColumnIndex\": 0,\n \"endColumnIndex\": 2\n },\n \"cell\": {\n \"userEnteredFormat\": {\n \"textFormat\": {\n \"bold\": True\n }\n }\n },\n \"fields\": \"userEnteredFormat(textFormat)\"\n }\n }\n ]\n\n body = {\n 'requests': requests\n }\n\n try:\n sheetsService.spreadsheets().batchUpdate(\n spreadsheetId=spreadsheet_id, body=body).execute()\n except HttpError as error:\n print('An error occurred: %s' % error)\n raise error", "def test_create_new_user_duplicate_email(self):\n\n data = {\n 'username': 'John',\n 'email': '[email protected]',\n 'password': 'test123!',\n 'phone': '1234567890',\n 'first_name': 'Chuck',\n 'last_name': 'Norris',\n 'university': {\n \"name\": \"random_university\"\n },\n 'academic_field': {'name': \"random_field\"},\n 'academic_level': {'name': \"random_level\"},\n 'gender': \"M\",\n 'birthdate': \"1999-11-11\",\n }\n\n user = UserFactory()\n user.email = '[email protected]'\n user.save()\n\n response = self.client.post(\n reverse('user-list'),\n data,\n format='json',\n )\n\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n content = {\n 'email': [\n \"An account for the specified email address already exists.\"\n ]\n }\n self.assertEqual(json.loads(response.content), content)", "def commit_draft(draft_uuid):\n api_request('post', api_url('drafts', str(draft_uuid), 'commit'))", "def test_create_account_failed_invalid_email(self):\n data = self.user_data.copy()\n data['email'] = 'test'\n response = self.client.post(self.url, data, format='json')\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n self.assertEqual(response.data.get('message').get('email')[0], 'Enter a valid email address.')", "def send_approval_mail(event_id, user_id):\n event = Event.objects.get(id=event_id)\n user = User.objects.get(id=user_id)\n\n sender = getattr(settings, 'EMAIL_HOST_USER', [])\n subject = \"Your attending request approved\"\n recipients = [user.email]\n context = Context({'name': user.username,\n 'event_title': event.title,\n 'event_id': event.id,\n 'site': Site.objects.get_current()})\n send_html_email(subject,\n sender,\n recipients,\n context,\n \"event/mail/approval\")\n return True", "def test_valid_account_create_is_a_developer(self):\n ident_choice = UserIdentificationLabel.objects.get(slug=\"ident1\")\n form_data = {\n 'invitation_code': '1234',\n 'email': '[email protected]',\n 'organization_name': 'transhealth',\n 'password1': 'BEDrocks@123',\n 'password2': 'BEDrocks@123',\n 'first_name': 'Hank',\n 'last_name': 'Flinstone',\n 'identification_choice': str(ident_choice.pk),\n }\n self.client.post(self.url, form_data, follow=True)\n up = UserProfile.objects.get(user__email='[email protected]')\n self.assertEqual(up.user_type, 'DEV')", "def new_user(global_config, timestamped_email, id_api):\n yield id_api.create_user_if_not_exists(timestamped_email, global_config.users.default.password)", "def test_admin_approval_email_falls_back_to_django_default_from_email(self):\n new_user = UserModel().objects.create_user(**self.user_info)\n profile = self.registration_profile.objects.create_profile(new_user)\n profile.activated = True\n self.registration_profile.objects.send_admin_approve_email(\n new_user, Site.objects.get_current())\n self.assertEqual(mail.outbox[0].from_email, '[email protected]')", "def perform_create(self, serializer):\n user = serializer.save()\n signals.user_registered.send(\n sender=self.__class__, user=user, request=self.request\n )\n\n context = get_email_context(user)\n to = [get_user_email(user)]\n if djconf.SEND_ACTIVATION_EMAIL:\n djconf.EMAIL.activation(self.request, context).send(to)\n elif djconf.SEND_CONFIRMATION_EMAIL:\n djconf.EMAIL.confirmation(self.request, context).send(to)", "def create(self, vals):\n if not vals.get(\"attendee_dob\"):\n vals.update(attendee_dob=None)\n return super(Attendee, self).create(vals)", "def create_user(open_ldap, smtp, entries):\n try:\n if open_ldap.ldap_insert(entries):\n smtp.send_email(entries)\n return True\n else:\n return False\n except Exception as e:\n print('ERROR - ', e)\n return", "def new_reply(cls, thread, user, content):\n msg = cls.objects.create(thread=thread, sender=user, content=content)\n thread.userthread_set.exclude(user=user).update(deleted=False, unread=True)\n thread.userthread_set.filter(user=user).update(deleted=False, unread=False)\n message_sent.send(sender=cls, message=msg, thread=thread, reply=True)\n #for recip in thread.userthread_set.exclude(user=user):\n # send_newmessage_mail(msg, recip.user)\n return msg", "def create(\n cls,\n recipient_id: str,\n recipient_email: str,\n sender_id: str,\n sender_email: str,\n intent: str,\n subject: str,\n html_body: str,\n sent_datetime: datetime.datetime\n ) -> None:\n instance_id = cls._generate_id(intent)\n email_model_instance = cls(\n id=instance_id, recipient_id=recipient_id,\n recipient_email=recipient_email, sender_id=sender_id,\n sender_email=sender_email, intent=intent, subject=subject,\n html_body=html_body, sent_datetime=sent_datetime)\n\n email_model_instance.update_timestamps()\n email_model_instance.put()", "def do_create(service,summary,description,startday,\\\n starttime,endtime,username,email):\n event = {\n 'summary': 'Code Clinic: {}'.format(summary),\n 'description': '{}.'.format(description),\n 'start': {\n 'dateTime': '{}T{}:00'.format(startday, starttime),\n 'timeZone': 'GMT+02',\n },\n 'end': {\n 'dateTime': '{}T{}:00'.format(startday,endtime),\n 'timeZone': 'GMT+02',\n },\n 'recurrence': [\n 'RRULE:FREQ=DAILY;COUNT=1'\n ],\n 'attendees': [\n {\n 'displayName': username,\n 'email': email,\n 'optional': True,\n 'comment': 'Creator',\n 'responseStatus': 'accepted',\n },\n ],\n 'anyoneCanAddSelf': True,\n\n 'reminders': {\n 'useDefault': False,\n 'overrides': [\n {'method': 'email', 'minutes': 24 * 60},\n {'method': 'popup', 'minutes': 10},\n ],\n },\n }\n\n event = service.events().insert(calendarId='primary', body=event,\\\n sendUpdates='all').execute()\n\n return event", "def do_create_drafts(draft_dicts: List[Dict[str, Any]], user_profile: UserProfile) -> List[Draft]:\n draft_objects = []\n for draft_dict in draft_dicts:\n valid_draft_dict = further_validated_draft_dict(draft_dict, user_profile)\n draft_objects.append(\n Draft(\n user_profile=user_profile,\n recipient_id=valid_draft_dict[\"recipient_id\"],\n topic=valid_draft_dict[\"topic\"],\n content=valid_draft_dict[\"content\"],\n last_edit_time=valid_draft_dict[\"last_edit_time\"],\n )\n )\n\n created_draft_objects = Draft.objects.bulk_create(draft_objects)\n\n event = {\n \"type\": \"drafts\",\n \"op\": \"add\",\n \"drafts\": [draft.to_dict() for draft in created_draft_objects],\n }\n send_event(user_profile.realm, event, [user_profile.id])\n\n return created_draft_objects", "def add_contact_to_db(name, email, module_db_id):\n success = False\n if name is not None:\n try:\n done_email = email.lower().strip()\n validate_email(done_email)\n\n contact, created = Contact.objects.get_or_create(list_owner_id=module_db_id, email=email)\n if created and contact:\n contact.name_and_last_name = name\n contact.email = email\n contact.status = 1\n contact.save()\n success = True\n else:\n success = False\n except Exception as e:\n print(e.args)\n contact, created = Contact.objects.get_or_create(list_owner_id=module_db_id, email=email)\n if created and contact:\n contact.name_and_last_name = name\n contact.email = email\n contact.status = 0\n contact.save()\n success = True\n else:\n success = False\n\n return success, name, email", "def get_user_and_created(cls, update, context):\n data = utils.extract_user_data_from_update(update)\n u, created = cls.objects.update_or_create(user_id=data[\"user_id\"], defaults=data)\n\n if created:\n if context is not None and context.args is not None and len(context.args) > 0:\n payload = context.args[0]\n if str(payload).strip() != str(data[\"user_id\"]).strip(): # you can't invite yourself\n u.deep_link = payload\n u.save()\n\n return u, created", "def test_update_email_task_send_email_current_name_not_email(self):\n\n user = fake_clients.FakeUser(\n name=\"nkdfslnkls\", password=\"123\", email=\"[email protected]\"\n )\n\n setup_identity_cache(users=[user])\n\n url = \"/v1/actions/UpdateEmail\"\n headers = {\n \"project_name\": \"test_project\",\n \"project_id\": \"test_project_id\",\n \"roles\": \"project_admin,member,project_mod\",\n \"username\": \"nkdfslnkls\",\n \"user_id\": user.id,\n \"authenticated\": True,\n \"email\": \"[email protected]\",\n }\n\n data = {\"new_email\": \"[email protected]\"}\n response = self.client.post(url, data, format=\"json\", headers=headers)\n\n self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)\n self.assertEqual(response.data, {\"notes\": [\"task created\"]})\n\n self.assertEqual(len(mail.outbox), 2)\n self.assertEqual(mail.outbox[0].to, [\"[email protected]\"])\n self.assertEqual(mail.outbox[0].subject, \"update_user_email_additional\")\n\n self.assertEqual(mail.outbox[1].to, [\"[email protected]\"])\n self.assertEqual(mail.outbox[1].subject, \"update_user_email_token\")\n\n new_token = Token.objects.all()[0]\n url = \"/v1/tokens/\" + new_token.token\n\n data = {\"confirm\": True}\n response = self.client.post(url, data, format=\"json\")\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n\n self.assertEqual(len(mail.outbox), 3)", "def test_for_email_attribut_by_uid(self):\n name = u\"__TestUser2__\"\n password = u\"ekERErwerwerh\"\n email = \"__TestUser2__@moinhost\"\n self.createUser(name, password, email=email)\n uid = user.getUserId(self.request, name)\n theuser = user.User(self.request, uid)\n assert theuser.email == email", "def draft_message(request):\n query = models.Message.query(\n models.Message.issue_key == request.issue.key,\n models.Message.sender == request.user.email(),\n models.Message.draft == True)\n if query.count() == 0:\n draft_message = None\n else:\n draft_message = query.get()\n if request.method == 'GET':\n return _get_draft_message(draft_message)\n elif request.method == 'POST':\n return _post_draft_message(request, draft_message)\n elif request.method == 'DELETE':\n return _delete_draft_message(draft_message)\n return HttpTextResponse('An error occurred.', status=500)", "def create_message(id_user: int, message: str):\n # Create datetime of today at midnight\n today = datetime.date.today()\n today = today.strftime(\"%Y-%m-%d %H:%M:%S\")\n # Query to check if there is already a message for this user today\n mycursor.execute(f\"\"\"SELECT id_message\n FROM Daily_message\n WHERE date_message >= '{today}'\n AND id_user = {id_user}\n \"\"\")\n rowcount = mycursor.rowcount\n if rowcount == 1:\n # If there is already a message today, user can't add a new one\n return 'Impossible d\\'ajouter ce message. Il y a déjà un message aujourd\\'hui, veuillez le modifier.'\n else:\n # Create datetime of today at current time\n date_message = datetime.datetime.today()\n date_message = date_message.strftime(\"%Y/%m/%d %H:%M:%S\")\n columns_mess = 'id_user, text, date_message'\n # Add the message infos to the Daily_message table\n add_in_database((id_user, message, date_message), 'Daily_message', columns_mess)\n mydb.commit()\n # Get the id_message\n mycursor.execute(f\"\"\"SELECT id_message\n FROM Daily_message\n WHERE (id_user = {id_user})\n AND (date_message >= '{today}')\n \"\"\")\n infos = mycursor.fetchall()\n id_message = infos[0][0]\n # Fill emotion table for the new message\n create_emotion(id_message, message)\n return {'id_user': id_user, 'message': message}" ]
[ "0.6185358", "0.5889587", "0.56886303", "0.563527", "0.5423166", "0.5389278", "0.53863585", "0.5356391", "0.5334495", "0.5330233", "0.53246087", "0.53019273", "0.5286498", "0.5276959", "0.5267437", "0.5240132", "0.5237234", "0.5231543", "0.52308434", "0.5228887", "0.5216414", "0.5204619", "0.518094", "0.5180281", "0.5179719", "0.5177352", "0.5169962", "0.5169251", "0.5163908", "0.51629907", "0.5141072", "0.5140649", "0.51283056", "0.51211363", "0.5120887", "0.5118071", "0.5117624", "0.5112251", "0.5099561", "0.5095478", "0.5061376", "0.5051435", "0.50501233", "0.50287694", "0.5012671", "0.5002051", "0.50018096", "0.49841133", "0.49734405", "0.49675235", "0.49649948", "0.49643576", "0.4960395", "0.49469087", "0.49383324", "0.49373287", "0.49347386", "0.49332684", "0.49320036", "0.49291316", "0.49214837", "0.4918819", "0.49182647", "0.49056786", "0.48995233", "0.48990878", "0.48976794", "0.48928875", "0.48866498", "0.48837417", "0.48808536", "0.48787385", "0.48780966", "0.48759377", "0.4873623", "0.48732615", "0.4869245", "0.48650315", "0.4861826", "0.486163", "0.48593515", "0.48548198", "0.4850518", "0.48436594", "0.48401958", "0.48367348", "0.48337993", "0.48322725", "0.4830197", "0.48299873", "0.48267302", "0.4825555", "0.4822729", "0.48203456", "0.48186588", "0.48111933", "0.4809703", "0.4809062", "0.4808818", "0.48077247" ]
0.73411775
0
iterator which goes through all the pages to find all the emails
def get_all_emails_it(auth, user_id, folder_id='AllItems', pages_limit=None, pages_size=50, **kwargs): i = 0 args_dict = dict(kwargs, top=pages_size, skip=pages_size * i) curr_emails = get_emails(auth, user_id, folder_id, **args_dict) while len(curr_emails) != 0: yield curr_emails if pages_limit is not None and i >= pages_limit: break i += 1 args_dict = dict(kwargs, top=pages_size, skip=pages_size * i) curr_emails = get_emails(auth, user_id, folder_id, **args_dict)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_email_addresses(startdate, enddate, user, password):\n emails = []\n page = 1\n more_pages = True\n\n while more_pages:\n response = requests.get(\n 'https://restapi.surveygizmo.com/v2/survey/{survey}'\n '/surveyresponse?'\n 'filter[field][0]=datesubmitted'\n '&filter[operator][0]=>=&filter[value][0]={start}+0:0:0'\n '&filter[operator][1]=<&filter[value][1]={end}+0:0:0'\n '&filter[field][1]=status&filter[operator][1]=='\n '&filter[value][1]=Complete'\n '&resultsperpage=500'\n '&page={page}'\n '&user:pass={user}:{password}'.format(\n survey=EMAIL_COLLECTION_SURVEY_ID, start=startdate,\n end=enddate, page=page, user=user, password=password))\n\n results = json.loads(response.content)\n total_pages = results['total_pages']\n more_pages = page < total_pages\n emails = emails + [r['[question(13)]'] for r in results['data']]\n\n return emails", "def iter_page_links(self) -> Iterable[str]:\n base_url = 'https://www.med.navy.mil'\n r = requests.get(self.starting_url, verify=CERTIFICATE_DIR + '/cat3.pem')\n soup = bs4.BeautifulSoup(r.content, features=\"html.parser\")\n\n # get target column of list items\n issuance_list = soup.find('div', attrs={'class': 'noindex ms-wpContentDivSpace'})\n matches = [\"Publications\", \"BUMEDNotes\", \"BUMEDInstructions\"]\n # extract links\n links = [link for link in issuance_list.find_all('a')]\n for link in links[2:-1]:\n if any(x in str(link) for x in matches):\n if not link['href'].startswith('http'):\n url = base_url + link['href']\n else:\n url = link['href']\n yield url", "def extract_emails_from_category(initial_url, first_page=int(1)):\r\n\tresult_emails = set() #we will return this\r\n\t#last page regex\r\n\tlp_regex = re.compile('[0-9]+/;')\r\n\t#Open URL\r\n\tsoup = bs4.BeautifulSoup(urlopen(initial_url), \"html5lib\")\r\n\t#extract the link to the last page. It is inside div.paging-bottom > ul > li with text \">>\"\r\n\tnavigation = soup.find_all(\"div\",id=\"paging-bottom\")\r\n\tif not navigation:\r\n\t\tprint(\"This page is weird. It has no navigation. Aborting\\n\")\r\n\t\treturn result_emails\r\n\r\n\ttxt_elem = navigation[0].ul.find_all(text=\">>\")[0]\r\n\t#link to last page\r\n\tlink = txt_elem.parent\r\n\t#Get its url.. smthg like /ourivesarias-joalharias/134/;jsessionid=67E1932531B84B3E77AAF47A29B263CE\r\n\turl = link['href']\r\n\t#Pick the number of the last page\r\n\tmatch = lp_regex.search(url)\r\n\tif match:\r\n\t\tlast_page = match.group()[0:-2]\r\n\t\tlast_page_i = int(last_page)\r\n\telse:\r\n\t\tprint(\"This category has no navigation to the last page\\n\")\r\n\t\tlast_page_i = first_page\r\n\t\t\r\n\t#Sanity Check\r\n\tif last_page_i < first_page:\r\n\t\tlast_page_i = first_page\r\n\t\t\r\n\tprint(\"Working on category %s\" % initial_url)\r\n\t#Now that we have the last page. Time to iterate on each one and get the emails\r\n\tfor page in xrange( first_page, last_page_i ):\r\n\t\tpage_url = initial_url + str(page) + '/' #This is fragile\r\n\t\tprint(\"Scanning page %d of %d (%s).\" % (page, last_page_i, page_url))\r\n\t\ttry:\r\n\t\t\temails = extract_emails_from_page(bs4.BeautifulSoup( unicode(urlopen(page_url).read(),'utf-8','ignore'), \"html5lib\"))\r\n\t\t\twrite_emails_to_set(emails, result_emails)\r\n\t\t\ttime.sleep(5)\r\n\t\texcept IOError:\r\n\t\t\tprint(\"Coult not fetch url %s. Skipped\\n\" % page_url)\r\n\treturn result_emails", "def extract_emails_from_page(soup):\r\n\temail_pattern = re.compile('([\\w\\-\\.+]+@(\\w[\\w\\-]+\\.)+[\\w\\-]+)')\r\n\ttry:\r\n\t\tpage_content = str(soup)\r\n\texcept:\r\n\t\tprint('Error parsing page. Skipped\\n')\r\n\t\treturn []\r\n\tmatches = email_pattern.findall(page_content)\r\n\tif matches:\r\n\t\treturn [ match[0] for match in matches ]\r\n\treturn []", "def __iter__(self):\n while self.has_next_page():\n response = self.get_next_page_response()\n for item in self.get_items_from_response(response):\n yield item", "def scrape_emails(webpage):\n emails = []\n html = requests.get(webpage)\n email_regex = re.compile(r'[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\\.[a-zA-Z]+')\n emails = email_regex.findall(html.text)\n return emails", "def get_emails(self):\n email_ids = self.get_email_ids()\n Email = get_email_class()\n return [email for email in Email.objects.filter(pk__in=email_ids)]", "def fetch_all(self):\n emails = []\n res, messages = self._mailconn.search(None, 'ALL')\n if res == 'OK':\n for msg in messages[0].split():\n try:\n res, data = self._mailconn.fetch(msg.decode('utf-8'), '(RFC822)')\n except Exception as error:\n self.close_mail_connection()\n print('No email to read: '+error)\n exit()\n \n msg = email.message_from_string((data[0][1]).decode('utf-8'))\n if not isinstance(msg, str):\n if self.is_sender_in_whitelist(msg['From']):\n emails.append(msg)\n\n return emails", "def getIdeaUrlsFromEmail():\n credentials = get_credentials()\n http = credentials.authorize(httplib2.Http())\n service = discovery.build('gmail', 'v1', http=http)\n\n\n lists,nextPageToken = ListMessages(service,user_id = 'me',q='from:[email protected]')\n # print (lists)\n mes,mes_str = GetMimeMessage(service,user_id = 'me',msg_id = lists[0]['id'])\n # print (mes)\n j = 0\n urls = []\n for part in mes.walk(): \n j = j + 1 \n fileName = part.get_filename() \n contentType = part.get_content_type() \n mycode=part.get_content_charset(); \n # 保存附件 \n if fileName:\n print ('保存邮件附件……TODO?')\n elif contentType == 'text/html': #or contentType == 'text/plain' \n #保存正文 \n data = part.get_payload(decode=True) \n content=str(data); \n # if mycode=='gb2312': \n # content= mbs_to_utf8(content) \n #end if \n # nPos = content.find('降息') \n # print(\"nPos is %d\"%(nPos)) \n # print >> f, data \n # 正则替换掉所有非 <a></a>的标签 <[^>|a]+>\n # reg = re.compile('<[^>|a]+>')\n # print (content)\n url,title = findIdeaUrlInHtml(content)\n urls.append((url,title))\n # print (url,title)\n # contentTxt = re.compile('<[^>|a]+>').sub('',content)\n # print (reg.sub('',content))\n # #end if \n\n return urls", "def _get_iter(self, url, params):\n for current_page_index in itertools.count():\n result_dict = self._get_page(url, params, current_page_index)\n for document in result_dict['entries']:\n yield document\n if not result_dict['isNextPageAvailable']:\n break", "def _all_pages(self, page_function, **kwargs) -> Iterator[Iterable]:\n\n next_token = None\n is_truncated = True\n while is_truncated:\n page = page_function(token=next_token, **kwargs)\n next_token = page.next_token\n is_truncated = page.is_truncated and next_token is not None\n for task in page.page_data:\n yield task", "def get_emails():\n\n # generate the gmail api service\n service = build_gmail_api_v1()\n\n # compute date for one year ago\n today = date.today()\n one_year_ago = today - timedelta(days=365.25)\n start = one_year_ago - timedelta(days=1)\n end = one_year_ago + timedelta(days=1)\n start_string = start.strftime(\"%Y/%m/%d\")\n end_string = end.strftime(\"%Y/%m/%d\")\n query_string = f'after:{start_string} before:{end_string}'\n\n # generate the gmail api request (get list of messages from one year ago)\n request = service.users().messages().list(userId='me', q=query_string)\n\n # try to get the api response\n try:\n response = request.execute()\n except HTTPError as e:\n print('Error response status code : {0}, reason : {1}'.format(\n e.resp.status, e.error_details))\n return []\n\n # get list of message ids from the api response\n messages = list(response[\"messages\"])\n ids = [message[\"id\"] for message in messages]\n\n # store all emails in a list\n data_to_display = []\n\n # loop through each message id\n for id in ids:\n\n try:\n # store email data in a dict\n email = {}\n\n # get message data by querying gmail api using message id\n request = service.users().messages().get(userId='me', id=id)\n response = request.execute()\n\n # get date, subject, from, to, etc from message header\n headers = list(response[\"payload\"][\"headers\"])\n looking_for = [\"Date\", \"Subject\", \"From\", \"To\"]\n for header in headers:\n if header[\"name\"] in looking_for:\n email[header[\"name\"]] = header[\"value\"]\n\n # try to get message body (base64) from response\n # the json structure varies a lot so that is why there are no many try/except\n try:\n base64_message = response[\"payload\"][\"parts\"][0][\"parts\"][0][\"body\"][\"data\"]\n except (KeyError, TypeError) as e:\n try:\n base64_message = response[\"payload\"][\"parts\"][1][\"body\"][\"data\"]\n except (KeyError, TypeError, IndexError) as e:\n try:\n base64_message = response[\"payload\"][\"parts\"][0][\"body\"][\"data\"]\n except (KeyError, TypeError, IndexError) as e:\n try:\n base64_message = response[\"payload\"][\"body\"][\"data\"]\n except (KeyError, TypeError, IndexError) as e:\n base64_message = \"Ti9B\"\n\n # decode the email body\n email[\"body\"] = base64.urlsafe_b64decode(\n base64_message).decode('utf-8')\n\n # populate list with email\n data_to_display.append(email)\n\n except HTTPError as e:\n print('Error response status code : {0}, reason : {1}'.format(\n e.resp.status, e.error_details))\n\n return data_to_display", "def extract_page_urls(self, _):\n url = \"https://mossadams.taleo.net/careersection/rest/jobboard/searchjobs?lang=en&portal=4160751617\"\n page_num = 1\n last_count = 0\n this_count = 0\n\n while True:\n last_count = len(self.urls_to_scrape)\n payload = PAYLOAD + '\"pageNo\":' + str(page_num) + \"}\"\n json_data = self.post_request(url, out_format='json', headers=HEADERS, data=payload)\n\n for job in json_data['requisitionList']:\n job_url = \"https://mossadams.taleo.net/careersection/6/jobdetail.ftl?job=\" + job['contestNo']\n self.urls_to_scrape.add(job_url)\n\n # check to see if any new records were scraped; if not, I've reach the end\n this_count = len(self.urls_to_scrape)\n if last_count == this_count:\n break\n else:\n last_count = this_count\n page_num += 1", "def find_emails(url):\n\thtml = retrieve_html(url)\n\temail_set = find_emails_in_html(html)\n\n\tif len(email_set) > 0:\n\t\t# If there is a email, we stop at level 1.\n\t\treturn email_set\n\n\telse:\n\t\t# No email at level 1. Crawl level 2\n\t\tlogger.info('No email at level 1.. proceeding to crawl level 2')\n\n\t\tlink_set = find_links(url, html)\n\t\tfor link in link_set:\n\t\t\t# Crawl them right away!\n\t\t\t# Enqueue them too\n\t\t\thtml = retrieve_html(link)\n\t\t\tif html is None:\n\t\t\t\tcontinue\n\t\t\temail_set = find_emails_in_html(html)\n\t\t\tdb.enqueue(link, list(email_set))\n\n\t\t# We return an empty set\n\t\treturn set()", "def emails(self):\r\n return emails.Emails(self)", "def extract_linked_items(pages):\n for page in pages:\n for iterate in iterate_on_items(page):\n yield((iterate[1:])[:-1])", "def get_email_addresses(survey, startdatetime, enddatetime):\n token = settings.SURVEYGIZMO_API_TOKEN\n secret = settings.SURVEYGIZMO_API_TOKEN_SECRET\n emails = []\n page = 1\n more_pages = True\n survey_id = SURVEYS[survey][\"email_collection_survey_id\"]\n dtfmt = \"%Y-%m-%d+%H:%M:%S\"\n\n # Can't do anything without credentials.\n if token is None or secret is None:\n return emails\n\n while more_pages:\n response = requests.get(\n \"https://restapi.surveygizmo.com/v2/survey/{survey}\"\n \"/surveyresponse?\"\n \"filter[field][0]=datesubmitted\"\n \"&filter[operator][0]=>=&filter[value][0]={start}\"\n \"filter[field][1]=datesubmitted\"\n \"&filter[operator][1]=<&filter[value][1]={end}\"\n \"&filter[field][2]=status&filter[operator][2]==\"\n \"&filter[value][2]=Complete\"\n \"&resultsperpage=500\"\n \"&page={page}\"\n \"&api_token={token}\"\n \"&api_token_secret={secret}\".format(\n survey=survey_id,\n start=startdatetime.strftime(dtfmt),\n end=enddatetime.strftime(dtfmt),\n page=page,\n token=token,\n secret=secret,\n ),\n timeout=300,\n )\n\n results = json.loads(response.content)\n total_pages = results.get(\"total_pages\", 1)\n more_pages = page < total_pages\n emails = emails + [r[\"[question(13)]\"] for r in results[\"data\"]]\n page += 1\n\n valid_emails = []\n for email in emails:\n try:\n validate_email(email)\n except ValidationError:\n pass\n else:\n valid_emails.append(email)\n\n return valid_emails", "def emails(self):\r\n url = api_base + 'emails/'\r\n return json.loads(self.load_url(url))", "def email_all():\n\tSubscribtion = session.query(email).all()\n\treturn subscribtion_object", "def __iter__(self):\n return self.paged()", "def iter_pages(self):\n for num in range(1, self.pages + 1):\n yield Page(num)", "def extract(self, response):\n # print response.url,\"extract response url\"\n sel = response.selector\n pages = []\n try:\n # print \"pages work\"\n pages = sel.xpath(\"//div[contains(@class,'fen_ye_nav')]//td/text()\").re(u\"共([\\d]{1,3})页\")\n # print pages\n except Exception, e:\n print e,\"error pages\"\n log.msg(e, level=log.ERROR)\n log.msg(response.url, level=log.ERROR)\n\n if len(pages) == 0:\n self.getUserName(response) #only one page\n else:\n for page in range(int(pages[0])+1)[1:]: #fro test\n url = response.url+\"_m0_p\"+str(page)\n yield Request(url, callback=self.getUserName,dont_filter=True)", "def get_group_of_emails(M):\n print \"Try to access group of emails\"\n data = search_email_advanced(M)\n if data is None:\n return\n # print \"Got data as \", data\n ids = data[0]\n id_list = ids.split()\n for id_num in id_list:\n rv, data = M.uid('fetch', id_num, \"(RFC822)\")\n if rv != \"OK\":\n print \"Error getting message\"\n return\n # get raw text of the whole email\n raw_email = data[0][1]\n content = email.message_from_string(raw_email)\n # print raw_email\n p = EmailParser()\n # print sender and receivers\n print \"To: \", content['To'], \"\\n\"\n print \"From: \", email.utils.parseaddr(content['From']), \"\\n\"\n print \"Date: \", content['Date'], \"\\n\"\n print \"Subject: \", p.parsestr(raw_email).get('Subject'), \\\n \"\\n\"\n result = parse_content(content)\n # print results\n printData(result)", "def find(self):\n self.paths.add(self.url)\n while len(self.visited_paths) < self.num_pages_limit and \\\n len(self.paths) > 0:\n self.find_emails_and_paths(path=self.paths.pop())", "def iter_feed(gd_client):\n feed = gd_client.GetContactsFeed()\n while feed:\n for entry in feed.entry:\n yield entry\n # Check whether there is another page and if yes\n next_link = feed.GetNextLink()\n feed = None\n if next_link:\n feed = gd_client.GetContactsFeed(uri=next_link.href)", "def get_pages(search_url):\n page_number = 1\n page = fetch_page(search_url.format(page_number))\n while (page_exists(page)) & (page_number <= 100):\n print (page_number, end=', ')\n yield page, page_number\n page_number += 1\n page = fetch_page(search_url.format(page_number))", "def iterate_on_items(pagecode):\n parser = etree.HTMLParser()\n \n tree = etree.parse(StringIO(pagecode), parser)\n\n # xpath = \"/html/body/div[3]/div[3]/div[3]/ul/li[83]/a/span/span[2]\"\n span_class = \"wb-itemlink-id\"\n request = tree.xpath('//span[@class=\"{}\"]'.format(span_class))\n for span in request:\n yield span.text", "def find_emails(site):\n regex=re.compile(r\"[\\w,\\.,\\_,\\%,\\+,\\-]+@[\\w,\\.]*\")\n emails=[]\n for a in site:\n emails.extend(regex.findall(str(a.decode('utf-8'))))\n all_emails.extend(emails)\n return set(emails)", "def __iter__(self):\n self.__iter_page = 1\n return self", "def _paginate(self) -> Iterable[List[str]]:\n req = self.html\n videos_lens = self._extractor(req)\n yield videos_lens # yielding doesn't mean that is the end\n\n # The above only returns 100 or fewer links\n # as Youtube loads 100 videos at a time\n # Simulating a browser request for the load more link\n load_more_url = self._find_load_more_url(req)\n\n while load_more_url: # there is an url found\n req = get(load_more_url)\n load_more = json.loads(req)\n try:\n html = load_more[\"content_html\"]\n except KeyError:\n return # if there is no content_html there is no chanch to find_load_more_url\n videos_lens = self._extractor(html)\n yield videos_lens\n\n load_more_url = self._find_load_more_url(\n load_more[\"load_more_widget_html\"],\n )\n\n return", "def parse_mail(self, m):\n addrs = []\n if isinstance(m, email.message.Message):\n get_header = m.get\n else:\n get_header = m.get_header\n for h in ('to', 'from', 'cc', 'bcc'):\n v = get_header(h)\n if v:\n addrs.append(v)\n for addr in email.utils.getaddresses(addrs):\n name = addr[0].strip('; ')\n address = addr[1].lower().strip(';\\'\" ')\n if (address and address not in self.addresses):\n self.addresses[address] = name\n yield (name, address)", "def yield_messages(self, *, mailbox=DEFAULT_MAILBOX):\n with imaplib.IMAP4_SSL(\n host=self.host, port=self.port,\n ssl_context=ssl.create_default_context()) as ic:\n logger.debug(f'Logging in as {self.user}')\n restype, _ = ic.login(user=self.user, password=self.password)\n check_restype(restype, f'failed to LOGIN as {self.user}')\n\n logger.debug(f'Selecting mailbox {mailbox}')\n restype, _ = ic.select(mailbox=mailbox)\n check_restype(restype, f'failed to SELECT mailbox {mailbox}')\n\n # First, query for *all* message UIDs in the selected mailbox.\n # Interpret the response as a list of integers.\n logger.debug('Getting UID list')\n restype, [uids] = ic.uid('SEARCH', 'ALL')\n check_restype(restype, 'failed to execute SEARCH')\n uids = [*map(int, filter(None, uids.split(b' ')))]\n\n # In the event that the mailbox is empty, there will be no UIDs and\n # nothing more to do.\n if not uids:\n logger.debug('No messages in this mailbox')\n raise NoMessages\n\n # Fetch a range of message headers, from the smallest UID to the\n # largest UID. Build a structure that links each UID to the `Date`\n # header on the message it refers to.\n message_set = f'{min(uids)}:{max(uids)}'\n logger.debug(f'Fetching headers in UID range {message_set}')\n restype, resdata = ic.uid('FETCH', message_set, '(BODY.PEEK[HEADER])')\n check_restype(restype, f'failed to execute FETCH UIDs {message_set} (peek)')\n uids_dated = self.date_uid_map(data=resdata)\n\n # Iterate over all UIDs in date order, oldest to newest.\n for _, uid in sorted(uids_dated, key=itemgetter(0)):\n # Fetch the full message content, wrap it in an IMAPMessage and\n # yield it to the caller.\n logger.debug(f'Fetching message UID {int(uid)}')\n restype, resdata = ic.uid('FETCH', uid, '(RFC822)')\n check_restype(restype, f'failed to execute FETCH UID {int(uid)} (full)')\n\n yield IMAPMessage(data=resdata, imap_connection=ic, uid=uid)", "def search_email_by_all(M):\n print \"basic search mode\\n\"\n rv, data = M.uid('search', None, 'All')\n if check_response(rv):\n return data\n else:\n return None", "def iter_links(self):", "def iter_page_links(self) -> Iterable[str]:\n base_url = 'https://health.mil/About-MHS/OASDHA/Defense-Health-Agency/Resources-and-Management/DHA-Publications'\n yield base_url", "def parse_view_page(self):\n for row in self.driver.find_elements_by_css_selector(\"table\"):\n cells = row.find_elements_by_tag_name(\"td\")\n for cell in cells:\n yield cell.text", "def test_pagination(self):\n for page in range(1, 5):\n self._test_one_page(page=page)", "def _print_basic_email_information(emails, conn):\n for mid in emails:\n (res, data) = conn.fetch(mid, '(ENVELOPE)')\n headers = pattern.match(data[0])\n print 'Date: %s' % headers.group(1)\n print 'Subject: %s' % headers.group(2)\n print 'From: %s <%s@%s>' % (headers.group(3), headers.group(4), headers.group(5))\n print", "def parse(self):\n\t\tfor part in self.mail.walk():\n\t\t\tself.process_part(part)", "def iter_pages(self) -> Generator[Tuple[Optional[List[dict]], int], None, None]:\n # retrieves the data for the given url\n data_list, response, result = self.retrieve_data(self.url)\n\n if result != GithubApiResult.SUCCESS:\n self.logger.debug(\"Failed to retrieve the data even though 10 attempts were given\")\n yield None, None\n return\n\n # this retrieves the page for the given url\n page_number = get_url_page_number(self.url)\n\n # yields the first page of data and its page number\n yield data_list, page_number\n\n while 'next' in response.links.keys():\n\n # gets the next page from the last responses header\n next_page = response.links['next']['url']\n\n # Here we don't need to pass in params with the page, or the default params because the url from the headers already has those values\n data_list, response, result = self.retrieve_data(next_page)\n\n if result != GithubApiResult.SUCCESS:\n self.logger.debug(f\"Failed to retrieve the data for even though 10 attempts were given. Url: {next_page}\")\n return\n\n page_number = get_url_page_number(next_page)\n\n # if either the data or response is None then yield None and return\n if data_list is None or response is None:\n return\n\n # yield the data from the page and its number\n yield data_list, page_number", "def _paginated_generator(self, request_args):\n while request_args:\n resp = self._api._session.request(**request_args)\n if not resp.ok:\n raise Basecamp3Error(response=resp)\n link_header = resp.headers.get(\"Link\")\n if link_header:\n next_page_url = self._LINK_HEADER_URL_REGEX.findall(link_header)[0]\n request_args = {'url': next_page_url, 'method': 'GET'} # get ready to call the next page\n else:\n request_args = None # clear it so we break the loop\n items_json = resp.json()\n for jdict in items_json:\n item = self.OBJECT_CLASS(jdict, self) # convert JSON dict into a BasecampObject\n yield item", "def test_get_sms_messages_paginated(self):\n pass", "def main():\n credentials = get_credentials()\n http = credentials.authorize(httplib2.Http())\n service = discovery.build('gmail', 'v1', http=http)\n\n messageIds = []\n i = 0\n nextPageToken = None\n while (i <= 15):\n try:\n response = service.users().messages().list(userId='me', q='after:2016/09/01', maxResults=10000, pageToken=nextPageToken).execute()\n messages = response.get('messages')\n nextPageToken = response['nextPageToken']\n\n for m in messages:\n messageIds.append(m['id'])\n\n i+=1 \n except KeyError:\n break\n\n senders = []\n counter = 0\n for i in messageIds:\n data = service.users().messages().get(userId='me', id=i).execute()\n for d in data['payload']['headers']:\n if d['name'] == 'Received':\n print(d['value'][d['value'].find('; ')+1:d['value'].find('(PST)')])\n if d['name'] == 'From' and 'bounce' not in d['value']:\n senders.append(d['value'])\n print(counter, ' ', d['value'])\n counter += 1\n break\n\n emails = []\n with open('out.csv', 'wb') as f:\n writer = csv.writer(f, delimiter=',')\n for person in set(senders):\n cleaned = clean_data(person)\n name = cleaned[0]\n email = cleaned[1]\n if email not in emails:\n emails.append(email)\n if name != None and email != None:\n writer.writerow([name, email])", "def process_messages(imap, messages):\n for i in messages:\n # fetch the email message by ID\n res, msg = imap.fetch(str(i), \"(RFC822)\")\n for response in msg:\n if isinstance(response, tuple):\n # parse bytes email into a message object\n msg = email.message_from_bytes(response[1])\n #print(msg.keys())\n\n # decode the email subject\n subject = decode_header(msg[\"Subject\"])[0][0]\n if isinstance(subject, bytes):\n # if it's a bytes, decode to str\n subject = subject.decode()\n\n # decode email sender\n From, encoding = decode_header(msg.get(\"From\"))[0]\n if isinstance(From, bytes):\n From = From.decode(encoding)\n\n # decode email Date\n Date, encoding = decode_header(msg.get(\"Date\"))[0]\n if isinstance(From, bytes):\n Date = Date.decode(encoding)\n\n print(\"Subject: \", subject)\n print(\"From: \", From)\n print(\"Date: \", Date)\n\n print(\"=\"*100)", "def fetchmail(self):\n mails = []\n\n if self.security == 'SSL/TLS':\n imap = IMAP4_SSL(self.host, self.port)\n else:\n imap = IMAP4(self.host, self.port)\n if self.security == 'STARTTLS':\n imap.starttls()\n imap.login(self.username, self.passwd)\n imap.select(readonly=True)\n\n status, uids = imap.uid('SEARCH', 'UNSEEN')\n\n for uid in uids[0].split():\n status, data = imap.uid('FETCH', uid, '(BODY[HEADER.FIELDS (DATE SUBJECT FROM)])')\n message = self._message_from_data(data)\n mail = Mail(uid, message['FROM'], message['SUBJECT'], message['DATE'])\n mails.append(mail)\n\n imap.close()\n imap.logout()\n\n return mails", "def get_emails(params, start_response):\n custodian = params.getfirst('custodian')\n date = params.getfirst('date')\n tfidf = params.getfirst('tfidf')\n out = json.dumps(documents_out(custodian, date, tfidf))\n status = '200 OK'\n response_headers = [('Content-type', 'application/json'),\n ('Access-Control-Allow-Origin', '*'),\n ('Content-Length', str(len(out)))]\n start_response(status, response_headers)\n return [out]", "def by_page(self) -> global___Snippet.PaginatedResponseHandling.ByPage:", "def by_page(self) -> global___Snippet.PaginatedResponseHandling.ByPage:", "def fetch_pages(query_val, page_num):\n \n for page_id in range(1 + page_num + 1):\n try:\n output = fetch_data(query_val, page_id)\n for j in output:\n print(str(j))\n \n except Exception as e:\n print(e)", "def get_email_info_from_all_addressbooks(self, email):\n logger.info(\"Function call: get_email_info_from_all_addressbooks for '{}'\".format(email, ))\n return self.__handle_error('Empty email') if not email else self.__handle_result(self.__send_request('emails/{}'.format(email, )))", "def _all_offset_pages(self, page_function, **kwargs) -> Iterator[Iterable]:\n\n next_offset = 0\n is_truncated = True\n while is_truncated:\n page = page_function(offset=next_offset, **kwargs)\n next_offset = page.offset + page.limit\n is_truncated = page.total > next_offset\n for data in page.page_data:\n yield data", "def get_emails(parsed_data):\n result = []\n known_values = []\n contacts = {'registrant_contact': [], 'administrative_contact': [], 'technical_contact': [],\n 'domain_registrar': []}\n if 'registrant_contact' in parsed_data:\n contacts['registrant_contact'].append(parsed_data['registrant_contact'])\n if 'administrative_contact' in parsed_data:\n contacts['administrative_contact'].append(parsed_data['administrative_contact'])\n if 'technical_contact' in parsed_data:\n contacts['technical_contact'].append(parsed_data['technical_contact'])\n if 'domain_registrar' in parsed_data:\n contacts['domain_registrar'].append(parsed_data['domain_registrar'])\n # parsing email address from contact block\n\n for contact, info in contacts.items():\n if info is not None:\n d = {'type': 2, 'data': '', 'properties': {}, 'special_properties': {}, 'is_valid': False, 'ref': {}}\n # properties dictionary\n is_valid = {}\n owner = {'owner': '', 'type': 11}\n organization = {'organization': '', 'type': 11}\n local_address = {'local_address': '', 'type': 5}\n domain_name = {'domain_name': '', 'type': 12}\n properties_list = []\n special_properties_list = []\n d.update({'ref': {'task': 'whois', 'whois_for': '', 'whois_from': ''}})\n if 'domain_name' in parsed_data and len(parsed_data['domain_name']) > 0:\n d['ref']['whois_for'] = parsed_data['domain_name']\n if 'whois_server' in parsed_data:\n d['ref']['whois_from'] = parsed_data['whois_server']\n\n for name in info:\n if \"email_address\" in name:\n if name['email_address'] in known_values:\n break\n for feature in name.keys():\n if feature == \"email_address\":\n d['data'] = name['email_address']\n known_values.append(name['email_address'])\n\n if feature == \"full_name\":\n owner['owner'] = name['full_name']\n properties_list.append(owner)\n\n if feature == \"city_name\":\n organization['organization'] = name['city_name']\n properties_list.append(organization)\n\n d['is_valid'] = ''\n is_valid = {'isvalid': '', 'type': 0}\n\n # prevent from create result if phone number of contact is not available\n if d['data'] == '':\n continue\n try:\n domain_name['domain_name'] = d['data'].split('@')[1]\n local_address['local_address'] = d['data'].split('@')[0]\n properties_list.append(domain_name)\n properties_list.append(local_address)\n except:\n\n domain_name['domain_name'] = ''\n local_address['local_address'] = d['data']\n properties_list.append(domain_name)\n properties_list.append(local_address)\n\n d.update({'ref': {'task': 'whois', 'whois_for': '', 'whois_from': '', 'label': ''}})\n d['ref']['label'] = \"%s_name\" % contact\n if 'domain_name' in parsed_data and len(parsed_data['domain_name']) > 0:\n d['ref']['whois_for'] = parsed_data['domain_name']\n if 'whois_server' in parsed_data:\n d['ref']['whois_from'] = parsed_data['whois_server']\n d['properties'] = properties_list\n special_properties_list.append(is_valid)\n d['special_properties'] = special_properties_list\n result.append(d)\n\n return result", "def group_pages(pages: List[str]) -> List[str]:\n results = []\n buffer = \"\"\n\n for p in pages:\n if _service_account_pattern.search(p) and buffer:\n results.append(buffer)\n buffer = p\n else:\n buffer += p\n\n # Ensure we capture the final contents of the buffer.\n if buffer:\n results.append(buffer)\n\n return results", "def collect_webpages(self, keyword: str) -> Dict[str, List[req.Response]]:\n collected = {\n keyword: list(islice(\n takewhile(lambda x: x is not None, self.get_links(keyword)), \n 100\n ))\n }\n print(f\"Found {len(collected[keyword])} articles for the keyword \"\n f\"'{keyword}'.\")\n return collected", "def _get_message_groups(\n self, messages: Iterator[AirbyteMessage], schema_inferrer: SchemaInferrer, limit: int\n ) -> Iterable[Union[StreamReadPages, AirbyteLogMessage]]:\n records_count = 0\n at_least_one_page_in_group = False\n current_page_records = []\n current_slice_pages = []\n current_page_request: Optional[HttpRequest] = None\n current_page_response: Optional[HttpResponse] = None\n\n while records_count < limit and (message := next(messages, None)):\n if self._need_to_close_page(at_least_one_page_in_group, message):\n self._close_page(current_page_request, current_page_response, current_slice_pages, current_page_records)\n current_page_request = None\n current_page_response = None\n\n if at_least_one_page_in_group and message.type == Type.LOG and message.log.message.startswith(\"slice:\"):\n yield StreamReadSlices(pages=current_slice_pages)\n current_slice_pages = []\n at_least_one_page_in_group = False\n elif message.type == Type.LOG and message.log.message.startswith(\"request:\"):\n if not at_least_one_page_in_group:\n at_least_one_page_in_group = True\n current_page_request = self._create_request_from_log_message(message.log)\n elif message.type == Type.LOG and message.log.message.startswith(\"response:\"):\n current_page_response = self._create_response_from_log_message(message.log)\n elif message.type == Type.LOG:\n yield message.log\n elif message.type == Type.RECORD:\n current_page_records.append(message.record.data)\n records_count += 1\n schema_inferrer.accumulate(message.record)\n else:\n self._close_page(current_page_request, current_page_response, current_slice_pages, current_page_records)\n yield StreamReadSlices(pages=current_slice_pages)", "def attachments(self):\n for part in self.email.walk():\n filename = part.get_filename()\n if filename:\n yield {\n 'type': part.get_content_type(),\n 'name': filename,\n 'content': part.get_payload()\n }", "def get_emails(print_list, email_dict):\n\n email_list = []\n again = True\n contact_table = PrettyTable()\n contact_table.field_names = [\"Command\", \"Advisor Name\", \"Email\"]\n\n for row in print_list:\n contact_table.add_row(row)\n\n while again:\n print(contact_table)\n pretty_print(email_list, \":\")\n pretty_print(\"To Add Receiving Emails Enter the corresponding command number\", \"-\")\n pretty_print(\"To Send Mail press any number key:\", \"-\")\n choice = get_int_input()\n if choice in email_dict.keys():\n email_list.append(email_dict[choice])\n\n else:\n if len(email_list) != 0:\n again = False\n\n else:\n again = True\n pretty_print(\"No Email Added\", \"-\")\n\n clear()\n\n return email_list", "def _get_all_entities(self, path, result_list_element_name='items',\n items_per_page=100, **params):\n params['limit'] = items_per_page\n response = self.get(path, **params)\n for item in response.get(result_list_element_name, ()):\n yield item\n while 'nextLink' in response and response['nextLink']:\n response = self.get(response['nextLink'], prepend_path=False)\n for item in response.get(result_list_element_name, ()):\n yield item", "def _get_allpages(self, url:str, paramsdict:Dict[str,str]):\n r1 = self._get_dict_from_url(url, paramsdict)\n r = [r1]\n #display(r)\n if 'total_pages' in r1:\n # print('more than one page')\n for next_page in range(2, r1['total_pages']+1):\n # print(f\"load page {next_page} \")\n r.append(self._get_dict_from_url(url, {**paramsdict, 'page':next_page}))\n # print(len(r))\n # print([len(rx['results']) for rx in r])\n results = [entry for rx in r for entry in rx['results'] ]\n\n return results", "def __iter__(self) -> Generator:\n\t\treturn (article for article in self._articles)", "def iteratePageItems(self, page, func=dict):\n\n for item in page.items:\n yield func(**item)\n\n if page.nextPageUrl:\n res = self.getRequest(page.nextPageUrl)\n nextPage = vsdModels.Pagination(**res)\n for nextItem in self.iteratePageItems(nextPage, func=func):\n yield nextItem", "def read_poems(poet, start, end):\r\n\r\n failed = []\r\n\r\n for i in range(start, end + 1):\r\n url = URL + str(i)\r\n try:\r\n info_dict = process_poem(url)\r\n write_file(poet, info_dict)\r\n if info_dict['multipage']:\r\n keep_going = True\r\n pagenum = 2\r\n while keep_going:\r\n try:\r\n tempurl = url + '&lim=20&pageno=' + str(pagenum)\r\n info_dict = process_poem(tempurl)\r\n print('here')\r\n write_file(poet, info_dict)\r\n pagenum = pagenum + 1\r\n except:\r\n keep_going = False\r\n\r\n except:\r\n failed.append(i)\r\n\r\n print('Failed for %d out of %d pages'%( len(failed), end - start + 1 ), failed)", "def __update_page_results(self):\n \n pages = []\n\n # Request id for pages associated to search term \n page_fields='page&fields=id,name,username,link'\n term = self.track[self.track_index]\n self.track_index += 1\n \n # Define url for http request to get pages id associated to search term \n page_request_url = 'https://graph.facebook.com/search?q=%s&type=%s&limit=%d&access_token=%s'%(term,page_fields,self.page_lim,self.access_token)\n \n while(True):\n # Try 100 times\n for i in range(100):\n \n page_response = requests.get(page_request_url)\n \n if 'error' in page_response.json() or page_response.status_code <> 200:\n print \"\\n !---- ERROR IN SEARCH REQUEST ----!\"\n print time.ctime()\n print \"Status Code: \", page_response.status_code\n print page_response.json()\n #raise StopIteration()\n time.sleep(1800) # Wait 30 minutes\n else:\n break\n \n page_json = page_response.json()\n pages = pages + page_json['data']\n time.sleep(5)\n \n if 'next' in page_json['paging']:\n page_request_url = page_json['paging']['next']\n else:\n break\n \n print \"Term: %s, Pages: %d\"%(term, len(pages))\n return pages", "def _get_pages(self,url,params,section):\n if self.verbose:\n print('Get Pages for {}'.format(url))\n print(params)\n page = 1\n maxPage = 1\n \n all_results = []\n this_batch = []\n while page <= maxPage: \n \n params['page']=page\n resp = self._get(url=url,params=params)\n maxPage = int(resp.headers.get('X-Total-Page-Count',0))\n try:\n results=resp.json()\n except:\n results=None\n if isinstance(results,(list,dict)):\n if 'errors' in results:\n print(results['errors'])\n return results\n \n this_batch = results[section]\n all_results.extend(this_batch)\n\n page+=1\n else:\n if self.verbose:\n print(\"PROBLEM\")\n return results\n\n return all_results", "def parse_recipients(self, response):\n self.logger.info('Landed on %s', response.url)\n\n table = response.xpath(self.X_TABLE)\n rows = table.xpath(self.X_ROWS)\n\n for row in rows:\n reference = row.xpath(self.X_REFERENCE).extract_first()\n details_url = self.details_url_format % (self.year, reference)\n yield Request(details_url, callback=self.parse_subsidies)", "def get_all_links(driver,site,URL_exclusions):\r\n all_links = []\r\n n = 0\r\n while n <= 10: #Maximum number of pages to scrape is 10.\r\n n += 1\r\n links = get_links_from_one_page(driver,site,URL_exclusions)\r\n for link in links:\r\n all_links.append(link)\r\n try:\r\n next_button = driver.find_element_by_id('pnnext') #Button to go to next page of results\r\n while True:\r\n try:\r\n next_button.click() #Go to next page of results\r\n break\r\n except:\r\n continue\r\n except NoSuchElementException: #when no 'next' button element, we have gone through every results page.\r\n break #end infinite loop\r\n driver.quit()\r\n return all_links", "def get_pages(epObject, fileDict):\r\n homePage = DOMAIN + epObject.ViewLink\r\n soup = make_soup(homePage)\r\n fileDict['pageUrls'].append(homePage)\r\n fileDict['pageFileNames'].append('index.html')\r\n fileDict['pageIds'].append(str(epObject.ObjectId))\r\n for a in soup.find_all('a', {'href': 'javascript://'}):\r\n if a['onclick'].find('GotoPage') > 0:\r\n pageId = get_page_id(str(a['onclick']), str(epObject.ObjectId))\r\n if pageId not in fileDict['pageIds']:\r\n address = homePage + \"&pageId={0}\".format(pageId)\r\n fileName = a.string.replace(' ', '').lower() + \".html\"\r\n fileDict['pageUrls'].append(address)\r\n fileDict['pageFileNames'].append(fileName)\r\n fileDict['pageIds'].append(pageId)\r\n return fileDict", "def image_generator(self, some_messages):\n offset = 0\n outer = 0\n inner = 0\n\n for a_message in some_messages:\n msg_id = a_message.gmail_id\n for att in a_message.attachments():\n if att.type in ATTACHMENT_MIMES:\n att_type = att.type.split(\"/\")[1]\n an_image = Image(a_message, att)\n\n # map each image id with a corresponding message id for later parsing\n if an_image.id in self.mapping:\n self.mapping[msg_id].append(a_message)\n else:\n self.mapping[msg_id] = [a_message]\n\n self.num_attachments = self.count_attachments(self.num_attachments)\n\n yield an_image", "def iterResponsePages(service, payload, verbose, slow_down):\n token = 0\n next_page = True\n data = {'reports': []}\n\n\n while next_page:\n if verbose:\n print(f'Fetching rows starting at position: {token}')\n if slow_down > 0:\n time.sleep(slow_down)\n \n data_tmp = service.reports().batchGet(body=payload).execute()\n token = data_tmp.get('reports')[0].get('nextPageToken')\n\n if token != None:\n payload.get('reportRequests')[0].update({'pageToken': token})\n else:\n next_page = False\n payload.get('reportRequests')[0].update({'pageToken': '0'})\n\n for report in data_tmp.get('reports'):\n data.get('reports').append(report)\n\n return data", "def iterator(self):\n yield", "def emails(self):\r\n return UserEmails(self)", "def get_emails(self, search_criteria=None, retries=10):\n\n if not search_criteria:\n search_criteria = dict()\n params = dict()\n if isinstance(search_criteria, str):\n params['search'] = search_criteria\n else:\n params = search_criteria\n params['key'] = self.api_key\n\n emails = None\n\n url = \"%s/mailboxes/%s/emails\" % (self.base_url, self.mailbox)\n response = requests.get(url, params=params)\n response.raise_for_status()\n data = response.json()\n emails = [Email(k) for k in data]\n\n return emails", "def __iter__(self):\n\t\tif self.is_paginated:\n\t\t\treturn self\n\n\t\traise PaginationError(\"The response is not paginated.\")", "def get_users_for(self, email):\r\n # this is a list rather than a generator because we probably want to do a len() on it right away\r\n return [address.user for address in EmailAddress.objects.filter(verified=True, email=email)]", "def start_requests(self):\n NUM_PAGES = 74\n warnings.warn(\n 'ECACCSpider: Num pages is hard-coded!'\n )\n \n URL_TEMPLATE = \"https://www.phe-culturecollections.org.uk/products/celllines/generalcell/browse.jsp?a2z=All&d-49653-p={}\"\n urls = [\n URL_TEMPLATE.format(i) \n for i in range(1, NUM_PAGES+1)\n ]\n for url in urls:\n yield scrapy.Request(\n url=url,\n callback=self.parse_catalog_page\n )", "def iter_row(self):\n yield from self.url.generator.iter_rp", "def get_unread_email_data(gmail_client):\n unread_ids = get_unread_email_ids(gmail_client)\n\n for message_id in unread_ids:\n remove_unread_label = {'removeLabelIds': ['UNREAD']}\n gmail_client.users().messages().modify(userId='me', id=message_id, body=remove_unread_label).execute()\n\n message_data = gmail_client.users().messages().get(userId='me',id=message_id).execute()\n message_payload = message_data['payload']\n has_attachment = 0 < len([part for part in message_payload['parts'] if part['mimeType'] == 'image/jpeg'])\n \n message_headers = message_payload['headers']\n sender = [header['value'] for header in message_headers if header['name'] == 'Return-Path'][0]\n yield sender, has_attachment", "def __call__(self, *paths):\n\n for item in self.site.items:\n if item.is_page() and item.match(*paths):\n yield item", "def enumerate_profiles(inhandle, page):\n html = inhandle.read()\n soup = BeautifulSoup(html, 'html.parser')\n \n urls = [ node.find('a')['href'] for node in soup.findAll('h1', {'class':'entry-title'})]\n return urls", "def get_emails(self, token):\n user_email_url = get_config('login.github.emails_info_url')\n headers = {\n \"Authorization\": \"token %s\" % token\n }\n email_info_resp = get_remote(user_email_url, headers)\n email_list = json.loads(email_info_resp)\n\n return email_list", "def get_data(self):\n has_next_page = True\n page = 1\n while has_next_page:\n print(f'Getting page {page}')\n response = self.get_articles(\n page=page,\n size=200,\n order_by='extracted_at',\n order_type='asc'\n )\n pagination = response.get('pagination')\n has_next_page = pagination.get('has_next')\n self.save_articles(response.get('articles'))\n page += 1\n time.sleep(2.5)", "def __iter__(self) -> Generator[Optional[dict], None, None]:\n data_list, response, result = self.retrieve_data(self.url)\n\n if result != GithubApiResult.SUCCESS:\n self.logger.debug(\"Failed to retrieve the data even though 10 attempts were given\")\n yield None\n return\n\n # yield the first page data\n for data in data_list:\n yield data\n\n while 'next' in response.links.keys():\n next_page = response.links['next']['url']\n\n # Here we don't need to pass in params with the page, or the default params because the url from the headers already has those values\n data_list, response, result = self.retrieve_data(next_page)\n\n if result != GithubApiResult.SUCCESS:\n self.logger.debug(\"Failed to retrieve the data even though 10 attempts were given\")\n return\n\n for data in data_list:\n yield data", "def page_query(q):\n\toffset = 0\n\twhile True:\n\t\tr = False\n\t\tfor elem in q.limit(1000).offset(offset):\n\t\t r = True\n\t\t yield elem\n\t\toffset += 1000\n\t\tif not r:\n\t\t\tbreak", "def search_email_advanced(M):\n print \"\\n==============================\"\n print \"Search emails in advanced mode\"\n print \"==============================\\n\"\n\n till_date = 50\n date_range = datetime.date.today() - datetime.timedelta(till_date)\n date = date_range.strftime(\"%d-%b-%Y\")\n # rv, data = M.uid('search', None, \\\n # '(SENTSINCE {date} FROM \"[email protected]\")'.format(date=date))\n rv, data = M.uid(\n 'search',\n None,\n '(SENTSINCE {date} FROM \"[email protected]\")'\n .format(date=date)\n )\n if check_response(rv):\n return data\n else:\n return None", "def parsing_all_page(url):\n html_doc = get_html(url)\n# html_doc = get_html_local()\n page_count = get_html_count(html_doc)\n print 'All have find pages %d' % page_count\n\n projects = []\n\n for page in range(1, page_count + 1):\n print 'Parsing %d%%' % (page*100/page_count)\n\n url = BASE_URL + '?page=%d' % page\n projects.extend(process_page(url))\n\n return projects", "def get_mail_docs_in_bucket():\n if BUCKET_ID not in settings.DOCUMENT_BUCKETS:\n raise ImproperlyConfigured(f'Bucket \"{BUCKET_ID}\" is missing in settings')\n\n config = settings.DOCUMENT_BUCKETS[BUCKET_ID]\n if 'bucket' not in config:\n raise ImproperlyConfigured(f'Bucket \"{BUCKET_ID}\" not configured properly in settings')\n\n name = config['bucket']\n if not name:\n raise ImproperlyConfigured(\n f'Bucket \"{BUCKET_ID}\" bucket value not configured properly in settings',\n )\n\n client = documents.get_s3_client_for_bucket(bucket_id=BUCKET_ID)\n\n paginator = client.get_paginator('list_objects')\n for page in paginator.paginate(Bucket=name):\n for doc in page.get('Contents') or []:\n key = doc['Key']\n with tempfile.TemporaryFile(mode='w+b') as f:\n client.download_fileobj(Bucket=name, Key=key, Fileobj=f)\n f.seek(0)\n content = f.read()\n yield {'source': key, 'content': content}", "async def __anext__(self):\n if self._next_query is None:\n raise StopAsyncIteration()\n page = await self._executor(self._next_query)\n self._next_query = page.next_query\n return page.content", "def _iterate_domains(self):\n\n class DomainIter:\n # Indices refer to positions between the nucleotides, as usual for \n # slices in python.\n\n def __init__(self, domain, cursor, rel_start, rel_end):\n self.domain = domain\n self.start = cursor\n self.rel_start = rel_start\n self.rel_end = rel_end\n\n def __repr__(self):\n return ('DomainIter('\n 'domain={0.domain!r}, '\n 'start={0.start}, '\n 'rel_start={0.rel_start}, '\n 'rel_end={0.rel_end})'.format(self))\n @property\n def len(self):\n return self.rel_end - self.rel_start\n\n @property\n def end(self):\n return self.start + self.len\n\n def rel_index(self, index):\n return index - self.start + self.rel_start\n\n def abs_index(self, rel_index):\n return self.start + rel_index - self.rel_start\n\n domain_cursor = 0\n index_cursor = 0\n \n while domain_cursor < len(self._domains):\n domain = self._domains[domain_cursor]\n\n # If this domain doesn't have anything attached to it, then we can \n # just yield the whole thing right away.\n\n if domain not in self._attachments:\n yield DomainIter(domain, index_cursor, 0, len(domain))\n index_cursor += len(domain)\n\n # If this domain does have something attached to it, then we need \n # to carefully yield only the parts of it that aren't covered by \n # the attachment.\n\n else:\n attachment = self._attachments[domain]\n\n # Yield whatever fraction of this domain comes before the \n # attachment.\n\n yield DomainIter(domain,\n index_cursor, 0, attachment.start_index)\n index_cursor += attachment.start_index\n\n # Yield the domains in the attachment itself by recursively \n # calling this method.\n\n for domain_iter in attachment.construct._iterate_domains():\n domain_iter.start += index_cursor\n yield domain_iter\n index_cursor += len(attachment.construct)\n\n # Skip domains until we reach the one where the attachment \n # ends.\n\n while domain is not attachment.end_domain:\n domain_cursor += 1\n domain = self._domains[domain_cursor]\n\n # Yield whatever fraction of that domain comes after the \n # attachment.\n\n yield DomainIter(domain,\n index_cursor, attachment.end_index, len(domain))\n index_cursor += len(domain) - attachment.end_index\n\n domain_cursor += 1", "def links(cls, page):\r\n for match in cls.HREF_RE.finditer(page):\r\n yield cls.href_match_to_url(match)", "def get_accounts_for_emails(cls, emails):\n return cls.get_by_key_name(['<%s>' % email for email in emails])", "def onpage(inp, user, oldest_first):\n yield lex.onpage.working\n total, _ = _members_on_page(1)\n pages = range(1, total + 1)\n if not oldest_first:\n pages = reversed(pages)\n\n for page in pages:\n if user in _members_on_page(page)[1]:\n yield lex.onpage.found(user=user, page=page)\n return\n yield lex.onpage.not_found(user=user)", "def getEmail(self, data):\r\n\t\tprint('test')\r\n\t\t# Empty array to hold unique emails\r\n\t\tno_dp_email = []\r\n\r\n\t\t# Loop through each row in the dataframe...\r\n\t\tfor row in data.itertuples():\r\n\t\t\tprint('test')\r\n\r\n\t\t\t# Parse through the row's keywords string for emails...\r\n\t\t\temails = re.findall(\"[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\\.[A-Za-z]{2,4}\", row.keywords)\r\n\t\t\tprint(emails)\r\n\t\t\tprint('test')\r\n\r\n\t\t\t# For each email in the array...\r\n\t\t\tfor email in emails:\r\n\t\t\t\tprint('test')\r\n\r\n\t\t\t\temail = str(email)\r\n\r\n\t\t\t\t# Append this email onto the array if it is not a repeat\r\n\t\t\t\tif email not in no_dp_email:\r\n\t\t\t\t\tprint('test')\r\n\r\n\t\t\t\t\tno_dp_email.append(email)\r\n\t\t\r\n\t\t# return array of unique emails\r\n\t\treturn no_dp_email", "def carve_email(self, payload):\n\n regex = re.compile(b\"\\r\\nDATA\\r\\n(.*?)(?:\\r\\n.\\r\\n|\\Z)\", re.M | re.S)\n matches = re.findall(regex, payload)\n if matches:\n for match in matches:\n yield match\n else:\n yield payload", "def parse_index(self, response):\n items = response.css('.item')\n for item in items:\n href = item.css('.top a::attr(href)').extract_first()\n detail_url = response.urljoin(href)\n logger.info('detail url %s', detail_url)\n yield PyppeteerRequest(detail_url, callback=self.parse_detail, wait_for='.item .name')\n \n # next page\n match = re.search(r'page/(\\d+)', response.url)\n if not match: return\n page = int(match.group(1)) + 1\n next_url = f'{self.base_url}/page/{page}'\n yield PyppeteerRequest(next_url, callback=self.parse_index, wait_for='.item .name')", "def get_recipients(item_container):\n if item_container.item.string_1 != '':\n user_folder = get_item_container_by_path(item_container.item.string_1)\n return get_all_users_with_email(user_folder)\n else:\n while not check_userfolder(item_container):\n item_container = item_container.get_parent()\n return get_all_users_with_email(item_container)", "def get_emails(self, is_verified=True, include_primary=True):\n if include_primary:\n emails = self.associated_emails.filter(is_verified=is_verified)\n else:\n emails = self.associated_emails.filter(is_verified=is_verified,\n is_primary_email=False)\n return [ae.email for ae in emails]", "def recipients(self, page=1, page_size=1000, order_field=\"email\", order_direction=\"asc\"):\n params = {\n \"page\": page,\n \"pagesize\": page_size,\n \"orderfield\": order_field,\n \"orderdirection\": order_direction}\n response = self._get(self.uri_for(\"recipients\"), params=params)\n return json_to_py(response)", "def get_emails_by_recipient(self, recipient_email):\n params = dict()\n params['recipient'] = recipient_email\n return self.get_emails(params, 2)", "def user_request_iterator(batch_size):\n\n print('Establishing connection to search API (to collect users)')\n\n for letter in 'abcdefghijklmnopqrstuvwxyz0123456789':\n page = 1\n print('Fetching users with query \"%s\"' % letter)\n while True:\n url = 'http://api.are.na/v2/search/users/'\n payload = {'q':letter, 'page':page, 'per':batch_size}\n\n\n req = requests.get(url, params=payload)\n\n user_json = req.json()\n user_data = user_json['users']\n num_pages = user_json['total_pages']\n\n if req.status_code != 200 or len(user_data) == 0:\n break\n\n print('Writing user data to csv (page %i of %i)' % (page, num_pages))\n page += 1\n\n for user in user_data:\n yield user", "def find_all(self):" ]
[ "0.6516295", "0.64715", "0.6371025", "0.6256334", "0.6235852", "0.61884665", "0.61240774", "0.6112633", "0.6023784", "0.60078543", "0.59824973", "0.59707236", "0.5946463", "0.5928901", "0.5901219", "0.58981884", "0.5887079", "0.5860919", "0.5854434", "0.58382696", "0.5836329", "0.5761337", "0.57366735", "0.572208", "0.57137436", "0.5700502", "0.5698886", "0.5681073", "0.5669405", "0.56630343", "0.5637139", "0.56304485", "0.56198376", "0.5591311", "0.5590721", "0.55457383", "0.5544133", "0.5534655", "0.55332816", "0.55310595", "0.55299217", "0.5494316", "0.5491941", "0.54910743", "0.54846376", "0.54735404", "0.5463632", "0.5463632", "0.5461037", "0.54609716", "0.5429553", "0.54175746", "0.54173136", "0.5411409", "0.5410241", "0.5403607", "0.5401379", "0.5400421", "0.53985023", "0.539845", "0.53808296", "0.5376834", "0.53617126", "0.53457767", "0.53444296", "0.5340078", "0.53356445", "0.53331023", "0.5324361", "0.5323847", "0.5320582", "0.531154", "0.531019", "0.53071094", "0.53052735", "0.5299641", "0.52986926", "0.5298421", "0.5286857", "0.52830654", "0.528047", "0.52788526", "0.5264601", "0.52549577", "0.5253278", "0.52480817", "0.5246483", "0.52439", "0.5239037", "0.52351165", "0.52342427", "0.52294683", "0.5224177", "0.5214183", "0.5212444", "0.52100205", "0.52093786", "0.52015233", "0.5193804", "0.5193489" ]
0.71289283
0
Calculate the masked ratio.
def get_masked_ratio(mask): hist = mask.histogram() return hist[0] / np.prod(mask.size)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def maskedFraction(self):\n\n\t\tif not self._masked:\n\t\t\treturn 0.0\n\t\telse:\n\t\t\treturn self._masked_fraction", "def maskedFraction(self):\n\n\t\treturn self._masked_fraction", "def bw_ratio(self):\r\n bw = self.bwstats.mean\r\n if bw == 0.0: return 0\r\n else: return self.bw/(1024.*bw)", "def adv_ratio(self): # XXX\r\n bw = StatsRouter.global_bw_mean\r\n if bw == 0.0: return 0\r\n else: return self.bw/bw", "def medicalMask(self) -> float:\n return self._coreEstimation.maskScore", "def masked_mre_cal(inputs, target, mask):\n return torch.sum(torch.abs(inputs - target) * mask) / (\n torch.sum(torch.abs(target * mask)) + 1e-9\n )", "def correct(self) -> float:\n return self._coreEstimation[DetailedMaskType.CorrectMask]", "def mask_density(mask):\n return get_number_of_unpruned_weights(mask).float() / get_number_of_weights(mask).float()", "def depolarization_ratio(self):\r\n if self._depol_ratio is not None:\r\n return round(self._depol_ratio,3)\r\n else:\r\n return self._depol_ratio", "def mask_percentage(self):\n return 100 - self.tissue_percentage", "def mask_rate(rate, error, maxsig):\n # initialise mask array with existing NaNs\n mask = ~isnan(error)\n # original Nan count\n orig = np.count_nonzero(mask)\n # determine where error is larger than the maximum sigma threshold\n mask[mask] &= error[mask] > maxsig\n # replace values with NaNs\n rate[mask] = nan\n error[mask] = nan\n # calculate percentage of masked pixels\n nummasked = int(np.count_nonzero(mask)/orig*100)\n log.info('Percentage of pixels masked = {}%'.format(nummasked))\n\n return rate, error", "def _ratio(sim: xr.DataArray, ref: xr.DataArray) -> xr.DataArray:\n out = sim / ref\n out.attrs[\"units\"] = \"\"\n return out", "def denominator(self):\n return 1", "def mapd(self) -> float:\n a = np.sum(np.abs(self.predicted - self.true))\n b = np.sum(np.abs(self.true))\n return float(a / b)", "def running_ratio(self) -> np.ndarray:\n result_array = self.result_array\n result = result_array.sum(axis=1) / result_array.sum()\n\n if isinstance(result, np.ndarray):\n result_out = result\n else:\n result_out = np.array(result)\n\n return result_out", "def sharpe_ratio(self, r_f):\n return (\n self.cumulative_returns().last('1D').iat[0] - r_f\n ) / self.cumulative_returns().std()", "def totaled_ratio_calculator(numerator, denominator):\n if denominator != 0:\n ratio = round(float(numerator) / denominator, 3)\n else:\n ratio = 0\n return ratio", "def calcmask(self, *args, **kwargs):\n return _image.image_calcmask(self, *args, **kwargs)", "def mask_sparsity(mask: Mask):\n return 1 - mask_density(mask)", "def ratio(self):\n return float(self.max_width) / self.max_height", "def infected_ratio(self):\n if self.max_pop != 0:\n return int(self.infected_pop) / self.max_pop\n else:\n return 1", "def circle_mask(width, ratio):\n # taken from Paul's code\n mask = np.zeros((width, width), dtype=np.float32)\n center = width // 2\n radius = ratio * center\n y, x = np.ogrid[-center:width - center, -center:width - center]\n mask_check = x * x + y * y <= radius * radius\n mask[mask_check] = 1.0\n return mask", "def Mask(self) -> int:", "def distance_image(self):\n return exclusion_distance(self.mask)", "def fe_ratio(self):\n return self._fe_ratio", "def span_rbw_ratio(self):\r\n res = self._visa.query(f\"SENSE{self._screen()}:BANDWIDTH:RESOLUTION:RATIO?\")\r\n return 1 / float(res)", "def rrint(self):\n if len(self.data.peaks):\n return (np.diff(self.data._masked) / self.data.fs).compressed()", "def f_a(self):\n return np.count_nonzero(self.label_mask) / float(self.label_mask.size)", "def ratio_calc(first_strandI, second_strandI):\n if first_strandI + second_strandI != 0:\n Ratio = first_strandI / float(first_strandI + second_strandI)\n return Ratio\n else:\n return np.nan", "def normalization_brain(img, mask):\n zone1 = img[mask != 0]\n imge = img.copy()\n imge[mask != 0] = (zone1 - zone1.min()) / (zone1.max() - zone1.min())\n imge[mask == 0] = 0\n return imge", "def _ratio(a1, a2):\n abs_residues = np.abs(a1 - a2).sum()\n avg_abs_sum = 0.5 * np.abs(a1).sum() + 0.5 * np.abs(a2).sum()\n return abs_residues / avg_abs_sum", "def _compute_raw_image_norm(self):\n return np.sum(self._data, dtype=float)", "def ds_ratio(group):\n nix_count = (group=='nix').sum()\n top_count = (group=='top').sum()\n ratio = nix_count/(nix_count+top_count) #could smooth this\n return ratio", "def exceeded_ratio(self) -> float:\n return self.amount_spent / self.total_amount", "def sum_normed (self):\n return self / self.sum", "def masked_mae_cal(inputs, target, mask):\n return torch.sum(torch.abs(inputs - target) * mask) / (torch.sum(mask) + 1e-9)", "def radial_profile(self,center,binsize,totalsize,pa=0,ratio=1):\n nsteps = int(totalsize/binsize)\n radii = np.zeros(nsteps)\n radial = np.zeros(nsteps)\n eradial = np.zeros(nsteps)\n binpix = binsize/self.xyscale\n megamask = np.zeros(self.size)\n for i in range(nsteps):\n inner,outer = i*binpix,(i+1)*binpix\n if i > 0:\n mask = anullarmask(self.image,center,self.size,inner,outer,pa=pa,ratio=ratio)\n else:\n mask = circmask(self.image,center,self.size,outer,pa=pa,ratio=ratio)\n megamask += mask\n avg = np.average(self.image,weights=mask)\n err = rms_masked(self.image,mask)\n radial[i] = avg\n eradial[i] = err\n radii[i] = (outer+inner)/2.\n\n self.radii = radii*self.xyscale\n self.radial = radial\n self.eradial = np.sqrt(eradial**2+self.noise**2)\n self.megamask = megamask\n return radii*self.xyscale,radial,eradial", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def flux_ratio(self):\n return self._flux_ratio", "def ratio(original, compressed):\n olen = len(original)\n clen = len(compressed)\n return (olen - clen) / olen", "def density(self):\r\n return self.count_ones() / float(self.xspan * self.yspan)", "def _compute_raw_image_norm(self):\n xypos = (self._nx / 2.0, self._ny / 2.0)\n # TODO: generalize \"radius\" (ellipse?) is oversampling is\n # different along x/y axes\n radius = self._norm_radius * self.oversampling[0]\n aper = CircularAperture(xypos, r=radius)\n flux, _ = aper.do_photometry(self._data, method='exact')\n return flux[0] / np.prod(self.oversampling)", "def sharpe_ratio(adr,sddr,sf=252,rfr=0.0):\n rfr=((1.0 + rfr) ** (1/sf)) - 1 # Daily risk free return. This is the shortcut to calculate daily (sf=252) risk free return\n return sf**(1.0/2)*(adr-rfr)/sddr", "def normalizing_constant(self):\n\t\tdim = self.train_data.shape[1]\n\t\treturn 1 / (2 * np.pi * ((self.bandwidth) ** 2)) ** (dim / 2)", "def strm_bw_ratio(self):\r\n bw = self.bwstats.mean\r\n if StatsRouter.global_strm_mean == 0.0: return 0\r\n else: return (1.0*bw)/StatsRouter.global_strm_mean", "def make_circle_mask(width, ratio):\n mask = np.zeros((width, width), dtype=np.float32)\n center = width // 2\n radius = ratio * center\n y, x = np.ogrid[-center:width - center, -center:width - center]\n mask_check = x * x + y * y <= radius * radius\n mask[mask_check] = 1.0\n return mask", "def masked_preconditioner(self, x, prec, shapebc='R', ratio=1.0):\n x = x.reshape(self.shape)\n if shapebc == 'R':\n mask_f = equations.make_mask\n mask_f_dual = equations.make_mask_dual\n elif shapebc == 'L':\n mask_f = equations.make_mask_L\n mask_f_dual = equations.make_mask_L_dual\n\n x_masked = np.multiply(x, mask_f(self.shape[0], ratio))\n x_dual = np.multiply(x, mask_f_dual(self.shape[0], ratio))\n x = np.multiply(\n prec(x_masked.ravel()).reshape(self.shape), mask_f(\n self.shape[0], ratio)) + x_dual\n return x.ravel()", "def pixel_size_ratio(self):\n return 2**(self.levels[-1] - self.levels[0])", "def flag_fraction(data):\n occ_f = np.sum(data.mask, axis=0) / float(data.shape[0])\n occ_t = np.sum(data.mask, axis=1) / float(data.shape[1])\n \n bad_f = occ_f > params.max_frac_f\n bad_t = occ_t > params.max_frac_t\n \n data.mask[bad_t, :] = True\n data.mask[:, bad_f] = True\n \n return data.mask", "def full(self) -> float:\n return self._coreEstimation[DetailedMaskType.FullMask]", "def get_scaling_ratio(img):\n\n healthy_img_area = 4872 * 6496\n input_img_area = img.shape[0] * img.shape[1]\n ratio = input_img_area / healthy_img_area\n return ratio", "def radial4(self) -> float:\n return self.distortion_coefficients[1]", "def _compute_pie_fraction(sources, pointing_position, fov_radius):\n sources = _add_column_and_sort_table(sources, pointing_position)\n radius = Angle(sources[\"Radius\"])[0]\n separation = Angle(sources[\"separation\"])[0]\n if separation > fov_radius:\n return 0\n else:\n return (2 * np.arctan(radius / separation) / (2 * np.pi)).value", "def pe_ratio(self):\n if self._pe_ratio == None:\n return float('inf')\n return self._pe_ratio", "def rmspe(self) -> float:\n return float(np.sqrt(np.mean(np.square(((self.true - self.predicted) / self.true)), axis=0)))", "def fractionPassing(self):\n return self.cut.entries / self.entries", "def reciprocal(self):\n return Rational(self.denominator, self.numerator)", "def get_granger_sig_mask(self):\n if not hasattr(self, 'percentile_granger'):\n self.calc_shuffle_threshold()\n if not hasattr(self, 'granger_actual'):\n self.calc_granger_actual()\n mean_granger_actual = np.mean(self.granger_actual, axis=0)\n self.masked_granger = np.ma.masked_where(\n mean_granger_actual < self.percentile_granger, mean_granger_actual)\n self.mask_array = np.ma.getmask(self.masked_granger)", "def _reduce(self) -> None:\n divisor = self._gcd(self._numerator, self._denominator)\n self._numerator = self._numerator // divisor\n self._denominator = self._denominator // divisor", "def getMoleFractions(self):\n\t\tnon_negative_amounts = self.amounts.clip(0)\n\t\ttotal_amount = non_negative_amounts.sum()\n\t\tif total_amount > ZERO_CUTOFF:\n\t\t\treturn self.amounts / total_amount\n\t\telse:\n\t\t\treturn self.amounts * 0", "def golden_ratio():\n return 1.61803398875", "def modulation(minima, contrast, distance):\n \n numerator = contrast - minima\n denominator = contrast + minima\n \n return numerator / denominator", "def calculateRatio(levelDims):\n highestReso = np.asarray(levelDims[0])\n lowestReso = np.asarray(levelDims[-1])\n Xratio, Yratio = highestReso/lowestReso\n return (Xratio, Yratio)", "def _blinking_ratio(self, landmarks, points):\n left = (landmarks.part(points[0]).x, landmarks.part(points[0]).y)\n right = (landmarks.part(points[3]).x, landmarks.part(points[3]).y)\n top = self._middle_point(landmarks.part(points[1]), landmarks.part(points[2]))\n bottom = self._middle_point(landmarks.part(points[5]), landmarks.part(points[4]))\n eye_width = math.hypot((left[0] - right[0]), (left[1] - right[1]))\n eye_height = math.hypot((top[0] - bottom[0]), (top[1] - bottom[1]))\n\n try:\n ratio = eye_width / eye_height\n except ZeroDivisionError:\n ratio = None\n\n return ratio", "def calmar_ratio(returns, period=DAILY):\n\n temp_max_dd = max_drawdown(returns=returns)\n if temp_max_dd < 0:\n temp = annual_return(\n returns=returns,\n period=period\n ) / abs(max_drawdown(returns=returns))\n else:\n return np.nan\n\n if np.isinf(temp):\n return np.nan\n\n return temp", "def normalization_mask(img, mask):\n zone1 = img[mask != 0]\n zone2 = img[mask == 0]\n zone1 = (zone1 - zone1.min()) / (zone1.max() - zone1.min())\n zone2 = (zone2 - zone2.min()) / (zone2.max() - zone2.min())\n imge = img.copy()\n imge[mask != 0] = zone1\n imge[mask == 0] = zone2\n return imge", "def norm_mask(self, mask):\n lmax = max(self.labels.values())\n return (mask * (lmax / mask.max())).astype(int)", "def masked_mse_cal(inputs, target, mask):\n return torch.sum(torch.square(inputs - target) * mask) / (torch.sum(mask) + 1e-9)", "def _mask_and_avg(values, padding_mask):\n\tdec_lens = torch.sum(padding_mask,dim=1)\n\tlosses = torch.stack(values, dim=1)\n\tlosses = losses * padding_mask\n\tvalues_per_ex = torch.sum(losses, dim=1)/dec_lens\n\treturn torch.sum(values_per_ex)", "def grey(self):\n return sum((self.value(0), self.value(1), self.value(2)))/3", "def mdape(self) -> float:\n return float(np.median(np.abs(self._percentage_error())) * 100)", "def clean_ratio(sub, total, default=0.0):\n return sub / total if total else default", "def adjusted_distance_reciprocal(distance: float) -> float:\n return 1 / (1 + distance)", "def missing(self) -> float:\n return self._coreEstimation.noMaskScore", "def pulse_width_percent(self) -> float:", "def rmdspe(self) -> float:\n return float(np.sqrt(np.median(np.square(self._percentage_error()))) * 100.0)", "def __weights(self):\n r, c = np.mgrid[:self.size, :self.size] + 0.5\n rad = np.sqrt((r - self.size/2)**2 + (c - self.size/2)**2)\n img = np.zeros((self.size, self.size))\n rmin = np.sqrt(2) * 0.5 * self.damp * rad.max()\n rmax = np.sqrt(2) * 0.5 * rad.max()\n zone = np.logical_and(rad > rmin, rad < rmax)\n img[rad < rmin] = 1.0\n img[rad > rmax] = 0.0\n img[zone] = (rmax - rad[zone]) / (rmax - rmin)\n return img", "def ratio_func(a, b):\n return a / b", "def reduce(self):\n import math\n g = math.gcd(self.num, self.den)\n return Fraction(self.num//g, self.den//g)", "def reciprocal(self):\r\n s, e = self.share\r\n s = 0.5*(1/s) # TODO: no normalization for 1/s as 1/2<=abs(s)<=1 (s<0 test still needed)\r\n return type(self)((s, 1-e))", "def pe_ratio(self):\n try:\n return self.price / self.dividend_yield\n except ZeroDivisionError:\n return 0.0", "def mask_evaluation(annotation_mask, result_mask, idx):\n\n true_positive = np.sum(np.logical_and(annotation_mask == 255, result_mask == 255)) \n false_positive = np.sum(np.logical_and(result_mask == 255, annotation_mask != result_mask))\n false_negative = np.sum(np.logical_and(annotation_mask == 255, annotation_mask != result_mask))\n\n precision = true_positive / (true_positive + false_positive)\n recall = true_positive / (true_positive + false_negative)\n f1_measure = 2 * ((precision * recall) / (precision + recall))\n\n return recall, precision, f1_measure", "def global_inhomogeneity(self, mask=np.ones((32, 32), dtype=bool)):\n return np.sum(np.absolute(self.__image[mask] - self.image_median(mask)))/np.sum(self.__image[mask])", "def _limit_by_ratio(self):\n\n if self._ratio_bounds is None:\n return\n\n numerator_col, denominator_col = self._ratio_cols\n min_ratio, max_ratio = sorted(self._ratio_bounds)\n\n overlap_idx = self._hybrid_meta[MERGE_COLUMN].isin(\n self.data.merge_col_overlap_values\n )\n\n numerator_vals = self._hybrid_meta[numerator_col].copy()\n denominator_vals = self._hybrid_meta[denominator_col].copy()\n\n ratios = (\n numerator_vals.loc[overlap_idx]\n / denominator_vals.loc[overlap_idx]\n )\n ratio_too_low = (ratios < min_ratio) & overlap_idx\n ratio_too_high = (ratios > max_ratio) & overlap_idx\n\n numerator_vals.loc[ratio_too_high] = (\n denominator_vals.loc[ratio_too_high].values * max_ratio\n )\n denominator_vals.loc[ratio_too_low] = (\n numerator_vals.loc[ratio_too_low].values / min_ratio\n )\n\n h_num_name = \"hybrid_{}\".format(numerator_col)\n h_denom_name = \"hybrid_{}\".format(denominator_col)\n self._hybrid_meta[h_num_name] = numerator_vals.values\n self._hybrid_meta[h_denom_name] = denominator_vals.values", "def getDurationReciprocal(self):\n return 1/self.duration" ]
[ "0.7183577", "0.6947266", "0.6375616", "0.62425804", "0.6177991", "0.6146051", "0.6002309", "0.5985765", "0.59175396", "0.58450127", "0.5783589", "0.5759599", "0.57562935", "0.56993043", "0.56441855", "0.56413704", "0.5576878", "0.55284727", "0.54961735", "0.5492076", "0.5457063", "0.54535043", "0.5447613", "0.54475504", "0.54434675", "0.54419243", "0.5419726", "0.5415708", "0.54018736", "0.5400798", "0.5386955", "0.5385443", "0.5380829", "0.53793037", "0.5377009", "0.5361214", "0.53592956", "0.53568435", "0.53568435", "0.53568435", "0.53568435", "0.53568435", "0.53568435", "0.53568435", "0.53568435", "0.53568435", "0.53568435", "0.53568435", "0.53568435", "0.53568435", "0.53568435", "0.53568435", "0.53568435", "0.5349506", "0.5341618", "0.5340016", "0.5327851", "0.5324222", "0.5316932", "0.53117085", "0.5311232", "0.5304579", "0.53024364", "0.52962565", "0.5285501", "0.5281854", "0.52642596", "0.52535146", "0.52511054", "0.5246217", "0.5245185", "0.52443045", "0.5240466", "0.52399296", "0.52363217", "0.5230673", "0.5230032", "0.52288336", "0.52146953", "0.52107894", "0.5202003", "0.5192192", "0.5185177", "0.5179807", "0.5174851", "0.5169572", "0.5164935", "0.5163482", "0.51603353", "0.5155759", "0.5153878", "0.51470566", "0.5139917", "0.5135665", "0.5133516", "0.5130815", "0.51128125", "0.5106106", "0.5105293", "0.5102967" ]
0.78421235
0
Retrieves or create a key/value from the request extra_context. If key is found, it is returned, that simple. If not, and a value has been passed, the key is set and returned.
def get_or_create(key, value=None): if not hasattr(g, key): value = value() if callable(value) else value setattr(g, key, value) return getattr(g, key)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_context(self, extra_ctx=None, **kwargs):\n ctx = {\n 'user': self.user,\n }\n if extra_ctx:\n ctx.update(extra_ctx)\n ctx.update(kwargs)\n return ctx", "def get(self, key, default=None):", "def addToExtra(self,key,val):\n if self.extra == None: \n self.extra = {} \n self.extra[key] = val", "def get_request_with_default(request, key, default_value):\n try:\n return request.GET[key]\n except:\n return default_value", "def get_request_get(request, key):\n return get_request_with_default(request, key, None)", "def get(self, key, default=None):\n try:\n return self.context.get(self.prefix+'.'+key, default)\n except AttributeError:\n return default", "def get_value_from_extras(self, extras, key):\n return [x[\"value\"] for x in extras if x[\"key\"] == key][0]", "def __getitem__(self, key):\n try:\n att = getattr(self, key)\n return att\n except AttributeError:\n # mimic pylons context\n return None", "def get_GET_parameter(req, key, default_value):\n try:\n return req.GET[key]\n except:\n return default_value", "def meta_value(request_object, dictkey):\n \n try:\n val = request_object.META[dictkey]\n except: # Exception as ex:\n val = ''\n return val", "def get(self, key, default=None, from_global=None, as_tuple=False):\n if from_global is None:\n value = self.get(\n key,\n default=None,\n from_global=False,\n as_tuple=as_tuple\n )\n if (isinstance(value, tuple) and value[0] is None) or value is None:\n value = self.get(\n key,\n default=default,\n from_global=True,\n as_tuple=as_tuple\n )\n return value\n elif from_global:\n return self.get_global(key, default, as_tuple)\n else:\n return self.get_local(key, default, as_tuple)", "def get(self, key, default=None):\n pass", "def set_upward(self, key, value):\n context = self.dicts[-1]\n for d in reversed(self.dicts):\n if key in d:\n context = d\n break\n context[key] = value", "def other_service(user, key, default_value=\"\"):\r\n try:\r\n value = OtherServiceInfo.objects.get(user=user, key=key).value\r\n except OtherServiceInfo.DoesNotExist:\r\n value = default_value\r\n return value", "def get_value(self, key):\n pass", "def assert_key_type_value(self,\n context_item,\n caller,\n extra_error_text=''):\n assert context_item, (\"context_item parameter must be specified.\")\n\n if extra_error_text is None or extra_error_text == '':\n append_error_text = ''\n else:\n append_error_text = f' {extra_error_text}'\n\n if not context_item.key_in_context:\n raise KeyNotInContextError(f'{caller} couldn\\'t find '\n f'{context_item.key} in context.'\n f'{append_error_text}')\n\n if not context_item.has_value:\n raise KeyInContextHasNoValueError(\n f'{caller} found {context_item.key} in '\n f'context but it doesn\\'t have a value.'\n f'{append_error_text}')\n\n if not context_item.is_expected_type:\n raise KeyInContextHasNoValueError(\n f'{caller} found {context_item.key} in context, but it\\'s '\n f'not a {context_item.expected_type}.'\n f'{append_error_text}')", "def get(self, key: str, default: Any = None) -> Any:\n return getattr(self, key, self.ctx.get(key, default))", "def _get_from_environ(self, key, default, proxied=False):\n env_key = self._get_key(key, omit_prefix=proxied)\n value = os.environ.get(env_key, default)\n if hasattr(value, \"strip\"):\n match = _PROXIED_PATTERN.match(value)\n if match: # Proxied variable\n proxied_key = match.groups()[0]\n return (\n key,\n self._get_from_environ(proxied_key, default, proxied=True)[1],\n proxied_key,\n )\n return env_key, value, None", "def lookup(my_dict, my_key, default_value=None):\n if my_key in my_dict:\n return my_dict[my_key]\n else:\n return default_value", "def get(self, key):", "def get(self, key):", "def get_param(environ, key, default=None):\n\n # this may not work in other applications.\n # in twister, the query string is simple enough\n # that we can get away with it\n environ['QUERY_STRING'].replace(';', '&')\n\n fs = cgi.FieldStorage(environ=environ)\n try:\n return fs[key].value\n except:\n return default", "def get_value(self, query_dict, k): \n if k in query_dict:\n return query_dict[k]\n return ''", "def get(self, key):\n if key in self.fields:\n return self.fields.get(key).get()\n return None", "def get(self, key):\n keystr = str(key)\n res = None\n\n try:\n res = self.ctx[keystr]\n except KeyError:\n for k, v in self.ctx.items():\n if \"name\" in v and v[\"name\"].lower() == keystr.lower():\n res = v\n break\n\n return res", "def get_POST_parameter(req, key, default_value):\n try:\n return req.POST[key]\n except:\n return default_value", "def get(self, key, default=None):\n return self[key] if key in self else default", "def get(self, key, default=None):\n if key in self:\n return self[key]\n return default", "def getKey(kwargs,key,default=None):\n value = kwarg.get(key,default)\n if value==None:\n return default\n return value", "def get_item(obj, key):\n val = None\n if obj and type(obj) == dict:\n val = obj.get(key)\n elif obj and hasattr(obj, key):\n val = getattr(obj, key)\n val = val or ''\n return val", "def _get(self, key: str):\n pass", "def get(self, key, default=None):\n return self.metadata_dict.get(key, default)", "def on_get(self, req, resp, key=None):\n\t\tif not key:\n\t\t\traise falcon.HTTPMethodNotAllowed({'PUT'})\n\n\t\tservice = KeyValueService()\n\t\tkey = service.get_key(key=key)\n\t\tif key:\n\t\t\tresp.body = json.dumps(key.to_dict())\n\t\t\tresp.status = falcon.HTTP_200\n\t\telse:\n\t\t\tresp.status = falcon.HTTP_404", "def add_field(key: str = None, value=None):\n data = {}\n if value is not None:\n data = {key: value}\n return data", "def value(self, key):\n item = self.default(key)\n return self.__getSafeValue(key, item)", "def get(self, key, totype='', default=None, verbose=False) :\n \n # element found ?\n if self.values.has_key(key) :\n # copy value:\n value = self.values[key]\n # convert ?\n if totype == 'bool' :\n # convert to boolean:\n if value in ['T', 'True', 'yes', '1'] :\n value = True\n elif value in ['F', 'False', 'no', '0'] :\n value = False\n else :\n logging.error(\"value of key '%s' is not a boolean : %s\" % (key, str(value)))\n raise Exception\n #endif\n elif len(totype) > 0 :\n # convert to other type ...\n value = eval('%s(%s)' % (totype, value))\n #endif\n # for debugging ...\n if verbose : logging.debug('rc setting \"%s\" : \"%s\"' % (key, str(value)))\n else :\n # default value specified ?\n if default != None :\n # copy default:\n value = default\n # for debugging ...\n if verbose : logging.debug('rc setting \"%s\" : \"%s\" (deault)' % (key, str(value)))\n else :\n # something wrong ...\n logging.error(\"key '%s' not found in '%s' and no default specified\" % (key, self.filename))\n raise Exception\n #endif\n #endif\n \n # ok\n return value", "def get_value(value, key, client):\n if client is None:\n return value.__dict__[key]\n elif \"glance\" in str(client):\n return value[key]\n elif \"cinder\" in str(client):\n return value.__dict__[key]\n elif \"nova\" in str(client):\n return value.__dict__[key]", "def get_if_exist(self, data, key):\n if key in data:\n return data[key]\n return None", "def _access_dict(self, d, key):\n try:\n # try to get access to the value by using the key\n value = d[key]\n return value\n except:\n # fail to access the value from the key\n # namely, the feature does not exist in the \n # feature dictionary of a specific apartment\n return None", "def get(self, key, default=None):\n raise NotImplementedError()", "def __get_value(name: str, strict: bool = True, upwards: bool = True, context: th.Optional[ContextType] = None):\n\n var, name = greedy_import_context(name, upwards=upwards) if context is None else (context, name)\n for split in name.split(\".\") if name else []:\n if isinstance(var, dict):\n if split not in var:\n if strict:\n raise KeyError('Invalid key \"%s\"' % name)\n else:\n return None\n var = var[split]\n else:\n if not hasattr(var, split):\n if strict:\n raise AttributeError(\"Invalid attribute %s\" % name)\n else:\n return None\n var = getattr(var, split)\n return var", "def getitem(value, key):\n try:\n return value[key]\n except Exception:\n return \"\"", "def get_value(self, key: str) -> Optional[str]:\n raise NotImplementedError", "def get(self, key):\n pass", "def get(self, key: K)-> Optional[V]:\n return self._func(key)", "def get_value(dct, key):\n return dct.get(key)", "def _f_params(key, default=None):\n from flask import request # pylint: disable=import-outside-toplevel\n\n # query params from simple method\n value = request.values.get(key, default)\n if value is not None:\n return value\n\n # query params from json request\n json_value = getattr(request, \"json\")\n if json_value and isinstance(json_value, dict):\n return json_value.get(key, default)\n\n return default", "def get(self, metakey, default=None):\n if metakey in self:\n return self[metakey]\n return default", "def _get_raw_key(args, key_field_name):\n flag_key = getattr(args, key_field_name, None)\n if flag_key is not None:\n return flag_key\n return _read_key_store_file().get(key_field_name)", "def _get(self, key):\n try:\n val = getattr(self, f\"_{key}\")\n if val is not None:\n return val\n else:\n self._load()\n return getattr(self, f\"_{key}\")\n except AttributeError:\n return None", "def get(self, k, d=None):\n try:\n return self[k]\n except KeyError:\n return d", "def create_raw(self, key, value):\n data = None\n if key is not None and value is not None:\n try:\n data = self.tcex.key_value_store.create(self._context, key.strip(), value)\n except RuntimeError as e:\n self.log.error(e)\n else:\n self.log.warning('The key or value field was None.')\n return data", "def get_object(self, obj, key, val):\n f = {key: value}\n try:\n return obj.objects.get(**f)\n except obj.DoesNotExist:\n return None", "def get(self, key):\n pass", "def update_context(self, ctx):\r\n assert isinstance(ctx, dict)\r\n ctx[str(self.context_id)] = self.value", "def get_value(key, dic, default_dic):\n\n v = dic.get(key)\n\n if v is None:\n if key in default_dic:\n v = default_dic.get(key)\n else:\n print_log_msg(\n 'ERROR', 'get_param', 'key not in default_dic', key\n )\n\n return v", "def parse_tag_key_value(key_value: str, value_required=True) -> Tuple[str, Any]:\n if not key_value:\n raise ValueError(\"key must be specified.\")\n\n if \"=\" not in key_value:\n if value_required:\n raise ValueError(f\"key=value pair expected: '{key_value}'\")\n return (key_value, ANY_VALUE)\n\n key, value = key_value.split(\"=\", 1)\n if not key:\n raise ValueError(f\"key must be specified: '{key_value}'\")\n return (key, parse_tag_value(value))", "def get_value(self, key):\n return self[key]", "def _single_getitem(self, key):\n try:\n return self._dict[key]\n except KeyError:\n return self.default", "def get(self, key, default=None):\n def find(found_item, _):\n \"\"\" This is the closer function which will be passed to find by key function , if key found than return the value \n otherwise return blanck\"\"\"\n if found_item:\n return found_item[1]\n else:\n return default\n\n return self._find_by_key(key, find)", "def _extra_context(self):\r\n return {}", "def safe_get(self,section,key,default_value=None):\n try:\n return self.get(section,key)\n except:\n return default_value", "def get(self, key: str, default: Any = None) -> Any:\n return self.attributes.get(key, default)", "def set(self, key, value):\n task = Task.current_task()\n try:\n context = task._context\n except AttributeError:\n task._context = context = {}\n context[key] = value", "def getValue(dictionary, key, value):\n if not key in dictionary.keys():\n return value\n else:\n return dictionary[key]", "def get_key(self, key, default=_MISSING):\n if '.' in key:\n # Nested key of form \"foo.bar\"\n key, remainder = key.split('.', 1)\n if default != _MISSING:\n try:\n value = self[key].get_key(remainder, default)\n except KeyError:\n # subdict exists, but doesn't contain key\n return default\n except AttributeError:\n # key points to non-dict thing, so no get_key attribute\n return default\n else:\n value = self[key].get_key(remainder)\n else:\n # Single, non-nested key of form \"foo\"\n if default != _MISSING:\n return self.get(key, default)\n else:\n return self[key]\n return value", "def get(self, key, alternative=None):\n try:\n return self[key]\n except KeyError:\n return alternative", "def get(self, key: K) -> Optional[V]:\n return self.mget([key])[0]", "def get(self, key, default=None, type=None):\n if key not in self:\n return default\n value = self[key]\n if type is not None:\n value = type(value)\n return value", "def getSpecific(self, keyword, key):", "def get(self, key, alternative=None):\n try:\n return self[key]\n except (KeyError, TypeError, ValueError):\n return alternative", "def get(self, key: str, default: t.Optional[object] = None) -> t.Any:\n try:\n index = self.__keys.index(str(key))\n except ValueError:\n return default\n if 0 <= index < len(self):\n return self._super_getitem_single(index)\n else:\n return default", "def get(self, key, default=None):\n try:\n return self[key]\n except KeyError:\n return default", "def get_value_from_object(obj, key):\n if is_dict(obj):\n return obj.get(key)\n return getattr(obj, key, None)", "def get_key_wrapper(o):\n new_key = get_key(o)\n print 'key_wrapper (%s) -> %s' %(o,new_key)\n return new_key", "def tags_handler(ctx, param, value):\n retval = from_like_context(ctx, param, value)\n if retval is None and value:\n try:\n retval = dict(p.split('=') for p in value)\n except:\n raise click.BadParameter(\n \"'%s' contains a malformed tag.\" % value,\n param=param, param_hint='transform')\n return retval", "def get(self, key: str, default=None):\n value = self._get(key)\n\n if value is None:\n return self._parent.get(key, default) if self._parent else default\n\n return value", "def get(self, key, default=None):\n try:\n return self.__getitem__(key)\n except ValueError:\n if default is not None:\n return default\n else:\n raise", "def get(self, key, key_type=None):\n pass", "def get(self, key, default=None):\r\n try:\r\n return self[key]\r\n except KeyError:\r\n return default", "def get_value(self, key):\n return self[key]['value']", "def context(key, value = Void, type = Void):\r\n def is_in_context(client, response, testcase):\r\n # If multiple templates are called, context\r\n # is actually a list of contexts, so we check\r\n # the value in all of them.\r\n if isinstance(response.context, list):\r\n contexts = response.context\r\n else:\r\n contexts = [response.context]\r\n \r\n for context in contexts:\r\n assert key in context\r\n if value is not Void:\r\n testcase.assertEqual(\r\n value, \r\n context[key]\r\n )\r\n if type is not Void:\r\n testcase.assertTrue(\r\n isinstance(\r\n context[key], \r\n type\r\n )\r\n )\r\n return is_in_context", "def get(self, key, default=None):\n result = self._get_raw_input().get(key, default)\n return result[0] if isinstance(result, list) else result", "def _get(self, key, current_node):\n pass", "def get(self, key: Any, default: Optional[Any] = None) -> Any:\n try:\n return self[key]\n except (KeyError, ValueError, IndexError):\n return default", "def get(self, key, default=None):\r\n return self.data.get(IdentityRef(key),default)", "def get_value(key, obj, default=missing):\n if isinstance(key, int):\n return _get_value_for_key(key, obj, default)\n return _get_value_for_keys(key.split('.'), obj, default)", "def get(self, key):\n\t\treturn self.__get(key, key[1:])", "def set_item(self, key, value):\n key, value = str(key), str(value)\n key = self.locate.match_context_key(key)\n replaced = self.selector.get(key, None)\n self.selector[key] = value\n return key, replaced", "def get_one(self, uuid: str) -> t.Optional[KeyValueEntity]: # dead: disable\n if sys.platform == \"win32\":\n uuid = uuid.upper() # pragma: no cover\n val = os.getenv(uuid)\n\n return None if val is None else KeyValueEntity(uuid=uuid, val=val)", "def get_value(self, key: str) -> Any:\r\n if key is None:\r\n return self.data\r\n try:\r\n return self.data[key]\r\n except KeyError:\r\n return None", "def get(self, key, default=None):\n try:\n value = self.list[key]\n except KeyError:\n return default\n else:\n return value", "def get_value(self, request_key):\n\n if request_key in self.key_value_db:\n result = 'The value for <b>%s</b> is <b>%s</b>' % (request_key, self.key_value_db[request_key])\n http_code = 200\n else:\n result = 'The requested key (<b>%s</b>) does not exist' % request_key\n http_code = 404\n\n return result, http_code", "def _get_value(self, main_key, sub_key):\n # pylint: disable=no-else-return\n if main_key in self.json_dict:\n if sub_key in self.json_dict[main_key]:\n return self.json_dict[main_key][sub_key]\n else:\n print(\n 'Sub key \"%s\" not in main key \"%s\"' %\n (sub_key, main_key))\n return None\n else:\n print('Main key \"%s\" not in JSON dict' % main_key)\n return None", "def get(self, key, default=None):\n try:\n return self._get(key)\n except Exception:\n return default", "def get_value(self, key, args, kwargs):\n if self.default is not None:\n try:\n return string.Formatter.get_value(self, key, args, kwargs)\n except KeyError:\n return self.default\n else:\n return string.Formatter.get_value(self, key, args, kwargs)", "def get(self, key, default=''):\n key = self.optionxform(key)\n cached = self._cache.get(key, _use_default)\n if cached is not _use_default:\n return cached\n name_str = self.name\n key_str = to_unicode(key)\n settings = ProductSetting.select(self.env,\n where={'product': self.product,\n 'section': name_str,\n 'option': key_str})\n if len(settings) > 0:\n value = settings[0].value\n else:\n for parent in self.config.parents:\n value = parent[self.name].get(key, _use_default)\n if value is not _use_default:\n break\n else:\n if default is not _use_default:\n option = Option.registry.get((self.name, key))\n value = option.default if option else _use_default\n else:\n value = _use_default\n if value is _use_default:\n return default\n if not value:\n value = u''\n elif isinstance(value, basestring):\n value = to_unicode(value)\n self._cache[key] = value\n return value", "def set_value(self, request_key, request_value):\n\n self.key_value_db[request_key] = request_value\n response = 'Stored the value <b>%s</b> for the key <b>%s</b>' % (request_value, request_key)\n http_code = 200\n\n return response, http_code", "def get_or_call(self, key, callback, ttl=None):\n if self.contains(key):\n res = self[key]\n else:\n res = callback()\n self.set(key, res, ttl=ttl)\n return res", "def test_create_contextual(self, extra_in, extra_out):\n kwargs = dict(\n type=int,\n metadata={'meta': 'data'},\n )\n fparam = FParameter.create_contextual(**kwargs, **extra_in)\n assert isinstance(fparam, FParameter)\n assert immutable.asdict(fparam) == \\\n {**FPARAM_CTX_DEFAULTS, **kwargs, **extra_out}" ]
[ "0.57630116", "0.5627676", "0.5543135", "0.55403066", "0.5527593", "0.5498205", "0.54873115", "0.5451134", "0.54109406", "0.5372317", "0.5307185", "0.525926", "0.5251889", "0.5233425", "0.5200257", "0.51947916", "0.51554173", "0.51229984", "0.508491", "0.50824934", "0.50824934", "0.5060603", "0.5038417", "0.5031132", "0.5026163", "0.5020251", "0.5011697", "0.50041884", "0.49933738", "0.49855122", "0.498496", "0.4984641", "0.49578995", "0.49523506", "0.49276188", "0.49143627", "0.49137446", "0.4909089", "0.49025765", "0.48959827", "0.488886", "0.4885787", "0.4880938", "0.4870742", "0.48693287", "0.48572028", "0.48503444", "0.48494443", "0.483493", "0.4825403", "0.48220783", "0.48096916", "0.48065788", "0.48042193", "0.4801542", "0.48004204", "0.47957906", "0.47931877", "0.47894382", "0.478358", "0.47795442", "0.47735927", "0.47699347", "0.47526708", "0.475086", "0.47506547", "0.4744491", "0.47419843", "0.4741315", "0.4736493", "0.47353882", "0.47338015", "0.47258028", "0.47182152", "0.47157905", "0.47124964", "0.47101387", "0.47087714", "0.4706577", "0.4705821", "0.47051868", "0.47004938", "0.46888587", "0.46886405", "0.46875918", "0.468292", "0.46797344", "0.46741956", "0.46723416", "0.46685505", "0.46645817", "0.4659615", "0.4653339", "0.46475238", "0.4646358", "0.46431574", "0.4639582", "0.46377203", "0.46373424", "0.4635976" ]
0.5090194
18
Create a dictionary with domain architectures exclusive in a single pathogen type group.
def generateArchitectureDataStructure(db, collapse_pathogen_groups=False): # Calculate total numbers of species and strains for each pathogen group counts_species_pathogen_dict = defaultdict(lambda: defaultdict(int)) for row in db.getNumSpeciesPathogen(): counts_species_pathogen_dict[row['pathogen_type']]['num_species'] = row['num_species'] counts_species_pathogen_dict[row['pathogen_type']]['num_strains'] = row['num_strains'] architecture_pathogen_dict = defaultdict(list) arch_strains_species_dict = defaultdict(lambda: defaultdict(list)) for row in db.getArchitecturePathogenTypeIterator(): strains = row['species'] species = str(strains).split(' (')[0] pathogen_type = row['pathogen_type'] architecture_id = row['architecture'] architecture_acc = row['architecture_acc'] architecture_pathogen_dict[(architecture_id, architecture_acc)].append(pathogen_type) arch_strains_species_dict[(architecture_id, architecture_acc)]['species'].append(species) arch_strains_species_dict[(architecture_id, architecture_acc)]['strains'].append(strains) for architecture in architecture_pathogen_dict.keys(): # If an architecture is only present in proteins of a certain pathogen_type, # it should have only 1 pathogen_type pathogen_groups_set = set(architecture_pathogen_dict[architecture]) if not exclusive_arch(pathogen_groups_set, collapse_pathogen_groups): architecture_pathogen_dict.pop(architecture) arch_strains_species_dict.pop(architecture) else: # Check if the architecture is present in all species and strains species_set = set(arch_strains_species_dict[architecture]['species']) strains_set = set(arch_strains_species_dict[architecture]['strains']) total_num_species, total_num_strains = get_number_ssp_stt_members(counts_species_pathogen_dict, pathogen_groups_set, collapse_pathogen_groups) arch_strains_species_dict[architecture]['total_num_species'] = total_num_species arch_strains_species_dict[architecture]['total_num_strains'] = total_num_strains if total_num_species == len(species_set): arch_strains_species_dict[architecture]['all_species'] if total_num_strains == len(strains_set): arch_strains_species_dict[architecture]['all_strains'] return architecture_pathogen_dict, arch_strains_species_dict
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def environments_of(groups):\n types = {}\n for group in groups:\n for env in group.environments:\n et = env.environmentType\n envs = types.setdefault((et.id, et.name), set())\n envs.add((env.id, env.name))\n return types", "def build_groupings(idir: str) -> dict:\n bkg_group = {key: [ifile for ifile in glob(f'{idir}/*_{key}_*.root')] for key in bkgs}\n pw_group = {key: [ifile for ifile in glob(f'{idir}/{key}*.root')] for key in powhegs}\n wh_pw_group = [ifile for name in wh_powhegs for ifile in glob(f'{idir}/{name}*.root')]\n ungrouped = [ifile for ifile in glob(f'{idir}/*.root') if 'madgraph' in ifile or 'JHU' in ifile]\n\n group = {}\n for key, files in bkg_group.items():\n if len(files) > 0:\n group[key] = files\n\n for key, files in pw_group.items():\n if len(files) > 0:\n group[key] = files\n\n for ifile in ungrouped:\n name = ifile.split('/')[-1].replace('.root', '')\n name = name.split('_SYST')[0].replace('-', '_')\n name = name.replace('_ggH125', '').replace('_VBF125', '').replace('_WH125', '').replace('_ZH125', '')\n group[name] = [ifile]\n\n if len(wh_pw_group) > 0:\n group['wh125_powheg'] = wh_pw_group\n\n return group", "def typeMapping(self):\n statemachines = self.package.getStateMachines()\n classes = {}\n for sm in statemachines:\n workflowId = sm.getCleanName()\n for klass in sm.getClasses():\n # We allow to bound a workflow to a <<stub>>\n if klass.isabstract:\n continue\n elif not self.atgenerator._isContentClass(klass) and \\\n not klass.hasStereoType(self.atgenerator.stub_stereotypes):\n continue\n name = klass.getTaggedValue('portal_type') or \\\n klass.getCleanName()\n classes.setdefault(name, []).append(workflowId)\n\n classNames = classes.keys()\n classNames.sort()\n result = []\n for id_ in classNames:\n item = {}\n item['id'] = id_ # portal type\n item['workflowIds'] = classes[id_]\n result.append(item)\n\n # no need to check use_workflow, it's already done by xmiparser.XMIModel.associateClassesToStateMachines,\n # so the sm.getClasses() already returns classes which uses use_workflow tgv.\n # if you uncomment thoses lines, you will have the bound-workflow twice\n #handle the use_workflow tgvs\n #for klass in self.package.getProduct().getClasses(recursive=True):\n # if klass.hasTaggedValue('use_workflow'):\n # result.append(dict(id=klass.getCleanName(),workflowId=klass.getTaggedValue('use_workflow')))\n # remember special case\n remembertypes = []\n self.atgenerator.getRememberTypes(remembertypes, self.package)\n for remembertype in remembertypes:\n existent = False\n for type in result:\n if type['id'] == remembertype['portal_type']:\n existent = True\n if existent:\n continue\n additionaltype = dict()\n additionaltype['id'] = remembertype['portal_type']\n additionaltype['workflowIds'] = [remembertype['workflow']]\n result.append(additionaltype)\n\n # take tgv on state machine itself into account\n for sm in statemachines:\n bindings = sm.getTaggedValue('bindings', '')\n bindings = [b.strip() for b in bindings.split(', ') if b.strip()]\n for binding in bindings:\n item = {}\n item['id'] = binding\n item['workflowIds'] = [sm.getCleanName()]\n result.append(item)\n\n return result", "def _get_type_mapping():\n return {\n Box.SPACE_NAME: Box,\n Dict.SPACE_NAME: Dict,\n Discrete.SPACE_NAME: Discrete\n }", "def _part_group_cell_mapper(bd_type):\n js, iss = np.meshgrid(range(smt.cols), range(smt.rows)) # zero indexed to agree with python interpretation\n idx = bd_type.flatten() != -1\n out = dict(zip(range(1, idx.sum() + 1), list(zip(iss.flatten()[idx], js.flatten()[idx]))))\n return out", "def build_doctype_map(self):\n\t\tself.doctype_map = {}\n\n\t\tactive_domains = frappe.get_active_domains()\n\t\tall_doctypes = frappe.get_all(\n\t\t\t\"DocType\",\n\t\t\tfields=[\n\t\t\t\t\"name\",\n\t\t\t\t\"in_create\",\n\t\t\t\t\"module\",\n\t\t\t\t\"istable\",\n\t\t\t\t\"issingle\",\n\t\t\t\t\"read_only\",\n\t\t\t\t\"restrict_to_domain\",\n\t\t\t],\n\t\t)\n\n\t\tfor dt in all_doctypes:\n\t\t\tif not dt.restrict_to_domain or (dt.restrict_to_domain in active_domains):\n\t\t\t\tself.doctype_map[dt[\"name\"]] = dt", "def get_info_dict(\n X: AnyStr,\n Y: AnyStr,\n ecod_df: pd.DataFrame = ecod_df,\n group_df: pd.DataFrame = group_df,\n) -> Dict:\n X = int(X)\n Y = int(Y)\n dom1, dom2 = get_proper_domains_id(X, Y)\n if dom1 is None:\n return None\n info_dict = {\"X\": X, \"Y\": Y}\n info_dict.update({\"domain1\": dom1, \"domain2\": dom2, \"swapFlag\": (X > Y)})\n return info_dict", "def build_network_definition(rsn_oms):\n if log.isEnabledFor(logging.DEBUG):\n log.debug(\"build_network_definition. rsn_oms class: %s\",\n rsn_oms.__class__.__name__)\n\n # platform types:\n platform_types = rsn_oms.config.get_platform_types()\n if log.isEnabledFor(logging.DEBUG):\n log.debug(\"got platform_types %s\", str(platform_types))\n\n # platform map:\n map = rsn_oms.config.get_platform_map()\n if log.isEnabledFor(logging.DEBUG):\n log.debug(\"got platform map %s\", str(map))\n\n # build topology:\n pnodes = NetworkUtil.create_node_network(map)\n dummy_root = pnodes['']\n root_pnode = pnodes[dummy_root.subplatforms.keys()[0]]\n if log.isEnabledFor(logging.DEBUG):\n log.debug(\"topology's root platform_id=%r\", root_pnode.platform_id)\n\n # now, populate the attributes and ports for the platforms\n\n def build_attributes_and_ports(pnode):\n \"\"\"\n Recursive routine to call set_attributes and set_ports on each pnode.\n \"\"\"\n set_attributes(pnode)\n set_ports(pnode)\n\n for sub_platform_id, sub_pnode in pnode.subplatforms.iteritems():\n build_attributes_and_ports(sub_pnode)\n\n def set_attributes(pnode):\n platform_id = pnode.platform_id\n attr_infos = rsn_oms.attr.get_platform_attributes(platform_id)\n if not isinstance(attr_infos, dict):\n raise PlatformDriverException(\n \"%r: get_platform_attributes returned: %s\" % (\n platform_id, attr_infos))\n\n if log.isEnabledFor(logging.TRACE):\n log.trace(\"%r: attr_infos: %s\", platform_id, attr_infos)\n\n if not platform_id in attr_infos:\n raise PlatformDriverException(\n \"%r: get_platform_attributes response does not \"\n \"include entry for platform_id: %s\" %(\n platform_id, attr_infos))\n\n ret_infos = attr_infos[platform_id]\n for attrName, attr_defn in ret_infos.iteritems():\n attr = AttrNode(attrName, attr_defn)\n pnode.add_attribute(attr)\n\n def set_ports(pnode):\n platform_id = pnode.platform_id\n port_infos = rsn_oms.port.get_platform_ports(platform_id)\n if not isinstance(port_infos, dict):\n raise PlatformDriverException(\n \"%r: get_platform_ports response is not a dict: %s\" % (\n platform_id, port_infos))\n\n if log.isEnabledFor(logging.TRACE):\n log.trace(\"%r: port_infos: %s\", platform_id, port_infos)\n\n if not platform_id in port_infos:\n raise PlatformDriverException(\n \"%r: get_platform_ports response does not include \"\n \"platform_id: %s\" % (platform_id, port_infos))\n\n ports = port_infos[platform_id]\n\n if not isinstance(ports, dict):\n raise PlatformDriverException(\n \"%r: get_platform_ports: entry for platform_id is \"\n \"not a dict: %s\" % (platform_id, ports))\n\n for port_id, dic in ports.iteritems():\n port = PortNode(port_id, dic['network'])\n port.set_state(dic['state'])\n pnode.add_port(port)\n\n # add connected instruments:\n instrs_res = rsn_oms.instr.get_connected_instruments(platform_id, port_id)\n if not isinstance(instrs_res, dict):\n log.warn(\"%r: port_id=%r: get_connected_instruments \"\n \"response is not a dict: %s\" % (platform_id, port_id, instrs_res))\n continue\n\n if log.isEnabledFor(logging.TRACE):\n log.trace(\"%r: port_id=%r: get_connected_instruments \"\n \"returned: %s\" % (platform_id, port_id, instrs_res))\n\n if not platform_id in instrs_res:\n raise PlatformDriverException(\n \"%r: port_id=%r: get_connected_instruments response\"\n \"does not have entry for platform_id: %s\" % (\n platform_id, ports))\n\n if not port_id in instrs_res[platform_id]:\n raise PlatformDriverException(\n \"%r: port_id=%r: get_connected_instruments response \"\n \"for platform_id does not have entry for port_id: %s\" % (\n platform_id, port_id, instrs_res[platform_id]))\n\n instr = instrs_res[platform_id][port_id]\n for instrument_id, attrs in instr.iteritems():\n port.add_instrument(InstrumentNode(instrument_id, attrs))\n\n # call the recursive routine\n build_attributes_and_ports(root_pnode)\n\n # we got our whole network including platform attributes and ports.\n\n # and finally create and return NetworkDefinition:\n ndef = NetworkDefinition()\n ndef._platform_types = platform_types\n ndef._pnodes = pnodes\n ndef._dummy_root = dummy_root\n return ndef", "def get_domains(graph: Graph, property_to_id: Dict[str, int], entity_type_to_id: Dict[str, int]) -> Dict[int, int]:\n # dictionary pointing from object property id to an entity type id\n domains = {}\n\n # add all domain triples for which the subject is an object property and the object is an entity type\n for subject, predicate, object in graph.triples((None, RDFS.domain, None)):\n if subject in property_to_id and object in entity_type_to_id:\n domains[property_to_id[subject]] = entity_type_to_id[object]\n\n return domains", "def _identify_media(self):\n\n mediapaths = {k: v['medium'] for k, v in self.labels.items() if v.get('medium') is not None}\n\n media_dict = {}\n for label, path in mediapaths.items():\n if path.lower() == 'air':\n media_dict[label] = Air()\n else:\n media_dict[label] = from_yaml(path)\n return media_dict", "def create_package_dict(self):\n dep_node = list()\n param_list = ['name', 'version', 'dir', 'description']\n inp_list = list()\n dep_node_list = list()\n pkg_dict = dict()\n for line in self.full_ed_lines:\n inp_list.append(line.text())\n dep_pkg = inp_list[6].split(', ')\n if dep_pkg[len(dep_pkg) - 1] == '':\n dep_pkg.pop()\n for dep in self.manager.wid.sub_list:\n dep_node.append(dep['msg_type'])\n for dep in self.manager.wid.pub_list:\n dep_node.append(dep['msg_type'])\n for dep in dep_node:\n msg, msg_type = dep.split('/')\n dep_node_list.append({'name': msg, 'type': msg_type})\n for param, value in zip(param_list, inp_list):\n pkg_dict[param] = value\n pkg_dict['maintainer'] = {'name': inp_list[4], 'email': inp_list[5]}\n pkg_dict['depend'] = dep_pkg\n pkg_dict['node'] = dict()\n pkg_dict['node']['name'] = inp_list[7]\n pkg_dict['node']['depend'] = dep_node_list\n pkg_dict['node']['subscribers'] = self.manager.wid.sub_list\n pkg_dict['node']['publishers'] = self.manager.wid.pub_list\n return pkg_dict", "def _get_entity_mappings(query_list: ProcessedQueryList) -> Dict:\n entity_labels = set()\n logger.info(\"Generating Entity Labels...\")\n for d, i, entities in zip(\n query_list.domains(), query_list.intents(), query_list.entities()\n ):\n if len(entities):\n for entity in entities:\n e = str(entity.entity.type)\n entity_labels.add(f\"{d}.{i}.B|{e}\")\n entity_labels.add(f\"{d}.{i}.I|{e}\")\n entity_labels.add(f\"{d}.{i}.S|{e}\")\n entity_labels.add(f\"{d}.{i}.E|{e}\")\n\n e = \"O|\"\n entity_labels.add(f\"{d}.{i}.{e}\")\n\n entity_labels = sorted(list(entity_labels))\n return dict(zip(entity_labels, range(len(entity_labels))))", "def metro_phil_to_basis_dict(metro):\n for o in metro.objects:\n if o.is_scope:\n #one of the subkeys of the root object will be the detector phil. it will be the only one not extracted.\n detector_phil = o.extract()\n break\n #metro = metro.extract() # not needed\n\n bd = {(detector_phil.serial,): basis(matrix.col(detector_phil.orientation),\n matrix.col(detector_phil.translation)*1000) }\n for p in detector_phil.panel:\n bd[(detector_phil.serial,p.serial)] = basis(matrix.col(p.orientation),\n matrix.col(p.translation)*1000)\n for s in p.sensor:\n bd[(detector_phil.serial,p.serial,s.serial)] = basis(matrix.col(s.orientation),\n matrix.col(s.translation)*1000)\n for a in s.asic:\n bd[(detector_phil.serial,p.serial,s.serial,a.serial)] = basis(matrix.col(a.orientation),\n matrix.col(a.translation)*1000)\n\n return bd", "def save_network_architecture(self,network_path):\n net_architecture = {}\n net_architecture['y_res'] = self.y_res\n net_architecture['x_res'] = self.x_res\n net_architecture['n_input_channels'] = self.n_input_channels\n net_architecture['n_output_classes'] = self.n_output_classes\n net_architecture['fc1_n_chan'] = self.fc1_n_chan\n net_architecture['fc1_dropout'] = self.fc1_dropout\n net_architecture['alpha'] = self.alpha\n net_architecture['n_samples_trained'] = self.n_samples_trained\n net_architecture['n_class_samples_trained'] = self.n_class_samples_trained\n net_architecture['n_samples_list'] = self.n_samples_list\n net_architecture['n_class_samples_list'] = self.n_class_samples_list\n net_architecture['accuracy_list'] = self.accuracy_list\n net_architecture['precision_list'] = self.precision_list\n net_architecture['recall_list'] = self.recall_list\n net_architecture['F1_list'] = self.F1_list\n np.save(os.path.join( \\\n network_path,'net_architecture.npy'), net_architecture)\n self.log(\"Network architecture saved to file:\\n{}\".format(\n os.path.join(network_path,'net_architecture.npy')))", "def save_network_architecture(self,network_path):\n net_architecture = {}\n net_architecture['y_res'] = self.y_res\n net_architecture['x_res'] = self.x_res\n net_architecture['n_input_channels'] = self.n_input_channels\n net_architecture['n_output_classes'] = self.n_output_classes\n net_architecture['conv1_size'] = self.conv1_size\n net_architecture['conv1_n_chan'] = self.conv1_n_chan\n net_architecture['conv1_n_pool'] = self.conv1_n_pool\n net_architecture['conv2_size'] = self.conv2_size\n net_architecture['conv2_n_chan'] = self.conv2_n_chan\n net_architecture['conv2_n_pool'] = self.conv2_n_pool\n net_architecture['fc1_n_chan'] = self.fc1_n_chan\n net_architecture['fc1_dropout'] = self.fc1_dropout\n net_architecture['alpha'] = self.alpha\n net_architecture['n_samples_trained'] = self.n_samples_trained\n net_architecture['n_class_samples_trained'] = self.n_class_samples_trained\n net_architecture['n_samples_list'] = self.n_samples_list\n net_architecture['n_class_samples_list'] = self.n_class_samples_list\n net_architecture['accuracy_list'] = self.accuracy_list\n net_architecture['precision_list'] = self.precision_list\n net_architecture['recall_list'] = self.recall_list\n net_architecture['F1_list'] = self.F1_list\n np.save(os.path.join( \\\n network_path,'net_architecture.npy'), net_architecture)\n self.log(\"Network architecture saved to file:\\n{}\".format(\n os.path.join(network_path,'net_architecture.npy')))", "def dict_of_domains(fc):\r\n # need to find root database (GDB or SDE)\r\n db_root = os.path.dirname(fc)\r\n while db_root[-4:].lower() != '.gdb' and db_root[-4:].lower() != '.sde':\r\n old_db_root = db_root # protect against infinite loop\r\n db_root = os.path.dirname(db_root)\r\n if old_db_root == db_root: # protect against infinite loop\r\n break\r\n arcpy.AddMessage(\"Retrieving Domains from \" + str(db_root))\r\n return {domain.name: domain.codedValues for domain in arcpy.da.ListDomains(db_root)}", "def slot_mappings(self) -> Dict[Text, Union[Dict, List[Dict]]]:\n\n return {\n \"product\": [\n self.from_entity(entity=\"product\", intent=[\"inform\"]),\n ],\n \"applicant_name\": [\n self.from_entity(entity=\"applicant_name\", intent=[\"inform\"]),\n ],\n \"applicant_dob\": [\n self.from_entity(entity=\"applicant_dob\", intent=[\"inform\"]),\n ],\n \"applicant_phoneno\": [\n self.from_entity(entity=\"applicant_phoneno\", intent=[\"inform\"]),\n ],\n \"applicant_address\": [\n self.from_entity(entity=\"applicant_address\", intent=[\"inform\"]),\n ]\n }", "def save_network_architecture(self,network_path):\n net_architecture = {}\n net_architecture['y_res'] = self.y_res\n net_architecture['x_res'] = self.x_res\n net_architecture['n_input_channels'] = self.n_input_channels\n net_architecture['n_output_classes'] = self.n_output_classes\n net_architecture['fc1_dropout'] = self.fc1_dropout\n net_architecture['alpha'] = self.alpha\n net_architecture['n_samples_trained'] = self.n_samples_trained\n net_architecture['n_class_samples_trained'] = self.n_class_samples_trained\n net_architecture['n_samples_list'] = self.n_samples_list\n net_architecture['n_class_samples_list'] = self.n_class_samples_list\n net_architecture['accuracy_list'] = self.accuracy_list\n net_architecture['precision_list'] = self.precision_list\n net_architecture['recall_list'] = self.recall_list\n net_architecture['F1_list'] = self.F1_list\n np.save(os.path.join( \\\n network_path,'net_architecture.npy'), net_architecture)\n self.log(\"Network architecture saved to file:\\n{}\".format(\n os.path.join(network_path,'net_architecture.npy')))", "def detect_domains (nffg):\n return {infra.domain for infra in nffg.infras}", "def structure_to_dict(structure):\n from aiida.common.exceptions import InputValidationError\n\n for kind in structure.kinds:\n if kind.is_alloy():\n raise InputValidationError(\n \"Kind '{}' is an alloy. This is not allowed for CRYSTAL input structures.\"\n \"\".format(kind.name))\n if kind.has_vacancies():\n raise InputValidationError(\n \"Kind '{}' has vacancies. This is not allowed for CRYSTAL input structures.\"\n \"\".format(kind.name))\n\n kindname_symbol_map = {\n kind.name: kind.symbols[0]\n for kind in structure.kinds\n }\n kindname_id_map = {kind.name: i for i, kind in enumerate(structure.kinds)}\n id_kind_map = {i: kind for i, kind in enumerate(structure.kinds)}\n kind_names = [site.kind_name for site in structure.sites]\n symbols = [kindname_symbol_map[name] for name in kind_names]\n equivalent = [kindname_id_map[name] for name in kind_names]\n kinds = [id_kind_map[e] for e in equivalent]\n\n sdata = {\n \"lattice\": structure.cell,\n \"atomic_numbers\": [ATOMIC_SYMBOL2NUM[sym] for sym in symbols],\n \"ccoords\": [site.position for site in structure.sites],\n \"pbc\": structure.pbc,\n \"equivalent\": equivalent,\n \"kinds\": kinds,\n }\n\n return sdata", "def known_domain_data(known_uid, known_verbose_name, known_os_type):\n return {\n 'id': known_uid,\n 'verbose_name': known_verbose_name,\n 'os_type': known_os_type\n }", "def by_type(environments):\n types = {}\n for env in environments:\n et = env.environmentType\n options = types.setdefault(et.id, set())\n options.add(env.id)\n return types", "def _init_group_dicts(self):\n\n all_groups = set()\n\n for detection in config['detections'].values():\n if 'action' in detection and detection['action'] == 'buy':\n if 'groups' in detection:\n for group in detection['groups']:\n all_groups.add(group)\n\n for group in all_groups:\n self.trade_sizes[group] = config['trade_min_size']\n self.trade_proceeds[group] = {}\n\n self.trade_sizes['default'] = config['trade_min_size']\n self.trade_proceeds['default'] = {}", "def createDict( self ):\n d = {}\n devTup = ( 'endcap', 'comp', 'shutter','397intensity' )\n for dev in devTup:\n d[dev] = {'devChannels':{}}\n endcap = ( ( 1, 1 ), ( 2, 0 ) )\n comp = ( ( 1, 4 ), ( 2, 2 ), ( 'common', 3 ) )\n shutter = ( ( 1, 5 ), ( 2, 6 ), ( 3, 7 ) )\n intensity397 = (('397intensity',8),)\n chanTup = ( endcap, comp, shutter ,intensity397 )\n for dev, value in zip( devTup, chanTup ):\n for chanPair in value:\n d[dev]['devChannels'][chanPair[0]] = {'value':None, 'channel':chanPair[1]}\n ecRange = ( 0.0, 40.0 )\n compRange = ( -40.0, 40.0 )\n shutterRange = ( 0.0, 5.0 )\n intensity397Range = (0.0,2500.0)\n rangeTup = ( ecRange, compRange, shutterRange, intensity397Range )\n for dev, value in zip( devTup, rangeTup ): d[dev]['range'] = value\n self.dcDict = d", "def get_type_dag(graph: Graph, entity_type_to_id: Dict[str, int]) -> Dict[int, DAGNode]:\n # dictionary pointing from entity type id to the corresponding node in the entity type DAG\n entity_type_dag = {}\n\n # extract equivalence class relation\n equivalent_classes = {}\n for subject, predicate, object in graph.triples((None, OWL.equivalentClass, None)):\n equivalent_classes[subject] = object\n equivalent_classes[object] = subject\n\n # iterate over class hierarchy\n for subject, predicate, object in graph.triples((None, RDFS.subClassOf, None)):\n\n # is the subject is an entity type or equivalent to an entity type\n subject_is_entity_type = (subject in entity_type_to_id or\n (subject in equivalent_classes and equivalent_classes[subject] in entity_type_to_id))\n # is the object is an entity type or equivalent to an entity type\n object_is_entity_type = (object in entity_type_to_id or\n (object in equivalent_classes and equivalent_classes[object] in entity_type_to_id))\n\n # if the subject is an entity type or equivalent to an entity type AND the object is an entity type or\n # equivalent to an entity type\n if subject_is_entity_type and object_is_entity_type:\n # replace subject and object with their equivalent entity type if thhey are not an entity type themselves\n if subject not in entity_type_to_id:\n subject = equivalent_classes[subject]\n if object not in entity_type_to_id:\n object = equivalent_classes[object]\n\n subject_id = entity_type_to_id[subject]\n object_id = entity_type_to_id[object]\n # add subject and object and their relation to the DAG\n if subject_id != object_id:\n if object_id not in entity_type_dag:\n entity_type_dag[object_id] = DAGNode(object_id, object)\n if subject_id not in entity_type_dag:\n entity_type_dag[subject_id] = DAGNode(subject_id, subject)\n\n # add DAG node of object as parent to the subject DAG node\n entity_type_dag[subject_id].parents.append(entity_type_dag[object_id])\n # add DAG node of the subject as child to the object DAG node\n entity_type_dag[object_id].children.append(entity_type_dag[subject_id])\n\n return entity_type_dag", "def define_group_properties(self):\n\n # PropertyGroup\n self.propertygroup['debug']['x86'] = get_propertygroup(\n 'debug', 'x86', ' and @Label=\"Configuration\"'\n )\n self.propertygroup['debug']['x64'] = get_propertygroup(\n 'debug', 'x64', ' and @Label=\"Configuration\"'\n )\n self.propertygroup['release']['x86'] = get_propertygroup(\n 'release', 'x86', ' and @Label=\"Configuration\"'\n )\n self.propertygroup['release']['x64'] = get_propertygroup(\n 'release', 'x64', ' and @Label=\"Configuration\"'\n )\n\n # ItemDefinitionGroup\n self.definitiongroups['debug']['x86'] = get_definitiongroup('debug', 'x86')\n self.definitiongroups['debug']['x64'] = get_definitiongroup('debug', 'x64')\n self.definitiongroups['release']['x86'] = get_definitiongroup('release', 'x86')\n self.definitiongroups['release']['x64'] = get_definitiongroup('release', 'x64')", "def make_grp(self):\n try:\n self.base['grp']\n except:\n self.base['grp'] = np.zeros(len(self.base),dtype='i')\n\n for halo in self._halos.values():\n halo[name][:] = halo._halo_id\n\n if config['verbose']: print \"writing %s\"%(self._base().filename+'.grp')\n self._base().write_array('grp',overwrite=True,binary=False)", "def bases(layout, mvClass=MultiVector, grades=None):\n\n dict = {}\n for i in range(layout.gaDims):\n grade = layout.gradeList[i]\n if grade != 0:\n if grades is not None and grade not in grades:\n continue\n v = np.zeros((layout.gaDims,), dtype=int)\n v[i] = 1\n dict[layout.names[i]] = mvClass(layout, v)\n return dict", "def _get_domain_mappings(domain_to_intents: Dict) -> Dict:\n domain2id = {}\n domains = list(domain_to_intents)\n for index, domain in enumerate(domains):\n domain2id[domain] = index\n return domain2id", "def format_domain(domain):\n domain.ns_converted = []\n for ns in domain.ns :\n if isinstance(ns, objects.DomainHostAttr) :\n ns_item = {\n 'hostname' : ns.hostname,\n 'ips' : []\n }\n\n for hostaddr in ns.hostAddr :\n ns_item['ips'].append(hostaddr.ip)\n else :\n ns_item = {\n 'hostname' : ns.name,\n 'ips' : [],\n 'hostobj' : 1\n }\n domain.ns_converted.append(ns_item)\n\n return domain", "def __build_inventory_groups():\r\n inventory_lists = {}\r\n for type_val, display in MEDIA_CHOICES:\r\n inventory_choices = [{\r\n 'id': inv.id,\r\n 'desc': inv.inventory_text,\r\n 'container': inv.container,\r\n 'notes': inv.notes_inv,\r\n 'vol': inv.volume,\r\n 'cost': str(inv.cost),\r\n 'media_code': type_val\r\n } for inv\r\n in Inventory.objects.filter(media_type=type_val)]\r\n inventory_lists[type_val] = inventory_choices\r\n return simplejson.dumps(inventory_lists)", "def factory_type_dict():\n return {'filter' : filters.generate_filter,\n 'global_options' : global_options.generate_global_options,\n 'input_device' : input_devices.generate_input_device,\n 'input_stream' : input_streams.generate_input_stream,\n 'output_device' : output_devices.generate_output_device,\n 'output_stream' : output_streams.generate_output_stream}", "def as_dict(self):\n d = {\n 'name': self.name,\n 'description': self.description,\n 'reset': self.reset,\n 'width': self.width,\n 'lsb': self.lsb,\n 'access': self.access,\n 'hardware': self.hardware,\n 'enums': [enum.as_dict() for enum in self.enums]\n }\n d.update(self.etc)\n return d", "def get_common_os(tuple_list):\n common_os_dict = {}\n # common_protocol_dict = {}\n itr = 0\n\n for t in tuple_list:\n \n os_key = get_group_number_from_name(t[0])\n\n if os_key in common_os_dict:\n common_os_dict[os_key].append(itr)\n else:\n common_os_dict[os_key] = [itr]\n\n itr += 1\n return common_os_dict", "def separate_types(obj_list):\n\n obj_dict = {\n 'R':[],\n 'L':[],\n 'C':[],\n 'V':[],\n 'I':[],\n 'E':[],\n 'G':[],\n 'H':[],\n 'F':[]\n }\n\n for obj in obj_list:\n obj_dict[obj.el_type].append(obj)\n\n return obj_dict", "def type_classes(self) -> Dict[str, int]:\n return {\n \"bg\": 0,\n \"neutrophil\": 1,\n \"epithelial\": 2,\n \"lymphocyte\": 3,\n \"plasma\": 4,\n \"eosinophil\": 5,\n \"connective\": 6,\n }", "def _compile_packers(endian):\n return {\n \"B\": struct.Struct(endian + \"B\"),\n \"b\": struct.Struct(endian + \"b\"),\n \"h\": struct.Struct(endian + \"h\"),\n \"H\": struct.Struct(endian + \"H\"),\n \"l\": struct.Struct(endian + \"l\"),\n \"L\": struct.Struct(endian + \"L\"),\n \"d\": struct.Struct(endian + \"d\"),\n \"f\": struct.Struct(endian + \"f\"),\n }", "def _BuildNamespaceFolderMap(self, type_folders):\n for folder in type_folders:\n self.namespace_folder_map[folder.local_namespace.namespace] = folder", "def serials(self) -> dict[str, int | lcn_defs.HardwareType]:\n return {\n \"hardware_serial\": self.hardware_serial,\n \"manu\": self.manu,\n \"software_serial\": self.software_serial,\n \"hardware_type\": self.hardware_type,\n }", "def botorch_modular_to_dict(class_type: Type[Any]) -> Dict[str, Any]:\n for _class in CLASS_TO_REGISTRY:\n if issubclass(class_type, _class):\n registry = CLASS_TO_REGISTRY[_class]\n if class_type not in registry:\n raise ValueError(\n f\"Class `{class_type.__name__}` not in Type[{_class.__name__}] \"\n \"registry, please add it. BoTorch object registries are \"\n \"located in `ax/storage/botorch_modular_registry.py`.\"\n )\n return {\n \"__type\": f\"Type[{_class.__name__}]\",\n \"index\": registry[class_type],\n \"class\": f\"{_class}\",\n }\n raise ValueError(\n f\"{class_type} does not have a corresponding parent class in \"\n \"CLASS_TO_REGISTRY.\"\n )", "def generate_dict(suffix):\n rules = {\n # If more than the first letter should be capitalized, we have to do it\n # manually.\n 'nsset' : 'NSSet',\n 'keyset' : 'KeySet',\n 'publicrequest' : 'PublicRequest',\n 'bankstatement' : 'BankStatement',\n 'statementhead' : 'StatementHead',\n }\n result = dict(\n [\n (\n item['classname'], \n rules.get(\n item['classname'], item['classname'].capitalize()) + suffix)\n for item in filter_type_items\n ])\n return result", "def _build_driver_dict(self):\n self._driver_dict.add(DriverDictKey.VENDOR_SW_COMPATIBLE, False)", "def _load_orgs_and_genes(self):\n organisms = {}\n genes = {}\n for gene in self.gene_ids:\n org_file_path = self._get_organisms_file_path(gene[self.GENE_NAME_IDX], gene[self.GENE_ID_IDX])\n with open(org_file_path, \"r\") as orgs:\n org = orgs.read().splitlines()\n genes[gene[self.GENE_NAME_IDX]] = {}\n # we only care about unique organisms\n for o in org:\n if not o.startswith(\">\"):\n continue\n clean_o = o.replace(\">\", \"\", 1).replace(\"_\", \" \").title()\n # I hate to do this but there's a special case for Canis Familiaris\n # EBI does not recognize it but it does recognize Canis Lupus (Canis Lupus Familiaris)\n if \"Canis Familiaris\" in clean_o:\n clean_o = \"Canis lupus\"\n if not organisms.get(clean_o):\n organisms[clean_o] = {self.FREQ_KEY: 1, self.GENE_IDS_KEY: [gene]}\n else:\n organisms[clean_o][self.FREQ_KEY] = organisms[clean_o][self.FREQ_KEY] + 1\n organisms[clean_o][self.GENE_IDS_KEY].append(gene)\n genes[gene[self.GENE_NAME_IDX]][clean_o] = 1\n return organisms, genes", "def edge_encoder_dict():\n\n from .encoders import geometric, mixed\n from mlreco.models.layers.gnn.encoders.cnn import ClustCNNMinkEdgeEncoder\n # from mlreco.models.scn.gnn.encoders.cnn import ClustCNNEdgeEncoder\n\n encoders = {\n \"geo\" : geometric.ClustGeoEdgeEncoder,\n \"mix_debug\" : mixed.ClustMixEdgeEncoder,\n \"cnn\": ClustCNNMinkEdgeEncoder\n }\n\n return encoders", "def constraints_to_dataset(model_run):\n data_dict = dict()\n\n # FIXME: hardcoding == bad\n def _get_set(constraint):\n \"\"\"\n return the set of loc_techs over which the given constraint should be\n built\n \"\"\"\n if \"_area\" in constraint:\n return \"loc_techs_area\"\n elif any(\n i in constraint for i in [\"resource_cap\", \"parasitic\", \"resource_min_use\"]\n ):\n return \"loc_techs_supply_plus\"\n elif (\n \"resource\" in constraint\n ): # i.e. everything with 'resource' in the name that isn't resource_cap\n return \"loc_techs_finite_resource\"\n elif (\n \"storage\" in constraint\n or \"charge_rate\" in constraint\n or \"energy_cap_per_storage_cap\" in constraint\n ):\n return \"loc_techs_store\"\n elif \"purchase\" in constraint:\n return \"loc_techs_purchase\"\n elif \"units_\" in constraint:\n return \"loc_techs_milp\"\n elif \"export\" in constraint:\n return \"loc_techs_export\"\n else:\n return \"loc_techs\"\n\n # find all constraints which are actually defined in the yaml file\n relevant_constraints = set(\n i.split(\".constraints.\")[1]\n for i in model_run.locations.as_dict_flat().keys()\n if \".constraints.\" in i and \".carrier_ratios.\" not in i\n )\n for constraint in relevant_constraints:\n data_dict[constraint] = dict(dims=_get_set(constraint), data=[])\n for loc_tech in model_run.sets[_get_set(constraint)]:\n loc, tech = loc_tech.split(\"::\", 1)\n # for transmission technologies, we also need to go into link nesting\n if \":\" in tech: # i.e. transmission technologies\n tech, link = tech.split(\":\")\n loc_tech_dict = model_run.locations[loc].links[link].techs[tech]\n else: # all other technologies\n loc_tech_dict = model_run.locations[loc].techs[tech]\n constraint_value = loc_tech_dict.constraints.get(constraint, np.nan)\n # inf is assumed to be string on import, so we need to np.inf it\n if constraint_value == \"inf\":\n constraint_value = np.inf\n # add the value for the particular location & technology combination to the list\n data_dict[constraint][\"data\"].append(constraint_value)\n # once we've looped through all technology & location combinations, add the array to the dataset\n\n group_share_data = {}\n group_constraints = [\"energy_cap_min\", \"energy_cap_max\", \"energy_cap_equals\"]\n group_constraints_carrier = [\n \"carrier_prod_min\",\n \"carrier_prod_max\",\n \"carrier_prod_equals\",\n ]\n\n for constraint in [ # Only process constraints that are defined\n c\n for c in group_constraints\n if c\n in \"\".join(model_run.model.get_key(\"group_share\", AttrDict()).keys_nested())\n ]:\n group_share_data[constraint] = [\n model_run.model.get_key(\n \"group_share.{}.{}\".format(techlist, constraint), np.nan\n )\n for techlist in model_run.sets[\"techlists\"]\n ]\n\n for constraint in [ # Only process constraints that are defined\n c\n for c in group_constraints_carrier\n if c\n in \"\".join(model_run.model.get_key(\"group_share\", AttrDict()).keys_nested())\n ]:\n group_share_data[constraint] = [\n [\n model_run.model.get_key(\n \"group_share.{}.{}.{}\".format(techlist, constraint, carrier), np.nan\n )\n for techlist in model_run.sets[\"techlists\"]\n ]\n for carrier in model_run.sets[\"carriers\"]\n ]\n\n # Add to data_dict and set dims correctly\n for k in group_share_data:\n data_dict[\"group_share_\" + k] = {\n \"data\": group_share_data[k],\n \"dims\": \"techlists\"\n if k in group_constraints\n else (\"carriers\", \"techlists\"),\n }\n\n return data_dict", "def node_encoder_dict():\n\n from .encoders import geometric, mixed\n from mlreco.models.layers.gnn.encoders.cnn import ClustCNNMinkNodeEncoder\n # from mlreco.models.scn.gnn.encoders.cnn import ClustCNNNodeEncoder\n\n encoders = {\n \"geo\" : geometric.ClustGeoNodeEncoder,\n \"mix_debug\" : mixed.ClustMixNodeEncoder,\n \"cnn\": ClustCNNMinkNodeEncoder\n }\n\n return encoders", "def mapped(cls, package=None):\n mapping = {}\n for extension in cls.all(package):\n signature = extension.signature()\n assert signature not in mapping, \\\n \"%s and %s have identical signatures: %r\" \\\n % (mapping[signature], extension, signature)\n mapping[signature] = extension\n return mapping", "def gen_outdict(symbols_it):\n #\n # TODO change all ``mode`` and ``atom`` vals to lists\n from collections.abc import MutableSequence\n outdict = {}\n for groupname, group in symbols_it:\n newgroup = {}\n for ntup in group:\n # ntup = ntup._replace(mode=[ntup.mode])\n if ntup.name not in newgroup:\n newgroup.update({ntup.name: ntup._asdict()})\n else:\n existing = newgroup[ntup.name]\n for field in 'font symbol'.split():\n assert existing[field] == ntup._asdict()[field]\n for field in 'atom mode'.split():\n if isinstance(existing[field], MutableSequence):\n # For now, this can't exist without implementing above.\n assert False\n if ntup._asdict()[field] not in existing[field]:\n existing[field].append(ntup._asdict()[field])\n existing[field].sort()\n else:\n if existing[field] != ntup._asdict()[field]:\n existing.update({field: sorted(\n [existing[field], ntup._asdict()[field]])})\n outdict.update({groupname: newgroup})\n return outdict", "def get_feature_domain_dict(self):\n feature_domain_dict = {}\n for feature_index in range(len(self.train_examples[0])):\n domain = set([example[feature_index] for example in self.train_examples])\n feature_domain_dict[self.features[feature_index]] = domain\n\n return feature_domain_dict", "def generate_manifest_dict(self):\n\n annotations = dict()\n\n for build_project in self.projects.get('build', []):\n for annotation in build_project.get('annotation', []):\n annotations[annotation['name']] = annotation['value']\n\n product = annotations.get('PRODUCT', 'unknown')\n version = annotations.get('VERSION', 'unknown')\n bld_num = annotations.get('BLD_NUM', '9999')\n manifest_name = '{}-{}-{}'.format(product, version, bld_num)\n\n return {\n manifest_name: {\n 'remotes': self.remotes,\n 'defaults': self.defaults,\n 'projects': self.projects\n }\n }", "def group_brakedown(group_list):\r\n group_types = {}\r\n for group1 in group_list:\r\n group_types[(group1.size,group1.valence)] = 1 + group_types.get((group1.size,group1.valence), 0)\r\n return group_types", "def to_dict(cls, obj):\n\n if isinstance(obj, iotbx_pdbh.model):\n labs = ('model')\n info = cls._format_mo(obj)\n elif isinstance(obj, iotbx_pdbh.chain):\n labs = ('model','chain')\n info = cls._format_ch(obj)\n elif isinstance(obj, iotbx_pdbh.residue_group):\n labs = ('model','chain','resseq','icode')\n info = cls._format_rg(obj)\n elif isinstance(obj, iotbx_pdbh.atom_group):\n labs = ('model','chain','resseq','icode','resname','altloc')\n info = cls._format_ag(obj)\n elif isinstance(obj, iotbx_pdbh.conformer):\n labs = ('model','chain','altloc')\n info = cls._format_co(obj)\n elif isinstance(obj, iotbx_pdbh.residue):\n labs = ('model','chain','altloc','resname','resseq','icode')\n info = cls._format_re(obj)\n elif isinstance(obj, iotbx_pdbh.atom):\n raise Exception('Not implemented')\n labs = ('model','chain','resseq','icode','resname','altloc','name')\n if hasattr(obj, 'chain_id'): info = cls._format_al(obj)\n else: info = cls._format_at(obj)\n elif isinstance(obj, iotbx_pdbh.atom_with_labels):\n labs = ('model','chain','resseq','icode','resname','altloc','name')\n info = cls._format_al(obj)\n else:\n raise Exception('Invalid object type provided: {}'.format(type(obj)))\n\n assert len(labs) == len(info)\n return dict(zip(labs, info))", "def convert_genotype_to_config(arch):\n base_string = 'NetworkSelectorDatasetInfo:darts:'\n config = {}\n\n for cell_type in ['normal', 'reduce']:\n cell = eval('arch.' + cell_type)\n\n start = 0\n n = 2\n for node_idx in range(4):\n end = start + n\n ops = cell[2 * node_idx: 2 * node_idx + 2]\n\n # get edge idx\n edges = {base_string + 'edge_' + cell_type + '_' + str(start + i): op for\n op, i in ops}\n config.update(edges)\n\n if node_idx != 0:\n # get node idx\n input_nodes = sorted(list(map(lambda x: x[1], ops)))\n input_nodes_idx = '_'.join([str(i) for i in input_nodes])\n config.update({base_string + 'inputs_node_' + cell_type + '_' + str(node_idx + 2):\n input_nodes_idx})\n\n start = end\n n += 1\n return config", "def _map_segments(self, type_: Any) -> Dict:\n mapping: Dict = {}\n for seg in self.segments:\n if seg.name and isinstance(seg, type_):\n if mapping.get(seg.name) and mapping.get(seg.name) != seg:\n raise ValueError(f\"Duplicate segment: {seg.name}\")\n mapping[seg.name] = seg\n return mapping", "def process_data_group(folder:Path, type:str, light:bool = False) -> dict:\n\n if type == dm.Delivery:\n data_folder = folder / 'data'\n else:\n data_folder = folder\n\n # check for non-existent or empty folder\n if not data_folder.exists():\n raise FileNotFoundError\n try:\n next((data_folder).glob(\"**/*\"))\n except StopIteration:\n # folder is empty can't process it\n raise FileNotFoundError\n\n # Get file sizes, last modified dates, and names to count,\n # sum size, and hash the file data provided\n file_sizes, file_modified_dates, file_metamodified_dates, file_names = zip(\n *[\n (f.stat().st_size, f.stat().st_mtime, f.stat().st_ctime, f)\n for f in (data_folder).glob(\"**/*\")\n if f.is_file() and f.name != 'receipt.rst'\n ]\n )\n\n last_modified = datetime.fromtimestamp(\n max(max(file_modified_dates),\n max(file_metamodified_dates)))\n\n # Hash the files in the delivery\n if light:\n folder_hash = 'skipped'\n else:\n folder_hash = hash_files(file_names)\n\n dg = {\n 'name' : folder.name,\n 'type' : type.__name__,\n 'last_update' : datetime.now(),\n 'size' : sum(file_sizes),\n 'num_files' : len(file_sizes),\n 'group_hash' : folder_hash,\n 'group_last_modified' : last_modified,\n }\n\n return dg", "def set_platform(self, platform_dict):\n if not os.path.exists(self.file_path):\n print(\"netCDF file does not exist, exiting without saving Platform group...\")\n elif self.format == '.nc':\n with netCDF4.Dataset(self.file_path, 'a', format='NETCDF4') as ncfile:\n plat = ncfile.createGroup('Platform')\n [plat.setncattr(k, v) for k, v in platform_dict.items()]\n elif self.format == '.zarr' and not self.append_zarr: # Do not save platform if appending\n zarrfile = zarr.open(self.file_path, mode='a')\n plat = zarrfile.create_group('Platform')\n for k, v in platform_dict.items():\n plat.attrs[k] = v", "def gencode_dic(gencode_file,gene_type_dic):\n gen_dic = {}\n for i in range(1,len(gencode_file)):\n words_gen = gencode_file[i].strip().split('\\t')\n chr_no = words_gen[2]\n trans_id = words_gen[1]\n cds_info = words_gen[13]\n cde_info = words_gen[14]\n gene_type = gene_type_dic[trans_id]\n gene_name = words_gen[12]\n TSS_start = int(words_gen[4])\n TSS_end = int(words_gen[5])\n CDS_start = int(words_gen[6])\n CDS_end = int(words_gen[7])\n strand = words_gen[3]\n start_list = [int(x) for x in words_gen[9].split(',')[:-1]]\n end_list = [int(x) for x in words_gen[10].split(',')[:-1]]\n exon_no = int(words_gen[8])\n# if (chr_no,trans_id) in gen_dic: #Some trans_id are not unique, especially transcripts in chrX and chrY\n# print trans_id\n interval_list = [P.closedopen(start_list[x],end_list[x]) for x in range(0,exon_no)]\n interval_merge = P.empty()\n for i in range(0,len(interval_list)):\n interval_merge = interval_merge | interval_list[i]\n if gene_type == 'protein_coding':\n if (cds_info == 'cmpl') and (cde_info == 'cmpl'):\n # print (interval_merge)\n gen_dic.setdefault((chr_no,strand),[]).append([TSS_start,TSS_end,CDS_start,CDS_end,\\\n gene_name,gene_type,interval_merge])\n else:\n gen_dic.setdefault((chr_no,strand),[]).append([TSS_start,TSS_end,CDS_start,CDS_end,\\\n gene_name,gene_type,interval_merge])\n return gen_dic", "def getAlignedPack4Structure(dataTypeManager: ghidra.program.model.data.DataTypeManager, categoryPath: ghidra.program.model.data.CategoryPath, structureName: unicode) -> ghidra.program.model.data.StructureDataType:\n ...", "def construct_type(self):\n return \"domain_ancillary\"", "def get_signal_type_configs(self) -> t.Mapping[str, SignalTypeConfig]:", "def makeMapping(globalMap):\n \n from memops.xml.Implementation import bool2str, str2bool\n\n # Set up top level dictionaries\n loadMaps = globalMap.get('loadMaps')\n mapsByGuid = globalMap.get('mapsByGuid')\n\n abstractTypes = globalMap.get('ANAP').get('abstractTypes')\n exolinks = globalMap.get('ANAP').get('exolinks')\n\n # DataType GraphicsHandlerType\n currentMap = {}\n abstractTypes['GraphicsHandlerType'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-10-03-11:26:03_00001'] = currentMap\n loadMaps['ANAP.GraphicsHandlerType'] = currentMap\n currentMap['tag'] = 'ANAP.GraphicsHandlerType'\n currentMap['type'] = 'simple'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-10-03-11:26:03_00001'\n currentMap['toStr'] = 'text'\n currentMap['cnvrt'] = 'text'\n\n # Class AnalysisProfile\n currentMap = {}\n abstractTypes['AnalysisProfile'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2008-05-05-15:12:50_00004'] = currentMap\n loadMaps['ANAP.AnalysisProfile'] = currentMap\n currentMap['tag'] = 'ANAP.AnalysisProfile'\n currentMap['type'] = 'class'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2008-05-05-15:12:50_00004'\n currentMap['eType'] = 'cplx'\n currentMap['fromParent'] = 'analysisProfiles'\n currentMap['isTop'] = True\n currentMap['objkey'] = 'name'\n currentMap['class'] = ccpnmr.api.AnalysisProfile.AnalysisProfile\n contentMap = {}\n currentMap['content'] = contentMap\n\n # Attribute AnalysisProfile.applicationData\n contentMap['applicationData'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-09-14-18:48:27_00007')\n\n # Attribute AnalysisProfile.bgColor\n currentMap = {}\n contentMap['bgColor'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00031'] = currentMap\n loadMaps['ANAP.AnalysisProfile.bgColor'] = currentMap\n currentMap['tag'] = 'ANAP.AnalysisProfile.bgColor'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00031'\n currentMap['name'] = 'bgColor'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['eType'] = 'cplx'\n currentMap['default'] = '#FFFFFF'\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2008-05-05-15:12:50_00007')\n\n # Attribute AnalysisProfile.createdBy\n contentMap['createdBy'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-12-31-09:00:59_00002__www.ccpn.ac.uk_Fogh_2007-10-03-14:53:27_00001__www.ccpn.ac.uk_Fogh_2006-09-14-16:28:57_00002')\n\n # Attribute AnalysisProfile.fgColor\n currentMap = {}\n contentMap['fgColor'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00032'] = currentMap\n loadMaps['ANAP.AnalysisProfile.fgColor'] = currentMap\n currentMap['tag'] = 'ANAP.AnalysisProfile.fgColor'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00032'\n currentMap['name'] = 'fgColor'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['eType'] = 'cplx'\n currentMap['default'] = '#000000'\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2008-05-05-15:12:50_00007')\n\n # Attribute AnalysisProfile.font\n currentMap = {}\n contentMap['font'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00030'] = currentMap\n loadMaps['ANAP.AnalysisProfile.font'] = currentMap\n currentMap['tag'] = 'ANAP.AnalysisProfile.font'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00030'\n currentMap['name'] = 'font'\n currentMap['hicard'] = 1\n currentMap['locard'] = 0\n currentMap['eType'] = 'cplx'\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00033')\n\n # Attribute AnalysisProfile.graphicsHandler\n currentMap = {}\n contentMap['graphicsHandler'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00040'] = currentMap\n loadMaps['ANAP.AnalysisProfile.graphicsHandler'] = currentMap\n currentMap['tag'] = 'ANAP.AnalysisProfile.graphicsHandler'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00040'\n currentMap['name'] = 'graphicsHandler'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['default'] = 'Tk'\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-10-03-11:26:03_00001')\n\n # Attribute AnalysisProfile.guid\n contentMap['guid'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-09-14-18:48:26_00002')\n\n # Attribute AnalysisProfile.isModifiable\n contentMap['isModifiable'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-17-14:16:26_00010__www.ccpn.ac.uk_Fogh_2007-10-03-14:53:27_00001__www.ccpn.ac.uk_Fogh_2006-09-14-16:28:57_00002')\n\n # Attribute AnalysisProfile.lastUnlockedBy\n contentMap['lastUnlockedBy'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-12-31-09:00:59_00003__www.ccpn.ac.uk_Fogh_2007-10-03-14:53:27_00001__www.ccpn.ac.uk_Fogh_2006-09-14-16:28:57_00002')\n\n # Attribute AnalysisProfile.name\n currentMap = {}\n contentMap['name'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00029'] = currentMap\n loadMaps['ANAP.AnalysisProfile.name'] = currentMap\n currentMap['tag'] = 'ANAP.AnalysisProfile.name'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00029'\n currentMap['name'] = 'name'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00037')\n\n # Attribute AnalysisProfile.panView\n currentMap = {}\n contentMap['panView'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00036'] = currentMap\n loadMaps['ANAP.AnalysisProfile.panView'] = currentMap\n currentMap['tag'] = 'ANAP.AnalysisProfile.panView'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00036'\n currentMap['name'] = 'panView'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['default'] = True\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00028')\n\n # Attribute AnalysisProfile.sendBugReports\n currentMap = {}\n contentMap['sendBugReports'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2010-11-17-16:21:37_00004'] = currentMap\n loadMaps['ANAP.AnalysisProfile.sendBugReports'] = currentMap\n currentMap['tag'] = 'ANAP.AnalysisProfile.sendBugReports'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2010-11-17-16:21:37_00004'\n currentMap['name'] = 'sendBugReports'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['default'] = 'maybe'\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2010-11-17-16:21:33_00001')\n\n # Attribute AnalysisProfile.transientDialogs\n currentMap = {}\n contentMap['transientDialogs'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00037'] = currentMap\n loadMaps['ANAP.AnalysisProfile.transientDialogs'] = currentMap\n currentMap['tag'] = 'ANAP.AnalysisProfile.transientDialogs'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00037'\n currentMap['name'] = 'transientDialogs'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['default'] = True\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00028')\n\n # Attribute AnalysisProfile.transientWindows\n currentMap = {}\n contentMap['transientWindows'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00038'] = currentMap\n loadMaps['ANAP.AnalysisProfile.transientWindows'] = currentMap\n currentMap['tag'] = 'ANAP.AnalysisProfile.transientWindows'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00038'\n currentMap['name'] = 'transientWindows'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['default'] = False\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00028')\n\n # Attribute AnalysisProfile.twoCharShortcuts\n currentMap = {}\n contentMap['twoCharShortcuts'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00039'] = currentMap\n loadMaps['ANAP.AnalysisProfile.twoCharShortcuts'] = currentMap\n currentMap['tag'] = 'ANAP.AnalysisProfile.twoCharShortcuts'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00039'\n currentMap['name'] = 'twoCharShortcuts'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['default'] = False\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00028')\n\n # Attribute AnalysisProfile.useCrosshair\n currentMap = {}\n contentMap['useCrosshair'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00034'] = currentMap\n loadMaps['ANAP.AnalysisProfile.useCrosshair'] = currentMap\n currentMap['tag'] = 'ANAP.AnalysisProfile.useCrosshair'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00034'\n currentMap['name'] = 'useCrosshair'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['default'] = True\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00028')\n\n # Attribute AnalysisProfile.useGlobalShortcuts\n currentMap = {}\n contentMap['useGlobalShortcuts'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00035'] = currentMap\n loadMaps['ANAP.AnalysisProfile.useGlobalShortcuts'] = currentMap\n currentMap['tag'] = 'ANAP.AnalysisProfile.useGlobalShortcuts'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00035'\n currentMap['name'] = 'useGlobalShortcuts'\n currentMap['hicard'] = 1\n currentMap['locard'] = 0\n currentMap['default'] = False\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00028')\n\n # Attribute AnalysisProfile.userEmail\n currentMap = {}\n contentMap['userEmail'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2010-11-17-16:21:37_00003'] = currentMap\n loadMaps['ANAP.AnalysisProfile.userEmail'] = currentMap\n currentMap['tag'] = 'ANAP.AnalysisProfile.userEmail'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2010-11-17-16:21:37_00003'\n currentMap['name'] = 'userEmail'\n currentMap['hicard'] = 1\n currentMap['locard'] = 0\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2007-09-12-18:31:28_00003')\n\n # Attribute AnalysisProfile.userName\n currentMap = {}\n contentMap['userName'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2010-11-17-16:21:37_00001'] = currentMap\n loadMaps['ANAP.AnalysisProfile.userName'] = currentMap\n currentMap['tag'] = 'ANAP.AnalysisProfile.userName'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2010-11-17-16:21:37_00001'\n currentMap['name'] = 'userName'\n currentMap['hicard'] = 1\n currentMap['locard'] = 0\n currentMap['eType'] = 'cplx'\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00033')\n\n # Attribute AnalysisProfile.userOrganisation\n currentMap = {}\n contentMap['userOrganisation'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2010-11-17-16:21:37_00002'] = currentMap\n loadMaps['ANAP.AnalysisProfile.userOrganisation'] = currentMap\n currentMap['tag'] = 'ANAP.AnalysisProfile.userOrganisation'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2010-11-17-16:21:37_00002'\n currentMap['name'] = 'userOrganisation'\n currentMap['hicard'] = 1\n currentMap['locard'] = 0\n currentMap['eType'] = 'cplx'\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00033')\n\n # Attribute AnalysisProfile.webBrowser\n currentMap = {}\n contentMap['webBrowser'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00033'] = currentMap\n loadMaps['ANAP.AnalysisProfile.webBrowser'] = currentMap\n currentMap['tag'] = 'ANAP.AnalysisProfile.webBrowser'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00033'\n currentMap['name'] = 'webBrowser'\n currentMap['hicard'] = 1\n currentMap['locard'] = 0\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00037')\n\n # Role AnalysisProfile.access\n contentMap['access'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-12-31-09:03:01_00014')\n\n # Role AnalysisProfile.colorSchemes\n currentMap = {}\n contentMap['colorSchemes'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00026'] = currentMap\n loadMaps['ANAP.AnalysisProfile.colorSchemes'] = currentMap\n currentMap['tag'] = 'ANAP.AnalysisProfile.colorSchemes'\n currentMap['type'] = 'child'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00026'\n currentMap['name'] = 'colorSchemes'\n currentMap['hicard'] = -1\n currentMap['locard'] = 0\n currentMap['eType'] = 'cplx'\n currentMap['implSkip'] = True\n currentMap['content'] = globalMap.get('ANAP').get('abstractTypes')\n\n # Role AnalysisProfile.macros\n currentMap = {}\n contentMap['macros'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00022'] = currentMap\n loadMaps['ANAP.AnalysisProfile.macros'] = currentMap\n currentMap['tag'] = 'ANAP.AnalysisProfile.macros'\n currentMap['type'] = 'child'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00022'\n currentMap['name'] = 'macros'\n currentMap['hicard'] = -1\n currentMap['locard'] = 0\n currentMap['eType'] = 'cplx'\n currentMap['implSkip'] = True\n currentMap['content'] = globalMap.get('ANAP').get('abstractTypes')\n\n # Role AnalysisProfile.marksColor\n currentMap = {}\n contentMap['marksColor'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00028'] = currentMap\n loadMaps['ANAP.AnalysisProfile.marksColor'] = currentMap\n currentMap['tag'] = 'ANAP.AnalysisProfile.marksColor'\n currentMap['type'] = 'link'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00028'\n currentMap['name'] = 'marksColor'\n currentMap['hicard'] = 1\n currentMap['locard'] = 0\n currentMap['implSkip'] = True\n currentMap['copyOverride'] = True\n\n # Role AnalysisProfile.refExpProfiles\n currentMap = {}\n contentMap['refExpProfiles'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00024'] = currentMap\n loadMaps['ANAP.AnalysisProfile.refExpProfiles'] = currentMap\n currentMap['tag'] = 'ANAP.AnalysisProfile.refExpProfiles'\n currentMap['type'] = 'child'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00024'\n currentMap['name'] = 'refExpProfiles'\n currentMap['hicard'] = -1\n currentMap['locard'] = 0\n currentMap['eType'] = 'cplx'\n currentMap['implSkip'] = True\n currentMap['content'] = globalMap.get('ANAP').get('abstractTypes')\n\n # Role AnalysisProfile.residueProfiles\n currentMap = {}\n contentMap['residueProfiles'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00020'] = currentMap\n loadMaps['ANAP.AnalysisProfile.residueProfiles'] = currentMap\n currentMap['tag'] = 'ANAP.AnalysisProfile.residueProfiles'\n currentMap['type'] = 'child'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00020'\n currentMap['name'] = 'residueProfiles'\n currentMap['hicard'] = -1\n currentMap['locard'] = 0\n currentMap['eType'] = 'cplx'\n currentMap['implSkip'] = True\n currentMap['content'] = globalMap.get('ANAP').get('abstractTypes')\n\n # Role AnalysisProfile.rulersColor\n currentMap = {}\n contentMap['rulersColor'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00027'] = currentMap\n loadMaps['ANAP.AnalysisProfile.rulersColor'] = currentMap\n currentMap['tag'] = 'ANAP.AnalysisProfile.rulersColor'\n currentMap['type'] = 'link'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00027'\n currentMap['name'] = 'rulersColor'\n currentMap['hicard'] = 1\n currentMap['locard'] = 0\n currentMap['implSkip'] = True\n currentMap['copyOverride'] = True\n # End of AnalysisProfile\n\n currentMap = abstractTypes.get('AnalysisProfile')\n aList = ['createdBy', 'graphicsHandler', 'guid', 'isModifiable', 'lastUnlockedBy', 'name', 'panView', 'sendBugReports', 'transientDialogs', 'transientWindows', 'twoCharShortcuts', 'useCrosshair', 'useGlobalShortcuts', 'userEmail', 'webBrowser']\n currentMap['headerAttrs'] = aList\n aList = ['bgColor', 'fgColor', 'font', 'userName', 'userOrganisation', 'marksColor', 'rulersColor']\n currentMap['simpleAttrs'] = aList\n aList = ['residueProfiles', 'refExpProfiles', 'macros', 'colorSchemes', 'access', 'applicationData']\n currentMap['cplxAttrs'] = aList\n aList = ['colorSchemes', 'macros', 'refExpProfiles', 'residueProfiles']\n currentMap['children'] = aList\n\n # Class ColorScheme\n currentMap = {}\n abstractTypes['ColorScheme'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2008-05-08-11:06:22_00002'] = currentMap\n loadMaps['ANAP.ColorScheme'] = currentMap\n currentMap['tag'] = 'ANAP.ColorScheme'\n currentMap['type'] = 'class'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2008-05-08-11:06:22_00002'\n currentMap['eType'] = 'cplx'\n currentMap['fromParent'] = 'colorSchemes'\n currentMap['objkey'] = 'name'\n currentMap['class'] = ccpnmr.api.AnalysisProfile.ColorScheme\n contentMap = {}\n currentMap['content'] = contentMap\n\n # Attribute ColorScheme.applicationData\n contentMap['applicationData'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-09-14-18:48:27_00007')\n\n # Attribute ColorScheme.colors\n currentMap = {}\n contentMap['colors'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00043'] = currentMap\n loadMaps['ANAP.ColorScheme.colors'] = currentMap\n currentMap['tag'] = 'ANAP.ColorScheme.colors'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00043'\n currentMap['name'] = 'colors'\n currentMap['hicard'] = -1\n currentMap['locard'] = 0\n currentMap['eType'] = 'cplx'\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2008-05-05-15:12:50_00007')\n\n # Attribute ColorScheme.name\n currentMap = {}\n contentMap['name'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-17-15:11:12_00007'] = currentMap\n loadMaps['ANAP.ColorScheme.name'] = currentMap\n currentMap['tag'] = 'ANAP.ColorScheme.name'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-17-15:11:12_00007'\n currentMap['name'] = 'name'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['eType'] = 'cplx'\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00033')\n\n # Role ColorScheme.access\n contentMap['access'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-12-31-09:03:01_00014')\n # End of ColorScheme\n\n currentMap = abstractTypes.get('ColorScheme')\n aList = ['colors', 'name']\n currentMap['simpleAttrs'] = aList\n aList = ['access', 'applicationData']\n currentMap['cplxAttrs'] = aList\n\n # Class Macro\n currentMap = {}\n abstractTypes['Macro'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2008-05-08-11:06:22_00001'] = currentMap\n loadMaps['ANAP.Macro'] = currentMap\n currentMap['tag'] = 'ANAP.Macro'\n currentMap['type'] = 'class'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2008-05-08-11:06:22_00001'\n currentMap['eType'] = 'cplx'\n currentMap['fromParent'] = 'macros'\n currentMap['objkey'] = 'serial'\n currentMap['class'] = ccpnmr.api.AnalysisProfile.Macro\n contentMap = {}\n currentMap['content'] = contentMap\n\n # Attribute Macro.applicationData\n contentMap['applicationData'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-09-14-18:48:27_00007')\n\n # Attribute Macro.details\n currentMap = {}\n contentMap['details'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-17-15:11:12_00005'] = currentMap\n loadMaps['ANAP.Macro.details'] = currentMap\n currentMap['tag'] = 'ANAP.Macro.details'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-17-15:11:12_00005'\n currentMap['name'] = 'details'\n currentMap['hicard'] = 1\n currentMap['locard'] = 0\n currentMap['eType'] = 'cplx'\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00036')\n\n # Attribute Macro.function\n currentMap = {}\n contentMap['function'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-17-15:11:12_00002'] = currentMap\n loadMaps['ANAP.Macro.function'] = currentMap\n currentMap['tag'] = 'ANAP.Macro.function'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-17-15:11:12_00002'\n currentMap['name'] = 'function'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00037')\n\n # Attribute Macro.isInMenu\n currentMap = {}\n contentMap['isInMenu'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-09-29-13:48:16_00005'] = currentMap\n loadMaps['ANAP.Macro.isInMenu'] = currentMap\n currentMap['tag'] = 'ANAP.Macro.isInMenu'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-09-29-13:48:16_00005'\n currentMap['name'] = 'isInMenu'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['default'] = False\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00028')\n\n # Attribute Macro.isInMouseMenu\n currentMap = {}\n contentMap['isInMouseMenu'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-09-29-13:48:16_00006'] = currentMap\n loadMaps['ANAP.Macro.isInMouseMenu'] = currentMap\n currentMap['tag'] = 'ANAP.Macro.isInMouseMenu'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-09-29-13:48:16_00006'\n currentMap['name'] = 'isInMouseMenu'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['default'] = False\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00028')\n\n # Attribute Macro.module\n currentMap = {}\n contentMap['module'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-17-15:11:12_00003'] = currentMap\n loadMaps['ANAP.Macro.module'] = currentMap\n currentMap['tag'] = 'ANAP.Macro.module'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-17-15:11:12_00003'\n currentMap['name'] = 'module'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00037')\n\n # Attribute Macro.name\n currentMap = {}\n contentMap['name'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-17-15:11:10_00001'] = currentMap\n loadMaps['ANAP.Macro.name'] = currentMap\n currentMap['tag'] = 'ANAP.Macro.name'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-17-15:11:10_00001'\n currentMap['name'] = 'name'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['eType'] = 'cplx'\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00033')\n\n # Attribute Macro.ordering\n currentMap = {}\n contentMap['ordering'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-17-15:11:12_00004'] = currentMap\n loadMaps['ANAP.Macro.ordering'] = currentMap\n currentMap['tag'] = 'ANAP.Macro.ordering'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-17-15:11:12_00004'\n currentMap['name'] = 'ordering'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['proc'] = 'direct'\n currentMap['default'] = 0\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00032')\n\n # Attribute Macro.path\n currentMap = {}\n contentMap['path'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-17-15:11:12_00001'] = currentMap\n loadMaps['ANAP.Macro.path'] = currentMap\n currentMap['tag'] = 'ANAP.Macro.path'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-17-15:11:12_00001'\n currentMap['name'] = 'path'\n currentMap['hicard'] = 1\n currentMap['locard'] = 0\n currentMap['eType'] = 'cplx'\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:54_00003')\n\n # Attribute Macro.serial\n currentMap = {}\n contentMap['serial'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-17-15:11:09_00001'] = currentMap\n loadMaps['ANAP.Macro.serial'] = currentMap\n currentMap['tag'] = 'ANAP.Macro.serial'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-17-15:11:09_00001'\n currentMap['name'] = 'serial'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00032')\n\n # Attribute Macro.shortcut\n currentMap = {}\n contentMap['shortcut'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-17-15:11:12_00006'] = currentMap\n loadMaps['ANAP.Macro.shortcut'] = currentMap\n currentMap['tag'] = 'ANAP.Macro.shortcut'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-17-15:11:12_00006'\n currentMap['name'] = 'shortcut'\n currentMap['hicard'] = 1\n currentMap['locard'] = 0\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00037')\n\n # Role Macro.access\n contentMap['access'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-12-31-09:03:01_00014')\n # End of Macro\n\n currentMap = abstractTypes.get('Macro')\n aList = ['function', 'isInMenu', 'isInMouseMenu', 'module', 'ordering', 'serial', 'shortcut']\n currentMap['headerAttrs'] = aList\n aList = ['details', 'name', 'path']\n currentMap['simpleAttrs'] = aList\n aList = ['access', 'applicationData']\n currentMap['cplxAttrs'] = aList\n\n # Class RefExpProfile\n currentMap = {}\n abstractTypes['RefExpProfile'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2008-05-05-15:12:50_00006'] = currentMap\n loadMaps['ANAP.RefExpProfile'] = currentMap\n currentMap['tag'] = 'ANAP.RefExpProfile'\n currentMap['type'] = 'class'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2008-05-05-15:12:50_00006'\n currentMap['eType'] = 'cplx'\n currentMap['fromParent'] = 'refExpProfiles'\n currentMap['objkey'] = 'name'\n currentMap['class'] = ccpnmr.api.AnalysisProfile.RefExpProfile\n contentMap = {}\n currentMap['content'] = contentMap\n\n # Attribute RefExpProfile.applicationData\n contentMap['applicationData'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-09-14-18:48:27_00007')\n\n # Attribute RefExpProfile.name\n currentMap = {}\n contentMap['name'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00046'] = currentMap\n loadMaps['ANAP.RefExpProfile.name'] = currentMap\n currentMap['tag'] = 'ANAP.RefExpProfile.name'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00046'\n currentMap['name'] = 'name'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00037')\n\n # Attribute RefExpProfile.peakSymbolColors\n currentMap = {}\n contentMap['peakSymbolColors'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00048'] = currentMap\n loadMaps['ANAP.RefExpProfile.peakSymbolColors'] = currentMap\n currentMap['tag'] = 'ANAP.RefExpProfile.peakSymbolColors'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00048'\n currentMap['name'] = 'peakSymbolColors'\n currentMap['hicard'] = -1\n currentMap['locard'] = 0\n currentMap['eType'] = 'cplx'\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2008-05-05-15:12:50_00007')\n\n # Attribute RefExpProfile.peakTextColors\n currentMap = {}\n contentMap['peakTextColors'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00049'] = currentMap\n loadMaps['ANAP.RefExpProfile.peakTextColors'] = currentMap\n currentMap['tag'] = 'ANAP.RefExpProfile.peakTextColors'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00049'\n currentMap['name'] = 'peakTextColors'\n currentMap['hicard'] = -1\n currentMap['locard'] = 0\n currentMap['eType'] = 'cplx'\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2008-05-05-15:12:50_00007')\n\n # Attribute RefExpProfile.refExpNames\n currentMap = {}\n contentMap['refExpNames'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00047'] = currentMap\n loadMaps['ANAP.RefExpProfile.refExpNames'] = currentMap\n currentMap['tag'] = 'ANAP.RefExpProfile.refExpNames'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00047'\n currentMap['name'] = 'refExpNames'\n currentMap['hicard'] = -1\n currentMap['locard'] = 0\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00037')\n\n # Role RefExpProfile.access\n contentMap['access'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-12-31-09:03:01_00014')\n\n # Role RefExpProfile.negColorSchemes\n currentMap = {}\n contentMap['negColorSchemes'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00045'] = currentMap\n loadMaps['ANAP.RefExpProfile.negColorSchemes'] = currentMap\n currentMap['tag'] = 'ANAP.RefExpProfile.negColorSchemes'\n currentMap['type'] = 'link'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00045'\n currentMap['name'] = 'negColorSchemes'\n currentMap['hicard'] = -1\n currentMap['locard'] = 0\n currentMap['copyOverride'] = True\n\n # Role RefExpProfile.posColorSchemes\n currentMap = {}\n contentMap['posColorSchemes'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00041'] = currentMap\n loadMaps['ANAP.RefExpProfile.posColorSchemes'] = currentMap\n currentMap['tag'] = 'ANAP.RefExpProfile.posColorSchemes'\n currentMap['type'] = 'link'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00041'\n currentMap['name'] = 'posColorSchemes'\n currentMap['hicard'] = -1\n currentMap['locard'] = 0\n currentMap['copyOverride'] = True\n # End of RefExpProfile\n\n currentMap = abstractTypes.get('RefExpProfile')\n aList = ['name']\n currentMap['headerAttrs'] = aList\n aList = ['peakSymbolColors', 'peakTextColors', 'refExpNames', 'negColorSchemes', 'posColorSchemes']\n currentMap['simpleAttrs'] = aList\n aList = ['access', 'applicationData']\n currentMap['cplxAttrs'] = aList\n\n # Class ResidueProfile\n currentMap = {}\n abstractTypes['ResidueProfile'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2008-05-05-15:12:50_00005'] = currentMap\n loadMaps['ANAP.ResidueProfile'] = currentMap\n currentMap['tag'] = 'ANAP.ResidueProfile'\n currentMap['type'] = 'class'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2008-05-05-15:12:50_00005'\n currentMap['eType'] = 'cplx'\n currentMap['fromParent'] = 'residueProfiles'\n currentMap['class'] = ccpnmr.api.AnalysisProfile.ResidueProfile\n contentMap = {}\n currentMap['content'] = contentMap\n\n # Attribute ResidueProfile.applicationData\n contentMap['applicationData'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-09-14-18:48:27_00007')\n\n # Attribute ResidueProfile.ccpCode\n currentMap = {}\n contentMap['ccpCode'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00051'] = currentMap\n loadMaps['ANAP.ResidueProfile.ccpCode'] = currentMap\n currentMap['tag'] = 'ANAP.ResidueProfile.ccpCode'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00051'\n currentMap['name'] = 'ccpCode'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2007-09-12-18:31:28_00003')\n\n # Attribute ResidueProfile.guiName\n currentMap = {}\n contentMap['guiName'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00052'] = currentMap\n loadMaps['ANAP.ResidueProfile.guiName'] = currentMap\n currentMap['tag'] = 'ANAP.ResidueProfile.guiName'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00052'\n currentMap['name'] = 'guiName'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2007-09-12-18:31:28_00003')\n\n # Attribute ResidueProfile.molType\n currentMap = {}\n contentMap['molType'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00050'] = currentMap\n loadMaps['ANAP.ResidueProfile.molType'] = currentMap\n currentMap['tag'] = 'ANAP.ResidueProfile.molType'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2008-05-05-15:12:52_00050'\n currentMap['name'] = 'molType'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:52_00024')\n\n # Role ResidueProfile.access\n contentMap['access'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-12-31-09:03:01_00014')\n # End of ResidueProfile\n\n currentMap = abstractTypes.get('ResidueProfile')\n aList = ['ccpCode', 'guiName', 'molType']\n currentMap['headerAttrs'] = aList\n aList = ['access', 'applicationData']\n currentMap['cplxAttrs'] = aList\n\n # Out-of-package link to AnalysisProfile\n currentMap = {}\n exolinks['AnalysisProfile'] = currentMap\n loadMaps['ANAP.exo-AnalysisProfile'] = currentMap\n currentMap['tag'] = 'ANAP.exo-AnalysisProfile'\n currentMap['type'] = 'exo'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2008-05-05-15:12:50_00004'\n currentMap['name'] = 'AnalysisProfile'\n currentMap['eType'] = 'cplx'\n currentMap['class'] = ccpnmr.api.AnalysisProfile.AnalysisProfile\n aList = list()\n currentMap['keyMaps'] = aList\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2008-06-30-16:30:50_00001'))\n\n # Out-of-package link to ColorScheme\n currentMap = {}\n exolinks['ColorScheme'] = currentMap\n loadMaps['ANAP.exo-ColorScheme'] = currentMap\n currentMap['tag'] = 'ANAP.exo-ColorScheme'\n currentMap['type'] = 'exo'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2008-05-08-11:06:22_00002'\n currentMap['name'] = 'ColorScheme'\n currentMap['eType'] = 'cplx'\n currentMap['class'] = ccpnmr.api.AnalysisProfile.ColorScheme\n aList = list()\n currentMap['keyMaps'] = aList\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2008-06-30-16:30:50_00001'))\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00033'))\n\n # Out-of-package link to Macro\n currentMap = {}\n exolinks['Macro'] = currentMap\n loadMaps['ANAP.exo-Macro'] = currentMap\n currentMap['tag'] = 'ANAP.exo-Macro'\n currentMap['type'] = 'exo'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2008-05-08-11:06:22_00001'\n currentMap['name'] = 'Macro'\n currentMap['eType'] = 'cplx'\n currentMap['class'] = ccpnmr.api.AnalysisProfile.Macro\n aList = list()\n currentMap['keyMaps'] = aList\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2008-06-30-16:30:50_00001'))\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00032'))\n\n # Out-of-package link to RefExpProfile\n currentMap = {}\n exolinks['RefExpProfile'] = currentMap\n loadMaps['ANAP.exo-RefExpProfile'] = currentMap\n currentMap['tag'] = 'ANAP.exo-RefExpProfile'\n currentMap['type'] = 'exo'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2008-05-05-15:12:50_00006'\n currentMap['name'] = 'RefExpProfile'\n currentMap['eType'] = 'cplx'\n currentMap['class'] = ccpnmr.api.AnalysisProfile.RefExpProfile\n aList = list()\n currentMap['keyMaps'] = aList\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2008-06-30-16:30:50_00001'))\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00037'))\n\n # Out-of-package link to ResidueProfile\n currentMap = {}\n exolinks['ResidueProfile'] = currentMap\n loadMaps['ANAP.exo-ResidueProfile'] = currentMap\n currentMap['tag'] = 'ANAP.exo-ResidueProfile'\n currentMap['type'] = 'exo'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2008-05-05-15:12:50_00005'\n currentMap['name'] = 'ResidueProfile'\n currentMap['eType'] = 'cplx'\n currentMap['class'] = ccpnmr.api.AnalysisProfile.ResidueProfile\n aList = list()\n currentMap['keyMaps'] = aList\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2008-06-30-16:30:50_00001'))\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:52_00024'))\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2007-09-12-18:31:28_00003'))", "def base_mappings():\n return {\n 'from_1': {\n 'to_1': {\n 'mol_1': ({}, {}, []),\n 'mol_2': ({}, {}, []),\n },\n },\n }", "def assemble(predefs=PREDEFS, symbol_hash=None):\r\n d = {}\r\n for k in PREDEFS:\r\n v = PREDEFS[k]\r\n if \"suppress\" in v and v[\"suppress\"]: continue\r\n d[k] = v\r\n if symbol_hash is None:\r\n symbol_hash = get_syms()\r\n for k in symbol_hash:\r\n bdi = get_builddict_info(symbol_hash[k])\r\n if bdi is None: continue\r\n d[k] = bdi.copy()\r\n if not \"defn\" in d[k]:\r\n d[k][\"defn\"] = entry_for_one(k, symbol_hash[k])\r\n return d", "def slot_mappings(self) -> Dict[Text, Union[Dict, List[Dict]]]:\n\n return {\n \"bug\":[self.from_entity(\n entity=\"bug\", \n intent=\"inform\"),\n self.from_text(\n intent=\"inform\")],\n \"beverage\": [self.from_entity(\n entity=\"beverage\", \n intent=\"inform\"), \n self.from_text(\n intent=\"inform\")],\n \"second_person_plural\": [self.from_entity(\n entity=\"second_person_plural\", \n intent=\"inform\"),\n self.from_text(\n intent=\"inform\")],\n \"cot_caught\": [self.from_entity(\n entity=\"cot_caught\", \n intent=\"inform\"),\n self.from_text(\n intent=\"inform\")],\n \"rain_sun\": [self.from_entity(\n entity=\"rain_sun\", \n intent=\"inform\"),\n self.from_text(\n intent=\"inform\")],\n \"crawfish\": [self.from_entity(\n entity=\"crawfish\", \n intent=\"inform\"),\n self.from_text(\n intent=\"inform\")],\n \"halloween\": [self.from_entity(\n entity=\"halloween\", \n intent=\"inform\"),\n self.from_text(\n intent=\"inform\")],\n \"sandwich\": [self.from_entity(\n entity=\"sandwich\", \n intent=\"inform\"),\n self.from_text(\n intent=\"inform\")],\n \"side_road\": [self.from_entity(\n entity=\"side_road\", \n intent=\"inform\"),\n self.from_text(\n intent=\"inform\")],\n \"shoes\": [self.from_entity(\n entity=\"shoes\", \n intent=\"inform\"),\n self.from_text(\n intent=\"inform\")],\n \"highway\": [self.from_entity(\n entity=\"highway\", \n intent=\"inform\"),\n self.from_text(\n intent=\"inform\")],\n \"yard_sale\": [self.from_entity(\n entity=\"yard_sale\", \n intent=\"inform\"),\n self.from_text(\n intent=\"inform\")],\n \"rubbernecking\": [self.from_entity(\n entity=\"rubbernecking\", \n intent=\"inform\"),\n self.from_text(\n intent=\"inform\")],\n \"frosting\": [self.from_entity(\n entity=\"frosting\", \n intent=\"inform\"),\n self.from_text(\n intent=\"inform\")],\n \"lawyer\": [self.from_entity(\n entity=\"lawyer\", \n intent=\"inform\"),\n self.from_text(\n intent=\"inform\")],\n \"kitty_corner\": [self.from_entity(\n entity=\"kitty_corner\", \n intent=\"inform\"),\n self.from_text(\n intent=\"inform\")],\n \"firefly\": [self.from_entity(\n entity=\"firefly\", \n intent=\"inform\"),\n self.from_text(\n intent=\"inform\")],\n \"verge\": [self.from_entity(\n entity=\"verge\", \n intent=\"inform\"),\n self.from_text(\n intent=\"inform\")],\n \"brew_thru\": [self.from_entity(\n entity=\"brew_thru\", \n intent=\"inform\"),\n self.from_text(\n intent=\"inform\")],\n \"water_fountain\": [self.from_entity(\n entity=\"water_fountain\", \n intent=\"inform\"),\n self.from_text(\n intent=\"inform\")]\n }", "def build_subsets(self, field):\n sss = defaultdict(list)\n for r in self.__elements__:\n sss[getattr(r, field)].append(r)\n return dict(sss)", "def _get_domain(self):\n self.ensure_one()\n domain = ['|', ('active', '=', True), ('active', '=', False)]\n # Check active\n if self.active == 'true':\n domain += [('active', '=', True)]\n elif self.active == 'false':\n domain += [('active', '=', False)]\n # Check partner type\n if self.partner_type == 'customer_or_supplier':\n domain += ['|', ('customer', '=', True), ('supplier', '=', True)]\n elif self.partner_type == 'customer_and_supplier':\n domain += [('customer', '=', True), ('supplier', '=', True)]\n elif self.partner_type == 'customer':\n domain += [('customer', '=', True)]\n elif self.partner_type == 'supplier':\n domain += [('supplier', '=', True)]\n # Check category\n if self.category_ids:\n domain += [('category_id', 'in', self.category_ids.ids)]\n return domain", "def makeMapping(globalMap):\n \n from memops.xml.Implementation import bool2str, str2bool\n\n # Set up top level dictionaries\n loadMaps = globalMap.get('loadMaps')\n mapsByGuid = globalMap.get('mapsByGuid')\n\n abstractTypes = globalMap.get('CHEL').get('abstractTypes')\n exolinks = globalMap.get('CHEL').get('exolinks')\n\n # DataType HalfLifeType\n currentMap = {}\n abstractTypes['HalfLifeType'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2007-06-07-18:18:10_00002'] = currentMap\n loadMaps['CHEL.HalfLifeType'] = currentMap\n currentMap['tag'] = 'CHEL.HalfLifeType'\n currentMap['type'] = 'simple'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2007-06-07-18:18:10_00002'\n currentMap['toStr'] = 'text'\n currentMap['cnvrt'] = 'text'\n\n # Class ChemElement\n currentMap = {}\n abstractTypes['ChemElement'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-16-18:19:49_00004'] = currentMap\n loadMaps['CHEL.ChemElement'] = currentMap\n currentMap['tag'] = 'CHEL.ChemElement'\n currentMap['type'] = 'class'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-18:19:49_00004'\n currentMap['eType'] = 'cplx'\n currentMap['fromParent'] = 'chemElements'\n currentMap['objkey'] = 'symbol'\n currentMap['class'] = ccp.api.molecule.ChemElement.ChemElement\n contentMap = {}\n currentMap['content'] = contentMap\n\n # Attribute ChemElement.applicationData\n contentMap['applicationData'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-09-14-18:48:27_00007')\n\n # Attribute ChemElement.atomNumber\n currentMap = {}\n contentMap['atomNumber'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-16-18:23:12_00017'] = currentMap\n loadMaps['CHEL.ChemElement.atomNumber'] = currentMap\n currentMap['tag'] = 'CHEL.ChemElement.atomNumber'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-18:23:12_00017'\n currentMap['name'] = 'atomNumber'\n currentMap['hicard'] = 1\n currentMap['locard'] = 0\n currentMap['proc'] = 'direct'\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00032')\n\n # Attribute ChemElement.atomicRadius\n currentMap = {}\n contentMap['atomicRadius'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-16-18:23:12_00018'] = currentMap\n loadMaps['CHEL.ChemElement.atomicRadius'] = currentMap\n currentMap['tag'] = 'CHEL.ChemElement.atomicRadius'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-18:23:12_00018'\n currentMap['name'] = 'atomicRadius'\n currentMap['hicard'] = 1\n currentMap['locard'] = 0\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00031')\n\n # Attribute ChemElement.covalentRadius\n currentMap = {}\n contentMap['covalentRadius'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-16-18:23:12_00019'] = currentMap\n loadMaps['CHEL.ChemElement.covalentRadius'] = currentMap\n currentMap['tag'] = 'CHEL.ChemElement.covalentRadius'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-18:23:12_00019'\n currentMap['name'] = 'covalentRadius'\n currentMap['hicard'] = 1\n currentMap['locard'] = 0\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00031')\n\n # Attribute ChemElement.mass\n currentMap = {}\n contentMap['mass'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-16-18:23:12_00020'] = currentMap\n loadMaps['CHEL.ChemElement.mass'] = currentMap\n currentMap['tag'] = 'CHEL.ChemElement.mass'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-18:23:12_00020'\n currentMap['name'] = 'mass'\n currentMap['hicard'] = 1\n currentMap['locard'] = 0\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00031')\n\n # Attribute ChemElement.name\n currentMap = {}\n contentMap['name'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-16-18:23:12_00021'] = currentMap\n loadMaps['CHEL.ChemElement.name'] = currentMap\n currentMap['tag'] = 'CHEL.ChemElement.name'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-18:23:12_00021'\n currentMap['name'] = 'name'\n currentMap['hicard'] = 1\n currentMap['locard'] = 0\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00055')\n\n # Attribute ChemElement.symbol\n currentMap = {}\n contentMap['symbol'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-16-18:23:12_00015'] = currentMap\n loadMaps['CHEL.ChemElement.symbol'] = currentMap\n currentMap['tag'] = 'CHEL.ChemElement.symbol'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-18:23:12_00015'\n currentMap['name'] = 'symbol'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00055')\n\n # Role ChemElement.access\n contentMap['access'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-12-31-09:03:01_00014')\n\n # Role ChemElement.isotopes\n currentMap = {}\n contentMap['isotopes'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-16-18:23:12_00004'] = currentMap\n loadMaps['CHEL.ChemElement.isotopes'] = currentMap\n currentMap['tag'] = 'CHEL.ChemElement.isotopes'\n currentMap['type'] = 'child'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-18:23:12_00004'\n currentMap['name'] = 'isotopes'\n currentMap['hicard'] = -1\n currentMap['locard'] = 0\n currentMap['eType'] = 'cplx'\n currentMap['content'] = globalMap.get('CHEL').get('abstractTypes')\n # End of ChemElement\n\n currentMap = abstractTypes.get('ChemElement')\n aList = ['atomNumber', 'atomicRadius', 'covalentRadius', 'mass', 'name', 'symbol']\n currentMap['headerAttrs'] = aList\n aList = ['isotopes', 'access', 'applicationData']\n currentMap['cplxAttrs'] = aList\n aList = ['isotopes']\n currentMap['children'] = aList\n\n # Class ChemElementStore\n currentMap = {}\n abstractTypes['ChemElementStore'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-16-18:19:49_00005'] = currentMap\n loadMaps['CHEL.ChemElementStore'] = currentMap\n currentMap['tag'] = 'CHEL.ChemElementStore'\n currentMap['type'] = 'class'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-18:19:49_00005'\n currentMap['eType'] = 'cplx'\n currentMap['fromParent'] = 'chemElementStores'\n currentMap['isTop'] = True\n currentMap['objkey'] = 'name'\n currentMap['class'] = ccp.api.molecule.ChemElement.ChemElementStore\n contentMap = {}\n currentMap['content'] = contentMap\n\n # Attribute ChemElementStore.applicationData\n contentMap['applicationData'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-09-14-18:48:27_00007')\n\n # Attribute ChemElementStore.createdBy\n contentMap['createdBy'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-12-31-09:00:59_00002__www.ccpn.ac.uk_Fogh_2007-10-03-14:53:27_00001__www.ccpn.ac.uk_Fogh_2006-09-14-16:28:57_00002')\n\n # Attribute ChemElementStore.guid\n contentMap['guid'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-09-14-18:48:26_00002')\n\n # Attribute ChemElementStore.isModifiable\n contentMap['isModifiable'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-17-14:16:26_00010__www.ccpn.ac.uk_Fogh_2007-10-03-14:53:27_00001__www.ccpn.ac.uk_Fogh_2006-09-14-16:28:57_00002')\n\n # Attribute ChemElementStore.lastUnlockedBy\n contentMap['lastUnlockedBy'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-12-31-09:00:59_00003__www.ccpn.ac.uk_Fogh_2007-10-03-14:53:27_00001__www.ccpn.ac.uk_Fogh_2006-09-14-16:28:57_00002')\n\n # Attribute ChemElementStore.name\n currentMap = {}\n contentMap['name'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-16-18:23:12_00024'] = currentMap\n loadMaps['CHEL.ChemElementStore.name'] = currentMap\n currentMap['tag'] = 'CHEL.ChemElementStore.name'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-18:23:12_00024'\n currentMap['name'] = 'name'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['eType'] = 'cplx'\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00033')\n\n # Role ChemElementStore.access\n contentMap['access'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-12-31-09:03:01_00014')\n\n # Role ChemElementStore.chemElements\n currentMap = {}\n contentMap['chemElements'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-16-18:23:12_00023'] = currentMap\n loadMaps['CHEL.ChemElementStore.chemElements'] = currentMap\n currentMap['tag'] = 'CHEL.ChemElementStore.chemElements'\n currentMap['type'] = 'child'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-18:23:12_00023'\n currentMap['name'] = 'chemElements'\n currentMap['hicard'] = -1\n currentMap['locard'] = 0\n currentMap['eType'] = 'cplx'\n currentMap['implSkip'] = True\n currentMap['content'] = globalMap.get('CHEL').get('abstractTypes')\n # End of ChemElementStore\n\n currentMap = abstractTypes.get('ChemElementStore')\n aList = ['createdBy', 'guid', 'isModifiable', 'lastUnlockedBy']\n currentMap['headerAttrs'] = aList\n aList = ['name']\n currentMap['simpleAttrs'] = aList\n aList = ['chemElements', 'access', 'applicationData']\n currentMap['cplxAttrs'] = aList\n aList = ['chemElements']\n currentMap['children'] = aList\n\n # Class Isotope\n currentMap = {}\n abstractTypes['Isotope'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-16-18:19:49_00003'] = currentMap\n loadMaps['CHEL.Isotope'] = currentMap\n currentMap['tag'] = 'CHEL.Isotope'\n currentMap['type'] = 'class'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-18:19:49_00003'\n currentMap['eType'] = 'cplx'\n currentMap['fromParent'] = 'isotopes'\n currentMap['objkey'] = 'massNumber'\n currentMap['class'] = ccp.api.molecule.ChemElement.Isotope\n contentMap = {}\n currentMap['content'] = contentMap\n\n # Attribute Isotope.abundance\n currentMap = {}\n contentMap['abundance'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-16-18:23:12_00011'] = currentMap\n loadMaps['CHEL.Isotope.abundance'] = currentMap\n currentMap['tag'] = 'CHEL.Isotope.abundance'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-18:23:12_00011'\n currentMap['name'] = 'abundance'\n currentMap['hicard'] = 1\n currentMap['locard'] = 0\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00058')\n\n # Attribute Isotope.applicationData\n contentMap['applicationData'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-09-14-18:48:27_00007')\n\n # Attribute Isotope.gyroMagneticRatio\n currentMap = {}\n contentMap['gyroMagneticRatio'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-16-18:23:12_00008'] = currentMap\n loadMaps['CHEL.Isotope.gyroMagneticRatio'] = currentMap\n currentMap['tag'] = 'CHEL.Isotope.gyroMagneticRatio'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-18:23:12_00008'\n currentMap['name'] = 'gyroMagneticRatio'\n currentMap['hicard'] = 1\n currentMap['locard'] = 0\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00031')\n\n # Attribute Isotope.halfLife\n currentMap = {}\n contentMap['halfLife'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2007-06-07-18:18:13_00001'] = currentMap\n loadMaps['CHEL.Isotope.halfLife'] = currentMap\n currentMap['tag'] = 'CHEL.Isotope.halfLife'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2007-06-07-18:18:13_00001'\n currentMap['name'] = 'halfLife'\n currentMap['hicard'] = 1\n currentMap['locard'] = 0\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:54_00007')\n\n # Attribute Isotope.halfLifeError\n currentMap = {}\n contentMap['halfLifeError'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2007-06-07-18:18:13_00002'] = currentMap\n loadMaps['CHEL.Isotope.halfLifeError'] = currentMap\n currentMap['tag'] = 'CHEL.Isotope.halfLifeError'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2007-06-07-18:18:13_00002'\n currentMap['name'] = 'halfLifeError'\n currentMap['hicard'] = 1\n currentMap['locard'] = 0\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:54_00007')\n\n # Attribute Isotope.halfLifeType\n currentMap = {}\n contentMap['halfLifeType'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2007-06-07-18:18:13_00003'] = currentMap\n loadMaps['CHEL.Isotope.halfLifeType'] = currentMap\n currentMap['tag'] = 'CHEL.Isotope.halfLifeType'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2007-06-07-18:18:13_00003'\n currentMap['name'] = 'halfLifeType'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['default'] = 'unknown'\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2007-06-07-18:18:10_00002')\n\n # Attribute Isotope.magneticMoment\n currentMap = {}\n contentMap['magneticMoment'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-16-18:23:12_00013'] = currentMap\n loadMaps['CHEL.Isotope.magneticMoment'] = currentMap\n currentMap['tag'] = 'CHEL.Isotope.magneticMoment'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-18:23:12_00013'\n currentMap['name'] = 'magneticMoment'\n currentMap['hicard'] = 1\n currentMap['locard'] = 0\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00031')\n\n # Attribute Isotope.mass\n currentMap = {}\n contentMap['mass'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-16-18:23:12_00010'] = currentMap\n loadMaps['CHEL.Isotope.mass'] = currentMap\n currentMap['tag'] = 'CHEL.Isotope.mass'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-18:23:12_00010'\n currentMap['name'] = 'mass'\n currentMap['hicard'] = 1\n currentMap['locard'] = 0\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00031')\n\n # Attribute Isotope.massNumber\n currentMap = {}\n contentMap['massNumber'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-16-18:23:12_00007'] = currentMap\n loadMaps['CHEL.Isotope.massNumber'] = currentMap\n currentMap['tag'] = 'CHEL.Isotope.massNumber'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-18:23:12_00007'\n currentMap['name'] = 'massNumber'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['proc'] = 'direct'\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00032')\n\n # Attribute Isotope.quadrupoleMoment\n currentMap = {}\n contentMap['quadrupoleMoment'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-16-18:23:12_00014'] = currentMap\n loadMaps['CHEL.Isotope.quadrupoleMoment'] = currentMap\n currentMap['tag'] = 'CHEL.Isotope.quadrupoleMoment'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-18:23:12_00014'\n currentMap['name'] = 'quadrupoleMoment'\n currentMap['hicard'] = 1\n currentMap['locard'] = 0\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00031')\n\n # Attribute Isotope.receptivity\n currentMap = {}\n contentMap['receptivity'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-16-18:23:12_00012'] = currentMap\n loadMaps['CHEL.Isotope.receptivity'] = currentMap\n currentMap['tag'] = 'CHEL.Isotope.receptivity'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-18:23:12_00012'\n currentMap['name'] = 'receptivity'\n currentMap['hicard'] = 1\n currentMap['locard'] = 0\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00031')\n\n # Attribute Isotope.spin\n currentMap = {}\n contentMap['spin'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-16-18:23:12_00009'] = currentMap\n loadMaps['CHEL.Isotope.spin'] = currentMap\n currentMap['tag'] = 'CHEL.Isotope.spin'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-18:23:12_00009'\n currentMap['name'] = 'spin'\n currentMap['hicard'] = 1\n currentMap['locard'] = 0\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00037')\n\n # Role Isotope.access\n contentMap['access'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-12-31-09:03:01_00014')\n # End of Isotope\n\n currentMap = abstractTypes.get('Isotope')\n aList = ['abundance', 'gyroMagneticRatio', 'halfLife', 'halfLifeError', 'halfLifeType', 'magneticMoment', 'mass', 'massNumber', 'quadrupoleMoment', 'receptivity', 'spin']\n currentMap['headerAttrs'] = aList\n aList = ['access', 'applicationData']\n currentMap['cplxAttrs'] = aList\n\n # Out-of-package link to ChemElement\n currentMap = {}\n exolinks['ChemElement'] = currentMap\n loadMaps['CHEL.exo-ChemElement'] = currentMap\n currentMap['tag'] = 'CHEL.exo-ChemElement'\n currentMap['type'] = 'exo'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-18:19:49_00004'\n currentMap['name'] = 'ChemElement'\n currentMap['eType'] = 'cplx'\n currentMap['class'] = ccp.api.molecule.ChemElement.ChemElement\n aList = list()\n currentMap['keyMaps'] = aList\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2008-06-30-16:30:50_00001'))\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00055'))\n\n # Out-of-package link to ChemElementStore\n currentMap = {}\n exolinks['ChemElementStore'] = currentMap\n loadMaps['CHEL.exo-ChemElementStore'] = currentMap\n currentMap['tag'] = 'CHEL.exo-ChemElementStore'\n currentMap['type'] = 'exo'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-18:19:49_00005'\n currentMap['name'] = 'ChemElementStore'\n currentMap['eType'] = 'cplx'\n currentMap['class'] = ccp.api.molecule.ChemElement.ChemElementStore\n aList = list()\n currentMap['keyMaps'] = aList\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2008-06-30-16:30:50_00001'))\n\n # Out-of-package link to Isotope\n currentMap = {}\n exolinks['Isotope'] = currentMap\n loadMaps['CHEL.exo-Isotope'] = currentMap\n currentMap['tag'] = 'CHEL.exo-Isotope'\n currentMap['type'] = 'exo'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-18:19:49_00003'\n currentMap['name'] = 'Isotope'\n currentMap['eType'] = 'cplx'\n currentMap['class'] = ccp.api.molecule.ChemElement.Isotope\n aList = list()\n currentMap['keyMaps'] = aList\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2008-06-30-16:30:50_00001'))\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00055'))\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00032'))", "def shp2idomain(shp_path,g,idomain,features_type=\"polygon\",layer=0):\n \n if len(idomain.shape) != 1:\n idomain = idomain.reshape(g.nlay*g.nrow*g.ncol) # reshape idomain if not in the right format\n res = g.intersect(shp_path,features_type,layer)\n idomain[res.nodenumber] = 1\n idomain = idomain.reshape(g.nlay,g.nrow,g.ncol)", "def _buildSpecializeMap(cls, namespaces, interwikimap):\n\n from mwlib.lang import languages\n \n res = {}\n\n def reg(name, num):\n name = name.lower()\n if num == namespace.NS_CATEGORY:\n res[name] = (CategoryLink, num)\n elif num == namespace.NS_FILE:\n res[name] = (ImageLink, num)\n else:\n res[name] = (NamespaceLink, num)\n\n for name, num in namespaces.iteritems():\n if isinstance(name, basestring):\n reg(name, num)\n else:\n for n in name:\n reg(n, num)\n\n for prefix, d in interwikimap.items():\n if 'language' in interwikimap[prefix] or prefix in languages:\n res[prefix] = (LangLink, prefix)\n else:\n res[prefix] = (InterwikiLink, d.get('renamed', prefix))\n \n return res", "def _preprocess(*elements):\n output_dict = {}\n num_domains = len(elements)\n for idx, (domain, elem) in enumerate(zip(domains, elements)):\n uint8_img = elem['image']\n patch = data_provider.full_image_to_patch(uint8_img, patch_size)\n label = tf.one_hot(idx, num_domains)\n output_dict[domain] = {'images': patch, 'labels': label}\n return output_dict", "def setup_units():\n # The number of tetrodes, probes, etc - any kind of grouping\n num_groups = 8\n\n # The region that each group belongs to\n regions = [\"SUB\"] * num_groups\n\n # A group number for each group, for example the tetrode number\n groups = [1, 2, 3, 4, 9, 10, 11, 12]\n\n output_dict = {\n \"num_groups\": num_groups,\n \"region\": regions,\n \"group\": groups,\n }\n\n return output_dict", "def get_storable_dict(self):\n d = super().get_storable_dict()\n d.update(grp=turn_keys_into_str(self._grp), grp_order=self._grp_order)\n return d", "def CRITs_mappings(self):\n self.crits_type_mapping = {}\n self.crits_type_mapping[\"DOMAIN\"] = \"URI - Domain Name\"\n self.crits_type_mapping[\"URI - Domain Name\"] = \"DOMAIN\"\n self.crits_type_mapping[\"IP\"] = \"Address - ipv4-addr\"\n self.crits_type_mapping[\"Address - ipv4-addr\"] = \"IP\"", "def dict_initialise(metadata, analysistype):\n for sample in metadata:\n sample[analysistype].dnaseq = dict()\n sample[analysistype].protseq = dict()\n sample[analysistype].ntindex = dict()\n sample[analysistype].aaindex = dict()\n sample[analysistype].ntalign = dict()\n sample[analysistype].aaalign = dict()\n sample[analysistype].aaidentity = dict()\n return metadata", "def network_nodes_species(self):\n G, mapping = self.network()\n waste, resources, intmed_products = self.amenities()\n\n node_dict = {}\n\n for nd in G:\n # print(nd)\n if isinstance(nd, int):\n node_dict[nd] = \"r\"\n elif nd in self.commodity:\n node_dict[nd] = \"Xc\"\n elif nd in waste:\n node_dict[nd] = \"w\"\n elif nd in resources:\n node_dict[nd] = \"Xr\"\n elif nd in intmed_products:\n node_dict[nd] = \"InPr\"\n\n return node_dict", "def determine_package_architecture(self, has_shared_object_files):\n logger.debug(\"Checking package architecture ..\")\n if has_shared_object_files:\n logger.debug(\"Package contains shared object files, tagging with %s architecture.\",\n self.converter.debian_architecture)\n return self.converter.debian_architecture\n else:\n logger.debug(\"Package doesn't contain shared object files, dealing with a portable package.\")\n return 'all'", "def makeMapping(globalMap):\n \n from memops.xml.Implementation import bool2str, str2bool\n\n # Set up top level dictionaries\n loadMaps = globalMap.get('loadMaps')\n mapsByGuid = globalMap.get('mapsByGuid')\n\n abstractTypes = globalMap.get('CCLB').get('abstractTypes')\n exolinks = globalMap.get('CCLB').get('exolinks')\n\n # Class AtomLabel\n currentMap = {}\n abstractTypes['AtomLabel'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-16-14:25:09_00018'] = currentMap\n loadMaps['CCLB.AtomLabel'] = currentMap\n currentMap['tag'] = 'CCLB.AtomLabel'\n currentMap['type'] = 'class'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-14:25:09_00018'\n currentMap['eType'] = 'cplx'\n currentMap['fromParent'] = 'atomLabels'\n currentMap['class'] = ccp.api.molecule.ChemCompLabel.AtomLabel\n contentMap = {}\n currentMap['content'] = contentMap\n\n # Attribute AtomLabel.applicationData\n contentMap['applicationData'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-09-14-18:48:27_00007')\n\n # Attribute AtomLabel.isotopeCode\n currentMap = {}\n contentMap['isotopeCode'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-16-18:23:37_00092'] = currentMap\n loadMaps['CCLB.AtomLabel.isotopeCode'] = currentMap\n currentMap['tag'] = 'CCLB.AtomLabel.isotopeCode'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-18:23:37_00092'\n currentMap['name'] = 'isotopeCode'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00037')\n\n # Attribute AtomLabel.name\n currentMap = {}\n contentMap['name'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-10-24-11:30:14_00003'] = currentMap\n loadMaps['CCLB.AtomLabel.name'] = currentMap\n currentMap['tag'] = 'CCLB.AtomLabel.name'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-10-24-11:30:14_00003'\n currentMap['name'] = 'name'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00037')\n\n # Attribute AtomLabel.subType\n currentMap = {}\n contentMap['subType'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-10-24-11:30:14_00004'] = currentMap\n loadMaps['CCLB.AtomLabel.subType'] = currentMap\n currentMap['tag'] = 'CCLB.AtomLabel.subType'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-10-24-11:30:14_00004'\n currentMap['name'] = 'subType'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['proc'] = 'direct'\n currentMap['default'] = 1\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00032')\n\n # Attribute AtomLabel.weight\n currentMap = {}\n contentMap['weight'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-16-18:23:37_00093'] = currentMap\n loadMaps['CCLB.AtomLabel.weight'] = currentMap\n currentMap['tag'] = 'CCLB.AtomLabel.weight'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-18:23:37_00093'\n currentMap['name'] = 'weight'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['default'] = 1.0\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:54_00009')\n\n # Role AtomLabel.access\n contentMap['access'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-12-31-09:03:01_00014')\n # End of AtomLabel\n\n currentMap = abstractTypes.get('AtomLabel')\n aList = ['isotopeCode', 'name', 'subType', 'weight']\n currentMap['headerAttrs'] = aList\n aList = ['access', 'applicationData']\n currentMap['cplxAttrs'] = aList\n\n # Class ChemCompLabel\n currentMap = {}\n abstractTypes['ChemCompLabel'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-16-14:25:09_00014'] = currentMap\n loadMaps['CCLB.ChemCompLabel'] = currentMap\n currentMap['tag'] = 'CCLB.ChemCompLabel'\n currentMap['type'] = 'class'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-14:25:09_00014'\n currentMap['eType'] = 'cplx'\n currentMap['fromParent'] = 'chemCompLabels'\n currentMap['class'] = ccp.api.molecule.ChemCompLabel.ChemCompLabel\n contentMap = {}\n currentMap['content'] = contentMap\n\n # Attribute ChemCompLabel.applicationData\n contentMap['applicationData'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-09-14-18:48:27_00007')\n\n # Attribute ChemCompLabel.ccpCode\n currentMap = {}\n contentMap['ccpCode'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-16-18:23:37_00073'] = currentMap\n loadMaps['CCLB.ChemCompLabel.ccpCode'] = currentMap\n currentMap['tag'] = 'CCLB.ChemCompLabel.ccpCode'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-18:23:37_00073'\n currentMap['name'] = 'ccpCode'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2007-09-12-18:31:28_00003')\n\n # Attribute ChemCompLabel.molType\n currentMap = {}\n contentMap['molType'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-16-18:23:37_00072'] = currentMap\n loadMaps['CCLB.ChemCompLabel.molType'] = currentMap\n currentMap['tag'] = 'CCLB.ChemCompLabel.molType'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-18:23:37_00072'\n currentMap['name'] = 'molType'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:52_00024')\n\n # Role ChemCompLabel.access\n contentMap['access'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-12-31-09:03:01_00014')\n\n # Role ChemCompLabel.isotopomers\n currentMap = {}\n contentMap['isotopomers'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-10-24-11:30:13_00001'] = currentMap\n loadMaps['CCLB.ChemCompLabel.isotopomers'] = currentMap\n currentMap['tag'] = 'CCLB.ChemCompLabel.isotopomers'\n currentMap['type'] = 'child'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-10-24-11:30:13_00001'\n currentMap['name'] = 'isotopomers'\n currentMap['hicard'] = -1\n currentMap['locard'] = 0\n currentMap['eType'] = 'cplx'\n currentMap['content'] = globalMap.get('CCLB').get('abstractTypes')\n # End of ChemCompLabel\n\n currentMap = abstractTypes.get('ChemCompLabel')\n aList = ['ccpCode', 'molType']\n currentMap['headerAttrs'] = aList\n aList = ['isotopomers', 'access', 'applicationData']\n currentMap['cplxAttrs'] = aList\n aList = ['isotopomers']\n currentMap['children'] = aList\n\n # Class Isotopomer\n currentMap = {}\n abstractTypes['Isotopomer'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-10-24-11:28:54_00001'] = currentMap\n loadMaps['CCLB.Isotopomer'] = currentMap\n currentMap['tag'] = 'CCLB.Isotopomer'\n currentMap['type'] = 'class'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-10-24-11:28:54_00001'\n currentMap['eType'] = 'cplx'\n currentMap['fromParent'] = 'isotopomers'\n currentMap['objkey'] = 'serial'\n currentMap['class'] = ccp.api.molecule.ChemCompLabel.Isotopomer\n contentMap = {}\n currentMap['content'] = contentMap\n\n # Attribute Isotopomer.applicationData\n contentMap['applicationData'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-09-14-18:48:27_00007')\n\n # Attribute Isotopomer.serial\n currentMap = {}\n contentMap['serial'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-10-24-11:30:14_00005'] = currentMap\n loadMaps['CCLB.Isotopomer.serial'] = currentMap\n currentMap['tag'] = 'CCLB.Isotopomer.serial'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-10-24-11:30:14_00005'\n currentMap['name'] = 'serial'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00032')\n\n # Attribute Isotopomer.weight\n currentMap = {}\n contentMap['weight'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-10-24-11:30:14_00006'] = currentMap\n loadMaps['CCLB.Isotopomer.weight'] = currentMap\n currentMap['tag'] = 'CCLB.Isotopomer.weight'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-10-24-11:30:14_00006'\n currentMap['name'] = 'weight'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['default'] = 1.0\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:54_00009')\n\n # Role Isotopomer.access\n contentMap['access'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-12-31-09:03:01_00014')\n\n # Role Isotopomer.atomLabels\n currentMap = {}\n contentMap['atomLabels'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-10-24-11:30:14_00001'] = currentMap\n loadMaps['CCLB.Isotopomer.atomLabels'] = currentMap\n currentMap['tag'] = 'CCLB.Isotopomer.atomLabels'\n currentMap['type'] = 'child'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-10-24-11:30:14_00001'\n currentMap['name'] = 'atomLabels'\n currentMap['hicard'] = -1\n currentMap['locard'] = 0\n currentMap['eType'] = 'cplx'\n currentMap['content'] = globalMap.get('CCLB').get('abstractTypes')\n # End of Isotopomer\n\n currentMap = abstractTypes.get('Isotopomer')\n aList = ['serial', 'weight']\n currentMap['headerAttrs'] = aList\n aList = ['atomLabels', 'access', 'applicationData']\n currentMap['cplxAttrs'] = aList\n aList = ['atomLabels']\n currentMap['children'] = aList\n\n # Class LabelingScheme\n currentMap = {}\n abstractTypes['LabelingScheme'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2007-01-24-12:23:14_00001'] = currentMap\n loadMaps['CCLB.LabelingScheme'] = currentMap\n currentMap['tag'] = 'CCLB.LabelingScheme'\n currentMap['type'] = 'class'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2007-01-24-12:23:14_00001'\n currentMap['eType'] = 'cplx'\n currentMap['fromParent'] = 'labelingSchemes'\n currentMap['isTop'] = True\n currentMap['objkey'] = 'name'\n currentMap['class'] = ccp.api.molecule.ChemCompLabel.LabelingScheme\n contentMap = {}\n currentMap['content'] = contentMap\n\n # Attribute LabelingScheme.applicationData\n contentMap['applicationData'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-09-14-18:48:27_00007')\n\n # Attribute LabelingScheme.createdBy\n contentMap['createdBy'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-12-31-09:00:59_00002__www.ccpn.ac.uk_Fogh_2007-10-03-14:53:27_00001__www.ccpn.ac.uk_Fogh_2006-09-14-16:28:57_00002')\n\n # Attribute LabelingScheme.details\n currentMap = {}\n contentMap['details'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2007-01-24-12:23:55_00005'] = currentMap\n loadMaps['CCLB.LabelingScheme.details'] = currentMap\n currentMap['tag'] = 'CCLB.LabelingScheme.details'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2007-01-24-12:23:55_00005'\n currentMap['name'] = 'details'\n currentMap['hicard'] = 1\n currentMap['locard'] = 0\n currentMap['eType'] = 'cplx'\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00036')\n\n # Attribute LabelingScheme.guid\n contentMap['guid'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-09-14-18:48:26_00002')\n\n # Attribute LabelingScheme.isModifiable\n contentMap['isModifiable'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-17-14:16:26_00010__www.ccpn.ac.uk_Fogh_2007-10-03-14:53:27_00001__www.ccpn.ac.uk_Fogh_2006-09-14-16:28:57_00002')\n\n # Attribute LabelingScheme.lastUnlockedBy\n contentMap['lastUnlockedBy'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-12-31-09:00:59_00003__www.ccpn.ac.uk_Fogh_2007-10-03-14:53:27_00001__www.ccpn.ac.uk_Fogh_2006-09-14-16:28:57_00002')\n\n # Attribute LabelingScheme.longName\n currentMap = {}\n contentMap['longName'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2007-01-24-12:23:55_00004'] = currentMap\n loadMaps['CCLB.LabelingScheme.longName'] = currentMap\n currentMap['tag'] = 'CCLB.LabelingScheme.longName'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2007-01-24-12:23:55_00004'\n currentMap['name'] = 'longName'\n currentMap['hicard'] = 1\n currentMap['locard'] = 0\n currentMap['eType'] = 'cplx'\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00033')\n\n # Attribute LabelingScheme.name\n currentMap = {}\n contentMap['name'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2007-01-24-12:23:55_00003'] = currentMap\n loadMaps['CCLB.LabelingScheme.name'] = currentMap\n currentMap['tag'] = 'CCLB.LabelingScheme.name'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2007-01-24-12:23:55_00003'\n currentMap['name'] = 'name'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00037')\n\n # Role LabelingScheme.access\n contentMap['access'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-12-31-09:03:01_00014')\n\n # Role LabelingScheme.chemCompLabels\n currentMap = {}\n contentMap['chemCompLabels'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2007-01-24-12:23:55_00002'] = currentMap\n loadMaps['CCLB.LabelingScheme.chemCompLabels'] = currentMap\n currentMap['tag'] = 'CCLB.LabelingScheme.chemCompLabels'\n currentMap['type'] = 'child'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2007-01-24-12:23:55_00002'\n currentMap['name'] = 'chemCompLabels'\n currentMap['hicard'] = -1\n currentMap['locard'] = 0\n currentMap['eType'] = 'cplx'\n currentMap['implSkip'] = True\n currentMap['content'] = globalMap.get('CCLB').get('abstractTypes')\n # End of LabelingScheme\n\n currentMap = abstractTypes.get('LabelingScheme')\n aList = ['createdBy', 'guid', 'isModifiable', 'lastUnlockedBy', 'name']\n currentMap['headerAttrs'] = aList\n aList = ['details', 'longName']\n currentMap['simpleAttrs'] = aList\n aList = ['chemCompLabels', 'access', 'applicationData']\n currentMap['cplxAttrs'] = aList\n aList = ['chemCompLabels']\n currentMap['children'] = aList\n\n # Out-of-package link to AtomLabel\n currentMap = {}\n exolinks['AtomLabel'] = currentMap\n loadMaps['CCLB.exo-AtomLabel'] = currentMap\n currentMap['tag'] = 'CCLB.exo-AtomLabel'\n currentMap['type'] = 'exo'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-14:25:09_00018'\n currentMap['name'] = 'AtomLabel'\n currentMap['eType'] = 'cplx'\n currentMap['class'] = ccp.api.molecule.ChemCompLabel.AtomLabel\n aList = list()\n currentMap['keyMaps'] = aList\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2008-06-30-16:30:50_00001'))\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:52_00024'))\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2007-09-12-18:31:28_00003'))\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00032'))\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00037'))\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00032'))\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00037'))\n\n # Out-of-package link to ChemCompLabel\n currentMap = {}\n exolinks['ChemCompLabel'] = currentMap\n loadMaps['CCLB.exo-ChemCompLabel'] = currentMap\n currentMap['tag'] = 'CCLB.exo-ChemCompLabel'\n currentMap['type'] = 'exo'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-14:25:09_00014'\n currentMap['name'] = 'ChemCompLabel'\n currentMap['eType'] = 'cplx'\n currentMap['class'] = ccp.api.molecule.ChemCompLabel.ChemCompLabel\n aList = list()\n currentMap['keyMaps'] = aList\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2008-06-30-16:30:50_00001'))\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:52_00024'))\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2007-09-12-18:31:28_00003'))\n\n # Out-of-package link to Isotopomer\n currentMap = {}\n exolinks['Isotopomer'] = currentMap\n loadMaps['CCLB.exo-Isotopomer'] = currentMap\n currentMap['tag'] = 'CCLB.exo-Isotopomer'\n currentMap['type'] = 'exo'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-10-24-11:28:54_00001'\n currentMap['name'] = 'Isotopomer'\n currentMap['eType'] = 'cplx'\n currentMap['class'] = ccp.api.molecule.ChemCompLabel.Isotopomer\n aList = list()\n currentMap['keyMaps'] = aList\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2008-06-30-16:30:50_00001'))\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:52_00024'))\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2007-09-12-18:31:28_00003'))\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00032'))\n\n # Out-of-package link to LabelingScheme\n currentMap = {}\n exolinks['LabelingScheme'] = currentMap\n loadMaps['CCLB.exo-LabelingScheme'] = currentMap\n currentMap['tag'] = 'CCLB.exo-LabelingScheme'\n currentMap['type'] = 'exo'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2007-01-24-12:23:14_00001'\n currentMap['name'] = 'LabelingScheme'\n currentMap['eType'] = 'cplx'\n currentMap['class'] = ccp.api.molecule.ChemCompLabel.LabelingScheme\n aList = list()\n currentMap['keyMaps'] = aList\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2008-06-30-16:30:50_00001'))", "def build_intrenal_hap_dict(self, alleles, group2):\n\n hap_dict = self.hap_dict_per_group[group2]\n\n internal = {}\n for i, haplotype in enumerate(alleles):\n if type(haplotype[0])==tuple: #Checks if X is a tuple/list of alleles.\n n = len(haplotype)\n if n==1:\n internal[1 << i] = hap_dict[haplotype[0]]\n elif n==2:\n internal[1 << i] = hap_dict[haplotype[0]] & hap_dict[haplotype[1]]\n else:\n internal[1 << i] = reduce(and_,itemgetter(*haplotype)(hap_dict))\n\n elif type(haplotype[0])==int: #Checks if X is a single allele.\n internal[1 << i] = hap_dict[haplotype]\n else:\n raise Exception('error: joint_frequencies only accepts alleles and tuple/list of alleles.')\n\n return internal", "def map_to_homo_nid(self, ids, ntype):\n ...", "def map_to_homo_nid(self, ids, ntype):\n ...", "def create_platforms(plat_map):\n platform_group = set()\n for plat in plat_map:\n platform_group.add(Platform([(plat[1] + 0.5) * TILE_DIM, (plat[0] + 0.5)\n * TILE_DIM, plat[2], plat[3]]))\n return platform_group", "def build_attributes_and_ports(pnode):\n set_attributes(pnode)\n set_ports(pnode)\n\n for sub_platform_id, sub_pnode in pnode.subplatforms.iteritems():\n build_attributes_and_ports(sub_pnode)", "def map_detector_to_basis_dict(detector):\n\n root = detector.hierarchy()\n\n d = 0 # only allow one detector for now\n metro = {(d,):basis(panelgroup=root)}\n\n for q, quad in enumerate(root):\n metro[(d,q)] = basis(panelgroup=quad)\n for s, sensor in enumerate(quad):\n metro[(d,q,s)] = basis(panelgroup=sensor)\n for a, asic in enumerate(sensor):\n # at the asic level, need to subtract off the d0 vector so this asic's basis does not include\n # the shift from the asic center to the asic corner. Also need to flip the Y back around\n # to be consistent with how it was originally stored\n d_mat = asic.get_local_d_matrix()\n fast = matrix.col((d_mat[0],d_mat[3],d_mat[6])).normalize()\n slow = matrix.col((d_mat[1],d_mat[4],d_mat[7])).normalize()\n orig = matrix.col((d_mat[2],d_mat[5],d_mat[8]))\n\n v3 = fast.cross(slow).normalize()\n\n r3 = matrix.sqr((fast[0],slow[0],v3[0],\n fast[1],slow[1],v3[1],\n fast[2],slow[2],v3[2]))\n\n transform = matrix.sqr((1, 0, 0,\n 0,-1, 0,\n 0, 0,-1))\n\n pix_size = asic.get_pixel_size()\n img_size = asic.get_image_size()\n\n offset = matrix.col((-pix_size[0]*(img_size[0])/2,\n +pix_size[1]*(img_size[1])/2,0))\n\n metro[(d,q,s,a)] = basis(orientation=(r3*transform).r3_rotation_matrix_as_unit_quaternion(),\n translation=orig-offset)\n\n return metro", "def _BuildTypeMaps(self, type_namespaces):\n for type_namespace in type_namespaces:\n self.type_namespaces_map[type_namespace.namespace] = type_namespace\n for entity_type in type_namespace.valid_types_map.values():\n if entity_type.guid:\n if entity_type.guid in self.type_guids_map:\n dup_guid_entry = self.type_guids_map[entity_type.guid]\n dup_guid_type = self.GetEntityType(dup_guid_entry.namespace,\n dup_guid_entry.typename)\n if dup_guid_type is None:\n raise RuntimeError('Duplicate type with guid ' + entity_type.guid\n + ' should always be mapped')\n entity_type.AddFinding(\n findings_lib.DuplicateGuidsError(type_namespace.namespace,\n entity_type, dup_guid_type))\n dup_guid_type.AddFinding(\n findings_lib.DuplicateGuidsError(dup_guid_entry.namespace,\n dup_guid_type, entity_type))\n self.type_guids_map[entity_type.guid] = EntityIdByEntry(\n namespace=type_namespace.namespace, typename=entity_type.typename)", "def _packaged_dict_for_entity(rt):\n entity = rt.entity\n return {u'entity_id': entity.id,\\\n u'name': entity.aggregation_paths['_geo'][-1]}", "def layout_method_mapper(self):\n return {\n \"kamada_kawai_layout\": kamada_kawai_layout,\n \"fruchterman_reingold_layout\": fruchterman_reingold_layout,\n \"spectral_layout\": spectral_layout,\n }", "def create_system_data():\n system_data = dict()\n system_data['system'] = dict()\n system_data['system']['primary'] = dict()\n system_data['system']['primary']['controllers'] = dict()\n system_data['system']['primary']['controllers']['re0'] = dict()\n system_data['system']['primary']['controllers']['re0']['hostname'] = 'abc'\n system_data['system']['primary']['controllers']['re0']['mgt-ip'] = '1.1.1.1'\n system_data['system']['primary']['controllers']['re0']['osname'] = 'Paragon'\n system_data['system']['primary']['name'] = 'abc'\n system_data['system']['primary']['model'] = 'Paragon'\n system_data['system']['primary']['make'] = 'Calnex'\n system_data['system']['primary']['server-ip'] = '1.1.1.2'\n system_data['system']['primary']['osname'] = 'Paragon'\n return system_data", "def slot_mappings(self) -> Dict[Text, Union[Dict, List[Dict]]]:\n\n # return { \"faq_choice\": self.from_entity(\"faq_choice\"),\"faq_question\": self.from_entity(\"faq_question\"), \"faq_text\": [self.from_text()]}\n\n return {\"faq_choice\": [self.from_entity(\"faq_choice\"), self.from_text()], \"faq_text\": [self.from_text(), self.from_entity(entity=\"navigation\")]}", "def translate_network_types(self, hosts):\n nt_requirements = self._aggregate_networks(hosts)\n nt_map = {}\n for network_type, count in nt_requirements.items():\n network_name = self._pick_network(network_type, count)\n nt_map[network_type] = network_name\n\n for host in hosts:\n # skip hosts which have low-level network names defined\n names = host.get(\"networks\")\n if names:\n continue\n\n nt = host.get(\"network\")\n\n # skip if nt is not network type\n if not self.config[\"networks\"].get(nt):\n continue\n\n network_name = nt_map[nt]\n host[\"network\"] = network_name", "def base_type_dict():\n return {'filter' : filters.Filter,\n 'global_options' : global_options.GlobalOptions,\n 'input_device' : input_devices.InputDevice,\n 'input_stream' : input_streams.InputStream,\n 'output_device' : output_devices.OutputDevice,\n 'output_stream' : output_streams.OutputStream}", "def crm2dict(conf_list=None):\n if conf_list is None:\n conf_list=configure_parse()\n conf_dict=dict(conf_list)\n results={}\n groupkeys = getkeys(conf_dict, 'group')\n primitivekeys = getkeys(conf_dict, 'primitive')\n for gk in groupkeys:\n results.setdefault(gk.split()[1], {})\n locationkeys = getkeys(conf_dict, 'location')\n for key in conf_dict.keys():\n conf_type, tag = key.split()\n if conf_type == 'group':\n members=[x for x in conf_dict[key] if not (x.startswith('target-role') or x == 'meta')]\n results[tag].update({'members' : members })\n elif conf_type == 'location':\n service_name, loc=parse_tag(tag)\n balancer = conf_dict[key][2]\n if service_name not in results.keys():\n results.setdefault(service_name, {'loadbalancers' : {loc:balancer}})\n elif 'loadbalancers' not in results[service_name].keys():\n results[service_name].update({'loadbalancers' : {loc:balancer}})\n else:\n results[service_name]['loadbalancers'].update({loc:balancer})\n elif conf_type == 'primitive':\n service_name, service_type = parse_tag(tag)\n if service_type == 'ld':\n results[service_name].update({'type' : 'ldirectord'})\n elif service_type[:2] == 'ip':\n params = conf_dict[key]\n parsed_params={}\n for param in params:\n if param[:3] == 'ip=':\n parsed_params.setdefault('ip', param[4:-1])\n elif param[:13] == 'cidr_netmask=':\n parsed_params.setdefault('cidr_netmask', param[14:-1])\n elif param[:4] == 'nic=':\n parsed_params.setdefault('nic', param[5:-1])\n if 'ips' not in results[service_name].keys():\n results[service_name].update({'ips' : [haipstr(parsed_params)]})\n else:\n results[service_name]['ips'].append(haipstr(parsed_params))\n return results", "def extend_inventory(self, datapath, variable_type='all', extra_pref=None, first_suffix=None):\n if extra_pref is None:\n extra_pref = self.extra_pref\n if first_suffix is None:\n first_suffix = self.first_suffix\n \n files = [file for file in listdir(datapath) if '.nc' in file and not 'xyz' in file]\n # file_prefixes = list(set([ file.split('_')[0] for file in files ]))\n # file_prefixes = list(set([ \"_\".join(file.split('_')[0:2]) for file in files ]))\n if extra_pref:\n file_prefixes = list(set([ \"_\".join(file.split('_')[0:2] + [extra_pref]) for file in files ]))\n else:\n file_prefixes = list(set([ \"_\".join(file.split('_')[0:2]) for file in files ]))\n \n inventory = {}\n for file_prefix in file_prefixes:\n fname = path.join(datapath,f'{file_prefix}{first_suffix}')\n if not self.metafile:\n self.metafile = fname\n vars = [ var for var in list(Dataset(fname).variables) if var not in self.skip_vars ]\n for var in vars:\n inventory[var] = {'files': sorted([path.join(datapath,file) \n for file in listdir(datapath) if file_prefix in file])}\n \n if variable_type == 'predictors':\n self.predictor_inventory = {**self.predictor_inventory, **inventory}\n self.predictors = self.predictor_inventory.keys()\n elif variable_type == 'predictands':\n self.predictand_inventory = {**self.predictand_inventory, **inventory}\n self.predictands = self.predictand_inventory.keys()\n else:\n self.predictor_inventory = {**self.predictor_inventory, **inventory}\n self.predictors = self.predictor_inventory.keys()\n self.predictand_inventory = {**self.predictand_inventory, **inventory}\n self.predictands = self.predictand_inventory.keys()", "def construct_relation_group(\n self,\n group_name,\n domain_type,\n range_type,\n group_members = None):\n if not group_members:\n group_members = sorted([\n rel for rel in self.get_relation_names()\n if self.get_domain(rel) == domain_type and\n self.get_range(rel) == range_type\n ])\n if self.is_type(group_name):\n raise RelationNameError(group_name, 'Group already exists.')\n\n self.declare_entity_type(\n group_name, fixed_vocab=group_members, unknown_marker=None)\n\n for r in group_members:\n if self.is_dense(r):\n raise ValueError('Dense relation %r is unsupported.' % r)\n\n group = RelationGroup(group_name, group_members)\n self._group[group_name] = group\n # declare the schema for the necessary extension to the KG\n self.declare_relation(group.relation_rel, group.triple_type, group_name)\n self.declare_relation(group.subject_rel, group.triple_type, domain_type)\n self.declare_relation(group.object_rel, group.triple_type, range_type)\n self.declare_relation(group.weight_rel, group.triple_type,\n group.triple_type)\n # relation i in this group has num_rows[i] rows\n try:\n num_rows = [self._np_initval[r].data.shape[0] for r in group.members]\n except KeyError as err:\n raise RelationNameError(\n str(err), 'An undefined relation was encountered. '\n 'All relations in a relation group must be defined before calling '\n 'construct_relation_group.')\n total_num_rows = sum(num_rows)\n # names of all those triples\n self.extend_type(\n group.triple_type,\n [group.triple_prefix + str(i) for i in range(total_num_rows)])\n # now populate the sparse matrixes\n triple_indices = np.arange(total_num_rows, dtype='int32')\n rel_indices = np.hstack([\n np.ones(num_rows[i], dtype='int32') * i\n for i in range(len(group.members))\n ])\n subj_indices = np.hstack([self._np_initval[r].col for r in group.members])\n obj_indices = np.hstack([self._np_initval[r].row for r in group.members])\n weight_data = np.hstack([self._np_initval[r].data for r in group.members])\n ones_data = np.ones_like(weight_data)\n # weights are in a diagonal matrix\n self._np_initval[group.weight_rel] = scipy.sparse.coo_matrix(\n (weight_data, (triple_indices, triple_indices)),\n shape=(total_num_rows, total_num_rows),\n dtype='float32')\n self._np_initval[group.relation_rel] = scipy.sparse.coo_matrix(\n (weight_data, (rel_indices, triple_indices)),\n shape=(len(group.members), total_num_rows),\n dtype='float32')\n self._np_initval[group.subject_rel] = scipy.sparse.coo_matrix(\n (ones_data, (subj_indices, triple_indices)),\n shape=(self.get_max_id(domain_type), total_num_rows),\n dtype='float32')\n self._np_initval[group.object_rel] = scipy.sparse.coo_matrix(\n (ones_data, (obj_indices, triple_indices)),\n shape=(self.get_max_id(range_type), total_num_rows),\n dtype='float32')\n self.freeze(group.triple_type, unknown_marker=None)\n return group", "def get_platform_combinations():\n mapped_osname = platform_map(g_osname)\n mapped_osarch = g_osarch\n ret = [mapped_osname]\n while True:\n ret += [mapped_osarch, mapped_osname + \"-\" + mapped_osarch]\n mapped_osarch = platform_map_iterate(mapped_osarch)\n if not mapped_osarch:\n break\n return sorted(ret, reverse=True) + [\"default\"]", "def _getDefaultGroupDict(self, container):\n ddict = dict(container._dict_)\n ddict.update({\n \"_def_for_repos\": container.for_repos,\n \"_def_for_paths\": container.for_paths,\n })\n\n return ddict", "def hierachy_nomenclature(a2_data):\n ret_dic = OrderedDict()\n ret_dic['X'] = OrderedDict()\n ret_dic['X']['name'] = a2_data['xs'].keys()\n ret_dic['X']['N'] = len(a2_data['xs'].keys())\n ret_dic['I'] = OrderedDict()\n ret_dic['I']['name'] = a2_data['xs']['1'].keys()\n ret_dic['I']['N'] = len(a2_data['xs']['1'].keys())\n ret_dic['R'] = OrderedDict()\n ret_dic['R']['name'] = a2_data['xs']['1']['U235'].keys()\n ret_dic['R']['N'] = len(a2_data['xs']['1']['U235'].keys())\n ret_dic['G'] = OrderedDict()\n ret_dic['G']['name'] = a2_data['xs']['1']['U235']['abso'].keys()\n ret_dic['G']['N'] = len(a2_data['xs']['1']['U235']['abso'].keys())\n return ret_dic", "def group_by_domain(hash_entries):\n entries = (get_entry(h) for h in hash_entries)\n domains = {}\n for e in entries:\n domains[e['url_domain']] = domains.get(e['url_domain']) or []\n domains[e['url_domain']].append(e)\n return [{'domain': name, 'entries': ent} for name, ent in domains.items()]", "def make_melon_type_lookup(melon_types):\n\n melon_dict = {}\n for melon in melon_types:\n melon_dict[melon.code] = [melon]", "def proc_group(inp):\n dic = {}\n dic.update(proc_attr(inp))\n for key in inp.keys():\n if isinstance(inp[key], h5py.Group):\n dic.update({key:proc_group(inp[key])})\n else:\n dic[key] = inp[key][()]\n pass\n return dic", "def make_melon_type_lookup(melon_types):\n\n # Fill in the rest\n melon_dict = {}\n\n for melon in melon_types:\n key = melon.code\n melon_dict[key] = melon\n\n return melon_dict" ]
[ "0.53147066", "0.5278428", "0.51913285", "0.5105832", "0.5089798", "0.5063621", "0.5043128", "0.5004288", "0.49817312", "0.4942777", "0.49404955", "0.49214888", "0.49084446", "0.48987442", "0.48879838", "0.4885036", "0.48838812", "0.48718145", "0.4864769", "0.4852137", "0.48391476", "0.48375362", "0.48369786", "0.48351386", "0.48169854", "0.48155892", "0.4813881", "0.48055103", "0.47993198", "0.47892964", "0.47742972", "0.4752725", "0.47482404", "0.47419754", "0.4733186", "0.47279978", "0.4715346", "0.47145873", "0.4708937", "0.47086725", "0.4706479", "0.47037098", "0.4702962", "0.4698565", "0.46949318", "0.46815643", "0.46793324", "0.46726972", "0.4663308", "0.46605814", "0.46604353", "0.46574825", "0.46572146", "0.46570534", "0.46499085", "0.4646502", "0.46449068", "0.46448532", "0.46430197", "0.46429804", "0.46285895", "0.46283725", "0.46178755", "0.46126467", "0.46107572", "0.46098736", "0.4609327", "0.46044484", "0.45979008", "0.45923302", "0.45795867", "0.4578957", "0.45772806", "0.45754552", "0.457492", "0.45716563", "0.45656535", "0.45648268", "0.45647314", "0.45647314", "0.45645216", "0.45563334", "0.4552386", "0.4551677", "0.4549725", "0.45426056", "0.4540961", "0.45363697", "0.45342442", "0.45278615", "0.45264593", "0.45224133", "0.45197883", "0.45196548", "0.4518576", "0.45185152", "0.451709", "0.45156735", "0.45100787", "0.45097205" ]
0.56843174
0
Boolean function to check if a given architecture is exclusive.
def exclusive_arch(pathogen_groups_set, collapse_pathogen_groups): if len(pathogen_groups_set) == 1: return True # Only check pathogen grouping when the flag is on if collapse_pathogen_groups: if len(pathogen_groups_set) > 2: return False if 0 in pathogen_groups_set and 1 in pathogen_groups_set: return True if 3 in pathogen_groups_set and 4 in pathogen_groups_set: return True return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_logical(*args):\n return _ida_hexrays.is_logical(*args)", "def __bool__(self):\n return any(self.smask)", "def is_infrastructure (self):\n return sum([1 for i in self.infras]) != 0", "def is_exclusive(self):\n return self.exclusive", "def incompatible_architecture(self) -> bool:\n return pulumi.get(self, \"incompatible_architecture\")", "def available_on_system(cls):\n return (cls.reason_to_be_disabled() is None)", "def incompatible_architecture(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"incompatible_architecture\")", "def check_masked(self):\n if self._alternate == 'N': # If our alternate allele is masked, or an 'N'\n return True # Return True\n else: # Otherwise\n return False # Return False", "def osarch_is_amd64():\n return osarch_match(\"amd64\")", "def _check_state(enabled_states, paper_states):\n enabled_states = set(enabled_states)\n paper_states = set(paper_states)\n return bool(enabled_states.intersection(paper_states))", "def isShiftHeld():\n return False if pm.about(batch=True) else (pm.getModifiers() & 1) > 0", "def in_state(self, state: str) -> bool:\n if state.startswith('APP_SPECIFIC'):\n app_state = int(state[len('APP_SPECIFIC'):])\n value = (app_state << 3) | 0b00000111\n return self.value == value\n return super().in_state(state)", "def is64Bit(program: ghidra.program.model.listing.Program) -> bool:\n ...", "def check_allowed(self):\n if self.state_model.op_state in [\n DevState.FAULT,\n DevState.UNKNOWN,\n DevState.DISABLE,\n ]:\n return False\n\n return True", "def g3(a, b): \n return not (a and b)", "def xor(a: bool, b: bool) -> bool:\n return (a and not b) or (not a and b)", "def _common_check(self, flag):\n has_perms = self.user.is_active and self.user.is_staff and (\n self.user.has_perm('blog.change_membership') or\n self.user.has_perm('blog.change_blog'))\n return has_perms or (self.role in ['O', 'A'] and\n not self.is_left() and\n not self.is_banned() and\n (flag or self.role == 'O'))", "def canUnlockAll(boxes):\n for key in range(1, len(boxes) - 1):\n res = False\n for index in range(len(boxes)):\n res = key in boxes[index] and key != index\n if res:\n break\n if res is False:\n return res\n return True", "def is_enabled(self):\n for arch in self.inputs:\n if arch.place.M < arch.weight:\n return False\n return True", "def ExclusiveAddressUse(self) -> bool:", "def ExclusiveAddressUse(self) -> bool:", "def ExclusiveAddressUse(self) -> bool:", "def ExclusiveAddressUse(self) -> bool:", "def is_non_exclusive(self, variable):\n non_exclusive = False\n for sub_effect in self._sub_effects:\n if sub_effect.get_variable() == variable:\n if not sub_effect.is_exclusive():\n non_exclusive = True\n elif len(sub_effect.get_value()) > 0 and not sub_effect.is_negated():\n return False\n return non_exclusive", "def is_commutative(*args):\n return _ida_hexrays.is_commutative(*args)", "def check_allowed(self):\n if self.state_model.op_state in [\n DevState.FAULT,\n DevState.UNKNOWN,\n DevState.ON,\n ]:\n tango.Except.throw_exception(\n f\"Disable() is not allowed in current state {self.state_model.op_state}\",\n \"Failed to invoke Disable command on SdpMasterLeafNode.\",\n \"SdpMasterLeafNode.Disable() \",\n tango.ErrSeverity.ERR,\n )\n return True", "def osarch_match(op):\n arch = g_osarch\n while True:\n if op == arch:\n return True\n arch = platform_map_iterate(arch)\n if not arch:\n break\n return False", "def has_exclusive_attributes(self):\n return any(schema.is_exclusive for schema in itervalues(self.schema))", "def is_commutative(self):\n try:\n return self.universe().is_commutative()\n except Exception:\n # This is not the mathematically correct default, but agrees with\n # history -- we've always assumed factored things commute\n return True", "def is_negated(x) -> bool:\n return not (x & 1 == 0)", "def _is_broadcast(self, op, op_reg_manager):\n op_slices = op_reg_manager.get_op_slices(op)\n op_groups = [op_reg_manager.get_op_group(op_slice)\n for op_slice in op_slices]\n return op_handler_util.get_op_size(op) == 1 and all(op_groups)", "def __bool__(self):\n return not self.undefine", "def is_32bit(self):\n return self.machine in ['i386', 'i586', 'i686']", "def isActive(state):\n return state in [State.enabled, State.softDisabling]", "def is_known(combo):\n return not _NONE_NONE & combo", "def mcu_enabled():\n\n return (_read_device_state() & _get_addr_for_bit(_mcu_bit)) == 0", "def is_logic(self):\n return self.value in ('and_logic', 'or_logic')", "def is_exclusive_attribute(self, name):\n return self.get_attribute_schema(name).is_exclusive", "def isUnConditional(self) -> bool:\n ...", "def osarch_is_64_bit():\n return osarch_match(\"64-bit\")", "def is_64bit(self):\n return self.machine == 'x86_64'", "def disabled(name):\n return not enabled(name)", "def g2(a, b): \n if a and b:\n return False\n else:\n return True", "def is_non_reducing(self):\n return bool(set(self.kind) & set(\"ABC\"))", "def osarch_is_ia32():\n return osarch_match(\"ia32\")", "def sdi_bus_valid(sdi_bus):\n bus_upper = sdi_bus.upper()\n if (\"PORT1\" in bus_upper) or (\"PORT2\" in bus_upper) or (\"RS485\" in bus_upper):\n return True\n else:\n return False", "def is_bitop(*args):\n return _ida_hexrays.is_bitop(*args)", "def is_bit_mask(enumeration, potential_mask):\n if not isinstance(potential_mask, six.integer_types):\n return False\n\n mask_enumerations = (\n CryptographicUsageMask,\n ProtectionStorageMask,\n StorageStatusMask\n )\n if enumeration not in mask_enumerations:\n return False\n\n mask = 0\n for value in [e.value for e in enumeration]:\n if (value & potential_mask) == value:\n mask |= value\n\n if mask != potential_mask:\n return False\n\n return True", "def is64bit(self):\n return platform.machine().endswith('64')", "def can_overwrite ( self, mask=None ):\n if mask is None:\n return self.value == self.OV_NONE\n else:\n return self.value & mask", "def f2(a, b): \n if not a and b:\n return True\n else:\n return False", "def __bool__(self):\n raise ValueError(\"bool() not permitted\")", "def not_op(target):\n if not isa(target, bool):\n return False\n return not target", "def check_architecture(target_architecture):\n if target_architecture == ARCH_16_BIT:\n # should be fine, most computers are at least 32 bit these days\n pass\n elif target_architecture == ARCH_32_BIT:\n # should be fine, most computers are at least 32 bit these days\n pass\n elif target_architecture == ARCH_64_BIT:\n # needs to be a 64 bit system\n is_64_bit_system = platform.machine().endswith(\"64\")\n if not is_64_bit_system:\n log_error(\"you are unable to analyze a 64-bit binary on a non-64-bit system\")\n else:\n log_error(f\"something is strange with the architecture type '{target_architecture}'\")", "def is_commutative(self, node):\n return node.name in commutative_operations", "def isEnabled(state):\n return (isActive(state) or state == State.preEnabled)", "def mask_pass(owned_permissions: int, requested_operation: int,) -> bool:\n return bool(owned_permissions & requested_operation)", "def can_be_disabled(self) -> bool:\n return True", "def logical_xor(a, b):\n return bool(a) ^ bool(b)", "def can_mi():\n pass", "def DualMode(self) -> bool:", "def is_system(self) -> undefined.UndefinedOr[bool]:", "def is_pure(self) -> bool:\r\n return self.is_valid and np.all([x[\"operation\"].is_pure for x in self.operations_by_name.values()])", "def f3(a, b): \n return not a and b", "def is_in_use(self):\n return bool(self.flags & 1)", "def check_flag(self):\n return self._flag is 0 or self._flag is 16", "def on_dedicated(self):\n\n return self.is_valid_platform() and self['MODE'] == 'enterprise'", "def is_system(self) -> bool:", "def opOk(op, validRegs):\n for operand in op.operands:\n if not operand in reversed(validRegs):\n return False\n # If we make it here, they're all valid\n return True", "def eeprom_enabled():\n\n return (_read_device_state() & _get_addr_for_bit(_eeprom_bit)) != 0", "def disabled(self):\n check1 = \"pf-m-disabled\" in self.browser.classes(self)\n check2 = \"pf-m-aria-disabled\" in self.browser.classes(self)\n return check1 or check2 or self.browser.get_attribute(\"disabled\", self) is not None", "def check(self):\n\n if not self.target.ok():\n return False\n\n if not self.progid.ok():\n return False\n\n if not self.prinapp.ok():\n return False\n\n if not self.observers.ok():\n return False\n\n return True", "def isLogicalOp( cond ):\n if( cond == CT.AND or cond == CT.OR or cond == CT.NOT ):\n return True\n else:\n return False", "def non_root_available(self):\n return self._adb_available and self._dev_emu", "def msan_supported(goroot: GoRoot) -> bool:\n if goroot.goos == \"linux\":\n return goroot.goarch in (\"amd64\", \"arm64\")\n elif goroot.goos == \"freebsd\":\n return goroot.goarch == \"amd64\"\n else:\n return False", "def is_nonbool_type(*args):\n return _ida_hexrays.is_nonbool_type(*args)", "def asan_supported(goroot: GoRoot) -> bool:\n if goroot.goos == \"linux\":\n return goroot.goarch in (\"arm64\", \"amd64\", \"riscv64\", \"ppc64le\")\n else:\n return False", "def is_simplifiable_logical_op(node, node_inputs):\n if isinstance(node, (LogicalAnd, LogicalOr)):\n lhs = node_inputs[0]\n rhs = node_inputs[1]\n return is_constant(lhs) or is_constant(rhs)\n elif isinstance(node, LogicalNot):\n return is_constant(node_inputs[0])\n else:\n return False", "def valid_prohibited_none_role(arch, **kwargs):\n xpath = '//*[@role=\"none\" or @role=\"presentation\"]'\n if arch.xpath(xpath):\n return \"Warning\"\n return True", "def Nand(*args):\n return Not(And(*args))", "def has_mask(self):\r\n return hasattr(self, '_has_mask')", "def matches(self, name):\n return name is not None and name in (self.leader, self.sync_standby)", "def _is_stack_cannery_check_bb_x86(self, xbb):\n # XXX TODO NOTE: seperate out arch-dependent code!!!\n\n # <exit block>\n # ...\n # mov 0x*(%rsp), %REG\n # xor %fs:0x*, %REG\n # jne *\n # check instruction sequence\n if len(xbb.insts) < 3:\n return False\n mov_inst = xbb.insts[-3]\n xor_inst = xbb.insts[-2]\n jne_inst = xbb.insts[-1]\n if not mov_inst.op.startswith(\"mov\") or \\\n not xor_inst.op.startswith(\"xor\") or \\\n not jne_inst.op.startswith(\"jne\"):\n return False\n # one of outs should be a return\n have_ret = False\n for out_bb in xbb.outs:\n if out_bb.insts[-1].op.startswith(\"ret\"):\n have_ret = True\n break\n if not have_ret:\n return False\n # check mov\n if mov_inst.opd[0].find(\"(%rsp)\") == -1 or \\\n not mov_inst.opd[0].startswith(\"0x\") or \\\n not mov_inst.opd[1].startswith(\"%\"):\n return False\n tmp_reg = mov_inst.opd[1]\n # check xor\n if xor_inst.opd[1] == tmp_reg:\n thread_local_mem = xor_inst.opd[0]\n elif xor_inst.opd[0] == tmp_reg:\n thread_local_mem = xor_inst.opd[1]\n else:\n return False\n if not thread_local_mem.startswith(\"%fs:0x\"):\n return False\n return True", "def is_odd(self):\n return S(self.parity()).is_odd", "def is_logical_and_not(node):\n if not isinstance(node, LogicalAnd):\n return False\n lhs = node.get_input(0)\n rhs = node.get_input(0)\n def input_predicate(op):\n return isinstance(op, LogicalNot) or is_logical_and_not(op)\n return input_predicate(lhs) and input_predicate(rhs)", "def _like_rnncell(cell):\n conditions = [hasattr(cell, \"output_size\"), hasattr(cell, \"state_size\"),\n hasattr(cell, \"zero_state\"), callable(cell)]\n return all(conditions)", "def in_state(self, state: str) -> bool:\n bits = self._MAP.get(state)\n if bits is None:\n raise ValueError(\"Invalid state\")\n return self.value & bits[0] == bits[0] and self.value & bits[1] == 0", "def _check_mutual_preference(resident, hospital):\n\n return resident in hospital.prefs and hospital in resident.prefs", "def __bool__(self) -> bool:\n return self.return_code == 0", "def __bool__(self) -> bool:\n return self.return_code == 0", "def __bool__(self) -> bool:\n return self.return_code == 0", "def valid(black, white, x, y):\n return (not black & gobit[(x, y)]) and (not white & gobit[(x, y)])", "def is_Disable_allowed(self):\n handler = self.get_command_object(\"Disable\")\n return handler.check_allowed()", "def is_pure_electric(self):\n klist = [key for key in self.components if not self.component_is_zero(key)]\n return all([key.startswith('electric') for key in klist])", "def exclusive_in(in_list,master_list):\n\tif in_list==[]:\n\t\treturn True\n\telse:\n\t\tfor elem in in_list:\n\t\t\tif elem not in master_list:\n\t\t\t\treturn False\n\t\treturn True", "def check_if_mask(address):\n bin_address = address_to_bin(address)\n bin_str = ''.join(bin_address.split('.'))\n i = 0\n while i < len(bin_str) and bin_str[i] == '1':\n i += 1\n\n if i == 0:\n return False\n\n for j in range(i, len(bin_str)):\n if bin_str[j] == '1':\n return False\n\n return True", "def is_legal(self, start, end) -> bool:\n return self.board(end) == 0 \\\n and self.board(start) > 0 \\\n and self._check_zone_locks(start, end) \\\n and self.exists_path(start, end)", "def determine_azel_are_safe(_az, _el):\n if _az and _el:\n return True", "def checkCapabilities(hostmask, capabilities, requireAll=False):\n for capability in capabilities:\n if requireAll:\n if not checkCapability(hostmask, capability):\n return False\n else:\n if checkCapability(hostmask, capability):\n return True\n return requireAll", "def skip_unlock(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"skip_unlock\")" ]
[ "0.588355", "0.5876618", "0.58541375", "0.58166057", "0.5798117", "0.5712255", "0.5689939", "0.5669322", "0.5637539", "0.5548849", "0.5544105", "0.5517736", "0.54952157", "0.5487494", "0.5465993", "0.5461534", "0.54453385", "0.5438241", "0.5419825", "0.53650993", "0.53650993", "0.53650993", "0.53650993", "0.53577167", "0.5331902", "0.53083897", "0.5296869", "0.5295731", "0.52924204", "0.5283284", "0.5274991", "0.52690446", "0.5261469", "0.52487767", "0.52435106", "0.52374554", "0.5236058", "0.52204555", "0.52183", "0.52080935", "0.51969767", "0.51953673", "0.51936954", "0.5189355", "0.5188642", "0.51882505", "0.5175984", "0.51704556", "0.515916", "0.51559377", "0.51551867", "0.5152203", "0.5151861", "0.514326", "0.5138287", "0.5137787", "0.51375294", "0.51343405", "0.5132263", "0.5126078", "0.5118968", "0.5118944", "0.5116026", "0.51126176", "0.5112017", "0.5098533", "0.5087344", "0.5085582", "0.50830096", "0.507765", "0.5073603", "0.50732976", "0.5072479", "0.50598204", "0.5056413", "0.5051229", "0.5050637", "0.50483", "0.5039844", "0.5039702", "0.50272137", "0.5020803", "0.5020439", "0.50134176", "0.50083274", "0.5005416", "0.4996461", "0.49941048", "0.49923202", "0.49923202", "0.49923202", "0.49917337", "0.49873894", "0.49866667", "0.498278", "0.49795383", "0.49782607", "0.4976701", "0.49696532", "0.4964077" ]
0.59786093
0
Loads the labels file. Supports files with or without index numbers.
def load_labels(path): with open(path, 'r', encoding='utf-8') as f: lines = f.readlines() labels = {} for row_number, content in enumerate(lines): pair = re.split(r'[:\s]+', content.strip(), maxsplit=1) if len(pair) == 2 and pair[0].strip().isdigit(): labels[int(pair[0])] = pair[1].strip() else: labels[row_number] = pair[0].strip() return labels
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def read_labels(idx_filename):\n return IdxFileLoader().load(idx_filename, gzip_compressed=True)[-1]", "def load_labels():\n filename = os.path.join(config['inference']['model_dir'], 'output_labels.txt')\n global labels\n labels = [line.rstrip() for line in tf.gfile.FastGFile(filename)]", "def load_labels(label_path):\r\n\r\n with open(label_path, \"r\") as f:\r\n\r\n lines = f.readlines()\r\n \r\n label = {}\r\n index = []\r\n for i, line in enumerate(lines):\r\n sp = line.split()\r\n label[sp[0]] = [int(sp[1]),int(sp[2]),int(sp[3])]\r\n index.append([int(sp[3]),int(sp[2]),int(sp[1])])\r\n\r\n return label, index", "def load_labels(filename):\n return [line.rstrip() for line in tf.gfile.GFile(filename)]", "def load_labels(filename):\n return [line.rstrip() for line in tf.gfile.GFile(filename)]", "def load_labels(filename):\n return [line.rstrip() for line in tf.gfile.GFile(filename)]", "def load_labels_index_map(self, file_path):\n with open(file_path) as handle:\n self._labels_2_index = json.loads(handle.read())\n self._index_2_labels = {i: label.lower() for label, i in self._labels_2_index.items()}\n self._labels_dim = len(self._labels_2_index)", "def _load_labels(self, label_path: str) -> List[str]:\n with open(label_path, 'r') as f:\n return [line.strip() for _, line in enumerate(f.readlines())]", "def load_labels(path):\n with open(path, \"r\", encoding=\"utf-8\") as f:\n lines = f.readlines()\n labels = {}\n for row_number, content in enumerate(lines):\n pair = re.split(r\"[:\\s]+\", content.strip(), maxsplit=1)\n if len(pair) == 2 and pair[0].strip().isdigit():\n labels[int(pair[0])] = pair[1].strip()\n else:\n labels[row_number] = pair[0].strip()\n # print(labels)\n return labels", "def load_labels(labels_dir, trial_name):\n labels_path = labels_dir + trial_name + \".txt\"\n raw_labels_data = np.genfromtxt(labels_path, dtype=np.int,\n converters=LABELS_CONVERTERS,\n usecols=LABELS_USECOLS)\n #print(\"rawlabelsdata: \", raw_labels_data)\n #print(get_first_frame(labels_path))\n frames = np.arange(get_first_frame(labels_path), get_last_frame(labels_path)+1, dtype=np.int)\n #print(\"frames: \", frames)\n #print(frames.shape)\n #labels = np.zeros(frames.shape, dtype=np.int)\n labels1 = []\n #print(labels)\n for start, end, label in raw_labels_data:\n #mask = (frames >= start) & (frames <= end)\n #print(start)\n #print(end)\n i = start\n while(i<end):\n if(i%6 == 0):\n labels1.append(label)\n i = i+1\n\n #labels[mask] = label\n #print(\"labels[mask]: \",labels[mask])\n labels1 = np.array(labels1)\n #print(labels1)\n labels_data = labels1.reshape(-1,1)\n #print(labels1.shape)\n #print(\"labels: \", labels_data)\n \n return labels_data", "def _read_labels(test_data=False):\n if not test_data:\n filename = os.path.join(FOLDER_PATH, 'train-labels.idx1-ubyte')\n else:\n filename = os.path.join(FOLDER_PATH, 't10k-labels.idx1-ubyte')\n if not os.path.exists(filename):\n raise ValueError('The file dose not exist.')\n \n # Create a queue that produces the filenames to read.\n filename_queue = tf.train.string_input_producer([filename])\n \n # The first 8 bytes contain file information:\n # [offset] [type] [value] [description]\n # 0000 32 bit integer 0x00000801(2049) magic number\n # 0004 32 bit integer 60000/10000 number of items \n # ...(label value)\n header_bytes = 8\n # Every record consists of a label, with a fixed number of bytes for each.\n record_bytes = 1\n \n # Create a FixedLengthRecordReader to read record.\n reader = tf.FixedLengthRecordReader(record_bytes=record_bytes,\n header_bytes=header_bytes)\n _, value = reader.read(filename_queue)\n\n # Convert from a string to a vector of uint8, then cast to int32.\n record = tf.cast(tf.decode_raw(value, tf.uint8), tf.int32)\n \n # Reshape from [1] to a scalar shape [].\n label = tf.reshape(record, [])\n\n return label", "def load_file(filename):\n\tlabels = []\n\tdocs = []\n\n\twith open(filename) as f:\n\t\tfor line in f:\n\t\t\tcontent = line.split('\\t')\n\n\t\t\tif len(content) > 2:\n\t\t\t\tprint('incorrect read')\n\t\t\t\texit()\n\n\t\t\tif len(content[1]) == 0: continue\n\n\t\t\tdocs.append(str(content[1]).strip('\\r').strip('\\n').strip('\\r\\n'))\n\t\t\tlabels.append(content[0])\n\n\treturn docs, labels", "def load_labels(self, labels):\n self.labels = pd.DataFrame(labels, index=[\"label\"]).T", "def load_labels(self, pathLabel):\n self.pathLabel = pathLabel\n self.labelList = os.listdir(pathLabel)", "def load_labels(label_file):\n\n label = []\n proto_as_ascii_lines = tf.io.gfile.GFile(label_file).readlines()\n for l in proto_as_ascii_lines:\n label.append(l.rstrip())\n return label", "def LoadLabels(filepath):\n with open(filepath, 'r') as datafile:\n lines = [line.strip() for line in datafile.readlines()]\n # filter for empty lines\n return [line for line in lines if line]", "def load_labels(label_file) :\n df = pd.read_csv(label_file, index_col=\"p_index\",\n dtype=str, na_values=['nan', 'NaN', '']).dropna()\n\n return df", "def load_for_sklearn(self):\n\n labels = [] # string labels\n examples = [] # examples as strings\n\n # document number -> label mapping\n doc2label = n2b2.map_patients_to_labels(\n self.xml_dir,\n self.category)\n\n for f in os.listdir(self.cui_dir):\n doc_id = f.split('.')[0]\n file_path = os.path.join(self.cui_dir, f)\n file_as_string = open(file_path).read()\n\n string_label = doc2label[doc_id]\n int_label = LABEL2INT[string_label]\n labels.append(int_label)\n examples.append(file_as_string)\n\n return examples, labels", "def load_label(self, idx):\n im = open('{}/GTTXT/{}.txt'.format(root_dir, idx))\n\t#print(type(im.readlines()[0].rstrip(\"\\n\")))\n rgb_label = [i.rstrip(\"\\n\").split(\" \") for i in im.readlines()]\n\tlabel=[]\t\n\tfor i in rgb_label:\n\t\tlabel+=[int(j) for j in i]\n\tlabel=np.array(label).reshape(720,960)\n\tlabel[label==-1]=12\n\t#print(np.unique(label))\n #label = label[np.newaxis, ...]\n return label", "def load_labels(path):\n with open(path, 'r', encoding='utf-8') as f:\n lines = f.readlines()\n labels = []\n for row_number, content in enumerate(lines):\n pair = re.split(r'[:\\s]+', content.strip(), maxsplit=1)\n #if len(pair) == 2 and pair[0].strip().isdigit():\n labels.append(np.array([int(pair[0].strip()),pair[1].strip()]))\n #else:\n # labels.append(pair[0].strip())\n return np.array(labels)", "def load_labels(path, encoding='utf-8'):\r\n with open(path, 'r', encoding=encoding) as f:\r\n lines = f.readlines()\r\n if not lines:\r\n return {}\r\n\r\n if lines[0].split(' ', maxsplit=1)[0].isdigit():\r\n pairs = [line.split(' ', maxsplit=1) for line in lines]\r\n return {int(index): label.strip() for index, label in pairs}\r\n else:\r\n return {index: line.strip() for index, line in enumerate(lines)}", "def read_label_file(self, label_file_name = None): #completed\n if label_file_name is None:\n label_file_name = self.label_file_name\n try:\n label_data = sp.loadmat(label_file_name)['labels'].astype(np.int32)\n return label_data#[:,1], label_data[:,0]#in MATLAB format\n except IOError:\n print \"Unable to open \", label_file_name, \"... Exiting now\"\n sys.exit()", "def retrieve_labels(file, label_indices):\n\n\t# Initialize numpy matrix to store the images\n\tlabels = np.zeros((len(label_indices), 10))\n\n\twith open(file, \"rb\") as f:\n\t\t# Intialize counters\n\t\ti = 0\n\t\tlabel_number = 0\n\n\t\t# Read first byte\n\t\tbyte = f.read(1)\n\n\t\t# Find each image in the data file\n\t\tfor label_index in label_indices:\n\t\t\t# Read in bytes until you arrive at the label\n\t\t\twhile byte and (i < (label_index + 8)):\n\t\t\t\tbyte = f.read(1)\n\t\t\t\ti += 1\n\n\t\t\t# Store label value in numpy array\n\t\t\tvalue = int.from_bytes(byte, \"big\")\n\t\t\tlabels[label_number] = np.zeros(10)\n\t\t\tlabels[label_number, value] = 1\n\n\t\t\t# Increment to next label\n\t\t\tlabel_number += 1\n\n\treturn labels", "def load_data_and_labels():\n # Load data from files\n positive_examples = []\n for file in os.listdir('with_datarace'):\n filename = os.fsdecode(file)\n ast_file = open('with_datarace\\\\' + filename, 'r')\n token_vector = ast_file.read()\n positive_examples.append(token_vector)\n file_names.append(filename)\n\n negative_examples = []\n for file in os.listdir('without_datarace\\\\'):\n filename = os.fsdecode(file)\n ast_file = open('without_datarace\\\\' + filename, 'r')\n token_vector = ast_file.read()\n negative_examples.append(token_vector) # List of lists\n file_names.append(filename)\n\n positive_examples = [s.strip() for s in positive_examples]\n negative_examples = [s.strip() for s in negative_examples]\n\n # Split by words\n x_text = positive_examples + negative_examples # why we didn't cobine it from the beginning?\n x_text = [clean_str(sent) for sent in x_text]\n x_text = [s.split(\" \") for s in x_text]\n\n # Generate labels\n positive_labels = [[0, 1] for _ in positive_examples]\n negative_labels = [[1, 0] for _ in negative_examples]\n y = np.concatenate([positive_labels, negative_labels], 0)\n\n return [x_text, y]", "def read_data(feature_file, label_file):", "def load_data(filename, use_labels=True):\n # load column 1 to 8 (ignore last one)\n data = np.loadtxt(open( filename), delimiter=',',\n usecols=range(1, 9), skiprows=1)\n if use_labels:\n labels = np.loadtxt(open( filename), delimiter=',',\n usecols=[0], skiprows=1)\n else:\n labels = np.zeros(data.shape[0])\n return labels, data", "def load_file(self, dset_type):\r\n path = './data/{0}.{1}'.format(self.name, dset_type)\r\n try:\r\n file_contents = np.genfromtxt(path, missing_values=0, skip_header=0,\r\n dtype=int, delimiter=\",\")\r\n self.labels[dset_type] = file_contents[:, 0]\r\n self.examples[dset_type] = file_contents[:, 1:]\r\n\r\n except RuntimeError:\r\n print('ERROR: Unable to load file ''{0}''. Check path and try again.'.format(path))", "def load_labels(filename):\n\n file_path = os.path.join(DATA_DIR, filename)\n with open(file_path, 'rb') as f:\n b = f.read()\n\n magic, n_labels = (struct.unpack('>i', b[i*4:(i+1)*4]) for i in range(2))\n\n assert magic[0] == 2049, \"bad magic number, what do?\"\n\n label_stream = array.array('B', b[8:])\n \n assert len(label_stream) == n_labels[0], \"mismatch in label length\"\n \n # label_stream is actually type array.array, which is iterable surely.\n # i'll convert it anyway...\n return tuple(label_stream)", "def load_from_path(self, paths, label_key='labels'):\n data = []\n labels = []\n for path in paths:\n with tf.io.gfile.GFile(path, 'rb') as f:\n d = {\n k.decode('utf8'): v\n for k, v in cPickle.load(f, encoding='bytes').items()\n }\n data.append(d['data'])\n labels.append(d[label_key])\n data = np.concatenate(data, axis=0)\n data = data.reshape((data.shape[0], 3, 32, 32))\n labels = np.concatenate(labels, axis=0)\n labels = np.reshape(labels, (len(labels), 1))\n\n if tf.keras.backend.image_data_format() == 'channels_last':\n data = data.transpose(0, 2, 3, 1)\n\n return data, labels", "def load_labels(path, kmer=True, rg=True, clip=True, rna=True, go=True):\n\n labels = dict()\n if go: labels[\"X_GO\"] = gzip.open(os.path.join(path,\n \"matrix_GeneOntology.tab.gz\")).readline().split(\"\\t\")\n if kmer: labels[\"X_KMER\"] = gzip.open(os.path.join(path,\n \"matrix_RNAkmers.tab.gz\")).readline().split(\"\\t\")\n if rg: labels[\"X_RG\"] = gzip.open(os.path.join(path,\n \"matrix_RegionType.tab.gz\")).readline().split(\"\\t\")\n if clip: labels[\"X_CLIP\"] = gzip.open(os.path.join(path,\n \"matrix_Cobinding.tab.gz\")).readline().split(\"\\t\")\n if rna: labels[\"X_RNA\"] = gzip.open(os.path.join(path,\n \"matrix_RNAfold.tab.gz\")).readline().split(\"\\t\")\n return labels", "def read_labels(labels_file):\n if not labels_file:\n print 'WARNING: No labels file provided. Results will be difficult to interpret.'\n return None\n\n labels = []\n with open(labels_file) as infile:\n for line in infile:\n label = line.strip()\n if label:\n labels.append(label)\n assert len(labels), 'No labels found'\n return labels", "def load_labeled_data(files):\n\tx = []\n\ty = []\n\tfor filename in files:\n\t\tdata = []\n\t\twith open(filename) as infile:\n\t\t\tlabel = int(infile.readline())\n\t\t\tfor line in infile:\t\n\t\t\t\tdata.append(dna_string_to_array(line.strip()))\n\t\ty += [label]*len(data)\n\t\tx += data\n\n\treturn (np.array(x), np.array(y))", "def get_labels(self):\n\n print 'Loading label data from', self.label_file, '...'\n labels = {}\n with open(self.label_file, 'rb') as f:\n f.next() # skip header line\n for line in f:\n index, answer = line.rstrip('\\n').split(',')\n labels[index] = answer\n\n return labels", "def load_label(path: str) -> dict:\n if not os.path.exists(path):\n print(f\"Warning, try to load non-exist label {path}\")\n return None\n return np.load(path, allow_pickle=True).tolist()", "def read_labels_from_file(label_identifier) -> list:\n global _LABELS\n if _LABELS is None:\n _LABELS = read_label_definitions(NORM_LABEL_FILE)\n filename = _LABELS['label_files'][label_identifier]\n print(f\"Reading {filename}\")\n with open(filename, 'r') as f:\n labels = [l.strip() for l in f.readlines() if l.strip() != '']\n return labels", "def read(path, label2int):\n\n labels = [] # int labels\n samples = [] # examples as strings\n\n for label_dir in os.listdir(path):\n label_dir_path = os.path.join(path, label_dir)\n\n for file in os.listdir(label_dir_path):\n file_path = os.path.join(label_dir_path, file)\n file_text = open(file_path).read().rstrip()\n int_label = label2int[label_dir.lower()]\n samples.append(file_text)\n labels.append(int_label)\n\n return samples, labels", "def read_idx_2_label():\n with open('../Data/imagenet_class_index.json') as f:\n dictionary = json.load(f)\n return dictionary", "def read_label(filepath, read_scalars=False):\n label_array = np.loadtxt(filepath, dtype=np.int, skiprows=2, usecols=[0])\n if read_scalars:\n scalar_array = np.loadtxt(filepath, skiprows=2, usecols=[-1])\n return label_array, scalar_array\n return label_array", "def read_labels(labels_path):\n with open(labels_path, 'r') as file:\n data = file.read()\n data = data.split()\n data = np.array(data)\n data = np.reshape(data, (-1, 2))\n return data", "def load_data(self):\n sets = ['train', 'val']\n images = []\n labels = []\n self.labels_dic = {}\n file = open(self.path + 'wnids.txt')\n train_labels = file.read().split()\n if self.train:\n for fn in range(self.num_classes):\n f = train_labels[fn]\n for i in os.listdir(self.path + 'train/' + f + '/images/'):\n images.append(Image.open(self.path + 'train/' + f + '/images/' + i))\n labels.append(f)\n #image label n link to folder names of TinyImageNet\n self.labels_dic[f] = fn\n\n else:\n for fn in range(self.num_classes):\n f = train_labels[fn]\n self.labels_dic[f] = fn\n file_val = open(self.path + 'val/val_annotations.txt')\n val_labels = file_val.read().split('\\n')\n for im in val_labels:\n im_data = im.split(\"\t\")[:2]\n if len(im_data) < 2:\n continue\n if im_data[1] in self.labels_dic:\n images.append(Image.open(self.path + 'val/images/' + im_data[0]))\n labels.append(im_data[1])\n\n self.images = images\n self.labels = labels", "def load_batch(fpath, label_key='labels'):\n f = open(fpath, 'rb')\n if sys.version_info < (3,):\n d = cPickle.load(f)\n else:\n d = cPickle.load(f, encoding='bytes')\n # decode utf8\n d_decoded = {}\n for k, v in d.items():\n d_decoded[k.decode('utf8')] = v\n d = d_decoded\n f.close()\n data = d['data']\n labels = d[label_key]\n\n data = data.reshape(data.shape[0], 3, 32, 32)\n return data, labels", "def loadLabelMap(self):\n print(\">>> load Label Map: {}\".format(self.PATH_TO_LABELS))\n self.label_map = self.label_map_util.load_labelmap(\n self.PATH_TO_LABELS)\n self.categories = self.label_map_util.convert_label_map_to_categories(\n self.label_map,\n max_num_classes=self.NUM_CLASSES,\n use_display_name=True)\n\n self.category_index = self.label_map_util.create_category_index(\n self.categories)\n # create a list filled with 0\n self.categoryNames = [0 for x in range(self.NUM_CLASSES)]\n for cc in self.categories:\n self.categoryNames[int(cc[\"id\"]) - 1] = cc[\"name\"]", "def load_labels(source_dir, label_pattern):\r\n\r\n logging.info(\"Loading labels from %s with pattern %s\"\r\n % (source_dir, label_pattern))\r\n label_files = glob(path.join(source_dir, label_pattern))\r\n if len(label_files) == 0:\r\n raise ValueError(\"No label files found with pattern %s\"\r\n % label_pattern)\r\n if len(label_files) > 1:\r\n raise ValueError(\"Only one label file supported ATM.\")\r\n labels = np.load(label_files[0]).flatten()\r\n logging.info(\"Label loading complete. Shape is %r\" % (labels.shape,))\r\n return labels", "def LoadBatch(filename):", "def load_mnist(path, kind='train'):\n\tlabels_path = os.path.join(path,'%s-labels.idx1-ubyte'%kind)\n\timages_path = os.path.join(path,'%s-images.idx3-ubyte'%kind)\n\t\n\twith open(labels_path, 'rb') as lbpath:\n\t\tmagic, n = struct.unpack('>II', lbpath.read(8))\n\t\tlabels = np.fromfile(lbpath, dtype=np.uint8)\n\t\t\n\twith open(images_path, 'rb') as imgpath:\n\t\tmagic, num, row, cols = struct.unpack('>IIII', imgpath.read(16))\n\t\timages = np.fromfile(imgpath, dtype=np.uint8).reshape(len(labels), 784)\n\t\n\treturn images, labels", "def load_mnist(kind='train'):\r\n with open('%s-labels.idx1-ubyte' % kind, 'rb') as lbpath:\r\n magic, n = struct.unpack('>II', lbpath.read(8))\r\n labels = np.fromfile(lbpath, dtype=np.uint8)\r\n\r\n with open('%s-images.idx3-ubyte' % kind, 'rb') as imgpath:\r\n magic, num, rows, cols = struct.unpack('>IIII', imgpath.read(16))\r\n images = np.fromfile(imgpath, dtype=np.uint8).reshape(len(labels), 784)\r\n\r\n return images, labels", "def load_idx_to_label(dataset_name):\n if dataset_name == 'imagenet':\n path = 'https://gist.githubusercontent.com/yrevar/'\n path += '6135f1bd8dcf2e0cc683/raw/'\n path += 'd133d61a09d7e5a3b36b8c111a8dd5c4b5d560ee'\n path += '/imagenet1000_clsid_to_human.pkl'\n idx_to_label = pickle.load(urllib.request.urlopen(path))\n \n elif dataset_name == 'indoor_scenes':\n label_to_idx = {'airport_inside': 0,\n 'bar': 1,\n 'bedroom': 2,\n 'casino': 3,\n 'inside_subway': 4,\n 'kitchen': 5,\n 'livingroom': 6,\n 'restaurant': 7,\n 'subway': 8,\n 'warehouse': 9}\n idx_to_label = {idx: label for label, idx in label_to_idx.items()}\n \n elif dataset_name == 'pubfig10':\n celebs = ['Aaron-Eckhart', 'Adriana-Lima',\n 'Angela-Merkel', 'Beyonce-Knowles', \n 'Brad-Pitt', 'Clive-Owen', \n 'Drew-Barrymore', 'Milla-Jovovich', \n 'Quincy-Jones', 'Shahrukh-Khan']\n idx_to_label = { i: celebs[i] for i in range(len(celebs)) }\n\n elif dataset_name == 'pubfig83':\n celebs = ['adam-sandler', 'alex-baldwin', 'angelina-jolie', 'anna-kournikova', 'ashton-kutcher', 'avril-lavigne',\n 'barack-obama', 'ben-affleck', 'beyonce-knowles', 'brad-pitt', 'cameron-diaz', 'cate-blanchett', 'charlize-theron',\n 'christina-ricci', 'claudia-schiffer', 'clive-owen', 'colin-farell', 'colin-powell', 'cristiano-ronaldo', 'daniel-craig',\n 'daniel-radcliffe', 'david-beckham', 'david-duchovny', 'denise-richards', 'drew-barrymore', 'dustin-hoffman', 'ehud-olmert',\n 'eva-mendes', 'faith-hill', 'george-clooney', 'gordon-brown', 'gwyneth-paltrow', 'halle-berry', 'harrison-ford',\n 'hugh-jackman', 'hugh-laurie', 'jack-nicholson', 'jennifer-aniston', 'jennifer-lopez', 'jennifer-lovehewitt',\n 'jessica-alba', 'jessica-simpson', 'joaquin-phoenix', 'john-travolta', 'julia-roberts', 'jula-stiles', 'kate-moss',\n 'kate-winslet', 'katherine-heigl', 'keira-knightley', 'kiefer-sutherland', 'leonardo-dicaprio', 'lindsay-lohan', 'mariah-carey',\n 'martha-stewart', 'matt-damon', 'meg-ryan', 'meryl-streep', 'michael-bloomberg', 'mickey-rourke', 'miley-cyrus',\n 'morgan-freeman', 'nicole-kidman', 'nicole-richie', 'orlando-bloom', 'reese-witherspoon', 'renee-zellweger', 'ricky-martin',\n 'robert-gates', 'sania-mirza', 'scarlett-johansson', 'shahrukh-khan', 'shakira', 'sharon-stone', 'silvio-berlusconi',\n 'stephen-colbert', 'steve-carell', 'tom-cruise', 'uma-thurman', 'victoria-beckham', 'viggo-mortensen', 'will-smith', 'zac-efron']\n idx_to_label = { i: celebs[i] for i in range(len(celebs)) }\n\n elif dataset_name == 'vggface2':\n path = \"../utils/vggface2_80_to_complete.pkl\"\n with open(path, 'rb') as file:\n idx_to_label = pickle.load(file)\n\n else:\n raise NotImplementedError\n \n return idx_to_label", "def labels(labels_file, labels = []):\n\n print(f\"Parsing labels '{labels_file}'\")\n with open(labels_file, 'r') as f:\n for i, line in enumerate(f):\n labels.append(line.split(':')[-1].strip())\n return pd.Series(labels)", "def load_nli_file(data_path, num_par=2):\n tokenizer = tokenization.NltkTokenizer()\n dataset = tf.data.TextLineDataset(data_path)\n dataset = dataset.map(\n functools.partial(_nli_line_to_tensors, tokenizer=tokenizer),\n num_parallel_calls=num_par)\n dataset = dataset.filter(lambda x: tf.greater_equal(x[\"label\"], 0))\n return dataset", "def read_labels(fn: str, count: int) -> SampleLabels:\n with open(fn, \"rb\") as f:\n assert unpack(\">i\", f.read(4))[0] == 0x00000801\n assert unpack(\">i\", f.read(4))[0] == count\n labels: List[int] = [\n ord(f.read(1))\n for _ in range(count)\n ]\n return labels", "def create_labels(filename, class_indices):\n \n _logger.debug(\"Mapping labels\")\n label={}\n label['category']=[]\n for key in class_indices:\n label['category'].append({\n 'name' : key,\n 'index' : class_indices[key]\n })\n label_path = os.path.join(config.TRAINED_MODELS_DATA, filename)\n with open(os.path.join(label_path, 'labels.txt'), 'w') as outfile:\n json.dump(label, outfile)\n return label_path", "def _read_train_datas(self):\r\n with open(self.train_label_path, 'r') as fb:\r\n lines = fb.readlines()\r\n return self._parse_raw_labels(lines)", "def load_data_and_labels():\n # Load data from files\n positive_examples = list(\n open(\"./data/rt-polarity.pos\", \"r\", encoding='latin-1').readlines())\n positive_examples = [s.strip() for s in positive_examples]\n negative_examples = list(\n open(\"./data/rt-polarity.neg\", \"r\", encoding='latin-1').readlines())\n negative_examples = [s.strip() for s in negative_examples]\n # Split by words\n x_text = positive_examples + negative_examples\n x_text = [clean_str(sent) for sent in x_text]\n x_text = [s.split(\" \") for s in x_text]\n # Generate labels\n positive_labels = [[0, 1] for _ in positive_examples]\n negative_labels = [[1, 0] for _ in negative_examples]\n y = np.concatenate([positive_labels, negative_labels], 0)\n return [x_text, y]", "def load_data(self):\n with open(self.file_name) as f:\n lines = f.readlines()\n\n labels = list()\n all_dat = list()\n for i, l in enumerate(lines):\n\n labels.append(int(l[0]))\n\n l = gensim.utils.any2unicode(l)\n all_dat.append(LabeledSentence(l.split(\"\\t\")[-1], [i]))\n\n return all_dat, np.asarray(labels)", "def load_fmnist(path, kind='train'):\n labels_path = os.path.join(path,\n '%s-labels-idx1-ubyte.gz'\n % kind)\n images_path = os.path.join(path,\n '%s-images-idx3-ubyte.gz'\n % kind)\n\n with gzip.open(labels_path, 'rb') as lbpath:\n labels = np.frombuffer(lbpath.read(), dtype=np.uint8,\n offset=8)\n\n with gzip.open(images_path, 'rb') as imgpath:\n images = np.frombuffer(imgpath.read(), dtype=np.uint8,\n offset=16).reshape(len(labels), 784)\n\n return images, labels", "def from_label_file(cls, label_file_path, out_path=FEATURES_DATA_PATH, source_path=RAW_DATA_PATH):\n df = pd.read_csv(label_file_path)\n filenames = df['filename']\n labels = df['label']\n return cls(filenames, labels, out_path=out_path, source_path=source_path)", "def label_names_file():\n return tfds.core.tfds_path(_LABELS_FNAME)", "def load_labeled_data():\n\n images = []\n labels = []\n\n for i in range(1, 10):\n path = (\"selflabeled\", str(i), \"*.jpg\")\n filenames = glob.glob(\"/\".join(path))\n images_one_type = [cv2.imread(img) for img in filenames]\n labels_one_type = [i] * len(images_one_type)\n images += images_one_type\n labels += labels_one_type\n\n return images, labels", "def load_mnist(path, kind='train'):\n labels_path = os.path.join(path,'{}-labels-idx1-ubyte'.format(kind))\n images_path = os.path.join(path,'{}-images-idx3-ubyte'.format(kind))\n with open(labels_path, 'rb') as lbpath:\n magic, n = struct.unpack('>II',\n lbpath.read(8))\n labels = np.fromfile(lbpath,\n dtype=np.uint8).reshape(n)\n\n with open(images_path, 'rb') as imgpath:\n magic, num, rows, cols = struct.unpack('>IIII',\n imgpath.read(16))\n images = np.fromfile(imgpath,\n dtype=np.uint8).reshape((num,1,rows,cols))\n print(kind)\n print(\"label num:\",n)\n print(\"image num:\",num)\n print(\"image rows:\",rows)\n print(\"image cols:\",cols)\n images = images/255\n return images, labels", "def _label_loader(self, prefix):\n return self._base_loader(prefix, 'labels')", "def read_stanford_labels():\n # First get the hardi data\n fetch_stanford_hardi()\n hard_img, gtab = read_stanford_hardi()\n\n # Fetch and load\n files, folder = fetch_stanford_labels()\n labels_file = pjoin(folder, \"aparc-reduced.nii.gz\")\n labels_img = nib.load(labels_file)\n return hard_img, gtab, labels_img", "def read_labels(labels_path):\n data = []\n with open(labels_path, 'r') as f:\n for line in f:\n line = line.split()\n sample = (line[0], int(line[1]))\n data.append(sample)\n \n dtype = [('video', '<U50'), ('label', int)]\n X = np.array(data, dtype=dtype)\n X = np.sort(X, order='video')\n return X", "def loadlabels_aslist(filename=None):\n if filename is None:\n filename = os.path.join(os.getenv('HOME'), 'ddc', 'data', 'bpti_labels_ms.txt')\n with open(filename) as src:\n lines = src.read().strip().split('\\n')\n label = [int(l.split()[1]) for l in lines]\n # Account for the last one:\n label.append(label[-1])\n return label", "def read_label_map(path):\n with tf.io.gfile.GFile(path) as f:\n if path.endswith('.json'):\n return json.load(f)\n else:\n label_map = {}\n empty_line_encountered = False\n for tag in f:\n tag = tag.strip()\n if tag:\n label_map[tag] = len(label_map)\n else:\n if empty_line_encountered:\n raise ValueError(\n 'There should be no empty lines in the middle of the label map '\n 'file.'\n )\n empty_line_encountered = True\n return label_map", "def extract_labels(filename):\n print('Extracting', filename)\n with gzip.open(filename) as bytestream:\n bytestream.read(8)\n buf = bytestream.read(10000)\n labels = numpy.frombuffer(buf, dtype=numpy.uint8).astype(numpy.int64)\n return labels", "def load_libsvm_file(file, labels_format=\"list\", sort_indices=False):\n if labels_format == 'list':\n labels, features = _load_libsvm_file_labels_list(file, sort_indices)\n return csr_matrix(features), labels\n elif labels_format == 'csr_matrix':\n labels, features = _load_libsvm_file_labels_csr_matrix(file, sort_indices)\n return csr_matrix(features), csr_matrix(labels)\n else:\n raise ValueError(\"Label format {} is not valid format\".format(labels_format))", "def load_data(fname):\n pathname = \"data/\" + fname\n data = pickle.load(open(pathname, 'rb'), encoding='latin1')\n images = np.array([img[:-1] for img in data])\n ys = [int(img[-1]) for img in data]\n length = len(ys)\n labels = np.zeros((length, 10))\n\n for i in range(length):\n labels[i, ys[i]] = 1\n\n return images, labels", "def loadLabeled(self):\n\n maxNumChannels = self._maxNumChannels # 4\n\n baseFilePath, ext = os.path.splitext(self.path)\n baseFilePath = baseFilePath.replace('_ch1', '')\n baseFilePath = baseFilePath.replace('_ch2', '')\n\n # load mask\n #labeledPath = dvMaskPath + '_mask.tif'\n #labeledData = tifffile.imread(labeledPath)\n\n maskFromLabelGreaterThan = 0\n\n # load labeled\n for channelIdx in range(maxNumChannels):\n channelNumber = channelIdx + 1 # for _ch1, _ch2, ...\n stackListIdx = maxNumChannels + channelIdx # for index into self._stackList\n\n chStr = '_ch' + str(channelNumber)\n labeledPath = baseFilePath + chStr + '_labeled.tif'\n maskPath = baseFilePath + chStr + '_mask.tif'\n\n # if we find _labeeled.tif, load and make a mask\n # o.w. if we find _mask.tif then load that\n if os.path.isfile(maskPath):\n print(' bStack.loadLabeled() loading _mask.tif channelNumber:', channelNumber, 'maskPath:', maskPath)\n maskData = tifffile.imread(maskPath)\n self._stackList[stackListIdx] = maskData\n elif os.path.isfile(labeledPath):\n print(' bStack.loadLabeled() loading channelNumber:', channelNumber, 'labeledPath:', labeledPath)\n labeledData = tifffile.imread(labeledPath)\n self._stackList[stackListIdx] = labeledData > maskFromLabelGreaterThan\n else:\n # did not find _mask or _labeled file\n pass\n\n # erode _mask by 1 (before skel) as skel was getting mized up with z-collisions\n #self._dvMask = bimpy.util.morphology.binary_erosion(self._dvMask, iterations=2)\n\n # bVascularTracing.loadDeepVess() uses mask to make skel", "def extract_labels(nlabels,filename, one_hot=False):\n print('Extracting', filename,'bbbccicicicicib')\n\n labels=numpy.loadtxt(filename,dtype='int64')\n \n if one_hot:\n print(\"LABELS ONE HOT\")\n print(labels.shape)\n XXX=dense_to_one_hot(labels,nlabels)\n print(XXX.shape)\n return dense_to_one_hot(labels,nlabels)\n print(\"LABELS\")\n print(labels.shape)\n return labels", "def load_mnist(path, kind='train'):\n labels_path = os.path.join(path,\n '%s-labels-idx1-ubyte' % kind)\n images_path = os.path.join(path,\n '%s-images-idx3-ubyte' % kind)\n \n with open(labels_path, 'rb') as lbpath:\n magic, n = struct.unpack('>II',\n lbpath.read(8))\n labels = np.fromfile(lbpath,\n dtype=np.uint8)\n \n with open(images_path, 'rb') as imgpath:\n magic, num, rows, cols = struct.unpack(\">IIII\",\n imgpath.read(16))\n images = np.fromfile(imgpath,\n dtype=np.uint8).reshape(len(labels), 784)\n \n return images, labels", "def _extract_labels(self, filename, one_hot=False):\n print('Extracting', filename)\n with gzip.open(filename) as bytestream:\n magic = self._read32(bytestream)\n if magic != 2049:\n raise ValueError(\n 'Invalid magic number %d in MNIST label file: %s' %\n (magic, filename))\n num_items = self._read32(bytestream)\n buf = bytestream.read(num_items)\n labels = np.frombuffer(buf, dtype=np.uint8)\n if one_hot:\n return self._dense_to_one_hot(labels)\n return labels", "def label(filenames, train_path='../data/train_molecules_30.mat'):\n unlabeled = [scipy.io.loadmat(fname) for fname in filenames]\n unlabeled_X = np.vstack([data['X'] for data in unlabeled])\n X, Y = load_data(train_path, shape=(-1, 30, 30, 30))\n\n num_unlabeled = unlabeled_X.shape[0]\n unlabeled_Y = np.zeros(num_unlabeled) - 1\n unlabeled_Y = unlabeled_Y.reshape((-1, 1))\n Y = Y.reshape((-1, 1))\n Y_all = np.vstack((Y, unlabeled_Y))\n\n X_all = np.vstack((X, unlabeled_X))\n X_all = X_all.reshape((-1, 27000))\n\n label_prop_model = LabelSpreading()\n label_prop_model.fit(X_all, Y_all)\n Y_all = label_prop_model.transduction_\n unlabeled_Y = Y_all[num_unlabeled:]\n return (unlabeled_X, unlabeled_Y), (X_all, Y_all)", "def load_mnist(path, kind = 'train'):\n label_path = os.path.join(path, '%s-labels-idx1-ubyte' % kind)\n images_path = os.path.join(path, '%s-images-idx3-ubyte' % kind)\n\n\n with open(label_path, 'rb') as lbpath:\n magic, n = struct.unpack('>II', lbpath.read(8))\n\n labels = np.fromfile(lbpath, dtype= np.uint8)\n\n with open(images_path, 'rb') as imgpath:\n magic, num, rows, cols = struct.unpack('>IIII', imgpath.read(16))\n\n images = np.fromfile(imgpath, dtype=np.uint8).reshape(len(labels),784)\n\n\n return images, labels", "def load_data_and_labels(data_file, labels_file):\r\n x_text = []\r\n y = []\r\n \r\n with open(data_file, encoding = \"utf-8\") as csvFile:\r\n readCSV = csv.reader(csvFile, delimiter = \",\")\r\n for row in readCSV:\r\n row = \"\".join(row)\r\n x_text.append(row) \r\n \r\n with open(labels_file, encoding = \"utf-8\") as csvFile2:\r\n readCSV = csv.reader(csvFile2, delimiter = \",\")\r\n for row in readCSV:\r\n d = defaultdict(list)\r\n for k,va in [(v,i) for i,v in enumerate(row)]:\r\n d[k].append(va)\r\n \r\n for k in range(len(d.get(\"1.0\"))):\r\n index = d.get(\"1.0\")[k]\r\n row[index] = 1\r\n for k in range(len(d.get(\"0.0\"))):\r\n index = d.get(\"0.0\")[k]\r\n row[index] = 0\r\n \r\n# print(len(row))\r\n y.append(row)\r\n \r\n\r\n\r\n\r\n \r\n print(\"x = {}\".format(len(x_text)))\r\n print(\"y = {}\".format(len(y)))\r\n \r\n return x_text, y", "def load(self):\n\n x = [] # input documents (n_docs, max_seq_len)\n labels = [] # targets we are predicting for each input\n\n for file_path in glob.glob(self.train_dir + '*.txt'):\n tokens = read_tokens(file_path)\n unique = list(set(tokens))\n x_count = round(len(unique) * 0.85)\n\n for _ in range(self.samples_per_doc):\n random.shuffle(unique)\n x.append(' '.join(unique[:x_count]))\n labels.append(' '.join(unique[x_count:]))\n\n # make x and y\n pkl = open('Model/tokenizer.p', 'rb')\n self.tokenizer = pickle.load(pkl)\n x = self.tokenizer.texts_to_matrix(x, mode='binary')\n y = self.tokenizer.texts_to_matrix(labels, mode='binary')\n\n # column zero is empty\n return x, y[:,1:]", "def load_data(filename):\r\n with open(filename,'rb') as f:\r\n data = pk.load(f,encoding='bytes')\r\n return data[b'data'],data[b'labels']", "def assign_labels(basename, data_folder=Path(\"/data\"), verbose=False):\n urls_path = data_folder / \"graphs\" / basename / (basename + \".urls\")\n assert urls_path.exists(), \"Urls file not found!\"\n # check if labels dict already existing\n labels_path = data_folder / \"models\" / basename / (\"labels.json\")\n if labels_path.exists():\n print(\"Labels json already existing.\")\n else:\n print(\"Building labels json..\")\n # count number of lines in file\n num_lines = sum(1 for line in urls_path.open())\n labels_array = [0] * num_lines\n with urls_path.open() as f:\n clusters_count = Counter()\n labels = dict()\n class_index = 0\n for pos, line in enumerate(tqdm(f, total=num_lines)):\n # extract the TLD\n complete_domain = tldextract.extract(line).suffix\n # we only need the country domain now\n domain = complete_domain.split(\".\")[-1]\n # if domain unseen add it to class indices\n if domain not in labels:\n class_index += 1\n labels[domain] = class_index\n # assign label and add it to array\n y = labels[domain]\n labels_array[pos] = y\n clusters_count[domain] += 1\n labels_data = dict()\n # labels_data['labels'] = labels # do we really need this?\n labels_data['labels'] = {int(v): k for k, v in labels.items()}\n labels_data['count'] = clusters_count\n labels_data['array'] = labels_array\n if verbose:\n print(\"Found following labels:\")\n print(labels)\n with open(labels_path, 'w', encoding='utf-8') as outfile:\n json.dump(labels_data, outfile, ensure_ascii=False, indent=4)\n return labels_path", "def load_expected(filename):\n\n all_labels = sorted([int(k) for k in open(filename).readline().split()[1:]])\n data = numpy.loadtxt(filename, dtype='float64', skiprows=1)\n return all_labels, data[:,0].astype('int64'), data[:,1:]", "def load_data():\r\n global labelNames\r\n print(\"Loading Data...\")\r\n\r\n fnpath = \"rawdata\\\\cifar-10-batches-py\"\r\n fnprefix = 'data_batch_'\r\n fnlblnames = 'batches.meta'\r\n fntstbatch = 'test_batch'\r\n\r\n labelNames = unpickle(path.join(fnpath, fnlblnames))\r\n label_names = []\r\n for label in labelNames['label_names']:\r\n label_names.append(\"\".join(map(chr, label)))\r\n labelNames['label_names'] = label_names\r\n\r\n CIFAR_Data.append(unpickle(path.join(fnpath, fntstbatch)))\r\n for n in range(1, 6):\r\n CIFAR_Data.append(unpickle(path.join(fnpath, fnprefix + str(n))))", "def load_mnist(path, kind='train'):\n labels_path = os.path.join(path,'%s-labels-idx1-ubyte.gz'% kind)\n\n images_path = os.path.join(path,'%s-images-idx3-ubyte.gz'% kind)\n\n with gzip.open(labels_path, 'rb') as lbpath:\n labels = np.frombuffer(lbpath.read(), dtype=np.uint8,offset=8)\n\n with gzip.open(images_path, 'rb') as imgpath:\n images = np.frombuffer(imgpath.read(), dtype=np.uint8,offset=16).reshape(len(labels), 784)\n\n print(\"Dataset Loaded\")\n \n return images, labels", "def read_traindata (filename, labels = ['pos', 'neg']):\n def split (l):\n \"\"\"split one line into words and label\"\"\"\n segs = l.strip().split ('\\t')\n label = segs [-1]\n words = segs [:-1]\n return words, label\n \n encoding = chardet.detect(open (filename).read ()) ['encoding']\n \n with codecs.open (filename, 'r', encoding) as f:\n for line in f.readlines ():\n row = split (line)\n assert len (row) == 2\n assert isinstance(row [0], list)\n assert isinstance(row [1], basestring)\n print row [1]\n assert row [1] in labels\n yield row", "def load_mnist(path, kind='train'):\n\n labels_path = os.path.join(path,\n '%s-labels-idx1-ubyte.gz'\n % kind)\n images_path = os.path.join(path,\n '%s-images-idx3-ubyte.gz'\n % kind)\n\n with gzip.open(labels_path, 'rb') as lbpath:\n labels = np.frombuffer(lbpath.read(), dtype=np.uint8,\n offset=8)\n\n with gzip.open(images_path, 'rb') as imgpath:\n images = np.frombuffer(imgpath.read(), dtype=np.uint8,\n offset=16).reshape(len(labels), 784)\n\n return images, labels", "def load_index(self, fn):\n name = fn.split('.pkl')[0]\n return utils.load_obj(name)", "def read_labelmap_vidor(labelmap_file):\n\n labelmap = []\n class_ids = set()\n name = \"\"\n class_id = \"\"\n\n with open('idx_to_pred.pkl', 'rb') as f:\n idx_to_pred = pickle.load(f)\n\n # with PathManager.open(labelmap_file, \"r\") as f:\n # import pdb; pdb.set_trace()\n # for line in f:\n # if line.startswith(\" name:\"):\n # name = line.split('\"')[1]\n # elif line.startswith(\" id:\") or line.startswith(\" label_id:\"):\n # class_id = int(line.strip().split(\" \")[-1])\n # labelmap.append({\"id\": class_id, \"name\": name})\n # class_ids.add(class_id)\n # return labelmap, class_ids\n\n \"\"\"\n (Pdb) categories\n [{'id': 1, 'name': 'bend/bow (at the waist)'}, {'id': 3, 'name': 'crouch/kneel'}, {'id': 4, 'name': 'dance'}, {'id': 5, 'name': 'fall down'}, {'id': 6, 'name': 'get up'}, {'id': 7, 'name': 'jump/leap'}, {'id': 8, 'name': 'lie/sleep'}, {'id': 9, 'name': 'martial art'}, {'id': 10, 'name': 'run/jog'}, {'id': 11, 'name': 'sit'}, {'id': 12, 'name': 'stand'}, {'id': 13, 'name': 'swim'}, {'id': 14, 'name': 'walk'}, {'id': 15, 'name': 'answer phone'}, {'id': 17, 'name': 'carry/hold (an object)'}, {'id': 20, 'name': 'climb (e.g., a mountain)'}, {'id': 22, 'name': 'close (e.g., a door, a box)'}, {'id': 24, 'name': 'cut'}, {'id': 26, 'name': 'dress/put on clothing'}, {'id': 27, 'name': 'drink'}, {'id': 28, 'name': 'drive (e.g., a car, a truck)'}, {'id': 29, 'name': 'eat'}, {'id': 30, 'name': 'enter'}, {'id': 34, 'name': 'hit (an object)'}, {'id': 36, 'name': 'lift/pick up'}, {'id': 37, 'name': 'listen (e.g., to music)'}, {'id': 38, 'name': 'open (e.g., a window, a car door)'}, {'id': 41, 'name': 'play musical instrument'}, {'id': 43, 'name': 'point to (an object)'}, {'id': 45, 'name': 'pull (an object)'}, {'id': 46, 'name': 'push (an object)'}, {'id': 47, 'name': 'put down'}, {'id': 48, 'name': 'read'}, {'id': 49, 'name': 'ride (e.g., a bike, a car, a horse)'}, {'id': 51, 'name': 'sail boat'}, {'id': 52, 'name': 'shoot'}, {'id': 54, 'name': 'smoke'}, {'id': 56, 'name': 'take a photo'}, {'id': 57, 'name': 'text on/look at a cellphone'}, {'id': 58, 'name': 'throw'}, {'id': 59, 'name': 'touch (an object)'}, {'id': 60, 'name': 'turn (e.g., a screwdriver)'}, {'id': 61, 'name': 'watch (e.g., TV)'}, {'id': 62, 'name': 'work on a computer'}, {'id': 63, 'name': 'write'}, {'id': 64, 'name': 'fight/hit (a person)'}, {'id': 65, 'name': 'give/serve (an object) to (a person)'}, {'id': 66, 'name': 'grab (a person)'}, {'id': 67, 'name': 'hand clap'}, {'id': 68, 'name': 'hand shake'}, {'id': 69, 'name': 'hand wave'}, {'id': 70, 'name': 'hug (a person)'}, {'id': 72, 'name': 'kiss (a person)'}, {'id': 73, 'name': 'lift (a person)'}, {'id': 74, 'name': 'listen to (a person)'}, {'id': 76, 'name': 'push (another person)'}, {'id': 77, 'name': 'sing to (e.g., self, a person, a group)'}, {'id': 78, 'name': 'take (an object) from (a person)'}, {'id': 79, 'name': 'talk to (e.g., self, a person, a group)'}, {'id': 80, 'name': 'watch (a person)'}]\n (Pdb) class_whitelist\n {1, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 17, 20, 22, 24, 26, 27, 28, 29, 30, 34, 36, 37, 38, 41, 43, 45, 46, 47, 48, 49, 51, 52, 54, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 72, 73, 74, 76, 77, 78, 79, 80}\n \"\"\"", "def load_data_and_labels(positive_data_file, negative_data_file):\n # Load data from files\n positive_examples = list(open(positive_data_file, \"r\",encoding='utf-8').readlines())\n positive_examples = [s.strip() for s in positive_examples]\n print (\"len of pos\"+positive_data_file, len(positive_examples))\n negative_examples = list(open(negative_data_file, \"r\",encoding='latin-1').readlines())\n negative_examples = [s.strip() for s in negative_examples]\n print (\"len of neg\"+negative_data_file,len(negative_examples))\n # Split by words\n x_text = positive_examples + negative_examples\n x_text = [clean_str(sent) for sent in x_text]\n positive_labels = [[0, 1] for _ in positive_examples]\n negative_labels = [[1, 0] for _ in negative_examples]\n y = np.concatenate([positive_labels, negative_labels], 0)\n return [x_text, y]", "def load_mnist(path, kind='train'):\n labels_path = os.path.join(path,\n '%s-labels-idx1-ubyte.gz'\n % kind)\n images_path = os.path.join(path,\n '%s-images-idx3-ubyte.gz'\n % kind)\n\n with gzip.open(labels_path, 'rb') as lbpath:\n labels = np.frombuffer(lbpath.read(), dtype=np.uint8,\n offset=8)\n\n with gzip.open(images_path, 'rb') as imgpath:\n images = np.frombuffer(imgpath.read(), dtype=np.uint8,\n offset=16).reshape(len(labels), 784)\n\n return images, labels", "def loadFromLocalFile(image_path, label_path):\n #Load label\n with gzip.open(label_path, 'rb') as file:\n magic, size = struct.unpack(\">II\", file.read(8))\n if magic != 2049:\n raise ValueError('Magic number mismatch, expected 2049, got {}'.format(magic))\n labels = np.array(array(\"B\", file.read()))\n # Load label\n with gzip.open(image_path, 'rb') as file:\n magic, size, rows, cols = struct.unpack(\">IIII\", file.read(16))\n if magic != 2051:\n raise ValueError('Magic number mismatch, expected 2051, got {}'.format(magic))\n x_train = array(\"B\", file.read())\n images = []\n for i in range(size):\n images.append([0] * rows * cols)\n for i in range(size):\n img = np.array(x_train[i * rows * cols:(i + 1) * rows * cols])\n img = img.reshape(28, 28)\n images[i][:] = img\n images = np.array(images)\n return images, labels", "def load_data_multilabel(traning_data_path,vocab_word2index, vocab_label2index,sentence_len,training_portion=0.95):\n file_object = codecs.open(traning_data_path, mode='r', encoding='utf-8')\n lines = file_object.readlines()\n random.shuffle(lines)\n label_size=len(vocab_label2index)\n X = []\n Y = []\n for i,line in enumerate(lines):\n raw_list = line.strip().split(\"__label__\")\n input_list = raw_list[0].strip().split(\" \")\n input_list = [x.strip().replace(\" \", \"\") for x in input_list if x != '']\n x=[vocab_word2index.get(x,UNK_ID) for x in input_list]\n label_list = raw_list[1:]\n label_list=[l.strip().replace(\" \", \"\") for l in label_list if l != '']\n label_list=[vocab_label2index[label] for label in label_list]\n y=transform_multilabel_as_multihot(label_list,label_size)\n X.append(x)\n Y.append(y)\n if i<10:print(i,\"line:\",line)\n\n X = pad_sequences(X, maxlen=sentence_len, value=0.) # padding to max length\n number_examples = len(lines)\n training_number=int(training_portion* number_examples)\n train = (X[0:training_number], Y[0:training_number])\n\n test_number=int((number_examples-training_number)/2)\n\n\n test = (X[training_number+ 1:training_number+test_number], Y[training_number + 1:training_number+test_number])\n valid = (X[training_number + test_number + 1:],\n Y[training_number + test_number + 1:])\n\n return train,test,valid", "def load_data_multilabel(traning_data_path,vocab_word2index, vocab_label2index,sentence_len,training_portion=0.95):\n file_object = codecs.open(traning_data_path, mode='r', encoding='utf-8')\n lines = file_object.readlines()\n random.shuffle(lines)\n label_size=len(vocab_label2index)\n X = []\n Y = []\n for i,line in enumerate(lines):\n raw_list = line.strip().split(\"__label__\")\n input_list = raw_list[0].strip().split(\" \")\n input_list = [x.strip().replace(\" \", \"\") for x in input_list if x != '']\n x=[vocab_word2index.get(x,UNK_ID) for x in input_list]\n label_list = raw_list[1:]\n label_list=[l.strip().replace(\" \", \"\") for l in label_list if l != '']\n label_list=[vocab_label2index[label] for label in label_list]\n y=transform_multilabel_as_multihot(label_list,label_size)\n X.append(x)\n Y.append(y)\n if i<10:print(i,\"line:\",line)\n\n X = pad_sequences(X, maxlen=sentence_len, value=0.) # padding to max length\n number_examples = len(lines)\n training_number=int(training_portion* number_examples)\n train = (X[0:training_number], Y[0:training_number])\n valid_number=min(1000,number_examples-training_number)\n test = (X[training_number+ 1:training_number+valid_number+1], Y[training_number + 1:training_number+valid_number+1])\n return train,test", "def _read_one_file(file_name, label_list):\n lines = tf.io.gfile.GFile(file_name, \"r\").readlines()\n examples = []\n label_id_map = {label: i for i, label in enumerate(label_list)}\n sentence_id = 0\n example = InputExample(sentence_id=0)\n for line in lines:\n line = line.strip(\"\\n\")\n if line:\n # The format is: <token>\\t<label> for train/dev set and <token> for test.\n items = line.split(\"\\t\")\n assert len(items) == 2 or len(items) == 1\n token = items[0].strip()\n\n # Assign a dummy label_id for test set\n label_id = label_id_map[items[1].strip()] if len(items) == 2 else 0\n example.add_word_and_label_id(token, label_id)\n else:\n # Empty line indicates a new sentence.\n if example.words:\n examples.append(example)\n sentence_id += 1\n example = InputExample(sentence_id=sentence_id)\n\n if example.words:\n examples.append(example)\n return examples", "def load_labels(db_dir, patient_id, flatten=True, unzipped=False):\n if unzipped:\n flat_labels = np.load(os.path.join(db_dir, '{:05d}_batched_lbls.npz'.format(patient_id)), allow_pickle=True)\n return flat_labels\n else:\n raw_labels = load_pkl(os.path.join(db_dir, '{:05d}_batched_lbls.pkl.gz'.format(patient_id)))\n if flatten:\n flat_labels = flatten_raw_labels(raw_labels)\n return flat_labels\n else:\n return raw_labels", "def load_label_map(location=\"configs/label_map.txt\"):\n ret = dict()\n num_class = 0\n with open(location) as f:\n for line in f:\n line = line.strip('\\n')\n index, relation = line.split(' ')\n ret[relation] = int(index)\n ret[int(index)] = relation\n num_class += 1\n return ret", "def load_words_from_file(path, voc_path=None):\n label_to_idx = {}\n dict_size = 0\n label_ids = []\n with open(path, \"r\") as fin:\n for label in fin:\n if label not in label_to_idx:\n label_to_idx[label] = dict_size\n dict_size += 1\n label_ids.append(label_to_idx[label])\n if voc_path:\n with open(voc_path, \"w+\") as fout:\n json.dump(label_to_idx, fout)\n return torch.tensor(label_ids)", "def get_labeled_data(filename):\n e = []\n y = []\n with open(filename) as f:\n for line in f:\n e.append(line[1:-1])\n y.append(category_mapping[abbreviation_mapping[line[0]]])\n return e, y", "def load_features_labels(self):\n MFCCs = torch.from_numpy(np.load(self.feature_file))\n labels = torch.from_numpy(np.load(self.label_file))\n 'Loading from files finished!'\n return MFCCs.view(-1,1,128,128), labels.long()", "def extract_labels(filename, num_images):\n filepath = os.path.join(WORK_DIRECTORY, filename)\n print('Extracting', filepath)\n with open(filepath, mode='rb') as bytestream:\n buf = bytestream.read(1 * num_images)\n labels = numpy.frombuffer(buf, dtype=numpy.uint8).astype(numpy.int64)\n return labels", "def load_label(path_file):\n if '.csv' not in path_file:\n raise FileNotFoundError('Only CSV format is supported currently')\n\n t0 = time()\n df = pd.DataFrame()\n\n with open(path_file, 'r') as f:\n # TODO: Implement the logic once the format is finalised\n pass\n\n logging.info('Loading label data with {} rows from {} takes {} secs'.format(df.shape[0],\n path_file, time() - t0))\n return df", "def extract_labels(filename, one_hot=False):\n print('Extracting', filename)\n with gzip.open(filename) as bytestream:\n magic = _read32(bytestream)\n if magic != 2049:\n raise ValueError(\n 'Invalid magic number %d in MNIST label file: %s' %\n (magic, filename))\n num_items = _read32(bytestream)[0]\n #print('check', magic, num_items)\n buf = bytestream.read(num_items)\n labels = numpy.frombuffer(buf, dtype=numpy.uint8)\n if one_hot:\n return dense_to_one_hot(labels)\n return labels", "def load_data():\n\n training_files_dir = \"digits/trainingDigits\"\n training_files = os.listdir(training_files_dir)\n file_num = len(training_files)\n hw_labels = []\n\n training_mat = zeros((file_num, 32 * 32))\n for i in xrange(file_num):\n filename = training_files[i]\n file_label = int((filename.split(\".\")[0]).split(\"_\")[0])\n hw_labels.append(file_label)\n training_mat[i, :] = img2vector(training_files_dir + '/' + filename)\n\n return training_mat, hw_labels", "def _labels(path):\r\n with gzip.open(path) as f:\r\n # First 8 bytes are magic_number, n_labels\r\n integer_labels = np.frombuffer(f.read(), 'B', offset=8)\r\n\r\n def _onehot(integer_labels):\r\n \"\"\"Return matrix whose rows are onehot encodings of integers.\"\"\"\r\n n_rows = len(integer_labels)\r\n n_cols = integer_labels.max() + 1\r\n onehot = np.zeros((n_rows, n_cols), dtype='uint8')\r\n onehot[np.arange(n_rows), integer_labels] = 1\r\n return onehot\r\n\r\n return _onehot(integer_labels)" ]
[ "0.7585093", "0.73918986", "0.7356385", "0.7295458", "0.7295458", "0.7251934", "0.72350484", "0.72224325", "0.69650286", "0.6930105", "0.6915016", "0.68840927", "0.6850086", "0.683416", "0.68133795", "0.6811225", "0.6805284", "0.67956245", "0.67892665", "0.6772183", "0.6754984", "0.67337954", "0.6703709", "0.6683473", "0.6667396", "0.6658817", "0.6648577", "0.66400075", "0.6639709", "0.6607544", "0.660347", "0.6578529", "0.6560403", "0.65400577", "0.65360963", "0.6481974", "0.6455631", "0.644909", "0.64252216", "0.64217895", "0.6418149", "0.6415866", "0.64082325", "0.6367961", "0.63610786", "0.6358924", "0.63587296", "0.6342076", "0.6322458", "0.6318979", "0.63063216", "0.6304317", "0.6298423", "0.6284338", "0.6282497", "0.62650555", "0.6256077", "0.62283325", "0.6223639", "0.6203575", "0.6195742", "0.6189949", "0.618924", "0.6185547", "0.61819863", "0.61798096", "0.6176982", "0.61658347", "0.615426", "0.61527497", "0.6142496", "0.6131467", "0.6111759", "0.61110383", "0.6101861", "0.6089731", "0.6089503", "0.6061444", "0.60575736", "0.60552174", "0.6050332", "0.60456306", "0.6045378", "0.6040671", "0.60318357", "0.6030383", "0.60284954", "0.6026798", "0.60201365", "0.6007718", "0.600146", "0.599465", "0.5988869", "0.5986981", "0.5960748", "0.59596515", "0.5956201", "0.5950592", "0.5944397", "0.5943295" ]
0.684912
13
Sets the input tensor.
def set_input_tensor(self, image): tensor_index = self.interpreter.get_input_details()[0]['index'] input_tensor = self.interpreter.tensor(tensor_index)()[0] input_tensor[:, :] = image
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_input_tensor(self, image):\n tensor_index = self.model.get_input_details()[0]['index']\n input_tensor = self.model.tensor(tensor_index)()[0]\n input_tensor[:, :] = image", "def _set_input_tensor(self, image):\n tensor_index = self._interpreter.get_input_details()[0]['index']\n input_tensor = self._interpreter.tensor(tensor_index)()[0]\n input_tensor[:, :] = image", "def set_input_tensor(image):\n tensor_index = interpreter.get_input_details()[0]['index']\n input_tensor = interpreter.tensor(tensor_index)()[0]\n input_tensor[:, :] = image", "def set_input_tensor(interpreter, image):\n tensor_index = interpreter.get_input_details()[0]['index']\n input_tensor = interpreter.tensor(tensor_index)()[0]\n input_tensor[:, :] = image", "def set_input_tensor(interpreter, image):\n tensor_index = interpreter.get_input_details()[0][\"index\"]\n input_tensor = interpreter.tensor(tensor_index)()[0]\n input_tensor[:, :] = image", "def set_input_tensor(interpreter, image):\n tensor_index = interpreter.get_input_details()[0][\"index\"]\n input_tensor = interpreter.tensor(tensor_index)()[0]\n input_tensor[:, :] = image", "def set_input_tensor(interpreter, image):\n tensor_index = interpreter.get_input_details()[0][\"index\"]\n input_tensor = interpreter.tensor(tensor_index)()[0]\n input_tensor[:, :] = image", "def setTensor(self, tensor):\t\t\n\t\tself.cur_tensor = tensor\n\t\tif tensor is not None:\n\t\t\tself.output_shape[self.cur_id] = self.cur_tensor.size()\n\t\telse:\n\t\t\tself.output_shape[self.cur_id] = None", "def set_input(self, input):\n self.input = transfer_to_device(input, self.device)", "def set_input(self, input):\r\n\r\n self.reset()\r\n self.input = input", "def set_input(self, input):\n pass", "def set_input(self, input):\n pass", "def __init__(self, input_tensor_spec):\n self._input_tensor_spec = input_tensor_spec\n super().__init__()", "def input_tensor(self):\n if self._direct_feed_dict:\n raise TypeError('This loom has direct_feed_dict set, '\n 'so it has no input tensor')\n return self._loom_input_tensor", "def call(self, input_tensor: th.Tensor) -> th.Tensor:\n return th.zeros(input_tensor.shape[0], dtype=input_tensor.dtype, device=input_tensor.device)", "def set_input(self, input):\n self.real_A = input['A'].to(self.device)\n self.image_paths = input['A_paths']", "def set_input(self, idx, input_stream):\n \n raise NotImplementedError", "def set_task_input(self, task, input_id, value):\n self._write_transaction(tx.set_task_input, task=task, input_id=input_id, value=value)", "def SetInput(self, , , p_float_6):\n ...", "def set_task_input(self, task, input_id, value):\n self._gdb_interface.set_task_input(task, input_id, value)", "def set_input(self, nodeVal: NodeValue) -> None:\n\n self.inputs_.append(nodeVal)", "def set_inputs(\n self,\n train_X: Tensor,\n train_Y: Tensor,\n train_Yvar: Optional[Tensor],\n task_feature: int,\n task_rank: Optional[int] = None,\n ):\n super().set_inputs(train_X, train_Y, train_Yvar)\n # obtain a list of task indicies\n all_tasks = train_X[:, task_feature].unique().to(dtype=torch.long).tolist()\n self.task_feature = task_feature\n self.num_tasks = len(all_tasks)\n self.task_rank = task_rank or 1\n # assume there is one column for task feature\n self.ard_num_dims = self.train_X.shape[-1] - 1", "def set_input(self, input, test_pose=None):\n self.image = input['image']\n self.batchsize = len(self.image)\n self.pose = input['pose'].long() #convert to LongTensor\n\n if self.is_Train:\n self.input_pose = one_hot(self.pose, self.N_p)\n else:\n self.input_pose = one_hot(test_pose.long(), self.N_p)\n\n self.identity = input['identity'].long() #convert to LongTensor\n self.name = input['name']\n self.fake_identity = torch.zeros(self.batchsize).long() # 0 indicates fake\n self.noise = torch.FloatTensor(np.random.normal(loc=0.0, scale=0.3, size=(self.batchsize, self.N_z)))\n\n #cuda\n if self.opt.gpu_ids:\n self.image = self.image.cuda()\n self.pose = self.pose.cuda()\n self.input_pose = self.input_pose.cuda()\n self.identity = self.identity.cuda()\n self.fake_identity = self.fake_identity.cuda()\n self.noise = self.noise.cuda()\n\n self.image = Variable(self.image)\n self.pose = Variable(self.pose)\n self.input_pose = Variable(self.input_pose)\n self.identity = Variable(self.identity)\n self.fake_identity = Variable(self.fake_identity)\n self.noise = Variable(self.noise)", "def set_tf(self, x):\n x = float(x)\n if self.tf != x:\n self.tf = x", "def coord(self, tensor: Union[Tensor, np.ndarray]) -> None:\n try:\n tensor = tensor.reshape(self.shape[0], 3)\n except (RuntimeError, ValueError): # for torch.Tensor and np.ndarray\n raise ValueError(f'got unexpected shape {tensor.shape}')\n if not isinstance(tensor, Tensor):\n tensor = self.tensor.new_tensor(tensor)\n self.tensor[:, :3] = tensor", "def forward_tensor(self, x):\n pass", "def get_tensor_from_input(self, input_data: Dict[str, Any],\n **kwargs) -> torch.Tensor:\n raise NotImplementedError", "def input(self, input):\n\n self._input = input", "def to(self, *args, **kwargs):\n self._tensor = self._tensor.to(*args, **kwargs)\n return self", "def _create_train_input(self, input_batch):\n self.raw_image = input_batch\n self.image = tf.reshape(self.raw_image, (-1, self._im_size[0], self._im_size[1]))\n self.lr = tf.placeholder(tf.float32, name='lr')\n self.keep_prob = tf.placeholder(tf.float32, name='keep_prob')", "def SetInput(self, *args):\n return _itkSpatialObjectWriterPython.itkSpatialObjectWriter3_SetInput(self, *args)", "def identity(self, input_tensor, name):\n return tf.identity(input_tensor, name=name)", "def set(self, enc_tensor):\n if not isinstance(enc_tensor, CrypTensor):\n enc_tensor = self.new(enc_tensor)\n return self.copy_(enc_tensor)", "def set_training_data(self, *, inputs: Inputs) -> None:\n\t\tsuper().set_training_data(inputs=inputs)", "def tensor(data, **context):\n raise NotImplementedError", "def tensor(self, X):\n return tf.convert_to_tensor(X, dtype=self.dtype)", "def __init__(self, tensor, df):\n super().__init__()\n self.tensor = tensor\n self.df = df", "def __set_inputs__(self):\n self.__set_in_out_var__(None, 0) # TODO: inspect None", "def set_inputs(self, inputs):\n self.attributes[\"inputs\"] = inputs", "def set_ivt_variable(self, var):\n self.set_input_variable(var)", "def __init__(self, input_mask):\n super().__init__()\n if isinstance(input_mask, torch.Tensor):\n self.register_buffer('input_mask', input_mask.float(), persistent=False)\n else:\n self.input_mask = input_mask", "def train(self, X):\n self.X = X", "def set_input(vtk_object, current_input):\n if isinstance(current_input, vtk.vtkPolyData):\n if vtk.VTK_MAJOR_VERSION <= 5:\n vtk_object.SetInput(current_input)\n else:\n vtk_object.SetInputData(current_input)\n elif isinstance(input, vtk.vtkAlgorithmOutput):\n vtk_object.SetInputConnection(current_input)\n\n vtk_object.Update()\n return vtk_object", "def __call__(self, x, **kwargs):\n x = as_tensor_variable(x)\n return super().__call__(x, dtype=x.dtype, **kwargs)", "def __init__(self, input, weight_init=None):\n n_in = input.get_shape()[1].value\n \n self.input = input\n \n # Initiate the weight for the input layer\n r = 4*np.sqrt(3.0/n_in)\n\n if weight_init is None:\n self.w = tf.Variable(tf.random_uniform([n_in,],-r, r), name='w')\n else: \n self.w = tf.Variable(weight_init, name='w')\n\n self.output = self.w * self.input", "def train_set_input(self, input):\r\n self.real_video = input['video'].cuda()\r\n self.source_image = input['trimmed'].cuda()\r\n self.source_image_label = input['trimmed_label'].cuda()\r\n self.video_label = input['video_label'].cuda()", "def input_tensor(interpreter):\n tensor_index = interpreter.get_input_details()[0]['index']\n return interpreter.tensor(tensor_index)()[0]", "def SetInput(self, *args):\n return _itkSpatialObjectWriterPython.itkSpatialObjectWriter2_SetInput(self, *args)", "def setInput(self, x, fadetime=0.05):\n self._input = x\n self._in_fader.setInput(x, fadetime)", "def setInput(self, input, layerName = None):\n if layerName == None:\n # try each of the current states\n for key in self.currentStates.keys():\n state = self.currentStates[key]\n newStateName = state.inputs.get(input)\n if newStateName:\n return self.gotoState(newStateName, key)\n return\n\n # use the specified layer / currentState\n currentState = self.currentStates.get(layerName)\n if not currentState:\n raise StateException(\"No layer %s\" % layerName)\n newStateName = currentState.inputs.get(input)\n if newStateName:\n return self.gotoState(newStateName, layerName)", "def set_input(self, *arg, **kw):\n # Convert arguments into keyword arguments\n for i, a in enumerate(arg):\n kw[str(i)] = a\n\n for name, value in six.iteritems(kw):\n if name not in self._inputs:\n raise ValueError(\"Invalid port name '{0}'\".format(name))\n\n if isinstance(value, Port):\n port = value\n else:\n port = Task.create_source(value).get_output()\n port.connect(self._inputs[name])\n\n self._dirty = True\n return self", "def copy(tensor):\n raise NotImplementedError", "def init_tensors(self, sample, *args):\n raise NotImplementedError", "def set_inputs(self,inputs):\n raise NotImplementedError(\"Robot.set_inputs\")", "def __init__(self, x: tf.Tensor):\n self.__frame = x", "def set_val(self, input):\n return", "def setInputSentence(self, sentence):\n self.inputSentence = sentence", "def set_input(self, input):\n AtoB = self.opt.direction == 'AtoB'\n self.real_A = input['A' if AtoB else 'B'].to(self.device)\n self.inst = input['inst'].to(self.device)\n self.real_B = input['B' if AtoB else 'A'].to(self.device)\n self.image_paths = input['A_paths' if AtoB else 'B_paths']", "def changeInputShape(self,shape):\n self.input_shape = shape", "def share(self, value):\n self._tensor = value", "def transform(self, X: Tensor) -> Tensor:\n pass # pragma: no cover", "def set_value(self, indices, val):\r\n assert len(indices) == 3, indices\r\n if self.model_tensor is None:\r\n raise ValueError(\"Please set the tensor\")\r\n self.model_tensor[indices[0], indices[1], indices[2]] = val\r\n return val", "def input(self, source) -> None:\n if source is self._source:\n return\n self._source = source\n if self._socket is not None:\n self._output.input = source", "def input_layer(self, input_layer):\n x = self._from_nhwc(input_layer)\n x = tf.cast(x, self.dtype)\n # Rescale and shift to [-1,1]\n x = x * (1./127.5) - 1\n return x", "def __init__(self, tensor, size_x, size_y):\n self.size_x = size_x\n self.size_y = size_y\n self.tensor = self.zero_pad(tensor, size_x, WIDTH)\n self.tensor = self.zero_pad(self.tensor, size_y, HEIGHT)\n assert(self.tensor.shape[WIDTH] % size_x == 0)\n assert(self.tensor.shape[HEIGHT] % size_y == 0)\n self.steps_x = self.tensor.shape[WIDTH] // size_x\n self.steps_y = self.tensor.shape[HEIGHT] // size_y\n self.max_steps = self.steps_x * self.steps_y\n self.out_shape = (tensor.shape[0], tensor.shape[1], self.steps_y, self.steps_x)", "def set_tensor_data(self, data: dict) -> None:\n assert isinstance(data,\n dict), f'data should be a `dict` but got {data}'\n for k, v in data.items():\n if k == 'gt_label':\n self.set_gt_label(v)\n elif k == 'prompt':\n self.set_field(v, k, dtype=(str, list))\n else:\n self.set_field(all_to_tensor(v), k, dtype=torch.Tensor)", "def Update(self, value):\n self.SetValue(self.GetValue() + tf.cast(value, self.dtype))", "def Update(self, value):\n self.SetValue(self.GetValue() + tf.cast(value, self.dtype))", "def set_task_input_type(self, task, input_id, type_):\n self._write_transaction(tx.set_task_input_type, task=task, input_id=input_id, type_=type_)", "def convert_set_value(g, op, block):\n\n x = g.get_node(op.input(\"Input\")[0])\n if op.input(\"StartsTensorList\"):\n starts = g.get_node(op.input(\"StartsTensorList\")[0])\n else:\n starts = op.attr(\"starts\")[0]\n\n if op.input(\"EndsTensorList\"):\n ends = g.get_node(op.input(\"EndsTensorList\")[0])\n else:\n ends = op.attr(\"ends\")[0]\n\n axes = op.attr(\"axes\")\n assert len(axes) == 1, \"Only support one axes now.\"\n axes = axes[0]\n\n input_shape = infer_shape(x)\n ends = min(ends, input_shape[axes])\n\n if op.input(\"StepsTensorList\"):\n steps = g.get_node(op.input(\"StepsTensorList\")[0])\n else:\n steps = op.attr(\"steps\")[0]\n\n if op.input(\"ValueTensor\"):\n value = g.get_node(op.input(\"ValueTensor\")[0])\n else:\n input_dtype = infer_type(x).checked_type.dtype\n if input_dtype == \"float64\":\n value = _expr.const(op.attr(\"fp64_values\"), dtype=\"float64\")\n elif input_dtype == \"float32\":\n value = _expr.const(op.attr(\"fp32_values\"), dtype=\"float32\")\n elif input_dtype == \"int32\":\n value = _expr.const(op.attr(\"int32_values\"), dtype=\"int32\")\n elif input_dtype == \"int64\":\n value = _expr.const(op.attr(\"int64_values\"), dtype=\"int64\")\n else:\n raise tvm.error.OpNotImplemented(\n \"dtype {} is not supported for set_value\".format(input_dtype)\n )\n\n sliced_data = _op.strided_slice(x, begin=[starts], end=[ends], strides=[steps], axes=[axes])\n sliced_shape = infer_shape(sliced_data)\n\n if infer_shape(value) != sliced_shape:\n expand_value = _op.broadcast_to(value, sliced_shape)\n else:\n expand_value = value\n\n if starts < 0:\n starts = starts + input_shape[axes]\n if ends < 0:\n ends = ends + input_shape[axes]\n\n indices = _op.arange(\n start=_expr.const(starts, dtype=\"int32\"),\n stop=_expr.const(ends, dtype=\"int32\"),\n step=_expr.const(steps, dtype=\"int32\"),\n dtype=\"int32\",\n )\n indices = _op.expand_dims(indices, axis=0)\n out = _op.scatter_nd(x, indices, expand_value, \"update\")\n g.add_node(op.output(\"Out\")[0], out)", "def setValue(self,val):\n self.input.setValues(val)", "def set_inputs(self, new_inputs: Iterable[tensor.Tensor]):\n for t in new_inputs:\n if t.graph != self.graph:\n raise ValueError(\"Tensor {} points to graph {}, but this node is in a \"\n \"different graph {}\".format(t, t.graph, self.graph))\n self._inputs = list(new_inputs)\n self._graph.increment_version_counter() # New edges added to graph", "def set_input(self, in_stream):\n self._in = self._wrap_stream(in_stream, 'in')\n return self._in", "def set_input(self, input):\n AtoB = self.opt.direction == 'AtoB'\n self.real_A = input['A' if AtoB else 'B'].to(self.device)\n self.real_B = input['B' if AtoB else 'A'].to(self.device)\n self.true_time = input['time_period'][0]\n self.image_paths = input['A_paths' if AtoB else 'B_paths']", "def set_parameter(self, output):\n self.model.set_parameter(output);", "def switch_to_tuned_inputs(self):\n \n self.h_e=self.inputs_flat.T\n self.h=np.vstack([self.h_e,self.h_i])", "def astensor(self, tensor_in, dtype='float'):\n dtypemap = {'float': torch.float, 'int': torch.int, 'bool': torch.uint8}\n try:\n dtype = dtypemap[dtype]\n except KeyError:\n print('Invalid dtype: dtype must be float, int, or bool.')\n raise\n\n tensor = torch.as_tensor(tensor_in, dtype=dtype)\n # Ensure non-empty tensor shape for consistency\n try:\n tensor.shape[0]\n except IndexError:\n tensor = tensor.expand(1)\n return tensor", "def __init__(self, tfInputGraph=None, inputMapping=None, outputMapping=None, tfHParms=None):\n super(TFTransformer, self).__init__()\n kwargs = self._input_kwargs\n self.setParams(**kwargs)", "def action(self):\n operator = self.create_operator()\n operator.set_name(self.node_name)\n if not isinstance(operator, zautograd.Variable):\n z_tensor = operator(self.inputs)\n operator.set_weights(self.format_params(self.params))\n else:\n z_tensor = operator\n operator.node.element().set_weights(self.format_params(self.params))\n\n self.all_tensors[self.output] = z_tensor # update the all_tensors\n return z_tensor", "def __init__(self, shape, input_var=None):\n\n self.output = layers.InputLayer(shape, input_var=input_var)", "def build(self, input_shape):\n shape = np.ones(len(input_shape), dtype=np.int32)\n shape[self._axis] = input_shape[self._axis]\n self._rand_shape = tf.constant(shape, dtype=tf.dtypes.int32)", "def __init__(self, incoming, name='RNNInputLayer'):\n super(RNNInputLayer, self).__init__()\n self.incoming, self.incoming_shape = get_input(incoming)\n with tf.variable_scope(name) as self.layer_scope:\n self.out = self.incoming()\n self.name = name", "def initialise_symbolic_input(self):\n\n\t\tself.symbolic_input = theano.tensor.dmatrix(\"x\")", "def set_input_values(self, input_values):\n raise NotImplementedError(\n 'Derived ExternalGreyBoxModel classes need'\n ' to implement the method: set_input_values'\n )", "def send_tensor_input(stream_name_, plugin_id, input_data, input_shape, stream_manager):\n tensor_list = MxpiDataType.MxpiTensorPackageList()\n tensor_pkg = tensor_list.tensorPackageVec.add()\n # init tensor vector\n tensor_vec = tensor_pkg.tensorVec.add()\n tensor_vec.deviceId = 0\n tensor_vec.memType = 0\n tensor_vec.tensorShape.extend(input_shape)\n tensor_vec.tensorDataType = 0\n tensor_vec.dataStr = input_data\n tensor_vec.tensorDataSize = len(input_data)\n\n return send_protobuf(stream_name_, plugin_id, tensor_list, stream_manager)", "def __update_mu(self, mu):\n assert mu.shape in [(self.n_components, self.n_features), (1, self.n_components,\n self.n_features)], \"Input mu does not have required tensor dimensions (%i, %i) or (1, %i, %i)\" % (\n self.n_components, self.n_features, self.n_components, self.n_features)\n\n if mu.shape == (self.n_components, self.n_features):\n self.mu = tf.expand_dims(mu, 0)\n elif mu.shape == (1, self.n_components, self.n_features):\n self.mu = mu", "def SetTensorAlias(tensor, alias):\n return _C.SetTensorAlias(_stringify_tensor(tensor), alias)", "def _set_input(self, v, load=False):\n try:\n t = YANGDynClass(v,base=yc_input_pyangbind_example__input, is_container='container', yang_name=\"input\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"input must be of a type compatible with base=yc_input_pyangbind_example__input, is_container='container', yang_name=\"input\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__input = t\n if hasattr(self, '_set'):\n self._set()", "def forward(self, input: torch.Tensor) -> torch.Tensor:\n return swish(input)", "def setParams(self, inputCol=None, outputCol=None, graph=None,\n inputTensor=utils.IMAGE_INPUT_PLACEHOLDER_NAME, outputTensor=None,\n outputMode=\"vector\"):\n kwargs = self._input_kwargs\n return self._set(**kwargs)", "def setParams(self, tfInputGraph=None, inputMapping=None, outputMapping=None, tfHParms=None):\n super(TFTransformer, self).__init__()\n kwargs = self._input_kwargs\n # Further conanonicalization, e.g. converting dict to sorted str pairs happens here\n return self._set(**kwargs)", "def set_X(self, X):\n assert isinstance(X, np.ndarray)\n state = self.update_model()\n self.update_model(False)\n self.X = ObsAr(X)\n self.update_model(state)", "def setTransition(self,input,target):\n\t\tself.trans[input] = target\n\t\t# self.outputs[input] = output\n\t\t# self.direction[input] = direction if direction >= -1 and direction <= 1 else 1", "def set_input_metadata(self, input_metadata):\n self.input_metadata = input_metadata\n if input_metadata is not None:\n with contextlib.suppress(AttributeError):\n self.data_loader.input_metadata = input_metadata", "def _to_tensor(cls, tensor):\n if isinstance(tensor, Tensor):\n return tensor\n return Tensor(data=tensor)", "def _fill_input(self):\n for sc in self.initial:\n if sc not in self.litter:\n self.litter[sc] = [0., 0., 0., 0., 0., 0.,\n 0., 0., 0., 0., 0., 0.]\n for sc in self.litter:\n if sc not in self.initial:\n self.initial[sc] = [0., 0., 0., 0., 0., 0.,\n 0., 0., 0., 0., 0., 0.]", "def call(self, input_tensor):\n x = self.conv_1(input_tensor)\n x = self.bn_1(x, training=False)\n x = self.relu(x)\n x = self.conv_2(x)\n x = self.bn_2(x, training=False)\n x = self.relu(x)\n x = self.conv_3(x)\n x = self.bn_3(x, training=False)\n x_shortcut = self.shortcut(input_tensor)\n x_shortcut = self.bn_shortcut(x_shortcut, training=False)\n x = keras.layers.add([x, x_shortcut])\n x = self.relu(x)\n return x", "def get_tensor_from_input(input_data: Dict[str, Any]) -> torch.Tensor:\n return input_data['img']", "def __init__(self, inputCol=None, outputCol=None, graph=None,\n inputTensor=utils.IMAGE_INPUT_PLACEHOLDER_NAME, outputTensor=None,\n outputMode=\"vector\"):\n super(TFImageTransformer, self).__init__()\n self._setDefault(inputTensor=utils.IMAGE_INPUT_PLACEHOLDER_NAME)\n self._setDefault(outputMode=\"vector\")\n kwargs = self._input_kwargs\n self.setParams(**kwargs)", "def __init__(self, input, init_w, init_b, activation='sigmoid'):\n\n n_in = input.get_shape()[1].value\n self.input = input\n\n # Initiate the weight for the input layer\n \n w = tf.Variable(init_w, name='w')\n b = tf.Variable(init_b, name='b')\n\n output = tf.add(tf.matmul(input, w), b)\n output = activate(output, activation)\n \n self.w = w\n self.b = b\n self.output = output\n self.params = [w]" ]
[ "0.78925943", "0.7880494", "0.7589295", "0.74826103", "0.74208987", "0.74208987", "0.74208987", "0.73879915", "0.6989476", "0.67201304", "0.66477954", "0.66477954", "0.65359837", "0.64077353", "0.6385685", "0.6378955", "0.6283016", "0.62689894", "0.6259443", "0.62375194", "0.6205585", "0.6185332", "0.61306477", "0.61286664", "0.6084893", "0.6081315", "0.60671747", "0.60534513", "0.59708554", "0.5957335", "0.5947927", "0.5907492", "0.5893956", "0.5866987", "0.58645576", "0.57944775", "0.57691944", "0.5757153", "0.5747513", "0.5746367", "0.5738332", "0.5728428", "0.57255524", "0.57220435", "0.5700817", "0.56942624", "0.5669548", "0.566921", "0.5663712", "0.5655328", "0.5641265", "0.55543107", "0.5553763", "0.554983", "0.554317", "0.55404913", "0.5525811", "0.551697", "0.55160594", "0.54841083", "0.5482248", "0.54755473", "0.54748744", "0.54746217", "0.5468877", "0.5452204", "0.54463196", "0.54463196", "0.5433756", "0.54271203", "0.54234105", "0.54102665", "0.5407598", "0.539805", "0.53911006", "0.5389536", "0.5389145", "0.5382806", "0.53704447", "0.5360565", "0.5356847", "0.5352924", "0.53468114", "0.53424853", "0.53333944", "0.53319633", "0.5318746", "0.53145635", "0.5312823", "0.530189", "0.5299653", "0.52973694", "0.52928144", "0.529188", "0.52858853", "0.52838355", "0.52802706", "0.52584136", "0.5257825", "0.524895" ]
0.78595096
2
Returns the output tensor at the given index.
def get_output_tensor(self, index): output_details = self.interpreter.get_output_details()[index] tensor = np.squeeze(self.interpreter.get_tensor(output_details['index'])) return tensor
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_output_tensor(index):\n output_details = interpreter.get_output_details()[index]\n tensor = np.squeeze(interpreter.get_tensor(output_details['index']))\n return tensor", "def get_output_tensor(self, index):\n output_details = self.model.get_output_details()[index]\n tensor = np.squeeze(self.model.get_tensor(output_details['index']))\n return tensor", "def _get_output_tensor(self, index):\n output_details = self._interpreter.get_output_details()[index]\n tensor = np.squeeze(self._interpreter.get_tensor(output_details['index']))\n return tensor", "def get_output_tensor(interpreter, index):\n output_details = interpreter.get_output_details()[index]\n tensor = np.squeeze(interpreter.get_tensor(output_details['index']))\n return tensor", "def get_output_tensor(interpreter, index):\n output_details = interpreter.get_output_details()[index]\n tensor = np.squeeze(interpreter.get_tensor(output_details[\"index\"]))\n return tensor", "def get_output_tensor(interpreter, index):\n output_details = interpreter.get_output_details()[index]\n tensor = np.squeeze(interpreter.get_tensor(output_details[\"index\"]))\n return tensor", "def get_output_tensor(interpreter, index):\n output_details = interpreter.get_output_details()[index]\n tensor = np.squeeze(interpreter.get_tensor(output_details[\"index\"]))\n return tensor", "def get_output(self, idx):\n raise NotImplementedError", "def output_tensor(interpreter, i):\n output_details = interpreter.get_output_details()[i]\n output_data = np.squeeze(interpreter.tensor(output_details['index'])())\n if 'quantization' not in output_details:\n return output_data\n scale, zero_point = output_details['quantization']\n if scale == 0:\n return output_data - zero_point\n return scale * (output_data - zero_point)", "def output(index: int = 0) -> str:\n return outputs()[index]", "def __getitem__(self, index):\n return self.input_[index], self.output[index]", "def get_output_tensor_size(self, index):\n return self._engine.get_output_tensor_size(index)", "def output(x_tensor, num_outputs, name=\"output\"):\n with tf.name_scope(name):\n return tf.layers.dense(x_tensor, num_outputs)", "def output_tensor(self, type_shape):\n return self._output[self._type_shape_to_idx[type_shape]]", "def output(x_tensor, num_outputs):\n # TODO: Implement Function\n y = tf.layers.dense(x_tensor,num_outputs)\n return y", "def output(x_tensor, num_outputs):\n shape = x_tensor.get_shape().as_list()\n weight = tf.Variable(tf.truncated_normal([shape[-1], num_outputs], stddev=0.1))\n bias = tf.Variable(tf.zeros(num_outputs))\n return tf.add(tf.matmul(x_tensor, weight), bias)", "def get_output(self, **kwargs):\n with tf.variable_scope(self.layer_scope):\n return self.out", "def output(x_tensor, num_outputs):\n w = tf.Variable(tf.random_normal([x_tensor.shape[1].value, num_outputs], stddev=0.1))\n b = tf.Variable(tf.random_normal([num_outputs], stddev=0.1))\n out = tf.add(tf.matmul(x_tensor, w), b)\n return out", "def output(x_tensor, num_outputs):\n # TODO: Implement Function\n out_w = tf.Variable(tf.truncated_normal([x_tensor.get_shape().as_list()[1], num_outputs],stddev=0.1,dtype=tf.float32))\n out_b = tf.Variable(tf.zeros(num_outputs))\n return tf.matmul(x_tensor,out_w)+out_b", "def gather_tensors_using_index(src_tensor, index_tensor) -> torch.FloatTensor:\n if index_tensor.size()[-1] != 1:\n raise ValueError(\"Expecting last index to be 1. Found {}\".format(index_tensor.size()))\n flat_idx_tensor = index_tensor.view(index_tensor.size(0), -1, 1, 1) # B * CP * 1 * 1\n\n # B * CP * T * h\n expanded_index_tensor = flat_idx_tensor.expand(flat_idx_tensor.shape[:-2]\n + src_tensor.shape[-2:]).long() # B * CP * T * h\n\n flat_extracted = torch.gather(src_tensor, 1, expanded_index_tensor) # B * CP * T * h\n\n extracted = flat_extracted.view(src_tensor.size(0), index_tensor.size(1),\n index_tensor.size(2), src_tensor.size(2), -1) # B * C * P * T * h\n return extracted", "def get_output_slice_idx(self, output_index):\r\n ipos = 0\r\n opos = output_index\r\n for otaps in zip(self.mitmot_out_taps()):\r\n if len(otaps) > 0:\r\n return ipos\r\n else:\r\n opos = opos - 1\r\n ipos += len(otaps)\r\n return ipos + opos", "def output(x_tensor, num_outputs):\n tensor = tf.contrib.layers.fully_connected(inputs=x_tensor, num_outputs=num_outputs, activation_fn=None)\n return tensor", "def __getitem__(self, idx):\n # check if idx is valid:\n if idx < 0:\n idx += self.__len__()\n if idx >= self.__len__():\n raise IndexError(\n f'index {idx} is out of bound with size {self.__len__()}.')\n \n # get sample\n kernel = int(self.kernel * self.fs)\n stride = int(self.stride * self.fs)\n idx_start = idx * stride\n idx_stop = idx_start + kernel\n data = self.data[:, idx_start: idx_stop].copy()\n \n # apply padding if needed\n nsamp = data.shape[-1]\n if nsamp < kernel:\n pad = kernel - nsamp\n data = np.pad(data, ((0, 0), (0, pad)), mode=self.pad_mode)\n \n # separate into target HOFT and aux channel\n target = data[self.target_idx]\n aux = np.delete(data, self.target_idx, axis=0)\n \n # convert into Tensor\n target = torch.Tensor(target)\n aux = torch.Tensor(aux)\n \n return aux, target", "def __getitem__(self, index):\r\n\r\n #current input in the sequence at t\r\n x = self.yInput[:, index]\r\n #input = x[t] not like that to create a matrix and not a vector\r\n \r\n #current target value at t\r\n target = self.yTarget[:, index]\r\n\r\n return (x, target)", "def __getitem__(self, index):\r\n\r\n #current input in the sequence at t\r\n x = self.yInput[:, index]\r\n #input = x[t] not like that to create a matrix and not a vector\r\n \r\n #current target value at t\r\n target = self.yTarget[:, index]\r\n\r\n return (x, target)", "def layer_output(x_tensor, num_outputs):\n weights = tf.Variable(\n tf.random_normal(\n [x_tensor.shape[1].value, num_outputs],\n stddev=0.1\n )\n )\n bias = tf.Variable(tf.zeros([num_outputs]))\n return tf.add(tf.matmul(x_tensor, weights), bias)", "def getNeuron(self, index):\n\t\treturn self.loader.getNeuron(index)", "def __getitem__(self, index):\n x = self._input_data[index]\n if self._output_data is None:\n return x, x\n else:\n y = self._output_data[index]\n return x, y", "def get(self, index):\n return self._get_node(index)", "def output(self, x_tensor, num_outputs):\n shape = x_tensor.get_shape().as_list()\n weights = tf.Variable(tf.truncated_normal([shape[-1], num_outputs], mean=0, stddev=0.01))\n biases = tf.Variable(tf.zeros([num_outputs]))\n logits = tf.add(tf.matmul(x_tensor, weights), biases)\n return logits", "def get_value(self, indices):\r\n assert len(indices) == 3, indices\r\n if self.model_tensor is None:\r\n raise ValueError(\"Please set the tensor\")\r\n return self.model_tensor[indices[0], indices[1], indices[2]]", "def __getitem__(self, index):\n img = Image.open(os.path.join(self.img_path, self.imgs[index][0]))\n label = float(self.imgs[index][1]) if self._fine_tune or self._test else int(float(self.imgs[index][1])) - 1\n return self.preproc(img), torch.tensor(label)", "def get(self, index):\n assert isinstance(index, np.ndarray)\n return self.weight[index]", "def __getitem__(self, index): # data for one dialogue file\n input_tensor, target_tensor, bs_tensor, db_tensor = \\\n self.input_tensor[index], self.target_tensor[index], self.bs_tensor[index], self.db_tensor[index]\n mask_tensor = self.mask_tensor[index] if self.mask_tensor else None\n return input_tensor, target_tensor, bs_tensor, db_tensor, mask_tensor", "def get_encoded_item(self, encoded, index):\n\n #for vanilla RNN and GRU, since they have a hidden state represented as a single tensor\n ##return encoded[:, index:index+1]\n\n #for LSTM, since it has a hidden state represented as a tuple of two tensors: the cell state and the hidden state\n return encoded[0][:, index:index+1].contiguous(), encoded[1][:, index:index+1].contiguous()", "def get_outputs():\n all_hidden_states = get_states()\n all_attention = tf.map_fn(get_attention, all_hidden_states)\n a_values = tf.nn.softmax(all_attention, axis = 0)\n final_hidden_state = tf.einsum('ijk,ijl->jkl', a_values, \n all_hidden_states)\n output = tf.nn.sigmoid(tf.matmul(final_hidden_state[:,0,:], Wo) + bo, \n name='outputs')\n return output, a_values", "def __getitem__(self, index):\n\n # Generate indexes of the batch\n indexes = self.indexes[index * self.batch_size:(index + 1) * self.batch_size]\n\n X, y = self.__data_generation(indexes)\n return X, y", "def GraftNthOutput(self, idx: 'unsigned int', output: 'itkDataObject') -> \"void\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF3_Superclass_GraftNthOutput(self, idx, output)", "def GraftNthOutput(self, idx: 'unsigned int', output: 'itkDataObject') -> \"void\":\n return _itkMeshSourcePython.itkMeshSourceMUC2_GraftNthOutput(self, idx, output)", "def GraftNthOutput(self, idx: 'unsigned int', output: 'itkDataObject') -> \"void\":\n return _itkMeshSourcePython.itkMeshSourceMUS2_GraftNthOutput(self, idx, output)", "def GraftNthOutput(self, idx: 'unsigned int', output: 'itkDataObject') -> \"void\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF2_Superclass_GraftNthOutput(self, idx, output)", "def test_output(data,idx,model):\n x,y = data[idx]\n out = model(x)\n return y.data.cpu().numpy(), out.data.cpu().numpy()", "def GraftNthOutput(self, idx: 'unsigned int', output: 'itkDataObject') -> \"void\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD3_Superclass_GraftNthOutput(self, idx, output)", "def get_index(self, index):\n return self.get_node_from_index(index).data", "def GraftNthOutput(self, idx: 'unsigned int', output: 'itkDataObject') -> \"void\":\n return _itkMeshSourcePython.itkMeshSourcePSUC2_GraftNthOutput(self, idx, output)", "def GraftNthOutput(self, idx: 'unsigned int', output: 'itkDataObject') -> \"void\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD2_Superclass_GraftNthOutput(self, idx, output)", "def GraftNthOutput(self, idx: 'unsigned int', output: 'itkDataObject') -> \"void\":\n return _itkMeshSourcePython.itkMeshSourcePSUS2_GraftNthOutput(self, idx, output)", "def GraftNthOutput(self, idx: 'unsigned int', output: 'itkDataObject') -> \"void\":\n return _itkMeshSourcePython.itkMeshSourceMUC3_GraftNthOutput(self, idx, output)", "def getTensor(self):\n\t\treturn self.cur_tensor", "def GraftNthOutput(self, idx: 'unsigned int', output: 'itkDataObject') -> \"void\":\n return _itkMeshSourcePython.itkMeshSourcePSUC3_GraftNthOutput(self, idx, output)", "def __getitem__(self, index):\n\n source = self.data[\"noisy_images\"][index,:,:]\n target = self.data[\"clean_images\"][index,:,:]\n\n source = torch.from_numpy(source.reshape(1,source.shape[0],source.shape[1])).cuda()\n target = torch.from_numpy(target.reshape(1,target.shape[0],target.shape[1])).cuda()\n \n if self.args.loss_function == 'MSE_Affine' or self.args.loss_function == 'N2V':\n target = torch.cat([source,target], dim = 0)\n\n return source, target", "def GraftNthOutput(self, idx: 'unsigned int', output: 'itkDataObject') -> \"void\":\n return _itkMeshSourcePython.itkMeshSourceMUS3_GraftNthOutput(self, idx, output)", "def GraftNthOutput(self, idx: 'unsigned int', output: 'itkDataObject') -> \"void\":\n return _itkMeshSourcePython.itkMeshSourceMSS3_GraftNthOutput(self, idx, output)", "def __getitem__(self, index):\n img, target = self.data[index], self.targets[index]\n\n return img, target", "def _index_select_nd(source: torch.Tensor, index: torch.Tensor) -> torch.Tensor:\n index_size = index.size() # (num_atoms/num_bonds, max_num_bonds)\n suffix_dim = source.size()[1:] # (hidden_size,)\n final_size = index_size + suffix_dim # (num_atoms/num_bonds, max_num_bonds, hidden_size)\n\n target = source.index_select(dim=0, index=index.view(\n -1)) # (num_atoms/num_bonds * max_num_bonds, hidden_size)\n target = target.view(\n final_size) # (num_atoms/num_bonds, max_num_bonds, hidden_size)\n\n return target", "def _decode_to_index(self, decoder_output):\n value, index = torch.topk(decoder_output, 1)\n index = index.transpose(0, 1) # S = 1 x B, 1 is the index of top1 class\n if self.use_cuda:\n index = index.cuda()\n return index", "def map_model_to_obs(fwd, t_obs, output_index):\n\n # old code\n # t_ind = [np.where(fwd[O_TIME, :] == a)[0][0] for a in t_obs]\n # return fwd[output_index, t_ind]\n\n t_ind = [np.where(fwd[O_TIME, :] == a)[0][0] for a in t_obs]\n\n retval = fwd[output_index[0], t_ind]\n for i, num in enumerate(output_index):\n if i > 0:\n r = fwd[num, t_ind]\n retval = np.append(retval, r)\n return retval", "def GraftNthOutput(self, idx: 'unsigned int', output: 'itkDataObject') -> \"void\":\n return _itkMeshSourcePython.itkMeshSourcePSUS3_GraftNthOutput(self, idx, output)", "def advanced_indexing_op(input, index):\n batch_size = tf.shape(input)[0]\n max_length = int(input.get_shape()[1])\n dim_size = int(input.get_shape()[2])\n index = tf.range(0, batch_size) * max_length + (index - 1)\n flat = tf.reshape(input, [-1, dim_size])\n relevant = tf.gather(flat, index)\n return relevant", "def __getitem__(self, index):\n full_index = index * self.batch_size\n\n if full_index + self.batch_size >= self.max_index:\n self.rows = self.max_index - full_index\n else:\n self.rows = min(self.batch_size, self.max_index)\n\n # print(\"index {}; full index: {}; rows: {}\".format(index, full_index, self.rows))\n\n # Generate data\n X, y = self.__data_generation(self.rows)\n\n return X, y", "def GraftNthOutput(self, idx: 'unsigned int', output: 'itkDataObject') -> \"void\":\n return _itkMeshSourcePython.itkMeshSourceMSS2_GraftNthOutput(self, idx, output)", "def __getitem__(self, index):\n dataset = self.train_dataset if self.mode == 'train' else self.test_dataset\n filename, label = dataset[index]\n image = Image.open(os.path.join(self.image_dir, filename))\n return self.transform(image), torch.FloatTensor(label)", "def __getitem__(self, index: int) -> T:\n node_at_index = self.__get_node_at_index(index)\n return node_at_index.item", "def _index(tensor_3d, tensor_2d):\n x, y, z = tensor_3d.size()\n t = tensor_3d.reshape(x * y, z)\n tt = tensor_2d.reshape(x * y)\n v = t[torch.arange(x * y), tt]\n v = v.reshape(x, y)\n return v", "def __getitem__(self, index) -> torch.nn.Parameter:\n return self.parameters[index]", "def __getitem__(self, index: int) -> List[torch.Tensor]:\n return [torch.randn(self.shape), torch.randn(self.shape)]", "def default_output(self):\r\n\r\n do = getattr(self.op, 'default_output', None)\r\n if do is None:\r\n if len(self.outputs) == 1:\r\n return self.outputs[0]\r\n else:\r\n raise AttributeError(\"%s.default_output should be an output index.\" % self.op)\r\n elif do < 0 or do >= len(self.outputs):\r\n raise AttributeError(\"%s.default_output is out of range.\" % self.op)\r\n return self.outputs[do]", "def GraftNthOutput(self, idx: 'unsigned int', output: 'itkDataObject') -> \"void\":\n return _itkMeshSourcePython.itkMeshSourcePSSS2_GraftNthOutput(self, idx, output)", "def output(self, i):\n if i >= len(self._writer._buffer):\n raise RuntimeError(\"Output {} does not exist.\".format(i))\n\n return self._writer._buffer[i]", "def gather_vectors_using_index(src_tensor, index_tensor) -> torch.FloatTensor:\n if index_tensor.size()[-1] != 1:\n raise ValueError(\"Expecting last index to be 1. Found {}\".format(index_tensor.size()))\n flat_idx_tensor = index_tensor.view(index_tensor.size(0), -1, 1) # B * CP * 1\n\n # B * CP * Th\n expanded_index_size = [x for x in flat_idx_tensor.size()[:-1]] + [src_tensor.size()[-1]]\n expanded_index_tensor = flat_idx_tensor.expand(expanded_index_size).long() # B * CP * H\n\n flat_extracted = torch.gather(src_tensor, 1, expanded_index_tensor) # B * CP * H\n\n extracted = flat_extracted.view(src_tensor.size(0), index_tensor.size(1),\n index_tensor.size(2), -1) # B * C * P * H\n return extracted", "def GraftNthOutput(self, idx: 'unsigned int', output: 'itkDataObject') -> \"void\":\n return _itkMeshSourcePython.itkMeshSourcePSF2_GraftNthOutput(self, idx, output)", "def GraftNthOutput(self, idx: 'unsigned int', output: 'itkDataObject') -> \"void\":\n return _itkMeshSourcePython.itkMeshSourceMF2_GraftNthOutput(self, idx, output)", "def GraftNthOutput(self, idx: 'unsigned int', output: 'itkDataObject') -> \"void\":\n return _itkMeshSourcePython.itkMeshSourcePSF3_GraftNthOutput(self, idx, output)", "def GraftNthOutput(self, idx: 'unsigned int', output: 'itkDataObject') -> \"void\":\n return _itkMeshSourcePython.itkMeshSourceMF3_GraftNthOutput(self, idx, output)", "def GraftNthOutput(self, idx: 'unsigned int', output: 'itkDataObject') -> \"void\":\n return _itkMeshSourcePython.itkMeshSourcePSD2_GraftNthOutput(self, idx, output)", "def GraftNthOutput(self, idx: 'unsigned int', output: 'itkDataObject') -> \"void\":\n return _itkMeshSourcePython.itkMeshSourcePSD3_GraftNthOutput(self, idx, output)", "def GraftNthOutput(self, idx: 'unsigned int', output: 'itkDataObject') -> \"void\":\n return _itkMeshSourcePython.itkMeshSourcePSSS3_GraftNthOutput(self, idx, output)", "def __getitem__(self, index: int) -> Tuple[Any, Any]:\n datum, target = self.data[index], self.targets[index]\n \n if self.transform is not None:\n datum = self.transform(datum)\n\n if self.target_transform is not None:\n target = self.target_transform(target)\n\n datum = datum.squeeze()\n return datum, target", "def get(self, index):\n raise NotImplementedError() # pragma: no cover", "def GraftNthOutput(self, idx: 'unsigned int', output: 'itkDataObject') -> \"void\":\n return _itkMeshSourcePython.itkMeshSourceMD2_GraftNthOutput(self, idx, output)", "def input_tensor(interpreter):\n tensor_index = interpreter.get_input_details()[0]['index']\n return interpreter.tensor(tensor_index)()[0]", "def batched_index_select(input, dim, index):\n views = [input.shape[0]] + [1 if i != dim else -1 for i in range(1, len(input.shape))]\n expanse = list(input.shape)\n expanse[0] = -1\n expanse[dim] = -1\n index = index.view(views).expand(expanse)\n return torch.gather(input, dim, index)", "def GraftNthOutput(self, idx: 'unsigned int', output: 'itkDataObject') -> \"void\":\n return _itkMeshSourcePython.itkMeshSourceMD3_GraftNthOutput(self, idx, output)", "def __getitem__(self, index):\n return self.data[index[0] - 1][index[1] - 1]", "def get_output_pos(self, input_index):\r\n ipos = input_index\r\n opos = 0\r\n for otaps, itaps in zip(self.mitmot_out_taps(), self.mitmot_taps()):\r\n if len(itaps) > ipos:\r\n return opos\r\n else:\r\n opos += len(otaps)\r\n ipos -= len(itaps)\r\n for dx, taps in enumerate(self.mitsot_taps()):\r\n if len(taps) > ipos:\r\n return opos\r\n else:\r\n opos += 1\r\n ipos -= len(taps)\r\n if ipos < self.info['n_sit_sot']:\r\n return ipos + opos\r\n else:\r\n return -1", "def get_idx_to_target(self, idx):\n metadata = self.data.loc[idx]\n target = metadata['label']\n return target", "def get_efg_tensor(self, atom_index: int) -> ArrayLike:\n return self._efg_tensors[atom_index - 1]", "def __getitem__(self, index: int) -> (torch.Tensor, torch.Tensor):\n # retrieve sentence and label (correct class index)\n example, label = self.examples[index], self.labels[index]\n\n # tokenize sentence into words and other symbols\n tokenizer = get_tokenizer(\"spacy\")\n tokens = tokenizer(example)\n\n # convert tokens to their corresponding indices, according to\n # vocabulary\n token_indices = []\n for i in tokens:\n token_indices.append(self.vocab.get_index_of_token(i))\n\n return torch.LongTensor(token_indices), torch.LongTensor(label)", "def __getitem__(self, index):\n ceiling = len(self.train)\n if index < ceiling:\n return self.train[index]\n\n offset = ceiling\n ceiling += len(self.val)\n if index < ceiling:\n return self.val[index - offset]\n\n offset = ceiling\n ceiling += len(self.test)\n if index < ceiling:\n return self.test[index - offset]", "def __getitem__(self, index):\n s = index * self.batch_size % self._len\n e = s + self.batch_size\n indices = self.indices[s:e]\n\n return self.__data_generator(indices)", "def __getitem__(self, index):\n return (self.train_stats, self.preprocessed_data, self.output_directory)[index]", "def get(self, node_index):\n return self.nodes[node_index + self.low_idx]", "def output(self):\n try:\n return self.outputs[-1]\n except IndexError:\n pass\n raise ValueError(\"The sample method has not been called\")", "def label_from_index(self, index):\n assert self.labels is not None, \"Labels not processed\"\n #return self.labels[index, :, :]\n return self.labels[index]", "def get(self, index):\n return self.board[index]", "def __getitem__(self, index):\n return self.dataset[index]", "def __getitem__(self, index):\n # NOTE: this automatically supports slicing :-)\n return self._main._sequence[index]", "def __getitem__(self, idx):\n return self.batches[idx]", "def __getitem__(self, index: int) -> Tuple[torch.Tensor, int]:\n path, target = self.samples[index]\n try:\n sample = self.loader(path)\n except UnidentifiedImageError as e:\n msg = 'ImageNet22k could not load picture at {}. Unidentified image error.'.format(path)\n self.logwarning(msg, e)\n sample = transforms.ToPILImage()(torch.zeros(self.size[1:]).byte())\n except OSError as e:\n msg = 'ImageNet22k could not load picture at {}. OS Error.'.format(path)\n self.logwarning(msg, e)\n sample = transforms.ToPILImage()(torch.zeros(self.size[1:]).byte())\n except sre_constants_error as e:\n msg = 'ImageNet22k could not load picture at {}. SRE Constants Error.'.format(path)\n self.logwarning(msg, e)\n sample = transforms.ToPILImage()(torch.zeros(self.size[1:]).byte())\n if self.transform is not None:\n sample = self.transform(sample)\n\n return sample, 1", "def get_single_output(self, input_review, index=-1):\r\n if index >= 0:\r\n self.review_wordId_print[index] = tf.Print(\r\n input_review,\r\n [input_review],\r\n message=\"review_wordId_input%d\" % (index),\r\n summarize=10\r\n )\r\n\r\n reshaped_embedding_input = tf.nn.embedding_lookup(self.word_embedding_matrix, input_review)\r\n review_embedding_input = tf.reshape(reshaped_embedding_input, [-1, self.maxReviewLength, self.wordVec_size])\r\n\r\n if index >= 0:\r\n self.review_input_print[index] = tf.Print(\r\n review_embedding_input,\r\n [review_embedding_input],\r\n message=\"review_input_%d\" % (index),\r\n summarize=600\r\n )\r\n\r\n review_input_expanded = tf.expand_dims(review_embedding_input, -1)\r\n\r\n pooled_outputs = []\r\n # Create a convolution + maxpool layer for each filter size\r\n for i, filter_size in enumerate(self.filter_sizes):\r\n with tf.variable_scope(\"conv-maxpool-%s\" % filter_size, reuse=True):\r\n # Convolution Layer\r\n # filter_shape = [filter_height, filter_width, in_channels, out_channels]\r\n filter_shape = [filter_size, self.wordVec_size, 1, self.num_filters]\r\n W = tf.get_variable(name=\"W\", initializer=tf.truncated_normal(filter_shape, stddev=0.1),\r\n dtype=tf.float32)\r\n b = tf.get_variable(name=\"b\", initializer=tf.constant(0.1, shape=[self.num_filters]),\r\n dtype=tf.float32)\r\n # conv = [batchsize, self.maxReviewLength - filter_size + 1, in_channels, out_channels]\r\n conv = tf.nn.conv2d(\r\n input=review_input_expanded,\r\n filter=W,\r\n strides=[1, 1, 1, 1],\r\n padding=\"VALID\",\r\n name=\"conv_layer\",\r\n use_cudnn_on_gpu=True)\r\n # Apply nonlinearity\r\n h = tf.nn.sigmoid(tf.nn.bias_add(conv, b), name=\"relu\")\r\n # Maxpooling over the outputs\r\n # pooled = [batchsize, 1, in_channels, out_channels]\r\n pooled = tf.nn.max_pool(\r\n value=h,\r\n ksize=[1, self.maxReviewLength - filter_size + 1, 1, 1],\r\n strides=[1, 1, 1, 1],\r\n padding='VALID',\r\n name=\"pool_layer\")\r\n pooled_outputs.append(pooled)\r\n\r\n # Combine all the pooled features\r\n h_pool = tf.concat(pooled_outputs, 3)\r\n h_pool_flat = tf.reshape(h_pool, [-1, self.num_filters_total])\r\n\r\n # Add dropout\r\n with tf.name_scope(\"dropout\"):\r\n h_drop = tf.nn.dropout(h_pool_flat, self.dropout_keep_prob)\r\n\r\n with tf.variable_scope(\"cnn_final_output\", reuse=True):\r\n output_W = tf.get_variable(\r\n \"W\",\r\n shape=[self.num_filters_total, self.output_size],\r\n initializer=tf.contrib.layers.xavier_initializer(),\r\n dtype=tf.float32\r\n )\r\n b = tf.get_variable(\r\n name=\"b\",\r\n initializer=tf.constant(0.1, shape=[self.output_size]),\r\n dtype=tf.float32\r\n )\r\n scores = tf.nn.xw_plus_b(h_drop, output_W, b, name=\"scores\")\r\n\r\n return scores" ]
[ "0.88110566", "0.8773681", "0.86581725", "0.8473132", "0.8446493", "0.8446493", "0.8446493", "0.75534517", "0.7171569", "0.68134344", "0.6715133", "0.6542007", "0.65397805", "0.65058136", "0.6436549", "0.6430624", "0.63161963", "0.6215072", "0.6104049", "0.6095873", "0.6082486", "0.60789025", "0.6071977", "0.60635924", "0.60635924", "0.6061082", "0.60455656", "0.5949376", "0.59474033", "0.5946361", "0.5936355", "0.5927495", "0.59196603", "0.5904108", "0.5892459", "0.58845246", "0.5878025", "0.5872847", "0.58663625", "0.58636034", "0.58461446", "0.5844126", "0.5842538", "0.5834923", "0.583354", "0.5822195", "0.5798261", "0.579757", "0.57906115", "0.57733244", "0.5773292", "0.5758369", "0.5757597", "0.57564145", "0.5754503", "0.5748947", "0.57485914", "0.57402706", "0.5738229", "0.57305485", "0.57289773", "0.5718648", "0.57178545", "0.57098293", "0.57061756", "0.57045174", "0.56995434", "0.5698474", "0.5695025", "0.569342", "0.56804", "0.5670404", "0.56679827", "0.56662506", "0.5664355", "0.5656347", "0.56520164", "0.56281245", "0.5624675", "0.5619234", "0.5615727", "0.55993086", "0.5597896", "0.5587494", "0.5584081", "0.55720735", "0.55640155", "0.5562339", "0.5551982", "0.5547728", "0.55286896", "0.55230016", "0.55206704", "0.5511907", "0.55051154", "0.54940546", "0.54924726", "0.54866624", "0.5486268", "0.54751414" ]
0.87153566
2
Returns a list of detection results, each a dictionary of object info.
def detect_objects(self, image, threshold): self.set_input_tensor(image) self.interpreter.invoke() # Get all output details boxes = self.get_output_tensor(0) classes = self.get_output_tensor(1) scores = self.get_output_tensor(2) count = int(self.get_output_tensor(3)) results = [] for i in range(count): if scores[i] >= threshold: result = { 'bounding_box': boxes[i], 'class_id': classes[i], 'score': scores[i] } results.append(result) return results
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def detect_objects(interpreter, image):\n set_input_tensor(interpreter, image)\n interpreter.invoke()\n\n # Get all output details\n boxes = get_output_tensor(interpreter, 0)\n classes = get_output_tensor(interpreter, 1)\n scores = get_output_tensor(interpreter, 2)\n count = int(get_output_tensor(interpreter, 3))\n\n results = []\n for i in range(count):\n # Only check for people that meet the threshold\n if classes[i] == 0.0 and scores[i] >= THRESHOLD:\n result = {\n \"bounding_box\": boxes[i],\n \"class_id\": classes[i],\n \"score\": scores[i],\n }\n results.append(result)\n return results", "def detect_objects(self, image):\n # Feed the input image to the model\n self.set_input_tensor(image)\n self.model.invoke()\n\n # Get all outputs from the model\n boxes = self.get_output_tensor(0)\n classes = self.get_output_tensor(1)\n scores = self.get_output_tensor(2)\n count = int(self.get_output_tensor(3))\n\n results = []\n for i in range(count):\n result = {\n 'bounding_box': boxes[i],\n 'class_id': int(classes[i]),\n 'score': scores[i]\n }\n results.append(result)\n return results", "def get_detect_result(self):\n\n resultdict = {'class_index' : self.class_index,\n 'obj_name' : self.obj_name,\n 'score' : self.score,\n 'bounding_box' : {\n 'x_min' : self.x_min,\n 'y_min' : self.y_min,\n 'width' : self.width,\n 'height' : self.height}\n }\n return resultdict", "def detect_objects(interpreter, image):\n set_input_tensor(interpreter, image)\n interpreter.invoke()\n\n # Get all output details\n #boxes = get_output_tensor(interpreter, 0)\n classes = get_output_tensor(interpreter, 1)\n scores = get_output_tensor(interpreter, 2)\n #count = int(get_output_tensor(interpreter, 3))\n\n #results = []\n #for i in range(count):\n # if scores[i] >= threshold:\n # result = {\n # #'bounding_box': boxes[i],\n # 'class_id': classes[i],\n # 'score': scores[i]\n # }\n # results.append(result)\n \n \n #print(\"detection results:\\n\" + str(results))\n #return results\n return np.array([int(_class) for _class in classes]), np.array(scores)", "def detect_objects(self, image):\n preprocessed_image = self._preprocess_image(image)\n\n # Feed the input image to the model\n self._set_input_tensor(preprocessed_image)\n self._interpreter.invoke()\n\n # Get all outputs from the model\n boxes = self._get_output_tensor(0)\n classes = self._get_output_tensor(1)\n scores = self._get_output_tensor(2)\n count = int(self._get_output_tensor(3))\n\n results = []\n for i in range(count):\n if scores[i] >= self.THRESHOLD:\n result = {\n 'bounding_box': boxes[i],\n 'class_id': classes[i],\n 'score': scores[i]\n }\n results.append(result)\n return results", "def get_object_detections(self):\n detections = self.__get_cropped_detections(self.image)\n return detections", "def get_detections(self):\n frame = self.get_still()\n return detector.process_frame(frame, False)", "def detect_objects(image, threshold, classes_incl=None):\n set_input_tensor(image)\n interpreter.invoke()\n\n # Get all output details\n boxes = get_output_tensor(0)\n classes = get_output_tensor(1)\n scores = get_output_tensor(2)\n count = int(get_output_tensor(3))\n\n results = []\n for i in range(count):\n if scores[i] >= threshold:\n result = {\n 'bounding_box': boxes[i],\n 'class_id': int(classes[i]),\n 'score': scores[i]\n }\n if not classes_incl:\n results.append(result)\n elif classes[i] in classes_incl:\n results.append(result)\n return results", "def object_detector(detector, img_location: str, num_detection=5 ) -> list:\n img = PIL.Image.open(img_location)\n img = np.array(img)\n img = tf.expand_dims(img, axis=0)\n result = detector(img)\n\n ret = []\n\n for i in range(num_detection):\n detection_class_number = int(result['detection_classes'].numpy()[0][i])\n detection_class_name = CLASSES_90[detection_class_number]\n\n detection_score = result['detection_scores'].numpy()[0][i]\n rounded_detection_score = round(float(detection_score), 2)\n\n # Append as a tuple\n ret.append( (detection_class_name, rounded_detection_score) )\n\n return ret", "def get_detections(self, image):\n self.img = jetson.utils.cudaFromNumpy(image)\n self.width = image.shape[1]\n self.height = image.shape[0]\n detections = self._net.Detect(self.img, self.width, self.height)\n print(\"The inference is happening at \" + str(self._net.GetNetworkFPS()) + \" FPS\")\n return detections, jetson.utils.cudaToNumpy(self.img)", "def detect_objects(snap):\n client = vision.ImageAnnotatorClient()\n print(snap)\n\n with open(snap, 'rb') as im_file:\n content = im_file.read()\n image = vision.Image(content=content)\n\n objects = client.object_localization(image=image).localized_object_annotations\n\n print(f\"Found {len(objects)} objects\")\n [print(f\"{objet.name} : {round(objet.score*100,2)}\") for objet in objects]\n \n return objects", "def extract_face_detections(self):\n self.detector.setInput(self.image_blob)\n self.detections = self.detector.forward()", "def detected(self):\n return self.detections", "def extract_detections(self):\n self.rescue_model.setInput(self.human_blob)\n self.predictions = self.rescue_model.forward()", "def draw_detections(self, img, yolo_results):\n\n _, height, _ = img.shape\n for yolo_result in yolo_results:\n class_index = yolo_result.class_index\n obj_name = yolo_result.obj_name\n x = yolo_result.x_min\n y = yolo_result.y_min\n w = yolo_result.width\n h = yolo_result.height\n\n offset = class_index * 123457 % self.meta.classes\n\n red = self._get_color(2, offset, self.meta.classes)\n green = self._get_color(1, offset, self.meta.classes)\n blue = self._get_color(0, offset, self.meta.classes)\n box_width = int(height * 0.006)\n cv2.rectangle(img, (int(x), int(y)), (int(x+w)+1, int(y+h)+1), (red, green, blue), box_width)\n cv2.putText(img, obj_name, (int(x) -1, int(y) -1), cv2.FONT_HERSHEY_PLAIN, 2, (red, green, blue), 2)\n\n return img", "def get_candidate_objects(output, img_size, classes, anchors, threshold):\n\n #threshold = 0.8\n iou_threshold = 0.4\n\n boxes, probs = parse_yolo_output_v2(output, img_size, len(classes), anchors)\n filter_mat_probs = (probs >= threshold)\n filter_mat_boxes = np.nonzero(filter_mat_probs)[0:3]\n boxes_filtered = boxes[filter_mat_boxes]\n probs_filtered = probs[filter_mat_probs]\n classes_num_filtered = np.argmax(probs, axis=3)[filter_mat_boxes]\n\n idx = np.argsort(probs_filtered)[::-1]\n boxes_filtered = boxes_filtered[idx]\n probs_filtered = probs_filtered[idx]\n classes_num_filtered = classes_num_filtered[idx]\n\n # too many detections - exit\n if len(boxes_filtered) > 1e3:\n print(\"Too many detections, maybe an error? : {}\".format(\n len(boxes_filtered)))\n return []\n\n probs_filtered = non_maxima_suppression(boxes_filtered, probs_filtered,\n classes_num_filtered, iou_threshold)\n\n filter_iou = (probs_filtered > 0.0)\n boxes_filtered = boxes_filtered[filter_iou]\n probs_filtered = probs_filtered[filter_iou]\n classes_num_filtered = classes_num_filtered[filter_iou]\n\n result = []\n for class_id, box, prob in zip(classes_num_filtered, boxes_filtered, probs_filtered):\n result.append([classes[class_id], box[0], box[1], box[2], box[3], prob])\n\n return result", "def parseResult(x, results, target_classes, detected_object_list):\n vertex1 = tuple(x[1:3].int()) # The first vertex\n vertex2 = tuple(x[3:5].int()) # The other vertex, which is opposite to c1\n\n img = results\n\n if vertex1 == vertex2:\n return img\n\n cls = int(x[-1])\n\n print('[DEBUG] cls = ' + str(cls))\n\n # to avoid the IndexError\n if cls >= len(target_classes) or cls < 0:\n return img\n\n label = \"{0}\".format(target_classes[cls]) # get the label name\n\n obj = DetectedObject()\n obj.setLabel(label)\n obj.setVertices(vertex1, vertex2)\n\n # push the detected object to the list\n detected_object_list.append(obj)\n\n return img", "def __detect_objs(self):\n while True:\n # Wait for input images\n if (not self.__predict_start) or \\\n (self.__img is None):\n continue\n\n # Client for detection\n client = vision.ImageAnnotatorClient()\n\n # Encode image to binary\n _, img_buffer = cv2.imencode(\".jpg\", self.__img)\n img_bytes = img_buffer.tobytes()\n\n # Change to vision Image type\n image = vision.Image(content=img_bytes)\n # Detect Person\n self.__detect_info = client.object_localization(image=image,\n max_results=self.__max_results\n ).localized_object_annotations\n cv2.waitKey(30)", "def json(self) -> Dict[str, List]:\n from app.dl_model.image import ClassifierInput\n return {\n \"total_detections\": len(self.detected_objects),\n \"total_classes\": ClassifierInput.get_total_classes(),\n \"detected_products\": [detected_object.json()\n for detected_object in self.detected_objects]\n }", "def main():\n with open(IMAGEPATH_LIST_PATH, \"rt\") as imagepath_list_handle:\n imagepath_list = [line.strip() for line in imagepath_list_handle.readlines()]\n\n object_detector = ObjectDetector(MODEL_PATH)\n\n dataset_json = []\n for imagepath in imagepath_list:\n image = scipy.misc.imread(imagepath)\n detections = object_detector.run(image)\n\n detections_json = {\"path\": imagepath, \"detections\": [det.to_dict() for det in detections]}\n dataset_json.append(detections_json)\n\n with open(DATASET_PATH, \"wt\") as json_handle:\n json.dump(dataset_json, json_handle, sort_keys=True, indent=4)", "def analyze_objects_detections_predictions(_context, config_path):\n\n import yaml\n\n import net.analysis\n import net.data\n import net.ml\n\n with open(config_path, encoding=\"utf-8\") as file:\n config = yaml.safe_load(file)\n\n ssd_model_configuration = config[\"vggish_model_configuration\"]\n\n network = net.ml.VGGishNetwork(\n model_configuration=ssd_model_configuration,\n categories_count=len(config[\"categories\"]))\n\n network.model.load_weights(config[\"model_checkpoint_path\"])\n\n validation_samples_loader = net.data.VOCSamplesDataLoader(\n data_directory=config[\"voc\"][\"data_directory\"],\n data_set_path=config[\"voc\"][\"validation_set_path\"],\n categories=config[\"categories\"],\n size_factor=config[\"size_factor\"])\n\n logger = net.utilities.get_logger(config[\"log_path\"])\n\n default_boxes_factory = net.ssd.DefaultBoxesFactory(model_configuration=ssd_model_configuration)\n\n thresholds_matching_data_map = net.analysis.MatchingDataComputer(\n samples_loader=validation_samples_loader,\n model=network,\n default_boxes_factory=default_boxes_factory,\n thresholds=[0, 0.5, 0.9],\n categories=config[\"categories\"]).get_thresholds_matched_data_map()\n\n net.analysis.log_precision_recall_analysis(\n logger=logger,\n thresholds_matching_data_map=thresholds_matching_data_map)\n\n net.analysis.log_mean_average_precision_analysis(\n logger=logger,\n thresholds_matching_data_map=thresholds_matching_data_map)\n\n net.analysis.log_performance_with_annotations_size_analysis(\n logger=logger,\n thresholds_matching_data_map=thresholds_matching_data_map)", "def _get_detections(args, generator, model, score_threshold=0.05, max_detections=100, save_path=None):\n all_detections = [[None for i in range(generator.num_classes()) if generator.has_label(i)] for j in range(generator.size())]\n\n detection_out = np.zeros([generator.size(),512,512,3])\n # detection_out = np.zeros([generator.size(),512,512])\n attention_out = np.zeros([generator.size(),512,512])\n mask_out = np.zeros([generator.size(),512,512])\n\n for i in tqdm(range(generator.size()), desc='Running network: '):\n raw_image = generator.load_image(i)\n # image = np.expand_dims(raw_image.copy(), axis=-1)\n # image = np.repeat(image, 3, axis=-1)\n # image = generator.preprocess_image(image)\n image = generator.preprocess_image(raw_image.copy())\n image, scale = generator.resize_image(image)\n\n if keras.backend.image_data_format() == 'channels_first':\n image = image.transpose((2, 0, 1))\n\n # run network\n # boxes, scores, labels = model.predict_on_batch(np.expand_dims(image, axis=0))[:3]\n boxes, scores, labels, masks, attention_map = model.predict_on_batch(np.expand_dims(image, axis=0))\n # print('scores:', scores.shape)\n # print('labels',labels.shape)\n\n # correct boxes for image scale\n boxes /= scale\n\n # select indices which have a score above the threshold\n indices = np.where(scores[0, :] > score_threshold)[0]\n # print('indices', indices)\n scores = scores.numpy()\n boxes = boxes.numpy()\n labels = labels.numpy()\n masks = masks.numpy()\n attention_map = attention_map.numpy()\n # select those scores\n scores = scores[0][indices]\n\n # find the order with which to sort the scores\n scores_sort = np.argsort(-scores)[:max_detections]\n # print(scores_sort)\n\n # select detections\n image_boxes = boxes[0, indices[scores_sort], :]\n image_scores = scores[scores_sort]\n image_labels = labels[0, indices[scores_sort]]\n image_detections = np.concatenate([image_boxes, np.expand_dims(image_scores, axis=1), np.expand_dims(image_labels, axis=1)], axis=1)\n\n if save_path is not None:\n draw_annotations(raw_image, generator.load_annotations(i), label_to_name=generator.label_to_name)\n draw_detections(raw_image, image_boxes, image_scores, image_labels, score_threshold=args.detection_threshold, label_to_name=generator.label_to_name)\n\n\n detection_out[i, :, :] = raw_image\n\n attention_map[np.where(attention_map < args.attention_threshold)] = 0\n # attention_out[i, :, :] = cv2.flip( cv2.resize(np.squeeze(np.uint8(attention_map * 255)), (origin_shape[1], origin_shape[0])), 0)\n attention_out[i, :, :] = cv2.resize(np.squeeze(np.uint8(attention_map * 255)), (512, 512))\n\n masks[masks < args.segmentation_threshold] = 0\n masks = cv2.resize(np.squeeze(np.uint8(masks * 255)), (512, 512))\n\n mask_out[i, :, :] = masks\n\n # copy detections to all_detections\n for label in range(generator.num_classes()):\n if not generator.has_label(label):\n continue\n\n all_detections[i][label] = image_detections[image_detections[:, -1] == label, :-1]\n if save_path is not None:\n detection_out = sitk.GetImageFromArray(detection_out)\n sitk.WriteImage(detection_out, os.path.join(save_path, 'detection_result.nii.gz'))\n\n attention_out = sitk.GetImageFromArray(attention_out)\n sitk.WriteImage(attention_out, os.path.join(save_path, 'attention_result.nii.gz'))\n\n mask_out = sitk.GetImageFromArray(mask_out)\n sitk.WriteImage(mask_out, os.path.join(save_path, 'masks_result.nii.gz'))\n\n return all_detections", "def get_people(self):\r\n\r\n logger.debug('Getting list of people detected in video')\r\n\r\n result = self.recognized_faces\r\n\r\n if len(result) == 0:\r\n\r\n # Try to load YAML files\r\n if os.path.exists(self.cluster_files_path):\r\n\r\n print 'Loading YAML files with clustering results'\r\n logger.debug('Loading YAML files with clustering results')\r\n\r\n rec_faces = []\r\n for yaml_file in os.listdir(self.cluster_files_path):\r\n yaml_file_path = os.path.join(\r\n self.cluster_files_path, yaml_file)\r\n with open(yaml_file_path) as f:\r\n rec_faces.append(yaml.load(f))\r\n\r\n if rec_faces:\r\n print 'YAML files with clustering results loaded'\r\n logger.debug('YAML files with clustering results loaded')\r\n result = rec_faces\r\n\r\n return result", "def get_all_results(pred_root, meta_results):\r\n results_all = {}\r\n for key in tqdm(meta_results, desc='Generating results ..'):\r\n persons = meta_results[key]\r\n\r\n global_seg = cv2.imread(pred_root + 'global_seg/{}.png'.format(key),\r\n cv2.IMREAD_UNCHANGED)\r\n global_tag = cv2.imread(pred_root + 'global_tag/{}.png'.format(key),\r\n cv2.IMREAD_UNCHANGED)\r\n\r\n results = {}\r\n dets, masks = [], []\r\n for p_id, score in persons:\r\n mask = (global_tag == p_id)\r\n if np.sum(mask) == 0:\r\n continue\r\n seg = mask * global_seg\r\n ys, xs = np.where(mask > 0)\r\n x1, y1, x2, y2 = xs.min(), ys.min(), xs.max(), ys.max()\r\n dets.append((x1, y1, x2, y2, score))\r\n masks.append(seg)\r\n\r\n # Reuiqred Field of each result: a list of masks,\r\n # each is a multi-class masks for one person.\r\n # It can also be sparsified to\r\n # [scipy.sparse.csr_matrix(mask) for mask in masks]\r\n # to save memory cost\r\n results['MASKS'] = masks if not Sparse \\\r\n else [scipy.sparse.csr_matrix(m) for m in masks]\r\n # Reuiqred Field of each result,\r\n # a list of detections corresponding to results['MASKS'].\r\n results['DETS'] = dets\r\n\r\n if cache_pkl:\r\n results_cache_add = cache_pkl_path + key + '.pklz'\r\n pickle.dump(results, gzip.open(results_cache_add, 'w'))\r\n results_all[key] = results_cache_add\r\n else:\r\n results_all[key] = results\r\n\r\n if PLOT:\r\n import pylab as plt\r\n plt.figure('seg')\r\n plt.imshow(global_seg)\r\n print('Seg unique:' + str(np.unique(global_seg)))\r\n plt.figure('tag')\r\n plt.imshow(global_tag)\r\n print('Tag unique:' + str(np.unique(global_tag)))\r\n plt.show()\r\n\r\n return results_all", "def evaluate_detections(self, all_boxes, output_dir=None):\n self._write_voc_results_file(all_boxes)\n aps,map = self._do_python_eval(output_dir)\n return aps,map", "def detect(self, image):\n\n # Load the demo image\n # Detect all object classes and regress object bounds\n image = image_transform_1_3(image)\n timer = Timer()\n timer.tic()\n scores, boxes = self.im_detect(image)\n timer.toc()\n print('rois--------------', scores)\n print ('Detection took {:.3f}s for '\n '{:d} object proposals'.format(timer.total_time, len(boxes)))\n\n CONF_THRESH = 0.3\n # print(scores)\n NMS_THRESH = 0.5\n dets = []\n for i in range(len(boxes)):\n # print('lll')\n cls_boxes = boxes[i]\n cls_scores = scores[i]\n dets_i_ = np.hstack([cls_boxes[:, 0:4], cls_scores])\n keep = nms(dets_i_, NMS_THRESH)\n dets_i = np.hstack([cls_boxes, cls_scores])\n dets_i = dets_i[keep, :]\n inds = np.where(dets_i[:, -1] >= CONF_THRESH)[0]\n dets_i = dets_i[inds, :]\n dets_i = dets_i[:, 0:5]\n dets.append(dets_i)\n return dets", "def annotate_image(self, image, results):\n predictedObjects = []\n for i in range(len(results)):\n objectParameters = []\n x = int(results[i][1])\n y = int(results[i][2])\n w = int(results[i][3])\n h = int(results[i][4])\n # print(x, y, w, h, results[i][0])\n imageHeight, imageWidth, _ = image.shape\n w = w // 2\n h = h // 2\n # change to truncate boxes which go outside the image\n xmin, xmax, ymin, ymax = 0, 0, 0, 0\n xmin = 3 if not max(x - w, 0) else (x - w)\n xmax = imageWidth - 3 if not min(x + w - imageWidth, 0) \\\n else (x + w)\n ymin = 1 if not max(y - h, 0) else (y - h)\n ymax = imageHeight - 3 if not min(y + h - imageHeight, 0) \\\n else (y + h)\n if self.verbose:\n print('Class : ' + results[i][0] + ', [x, y, w, h] [' +\n str(x) + ', ' + str(y) + ', ' + str(w) + ', ' + str(h) +\n '] Confidence : ' + str(results[i][5]))\n \n # Each class must have a unique color\n color = tuple([(j * (1+self.classes.index(results[i][0])) % 255) \\\n for j in self.seed])\n cv2.rectangle(image, (xmin, ymin), (xmax, ymax), color, 2)\n if ymin <= 20:\n cv2.rectangle(\n image, (xmin, ymin), (xmax, ymin + 20), color, -1\n )\n cv2.putText(\n image, results[i][0] + ': %.2f' % results[i][5],\n (xmin+5, ymin+15), cv2.FONT_HERSHEY_SIMPLEX, 0.5,\n (255, 255, 255), 2\n )\n else:\n cv2.rectangle(image, (xmin, ymin), (xmax, ymin-20), color, -1)\n cv2.putText(\n image, results[i][0] + ': %.2f' % results[i][5],\n (xmin+5, ymin-8), cv2.FONT_HERSHEY_SIMPLEX, 0.5,\n (255, 255, 255), 2\n )\n objectParameters = [\n results[i][0], xmin, ymin, xmax, ymax, results[i][5]\n ]\n predictedObjects.append(objectParameters)\n return image, predictedObjects\n # if self.outputFile:\n # cv2.imwrite(self.outputFile,image)", "def evaluate_detections(self, all_boxes, output_dir=None):\n raise NotImplementedError", "def getResults():", "def evaluate_detections(ground_truth, predictions, class_name, overlap_threshold=0.5,\n allow_multiple_matches_per_ignored=True,\n verbose=True):\n\n Detection = namedtuple('Detection', ['image', 'bbox', 'score', 'gt_match'])\n GT = namedtuple('GroundTruth', ['bbox', 'is_matched', 'is_ignored'])\n detections = [Detection(image=img_pred.image_path,\n bbox=np.array(obj_pred[\"bbox\"]),\n score=obj_pred.get(\"score\", 0.0),\n gt_match=-1)\n for img_pred in predictions\n for obj_pred in img_pred\n if obj_pred[\"type\"] == class_name]\n\n scores = np.array([detection.score for detection in detections])\n sorted_ind = np.argsort(-scores)\n detections = [detections[i] for i in sorted_ind]\n\n gts = {}\n for img_gt in ground_truth:\n gts[img_gt.image_path] = GT(\n bbox=np.vstack([np.array(obj_gt[\"bbox\"]) for obj_gt in img_gt]) if img_gt else np.empty(\n (0, 4)),\n is_matched=np.zeros(len(img_gt), dtype=bool),\n is_ignored=np.array([obj_gt.get(\"is_ignored\", False) for obj_gt in img_gt], dtype=bool))\n\n detections_num = len(detections)\n true_pos = np.zeros(detections_num)\n false_pos = np.zeros(detections_num)\n\n for i, detection in tqdm(enumerate(detections), desc=\"Processing detections\",\n disable=not verbose):\n image_path = detection.image\n bboxes_gt = gts[image_path].bbox\n bbox = detection.bbox\n max_overlap = -np.inf\n\n if bboxes_gt is not None and bboxes_gt.shape[0] > 0:\n intersection_xmin = np.maximum(bboxes_gt[:, 0], bbox[0])\n intersection_ymin = np.maximum(bboxes_gt[:, 1], bbox[1])\n intersection_xmax = np.minimum(bboxes_gt[:, 0] + bboxes_gt[:, 2], bbox[0] + bbox[2])\n intersection_ymax = np.minimum(bboxes_gt[:, 1] + bboxes_gt[:, 3], bbox[1] + bbox[3])\n intersection_width = np.maximum(intersection_xmax - intersection_xmin, 0.)\n intersection_height = np.maximum(intersection_ymax - intersection_ymin, 0.)\n intersection = intersection_width * intersection_height\n\n det_area = bbox[2] * bbox[3]\n gt_area = bboxes_gt[:, 2] * bboxes_gt[:, 3]\n union = (det_area + gt_area - intersection)\n ignored_mask = gts[image_path].is_ignored\n if allow_multiple_matches_per_ignored:\n if np.any(ignored_mask):\n union[ignored_mask] = det_area\n\n overlaps = intersection / union\n # Match not ignored ground truths first.\n if np.any(~ignored_mask):\n overlaps_filtered = np.copy(overlaps)\n overlaps_filtered[ignored_mask] = 0.0\n max_overlap = np.max(overlaps_filtered)\n argmax_overlap = np.argmax(overlaps_filtered)\n # If match with non-ignored ground truth is not good enough,\n # try to match with ignored ones.\n if max_overlap < overlap_threshold and np.any(ignored_mask):\n overlaps_filtered = np.copy(overlaps)\n overlaps_filtered[~ignored_mask] = 0.0\n max_overlap = np.max(overlaps_filtered)\n argmax_overlap = np.argmax(overlaps_filtered)\n detections[i] = detection._replace(gt_match=argmax_overlap)\n\n if max_overlap >= overlap_threshold:\n if not gts[image_path].is_ignored[argmax_overlap]:\n if not gts[image_path].is_matched[argmax_overlap]:\n true_pos[i] = 1.\n gts[image_path].is_matched[argmax_overlap] = True\n else:\n false_pos[i] = 1.\n elif not allow_multiple_matches_per_ignored:\n gts[image_path].is_matched[argmax_overlap] = True\n else:\n false_pos[i] = 1.\n\n false_pos = np.cumsum(false_pos)\n true_pos = np.cumsum(true_pos)\n\n debug_visualization = False\n if debug_visualization:\n for image_path, bboxes_gt in gts.items():\n\n print(image_path)\n image = cv2.imread(image_path)\n image_gt = np.copy(image)\n for bbox in bboxes_gt.bbox:\n cv2.rectangle(image_gt, tuple(bbox[:2]), tuple(bbox[2:] + bbox[:2]),\n color=(255, 255, 0), thickness=2)\n cv2.imshow(\"gt\", image_gt)\n for detection in detections:\n if detection.image != image_path:\n continue\n bbox = detection.bbox\n cv2.rectangle(image, tuple(bbox[:2]), tuple(bbox[2:] + bbox[:2]), color=(0, 255, 0),\n thickness=2)\n if detection.gt_match is not None:\n bbox = bboxes_gt.bbox[detection.gt_match]\n cv2.rectangle(image, tuple(bbox[:2]), tuple(bbox[2:] + bbox[:2]),\n color=(0, 0, 255), thickness=1)\n cv2.imshow(\"image\", image)\n cv2.waitKey(0)\n\n # Handle equal-score detections.\n # Get index of the last occurrence of a score.\n ind = len(scores) - np.unique(scores[sorted_ind[::-1]], return_index=True)[1] - 1\n ind = ind[::-1]\n # Though away redundant points.\n false_pos = false_pos[ind]\n true_pos = true_pos[ind]\n\n total_positives_num = np.sum([np.count_nonzero(~gt.is_ignored) for gt in gts.values()])\n recall = true_pos / float(total_positives_num)\n # Avoid divide by zero in case the first detection matches an ignored ground truth.\n precision = true_pos / np.maximum(true_pos + false_pos, np.finfo(np.float64).eps)\n miss_rate = 1.0 - recall\n fppi = false_pos / float(len(gts))\n\n return recall, precision, miss_rate, fppi", "def _check_intrusion(self, detections):\n width, height = self.frame_size\n\n results = []\n for i in range(len(detections)):\n (bboxes, scores, classes, num_candidates) = detections[i]\n\n in_roi_cands = {}\n for j in range(int(num_candidates[0])):\n # Check if score passes the threshold.\n if scores[0][j] < self.detect_threshold:\n continue\n # Check if the object in in the trigger list.\n # XXX: Is it posssible to generate index that is not in the\n # category_index list?\n try:\n label = self._category_index[int(classes[0][j])]\n except KeyError:\n continue\n else:\n if label not in self.triggers:\n continue\n # Check whether the object's bbox is in roi or not.\n ymin, xmin, ymax, xmax = bboxes[0][j]\n unnormalized_bbox = (xmin * width, ymin * height,\n xmax * width, ymax * height)\n if self._is_in_roi(unnormalized_bbox):\n if not bool(in_roi_cands):\n # This is the first detected object candidate\n in_roi_cands = {\"bboxes\": [], \"scores\": [], \"labels\": []}\n in_roi_cands[\"bboxes\"].append(bboxes[0][j].tolist())\n in_roi_cands[\"scores\"].append(scores[0][j].tolist())\n in_roi_cands[\"labels\"].append(label)\n results.append(in_roi_cands)\n return results", "def get_detections(self, sha256):\n #access undocumented detections API\n url = self.API_URL % ('apks/', sha256, '/detections')\n return requests.get(url=url, headers=self.headers, proxies=self.proxies, verify=self.verify_ssl)", "def get_detection_results(\n url,\n timeout,\n metadata=False,\n save_har=False,\n splash_url=\"\",\n):\n plugins = load_plugins()\n if not plugins:\n raise NoPluginsError(\"No plugins found\")\n\n logger.debug(\"[+] Starting detection with %(n)d plugins\", {\"n\": len(plugins)})\n\n response = get_response(url, plugins, timeout, splash_url)\n\n # Save HAR\n if save_har:\n fd, path = tempfile.mkstemp(suffix=\".har\")\n logger.info(f\"Saving HAR file to {path}\")\n\n with open(fd, \"w\") as f:\n json.dump(response[\"har\"], f)\n\n det = Detector(response, plugins, url)\n softwares = det.get_results(metadata=metadata)\n\n output = {\"url\": url, \"softwares\": softwares}\n\n return output", "def detect_image_objects(gray, detect_params, detect_type=\"all\", label=-1, verbose=False):\n if detect_type == \"all\":\n detected_rects = detect_all_objects(gray, verbose=verbose, **detect_params)\n elif detect_type == \"primary\":\n detected_rects = detect_primary_objects(gray, verbose=verbose, **detect_params)\n else:\n print(f\"Unrecongized input value for detect_type, {detect_type}, so no objects were detected!\")\n print(\"Please provide a string value for detect_type of either 1) 'all' or 2) 'primary'\")\n detected_rects = None\n if isinstance(detected_rects, np.ndarray):\n features_labels = get_detected_features_labels(gray, detected_rects, label=label, verbose=verbose)\n return features_labels", "def get_objects(self, image=None):\n output_dict = self.run_inference_for_single_image(image)\n return output_dict, self.category_index", "def get_detectors(self):\n kwargs = {\"timeout\": self.timeout}\n if self.auth_key:\n kwargs['headers'] = {'doods-auth-key': self.auth_key}\n response = requests.get(self.url + \"/detectors\", **kwargs)\n response.raise_for_status()\n return response.json()", "def evaluate_detections(self, all_boxes, output_dir=None):\n\n self._write_voc_results_file(all_boxes)\n aps, map = self._do_python_eval(output_dir)\n return aps, map", "def detection(self, model_infos, trained_images=None):\n # Index of the class in the list is its ID. For example, to get ID of\n class_names = ['BG', 'red_s', 'red_m', 'red_l', 'yellow_s', 'yellow_m', 'yellow_l', 'green_s', 'green_m',\n 'green_l', 'blue_s', 'blue_m', 'blue_l', 'orange_s', 'orange_m', 'orange_l']\n config = ShapesConfig()\n detect_model = modellib.MaskRCNN(mode=\"inference\", model_dir=MODEL_DIR, config=config, model_info=model_infos)\n # Load weights trained on current model\n cur_model_path = os.path.join(model_infos[0], model_infos[1]+'.h5')\n cur_model_weights = os.path.join(MODEL_DIR, cur_model_path)\n detect_model.load_weights(cur_model_weights, by_name=True)\n # Traverse all the packages(the pool)\n result_of_detection = {}\n for package in self.images_pool:\n image_dir = os.path.join(DATA_DIR, package)\n images_in_package = os.listdir(image_dir)\n # import ground truth to check out the detection result\n instance_nums_of_images = self.count_instances_in_images(package)\n for img in images_in_package:\n # Skip detection of those images that already used for training\n if trained_images:\n if img in trained_images:\n continue\n image = skimage.io.imread(os.path.join(image_dir, img), as_gray=False)\n # Run detection\n results = detect_model.detect([image], verbose=0)\n r = results[0]\n \"\"\"\n # average entropy model\n total_entropy = 0\n for prob in r['scores']:\n total_entropy -= prob * math.log2(prob) + (1 - prob) * math.log2(1 - prob)\n result_of_detection[img] = total_entropy / len(r['scores']) if r['scores'] != [] else total_entropy\n \"\"\"\n # use dict to save the info of the detected instances of each images\n # min detection model\n\n gt_instances = instance_nums_of_images[img.split('.')[0]]\n result_of_detection[img] = abs(len(r['scores']) - gt_instances)\n\n # print(result_of_detection)\n print(\"+++++++detection finished\")\n del detect_model\n del config\n return result_of_detection", "def detect(self, images, verbose=0):\n assert self.mode == \"inference\", \"Create model in inference mode.\"\n #assert len(images) == self.config.BATCH_SIZE, \"len(images) must be equal to BATCH_SIZE\"\n\n if verbose:\n log(\"Processing {} images\".format(len(images)))\n for image in images:\n log(\"image\", image)\n\n # Mold inputs to format expected by the neural network\n molded_images = []\n for img in images:\n molded_images.append(mold_image(img))\n\n # Validate image sizes\n # All images in a batch MUST be of the same size\n image_shape = molded_images[0].shape\n for g in molded_images[1:]:\n assert g.shape == image_shape,\\\n \"After resizing, all images must have the same size. Check IMAGE_RESIZE_MODE and image sizes.\"\n\n molded_images = np.asarray(molded_images)\n if verbose:\n log(\"molded_images\", molded_images)\n # Run object detection\n y = self.keras_model.predict([molded_images], verbose=0)\n # Process detections\n results = []\n for i, image in enumerate(images):\n results.append(y[i][0][0][1])\n return results", "def runClassifier(interpreter, image, threshold):\n set_input_tensor(interpreter, image)\n interpreter.invoke()\n\n # Get all output details\n boxes = get_output_tensor(interpreter, 0)\n classes = get_output_tensor(interpreter, 1)\n scores = get_output_tensor(interpreter, 2)\n count = int(get_output_tensor(interpreter, 3))\n\n results = []\n for i in range(count):\n if scores[i] >= threshold:\n result = {\n \"bounding_box\": boxes[i],\n \"class_id\": classes[i],\n \"score\": scores[i],\n }\n results.append(result)\n return results", "def im_list_detections(model, im_list):\n _t = Timer()\n num_images = len(im_list)\n im_list_boxes = [[] for _ in range(num_images)]\n im_list_scores = [[] for _ in range(num_images)]\n im_list_ids = [[] for _ in range(num_images)]\n im_list_classes = [[] for _ in range(num_images)]\n # create anchors for each level\n anchors = create_cell_anchors()\n for i in range(num_images):\n im_list_ids[i] = im_list[i]['id']\n im = cv2.imread(im_list[i]['image'])\n with c2_utils.NamedCudaScope(0):\n _t.tic()\n im_list_boxes[i], im_list_scores[i], im_list_classes[i] = \\\n im_detections(model, im, anchors)\n _t.toc()\n logger.info(\n 'im_detections: {:d}/{:d} {:.3f}s'.format(\n i + 1, num_images, _t.average_time))\n return im_list_boxes, im_list_scores, im_list_classes, im_list_ids", "def detect_all_objects(gray, haar_file, params, verbose=False):\n # Not the most performant to load haar_cascade for each image when params aren't changing...\n haar_cascade = cv.CascadeClassifier(haar_file)\n detected_objects = haar_cascade.detectMultiScale(gray, **params)\n if verbose:\n print(f\"# of Objects Detected = {len(detected_objects)}\")\n return detected_objects", "def object_detection(self):\r\n pass", "def detector(videoframe, facedetection, maskdetection):\n (h, w) = videoframe.shape[:2]\n blobimage = cv2.dnn.blobFromImage(videoframe, 1.0, (224, 224), (104.0, 177.0, 123.0))\n\n facedetection.setInput(blobimage)\n ffinding = facedetection.forward()\n\n face_list = []\n locations = []\n predictions = []\n\n for i in range(0, ffinding.shape[2]):\n credence = ffinding[0, 0, i, 2]\n if credence > 0.6:\n case = ffinding[0, 0, i, 3:7] * np.array([w, h, w, h])\n (x_start, y_start, x_end, y_end) = case.astype(\"int\")\n (x_start, y_start) = (max(0, x_start), max(0, y_start))\n (x_end, y_end) = (min(w - 1, x_end), min(h - 1, y_end))\n\n image = videoframe[y_start:y_end, x_start:x_end]\n image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)\n image = cv2.resize(image, (224, 224))\n image = img_to_array(image)\n image = preprocess_input(image)\n face_list.append(image)\n locations.append((x_start, y_start, x_end, y_end))\n\n if len(face_list) > 0:\n face_list = np.array(face_list, dtype=\"float32\")\n predictions = maskdetection.predict(face_list, batch_size=32)\n return (locations, predictions)", "def load_detections(det_dir):\n detections = []\n\n for curd in os.listdir(det_dir):\n sub_dir = os.path.join(det_dir, curd)\n if os.path.isdir(sub_dir):\n\n for f in os.listdir(sub_dir):\n filepath = os.path.join(sub_dir, f)\n\n if os.path.isfile(filepath) and filepath.endswith('txt'):\n # find detection and read it\n with open(filepath, 'r') as rf:\n # filename = rf.readline().strip('\\n')\n # dets2read = int(rf.readline())\n data = []\n\n for l in rf.readlines():\n # for i in range(dets2read):\n # x, y, w, h, c = rf.readline().split()\n x, y, w, h, c = l.split()\n data.append({'x':float(x), 'y':float(y), 'w':float(w)-float(x), 'h':float(h)-float(y), 'c':float(c)})\n #detections.append({'filename': filename, 'data': data})\n detections.append({'filename': os.path.splitext(f)[0], 'data': data})\n\n return detections", "def evaluate_detections(self, all_boxes, output_dir=None):\n raise NotImplementedError", "def visualize_detection(self, image):\n\t\tH, W, _ = image.shape\n\t\tpos_list = self.apply_detection(image)\n\t\tdetections = {}\n\t\thasDetection = False\n\t\tfor i, L in enumerate(pos_list):\n\t\t\ttext, coordinates = L[0], L[1]\n\t\t\tCOLOR = COLORS[text]\n\t\t\tfor x, y, w, h in coordinates:\n\t\t\t\t# prune bad homography points\n\t\t\t\tif x < 0 or y < 0 or x + w > W or \\\n\t\t\t\t y + h > H or w <= 1 or h <= 1:\n\t\t\t\t\tcontinue\n\t\t\t\t# add the detection to the dict for tracking\n\t\t\t\tdetections[self.num_detect] = (x, y, w, h)\n\t\t\t\tself.detection_index[self.num_detect] = (x, y, w, h, self.num_save, text)\n\t\t\t\tself.num_detect += 1\n\t\t\t\thasDetection = True\n\t\t\t\t# if the detection is human\n\t\t\t\tif text == 'face':\n\t\t\t\t\tgender = self.genderDetect.classify(image[y:y+h, x:x+w, :])\n\t\t\t\t\tgender = 'female' if gender[0] < 0.5 else 'male'\n\t\t\t\t\tcv2.putText(image, gender, (x + w // 2 -10, y + h + 15),\n\t\t\t\t\t\tcv2.FONT_HERSHEY_SIMPLEX, 0.6, COLOR, 2, cv2.LINE_AA)\n\n\t\t\t\timage = cv2.rectangle(image, (x, y), (x + w, y + h), COLOR, 2)\n\t\t\t\tcv2.putText(image, text, (x, y - 5),\n\t\t\t\t\tcv2.FONT_HERSHEY_SIMPLEX, 0.6, COLOR, 2, cv2.LINE_AA)\n\t\tif hasDetection:\n\t\t\tself.detection_frames[self.num_save] = detections\n\t\tself.num_save +=1\n\t\treturn image", "def _post_process(self, inputs, predictions):\n options = self._model_proto\n\n results = {}\n\n # Post process to get the final detections.\n\n proposals = predictions[DetectionResultFields.proposal_boxes]\n\n for i in range(1 + options.oicr_iterations):\n post_process_fn = self._midn_post_process_fn\n proposal_scores = predictions[NOD2Predictions.oicr_proposal_scores +\n '_at_{}'.format(i)]\n proposal_scores = tf.stop_gradient(proposal_scores)\n if i > 0:\n post_process_fn = self._oicr_post_process_fn\n proposal_scores = tf.nn.softmax(proposal_scores, axis=-1)[:, :, 1:]\n\n # Post process.\n\n (num_detections, detection_boxes, detection_scores, detection_classes,\n _) = post_process_fn(proposals, proposal_scores)\n\n model_utils.visl_detections(\n inputs,\n num_detections,\n detection_boxes,\n detection_scores,\n tf.gather(self._vocabulary_list, tf.to_int32(detection_classes - 1)),\n name='detection_{}'.format(i))\n\n results[DetectionResultFields.num_detections +\n '_at_{}'.format(i)] = num_detections\n results[DetectionResultFields.detection_boxes +\n '_at_{}'.format(i)] = detection_boxes\n results[DetectionResultFields.detection_scores +\n '_at_{}'.format(i)] = detection_scores\n results[DetectionResultFields.detection_classes +\n '_at_{}'.format(i)] = detection_classes\n return results", "def detectObjects(image):\n\tgrayscale = cvCreateImage(cvSize(image.width, image.height), 8, 1)\n\tcvCvtColor(image, grayscale, CV_BGR2GRAY)\n\n\tstorage = cvCreateMemStorage(0)\n\tcvClearMemStorage(storage)\n\tcvEqualizeHist(grayscale, grayscale)\n\tcascade = cvLoadHaarClassifierCascade(\n\t\t'/usr/share/opencv/haarcascades/haarcascade_frontalface_default.xml',\n\t\tcvSize(1,1))\n\n\tscalefactor = 1.1 # How much to increase window size each pass\n\tminsize = 50 # Smallest face to detect. Up this if you have small falsepositives\n\tfaces = cvHaarDetectObjects(grayscale, cascade, storage, scalefactor, 50,\n\t\t\t\tCV_HAAR_DO_CANNY_PRUNING, cvSize(minsize, minsize))\n\n\treturn [(f.x, f.y, f.x + f.width, f.y + f.height) for f in faces]", "def get_per_image_gts_and_detections(gt_db_indexed, detection_res):\n per_image_gts = {}\n per_image_detections = {}\n\n # iterate through each image in the gt file, not the detection file\n\n for image_id, annotations in gt_db_indexed.image_id_to_annotations.items():\n # ground truth\n image_obj = gt_db_indexed.image_id_to_image[image_id]\n im_h, im_w = image_obj['height'], image_obj['width']\n\n gt_boxes = []\n gt_labels = []\n\n for gt_anno in annotations:\n # convert gt box coordinates to TFODAPI format\n gt_box_x, gt_box_y, gt_box_w, gt_box_h = gt_anno['bbox']\n gt_y_min, gt_x_min = gt_box_y / im_h, gt_box_x / im_w\n gt_y_max, gt_x_max = (gt_box_y + gt_box_h) / im_h, (gt_box_x + gt_box_w) / im_w\n gt_boxes.append([gt_y_min, gt_x_min, gt_y_max, gt_x_max])\n\n gt_labels.append(gt_anno['category_id'])\n\n per_image_gts[image_id] = {\n 'gt_boxes': gt_boxes,\n 'gt_labels': gt_labels\n }\n\n # detections\n det_image_obj = detection_res[image_id]\n\n detection_boxes = []\n detection_scores = []\n detection_labels = []\n\n for det in det_image_obj['detections']:\n x_min, y_min, width_of_box, height_of_box = det['bbox']\n y_max = y_min + height_of_box\n x_max = x_min + width_of_box\n detection_boxes.append([y_min, x_min, y_max, x_max])\n\n detection_scores.append(det['conf'])\n detection_labels.append(int(det['category']))\n\n # only include a detection entry if that image had detections\n if len(detection_boxes) > 0:\n per_image_detections[image_id] = {\n 'boxes': detection_boxes,\n 'scores': detection_scores,\n 'labels': detection_labels\n }\n\n return per_image_gts, per_image_detections", "def detect(img, window_list, pipeline):\n #t = Timer()\n windows = []\n for bbox in window_list:\n window = extract_window(img, bbox)\n windows.append(window)\n windows = np.stack(windows)\n detections = pipeline.predict(windows)\n #print(\"Time to detect: {:.2f}\".format(t.tock()))\n return detections", "def _extract_results(self) -> None:\n metric_name = self.metric.name\n for inference_name in ['train', 'test', 'opt']:\n # TODO: Extract information from self.search_results\n data = getattr(self.search_results, f'{inference_name}_metric_dict')[metric_name]\n if all([d is None for d in data]):\n if inference_name not in OPTIONAL_INFERENCE_CHOICES:\n raise ValueError(f\"Expected {metric_name} score for {inference_name} set\"\n f\" to not be None, but got {data}\")\n else:\n continue\n self.data[f'single::{inference_name}::{metric_name}'] = np.array(data)\n\n if self.ensemble_results.empty() or inference_name == 'opt':\n continue\n\n data = getattr(self.ensemble_results, f'{inference_name}_scores')\n if all([d is None for d in data]):\n if inference_name not in OPTIONAL_INFERENCE_CHOICES:\n raise ValueError(f\"Expected {metric_name} score for {inference_name} set\"\n f\" to not be None, but got {data}\")\n else:\n continue\n self.data[f'ensemble::{inference_name}::{metric_name}'] = np.array(data)", "def getResults(self):\n return self.classifiers", "def detect(self, images, verbose=0):\n assert self.mode == \"inference\", \"Create model in inference mode.\"\n assert len(\n images) == self.config.BATCH_SIZE, \"len(images) must be equal to BATCH_SIZE\"\n\n if verbose:\n log(\"Processing {} images\".format(len(images)))\n for image in images:\n log(\"image\", image)\n\n # Mold inputs to format expected by the neural network\n molded_images, image_metas, windows = self.mold_inputs(images)\n\n # Validate image sizes\n # All images in a batch MUST be of the same size\n image_shape = molded_images[0].shape\n for g in molded_images[1:]:\n assert g.shape == image_shape,\\\n \"After resizing, all images must have the same size. Check IMAGE_RESIZE_MODE and image sizes.\"\n\n # Anchors\n anchors = self.get_anchors(image_shape)\n # Duplicate across the batch dimension because Keras requires it\n # TODO: can this be optimized to avoid duplicating the anchors?\n anchors = np.broadcast_to(anchors, (self.config.BATCH_SIZE,) + anchors.shape)\n\n if verbose:\n log(\"molded_images\", molded_images)\n log(\"image_metas\", image_metas)\n log(\"anchors\", anchors)\n # Run object detection\n detections, _, _, mrcnn_mask, _, _, _ =\\\n self.keras_model.predict([molded_images, image_metas, anchors], verbose=0)\n # Process detections\n results = []\n for i, image in enumerate(images):\n final_rois, final_class_ids, final_scores, final_masks =\\\n self.unmold_detections(detections[i], mrcnn_mask[i],\n image.shape, molded_images[i].shape,\n windows[i])\n results.append({\n \"rois\": final_rois,\n \"class_ids\": final_class_ids,\n \"scores\": final_scores,\n \"masks\": final_masks,\n })\n return results", "def results(self):\n return extract_results(self.model)", "def retinanet_object_detection(image_org,\n object_detection_graph, object_detection_session, \n object_detection_threshold=0.5,\n object_count_threshold=1,\n backbone='resnet',\n boundary_filter_size=0, \n output_detection_image=False,\n output_dir=None,\n img_name=None,\n patch_idx=-1):\n\n ret = 0\n\n img_height, img_width = image_org.shape[:2]\n\n if img_name != None:\n img_base_name = os.path.splitext(os.path.basename(img_name))[0]\n if patch_idx >= 0:\n img_base_name = img_base_name + '_' + str(patch_idx)\n\n # copy to draw on\n draw = image_org.copy()\n\n # preprocess each image for network\n if backbone == 'mobilenet' or backbone == 'densenet':\n img = retinanet_preprocess_image(image_org, mode='tf')\n else:\n img = retinanet_preprocess_image(image_org, mode='caffe')\n\n img, scale = retinanet_resize_image(img)\n\n #print(scale)\n\n # process image\n start = time.time()\n image_tensor = object_detection_graph.get_tensor_by_name('input_1:0')\n output_tensor_0 = object_detection_graph.get_tensor_by_name('filtered_detections/map/TensorArrayStack/TensorArrayGatherV3:0')\n output_tensor_1 = object_detection_graph.get_tensor_by_name('filtered_detections/map/TensorArrayStack_1/TensorArrayGatherV3:0')\n output_tensor_2 = object_detection_graph.get_tensor_by_name('filtered_detections/map/TensorArrayStack_2/TensorArrayGatherV3:0')\n boxes, scores, labels = object_detection_session.run([output_tensor_0, output_tensor_1, output_tensor_2], feed_dict={image_tensor: np.expand_dims(img, axis=0)})\n #print(\"processing time: \", time.time() - start)\n\n # correct for image scale\n boxes /= scale\n\n #print(scores[0])\n #print(labels[0])\n\n # visualize detections\n detected_bboxes = [] \n for box, score, label in zip(boxes[0], scores[0], labels[0]):\n # scores are sorted so we can break\n if score < object_detection_threshold:\n break\n\n # print(score, label)\n b = box.astype(int)\n \n detected_bboxes.append(b)\n \n if len(detected_bboxes) < object_count_threshold:\n ret = 100 \n\n #print(len(detected_bboxes))\n\n # Using the bounding box centers as object locations and filter out those objects too close to boundaries\n objects = []\n scores_filtered = []\n labels_filtered = []\n object_width_sum = 0\n object_height_sum = 0\n for i in range(len(detected_bboxes)):\n b = detected_bboxes[i]\n if object_too_close_to_boundary(b, img_width=img_width, img_height=img_height, boundary_filter_size=boundary_filter_size):\n continue \n objects.append(b)\n scores_filtered.append(scores[0][i])\n labels_filtered.append(labels[0][i])\n\n object_width_sum += (b[2] - b[0])\n object_height_sum += (b[3] - b[1])\n\n if len(objects) > 0:\n average_object_width = int(object_width_sum / len(objects))\n average_object_height = int(object_height_sum / len(objects))\n else:\n average_object_width = 0\n average_object_height = 0\n\n #print(average_object_width)\n #print(average_object_height) \n\n # Save the detection images\n if output_detection_image:\n if img_width < 1000 or img_height < 1000:\n thickness = 1\n elif img_width < 2000 or img_height < 2000:\n thickness = 2\n else:\n thickness = 3\n\n # draw detection boxes\n for i in range(len(detected_bboxes)):\n b = detected_bboxes[i]\n if object_too_close_to_boundary(b, img_width=img_width, img_height=img_height, boundary_filter_size=boundary_filter_size):\n draw_box(draw, b, color=(0, 0, 255), thickness=thickness) \n elif labels[0][i] >= 1:\n draw_box(draw, b, color=(255, 255, 0), thickness=thickness) \n else:\n draw_box(draw, b, color=(255, 0, 0), thickness=thickness)\n img_output_filename = img_base_name + '_d.jpg'\n cv2.imwrite(os.path.join(output_dir, img_output_filename), draw) \n\n return ret, objects, scores_filtered, labels_filtered, average_object_width, average_object_height", "def detect_objects(self, image, threshold=0.5, needs_preprocessing=False, temp_path='temp.jpg',\n version='default'):\n if self.transform_service:\n image = self.transform_service.predict_single(image, version)\n return self.model_service.detect_objects(image, threshold, needs_preprocessing, temp_path)", "def object_detect(filename):\n cv2.ocl.setUseOpenCL(False)\n just_fname = filename.split(\".\")[0]\n image = cv2.imread('./static/uploads/' + filename)\n bbox, label, conf = cv.detect_common_objects(image)\n output_image = draw_bbox(image, bbox, label, conf)\n plt.imshow(output_image)\n plt.savefig(os.path.join('./static/output/', just_fname + '.png'))\n d = Counter(label)\n if not label:\n return \"No objects detected\"\n labelstr = \", \".join('{} {}'.format(v, k) for k, v in d.items())\n return labelstr", "def detectAll(self):\n\t\trects = self.cascade.detectMultiScale(self.frameImage, 1.3, 4, cv2.cv.CV_HAAR_SCALE_IMAGE, (20,20))\n\t\treturn rects", "def process_camera():\n\n pic_array = take_picture()\n detections, shapes, descriptors = detect_faces(person_database,pic_array)\n\n names = []\n\n for desc in descriptors:\n name = find_match(person_database, desc)\n names.append(name)\n\n return pic_array, names, detections, shapes, descriptors", "def PostProcessing(image, resultList, threshold=0.6):\n\tnum_detections = resultList[0][0].astype(np.int)\n\tscores = resultList[2]\n\tboxes = resultList[3]\n\tbbox_num = 0\n\t\n\t# loop through all the detections and get the confidence and bbox coordinates\n\tfor i in range(num_detections):\n\t\tdet_conf = scores[0, i]\n\t\tdet_ymin = boxes[0, i, 0]\n\t\tdet_xmin = boxes[0, i, 1]\n\t\tdet_ymax = boxes[0, i, 2]\n\t\tdet_xmax = boxes[0, i, 3]\n\n\t\tbbox_width = det_xmax - det_xmin\n\t\tbbox_height = det_ymax - det_ymin\n\t\t# the detection confidence and bbox dimensions must be greater than a minimum value to be a valid detection\n\t\tif threshold <= det_conf and 1 >= det_conf and bbox_width > 0 and bbox_height > 0:\n\t\t\tbbox_num += 1\n\t\t\txmin = int(round(det_xmin * image.shape[1]))\n\t\t\tymin = int(round(det_ymin * image.shape[0]))\n\t\t\txmax = int(round(det_xmax * image.shape[1]))\n\t\t\tymax = int(round(det_ymax * image.shape[0]))\n\t\t\t\n\t\t\tcv2.rectangle(image, (xmin, ymin), (xmax, ymax), (0, 255, 0), 2)\n\t\telse:\n\t\t\tcontinue\n\n\tprint(\"detected bbox num:\", bbox_num)\n\tSRC_PATH = os.path.realpath(__file__).rsplit(\"/\", 1)[0]\n\tOutput_PATH = os.path.join(SRC_PATH, \"../output/output.jpg\")\n\ttry:\n\t\tos.mkdir(os.path.join(SRC_PATH, \"../output/\"))\n\texcept Exception as e:\n\t\tprint(\"Output Path already exists\")\n\tcv2.imwrite(Output_PATH, image)", "def test_face_detector_measure():\n dict_results = {}\n video_capture = cv2.VideoCapture(config.CAM_SRC)\n success, frame = video_capture.read()\n while success:\n FaceDetector().run(frame, dict_results)\n print(dict_results)\n success, frame = video_capture.read()", "def detect_objects(robot):\n env = robot.GetEnv()\n with env:\n robot_in_world = robot.GetTransform()\n\n table_in_world = numpy.dot(robot_in_world, TABLE_IN_ROBOT)\n table = add_object(env, 'table', 'furniture/table.kinbody.xml',\n table_in_world)\n\n glass = add_object(env, 'glass', 'objects/plastic_glass.kinbody.xml',\n numpy.dot(table_in_world, GLASS_IN_TABLE))\n return table, glass", "def draw_all_result(self, image):\n for facebox, conf in self.detection_result:\n cv2.rectangle(image, (facebox[0], facebox[1]),\n (facebox[2], facebox[3]), (0, 255, 0))\n label = \"face: %.4f\" % conf\n label_size, base_line = cv2.getTextSize(\n label, cv2.FONT_HERSHEY_SIMPLEX, 0.5, 1)\n\n cv2.rectangle(image, (facebox[0], facebox[1] - label_size[1]),\n (facebox[0] + label_size[0],\n facebox[1] + base_line),\n (0, 255, 0), cv2.FILLED)\n cv2.putText(image, label, (facebox[0], facebox[1]),\n cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 0))", "def get_results(self):\n if not self.sim_stop_flag.value:\n # we are only interested in entries of frames processed > 0\n max_index = np.sum(np.array(self.sim_results_struct.frames[0:50]) > 0)\n\n # convert the cstruct to dictionary\n return dict([(x, getattr(self.sim_results_struct, x)[0:max_index]) for (x,_) in self.sim_results_struct._fields_])\n else:\n return self.results", "def draw_all_result(self, image):\r\n for facebox, conf in self.detection_result:\r\n cv2.rectangle(image, (facebox[0], facebox[1]),\r\n (facebox[2], facebox[3]), (0, 255, 0))\r\n label = \"face: %.4f\" % conf\r\n label_size, base_line = cv2.getTextSize(\r\n label, cv2.FONT_HERSHEY_SIMPLEX, 0.5, 1)\r\n\r\n cv2.rectangle(image, (facebox[0], facebox[1] - label_size[1]),\r\n (facebox[0] + label_size[0],\r\n facebox[1] + base_line),\r\n (0, 255, 0), cv2.FILLED)\r\n cv2.putText(image, label, (facebox[0], facebox[1]),\r\n cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 0))", "def update(self):\n self.detections = []\n return self.detections", "def detect_objects_on_image(image_path, detections_file='pickles/bounding_boxes.pickle'):\n image_name = os.path.basename(image_path)\n try:\n with open(detections_file, 'rb') as handle:\n detections = pickle.load(handle)\n except FileNotFoundError:\n print('Detections file not found!')\n detections = {}\n if image_name in detections:\n print(image_name, 'is already in detections file!')\n print('Bounding boxes from file', detections[image_name])\n return detections[image_name]\n else:\n print('Adding to detections file', image_name)\n _, _, bound_boxes = run_yolo_onpic(image_path)\n detections[image_name] = bound_boxes\n print('Bounding boxes', bound_boxes)\n fileObject = open(detections_file, 'wb')\n pickle.dump(detections, fileObject)\n fileObject.close()\n return bound_boxes", "def get_bounding_boxes(outputs, width: int, height: int):\n\n # detected bounding boxes, obtained confidences and class's number\n boxes = []\n scores = []\n classes = []\n\n # this is our threshold for keeping the bounding box\n probability_minimum = 0.5\n\n # iterating through all three outputs\n for result in outputs:\n # going through all bounding boxes from current output layer\n for detection in result:\n # getting class for current object\n scores_current = detection[5:]\n class_current = np.argmax(scores_current)\n\n # getting probability for current object\n probability_current = scores_current[class_current]\n\n # getting object confidence for current object\n object_confidence = detection[4]\n\n # eliminating weak predictions by minimum probability\n if probability_current > probability_minimum:\n # if probability_current*object_confidence > probability_minimum: # this is an alternative way\n\n # Scaling bounding box coordinates to the initial image size\n # by element-wise multiplying them with the width and height of the image\n box_current = np.array(detection[0:4]) * np.array([width, height, width, height])\n\n # YOLO data format keeps center of detected box and its width and height\n # here we reconstruct the top left and bottom right corner\n x_center, y_center, box_width, box_height = box_current.astype('int')\n x_min = int(x_center - (box_width / 2))\n y_min = int(y_center - (box_height / 2))\n x_max = int(x_center + (box_width / 2))\n y_max = int(y_center + (box_height / 2))\n\n # adding results into prepared lists\n boxes.append([x_min, y_min, x_max, y_max])\n scores.append(float(probability_current))\n classes.append(class_current)\n\n boxes = np.array(boxes)\n scores = np.array(scores)\n classes = np.array(classes)\n return boxes, scores, classes", "def run_detection(self, image_path, threshold=0.5):\n # Load the input shape required by the model\n _, input_height, input_width, _ = self.model.get_input_details()[0]['shape']\n\n # Load the input image and preprocess it\n preprocessed_image = self.preprocess_image(image_path, (input_height, input_width))\n\n # Run object detection on the input image\n results = self.detect_objects(preprocessed_image)\n return results", "def detect(self, images, verbose=0):\n assert self.mode == \"inference\", \"Create model in inference mode.\"\n assert len(images) == self.config.BATCH_SIZE, \"len(images) must be equal to BATCH_SIZE\"\n\n if verbose:\n log(f\"Processing {len(images)} images\")\n for image in images:\n log(\"image\", image)\n\n # Mold inputs to format expected by the neural network\n molded_images, image_metas, windows = self.mold_inputs(images)\n\n # Validate image sizes\n # All images in a batch MUST be of the same size\n image_shape = molded_images[0].shape\n for g in molded_images[1:]:\n assert g.shape == image_shape,\\\n \"After resizing, all images must have the same size. Check IMAGE_RESIZE_MODE and image sizes.\"\n\n # Anchors\n anchors = self.get_anchors(image_shape)\n # Duplicate across the batch dimension because Keras requires it\n # TODO: can this be optimized to avoid duplicating the anchors?\n anchors = np.broadcast_to(anchors, (self.config.BATCH_SIZE,) + anchors.shape)\n\n if verbose:\n log(\"molded_images\", molded_images)\n log(\"image_metas\", image_metas)\n log(\"anchors\", anchors)\n # Run object detection\n # ************************* NOTE for 2 label dataset \n\n predict = self.keras_model.predict([molded_images, image_metas, anchors], verbose=0)\n detections,mrcnn_mask = predict[:2]\n # Process detections\n results = []\n for i, image in enumerate(images):\n result = self.unmold_detections(detections[i], mrcnn_mask[i],\n image.shape, molded_images[i].shape,\n windows[i])\n results.append(result)\n return results", "def detect_fn(image):\r\n\r\n image, shapes = model.preprocess(image)\r\n prediction_dict = model.predict(image, shapes)\r\n detections = model.postprocess(prediction_dict, shapes)\r\n\r\n return detections, prediction_dict, tf.reshape(shapes, [-1])", "def demo(net, data_dir, imgfile, out_dir):\n\n # Load the demo image\n im_file = os.path.join(data_dir, imgfile)\n im = cv2.imread(im_file)\n\n timer = Timer()\n timer.tic()\n scores, boxes = im_detect(net, im)\n scores = np.squeeze(scores)\n timer.toc()\n print ('Detection took {:.3f}s for '\n '{:d} object proposals').format(timer.total_time, boxes.shape[0])\n\n # Visualize detections for each class\n CONF_THRESH = 0.12\n NMS_THRESH = 0.3\n color_white = (0, 0, 0)\n for cls_ind, cls in enumerate(CLASSES[1:]):\n cls_ind += 1 \n cls_boxes = boxes[:, 4*cls_ind:4*(cls_ind + 1)]\n cls_scores = scores[:, cls_ind]\n dets = np.hstack((cls_boxes,\n cls_scores[:, np.newaxis])).astype(np.float32)\n keep = nms(dets, NMS_THRESH)\n dets = dets[keep, :]\n color = (random.randint(0, 256), random.randint(0, 256), random.randint(0, 256))\n inds = np.where(dets[:, -1] >= CONF_THRESH)[0]\n for i in inds:\n bbox = dets[i, :4]\n score = dets[i, -1]\n bbox = map(int, bbox)\n cv2.rectangle(im, (bbox[0], bbox[1]), (bbox[2], bbox[3]), color=color, thickness=4)\n cv2.putText(im, '%s %.3f' % (cls, score), (bbox[0], bbox[1] + 15),\n color=color_white, fontFace=cv2.FONT_HERSHEY_COMPLEX, fontScale=0.5)\n return im", "def get_files_data_from_results(results):\n files = []\n if results:\n for result in results:\n raw_file = get_fields_from_hit_object(result, 'file_indicators')\n file_data = filter_object_entries_by_dict_values(raw_file, 'file_indicators')\n files.append(file_data)\n return files", "def vis_detections(im, class_name, dets, thresh=0.8):\n\n dict = {'HolderA': 'Holder', 'WheelA': 'WheelA', 'WheelB': 'WheelB', 'BrakeA': 'Brake', 'SpringA': 'Spring',\n 'BuckleA': 'BuckleA', 'BuckleB': 'BuckleB', 'TubeA': 'Tube', 'NutA': 'NutA', 'ScrewA': 'ScrewA',\n 'NutB': 'NutB', 'ScrewB': 'ScrewB',\n 'WireA': 'Wire', 'PlateA': 'PlateA', 'PlateB': 'PlateB', 'PlateD': 'PlateC', 'PlateE': 'PlateD',\n 'BoltA': 'Bolt', 'LoopB': 'Loop', 'JointA': 'JointA', 'JointB': 'JointB', 'FixatorA': 'Fixator',\n 'BearingA': 'Bearing', 'PlugA': 'Plug'}\n\n for i in range(np.minimum(10, dets.shape[0])):\n bbox = tuple(int(np.round(x)) for x in dets[i, :4])\n score = dets[i, -1]\n if score > thresh:\n # Color site: http://www.wahart.com.hk/rgb.htm\n if class_name == 'HolderA':\n color = (255, 255, 0) # Cyan\n elif class_name == 'WheelA':\n color = (212, 255, 127) # Aquamarina\n elif class_name == 'WheelB':\n color = (99, 99, 238) # IndianRed2\n elif class_name == 'BrakeA':\n color = (99, 99, 238) # IndianRed2\n elif class_name == 'SpringA':\n color = (180, 130, 70) # SteelBlue\n elif class_name == 'BuckleA':\n color = (205, 0, 0) # MediumBlue\n elif class_name == 'BuckleB':\n color = (170, 205, 102) # MediumAquamarine\n elif class_name == 'BuckleC':\n color = (0, 252, 124) # LawnGreen\n elif class_name == 'BuckleD':\n color = (50, 205, 50) # LimeGreen\n elif class_name == 'TubeA':\n color = (147, 112, 219) # PaleVioletRed\n elif class_name == 'ScrewA':\n color = (240, 32, 160) # Purple\n elif class_name == 'ScrewB':\n color = (0, 165, 255) # Orange1\n elif class_name == 'ScrewC':\n color = (48, 48, 255) # Firebrick1\n elif class_name == 'NutA':\n color = (0, 255, 255) # Yellow\n elif class_name == 'NutB':\n color = (255, 144, 30) # DodgerBlue\n elif class_name == 'NutC':\n color = (180, 238, 180) # DarkSeaGreen2\n elif class_name == 'WireA':\n color = (255, 255, 255) # White\n elif class_name == 'PlateA':\n color = (0, 69, 255) # OrangeRed\n elif class_name == 'PlateB':\n color = (102, 205, 0) # SpringGreen3\n elif class_name == 'PlateD':\n color = (0, 255, 0) # Green\n elif class_name == 'PlateE':\n color = (0, 140, 250) # DarkOrange\n elif class_name == 'BoltA':\n color = (255, 255, 0) # Cyan\n elif class_name == 'LoopB':\n color = (180, 105, 255) # HotPink\n elif class_name == 'JointA':\n color = (105, 140, 255) # Salmon1\n elif class_name == 'JointB':\n color = (255, 0, 255) # Magenta3\n elif class_name == 'FixatorA':\n color = (0, 205, 102) # Chartreuse3\n elif class_name == 'BearingA':\n color = (185, 218, 255) # PeachPuff\n elif class_name == 'PlugA':\n color = (193, 193, 255) # RosyBrown1\n else:\n color = (139, 0, 139) # DarkMagenta\n cv2.rectangle(im, bbox[0:2], bbox[2:4], color, 2)\n # cv2.putText(im, '%s: %.3f' % (class_name, score), (bbox[0], bbox[1] + 15), cv2.FONT_HERSHEY_COMPLEX,\n # 0.5, color, thickness=1)\n cv2.putText(im, '%s: %.3f' % (dict[class_name], score), (bbox[0], bbox[1] + 15), cv2.FONT_HERSHEY_COMPLEX,\n 0.5, color, thickness=1)\n return im", "def detect_fn(image):\n\n image, shapes = model.preprocess(image)\n prediction_dict = model.predict(image, shapes)\n detections = model.postprocess(prediction_dict, shapes)\n\n return detections, prediction_dict, tf.reshape(shapes, [-1])", "def _extract_epoch_results(\n self, epoch: int = -1\n ) -> Dict[str, Dict[str, DLTypes.TrackableType]]:\n return {\n \"Static hyperparameters\": self._static_hyperparameters,\n \"Dynamic hyperparameters\": {\n name: value[epoch]\n for name, value in self._dynamic_hyperparameters.items()\n },\n \"Training results\": {\n name: value[epoch] for name, value in self._training_summaries.items()\n },\n \"Validation results\": {\n name: value[epoch] for name, value in self._validation_summaries.items()\n },\n }", "def load_predictions(self,\n detection_results,\n include_mask,\n is_image_mask=False):\n predictions = []\n num_detections = detection_results['detection_scores'].size\n current_index = 0\n for i, image_id in enumerate(detection_results['source_ids']):\n\n if include_mask:\n box_coorindates_in_image = detection_results['detection_boxes'][i]\n segments = generate_segmentation_from_masks(\n detection_results['detection_masks'][i],\n box_coorindates_in_image,\n int(detection_results['image_info'][i][3]),\n int(detection_results['image_info'][i][4]),\n is_image_mask=is_image_mask\n )\n\n # Convert the mask to uint8 and then to fortranarray for RLE encoder.\n encoded_masks = [\n maskUtils.encode(np.asfortranarray(instance_mask.astype(np.uint8)))\n for instance_mask in segments\n ]\n\n for box_index in range(int(detection_results['num_detections'][i])):\n if current_index % 1000 == 0:\n logging.info('{}/{}'.format(current_index, num_detections))\n\n current_index += 1\n\n prediction = {\n 'image_id': int(image_id),\n 'bbox': detection_results['detection_boxes'][i][box_index].tolist(),\n 'score': detection_results['detection_scores'][i][box_index],\n 'category_id': int(\n detection_results['detection_classes'][i][box_index]),\n }\n\n if include_mask:\n prediction['segmentation'] = encoded_masks[box_index]\n\n predictions.append(prediction)\n\n return predictions", "def create_from_pb2(cls, pb2_obj: _DetectionListProto) -> 'DetectionResult':\n return DetectionResult(detections=[\n Detection.create_from_pb2(detection) for detection in pb2_obj.detection\n ])", "def load_detection_dir_as_results(root,\n annotations,\n detections_format='pickle',\n include_masks=False,\n score_threshold=None,\n max_dets_per_image=None,\n show_progress=False):\n if not isinstance(root, Path):\n root = Path(root)\n ext = {\n 'pickle': '.pickle',\n 'pkl': '.pkl',\n 'mat': '.mat'\n }[detections_format]\n bbox_annotations = []\n if include_masks:\n segmentation_annotations = []\n\n for image in tqdm(annotations['images'],\n desc='Collecting annotations',\n disable=not show_progress):\n path = (root / f'{image[\"file_name\"]}').with_suffix(ext)\n if not path.exists():\n logging.warn(f'Could not find detections for image '\n f'{image[\"file_name\"]} at {path}; skipping...')\n continue\n if detections_format in ('pickle', 'pkl'):\n with open(path, 'rb') as f:\n detections = pickle.load(f)\n else:\n detections = misc.load_detection_mat(path)\n\n num_detections = len(detections['instances']['scores'])\n indices = sorted(range(num_detections),\n key=lambda i: detections['instances']['scores'][i],\n reverse=True)\n\n if max_dets_per_image is not None:\n indices = indices[:max_dets_per_image]\n\n for idx in indices:\n entry = detections['instances']['pred_boxes'][idx]\n x1 = entry[0]\n y1 = entry[1]\n x2 = entry[2]\n y2 = entry[3]\n bbox = [int(x1), int(y1), int(x2-x1), int(y2-y1)]\n\n category = int(detections['instances']['pred_classes'][idx] + 1)\n score = detections['instances']['scores'][idx]\n if score_threshold is not None and score < score_threshold:\n continue\n\n try:\n score = score.item()\n except AttributeError:\n pass\n\n bbox_annotations.append({\n 'image_id': image['id'],\n 'category_id': category,\n 'bbox': bbox,\n 'score': score,\n })\n if include_masks:\n segmentation_annotations.append({\n 'image_id': image['id'],\n 'category_id': category,\n 'segmentation': detections['instances']['pred_masks'][idx],\n 'score': score\n })\n if include_masks:\n return bbox_annotations, segmentation_annotations\n else:\n return bbox_annotations", "def get_results(self):\n summary = self.handle.get_summary_data(self.group_name)\n results = {'template': {'status': 'no data'},\n 'complement': {'status': 'no data'},\n '2d': {'status': 'no data'}}\n if 'genome_mapping_template' in summary:\n results['template'] = self._get_results(summary['genome_mapping_template'])\n if 'genome_mapping_complement' in summary:\n results['complement'] = self._get_results(summary['genome_mapping_complement'])\n if 'genome_mapping_2d' in summary:\n results['2d'] = self._get_results(summary['genome_mapping_2d'])\n return results", "def postprocess(image: np.ndarray, results_list: list, threshold_confidence: float, threshold_nms: float) -> list:\n frameHeight = image.shape[0]\n frameWidth = image.shape[1]\n\n # Scan through all the bounding boxes output from the network and..\n # 1. keep only the ones with high confidence scores.\n # 2. assign the box class label as the class with the highest score.\n # 3. construct a list of bounding boxes, class labels and confidence scores\n\n classIds = []\n confidences = []\n boxes = []\n for result in results_list:\n for detection in result:\n scores = detection[5:]\n classId = np.argmax(scores)\n confidence = scores[classId]\n if confidence > threshold_confidence:\n center_x = int(detection[0] * frameWidth)\n center_y = int(detection[1] * frameHeight)\n width = int(detection[2] * frameWidth)\n height = int(detection[3] * frameHeight)\n left = max(0, int(center_x - width / 2))\n top = max(0, int(center_y - height / 2))\n classIds.append(classId)\n confidences.append(float(confidence))\n boxes.append([left, top, width, height])\n\n # Perform non maximum suppression to eliminate redundant overlapping boxes with\n # lower confidences\n list_of_tuples = []\n\n indices = cv2.dnn.NMSBoxes(boxes, confidences, threshold_confidence, threshold_nms)\n for i in indices:\n i = i[0]\n list_of_tuples.append((classIds[i], confidences[i], boxes[i]))\n # return post processed lists of classIds, confidences and bounding boxes\n return list_of_tuples", "def detect_fn(image) :\n image, shapes = detection_model.preprocess(image)\n prediction_dict = detection_model.predict(image, shapes)\n detections = detection_model.postprocess(prediction_dict, shapes)\n\n return detections, prediction_dict, tf.reshape(shapes, [-1])", "def test_measurement_on_images(file_list):\n test_details_list = []\n for idx, file in enumerate(file_list):\n dict_results = {}\n image = cv2.imread(file)\n FaceDetector().run(image, dict_results)\n file_name = ntpath.basename(file)\n is_there_face = \"True\" in file_name\n test_details_list.append([file_name, is_there_face, dict_results[\"FaceDetector\"],\n is_there_face == dict_results[\"FaceDetector\"]])\n\n # print test results in a readable table format\n headers = ['File Name', 'Face Exist', 'Measurement Result', 'Test Result']\n print(tabulate(test_details_list, headers))", "def detect_fn(image):\n\n image, shapes = model.preprocess(image)\n prediction_dict = model.predict(image, shapes)\n detections = model.postprocess(prediction_dict, shapes)\n\n return detections, prediction_dict, tf.reshape(shapes, [-1])", "def inference(self, image, score_threshold=None):\n h, w, c = image.shape\n image_batch = np.expand_dims(image, axis=0)\n # get operators from graph\n image_tensor = self.graph.get_tensor_by_name('image_tensor:0')\n detection_boxes = self.graph.get_tensor_by_name('detection_boxes:0')\n detection_scores = self.graph.get_tensor_by_name('detection_scores:0')\n detection_classes = self.graph.get_tensor_by_name('detection_classes:0')\n num_detections = self.graph.get_tensor_by_name('num_detections:0')\n # run inference\n with self.graph.as_default():\n t0 = datetime.now()\n (boxes, scores, classes, num) = self.sess.run(\n [detection_boxes, detection_scores, detection_classes, num_detections],\n feed_dict={image_tensor: image_batch})\n t1 = datetime.now()\n num = int(num)\n self._log_info('*TF Detection*: {}'.format(get_tdiff(t0, t1)))\n # post processing ...\n # purge useless dimension \n boxes, scores, classes = np.squeeze(boxes), np.squeeze(scores), np.squeeze(classes)\n # take only valid results\n boxes, scores, classes = boxes[:num,:], scores[:num], classes[:num]\n # score threshold\n if score_threshold is None:\n score_threshold = self.score_threshold\n boxes = boxes[scores>score_threshold,:]\n classes = classes[scores>score_threshold]\n scores = scores[scores>score_threshold]\n num = scores.shape[0]\n self._log_info('{} objects found'.format(num))\n # x-y reorder\n boxes = boxes[:,np.array([1,0,3,2])]\n # transform from 0-1 to 0-w and 0-h\n boxes = np.multiply(boxes, np.array([w,h,w,h])).astype(np.int32)\n return boxes, scores, classes", "def match_results(self):\n return np.array(list(self._match_result_dict.values()))", "def detect(self):\n # process the input video and get the attributes:\n self.process_video()\n\n # build a rcnn/ yolov5 predictor:\n self.build_predictor()\n\n \n # assert not os.path.isfile(args.output_file), \"File with the name %s already exists\"%args.output_file\n # build the writer with same attributes:\n self.vid_writer = cv2.VideoWriter(self.output, self.fourcc, self.fps, (self.w, self.h))\n\n # inference time:\n start = time.time()\n print(\"Started inference\\n\")\n \n # progress bar using tqdm:\n pbar = tqdm(total=self.nframes)\n\n while(self.cap.isOpened()):\n ret, frame = self.cap.read()\n if ret == False:\n break # when the last frame is read \n\n # different formats of results:\n if self.library == \"yolov5\":\n # predict and bring the outputs to cpu:\n results = self.predictor(cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)) # convert to RGB\n predictions = results.xyxy[0].cpu()\n # find the instance indices with person:\n person_idx = predictions[:,5] == self.label_dict[\"person\"]\n # extract the corresponding boxes and scores:\n boxes = predictions[person_idx,:4].numpy()\n probs = predictions[person_idx,4].numpy()\n\n if self.library == \"detectron2\":\n # predict and bring the outputs to cpu:\n results = self.predictor(frame) # RGB conversion done automatically in detectron\n predictions = results[\"instances\"].to(\"cpu\")\n # find the instance indices with person:\n person_idx = [predictions.pred_classes == self.label_dict[\"person\"]]\n # extract the corresponding boxes and scores:\n boxes = predictions.pred_boxes[person_idx].tensor.numpy()\n probs = predictions.scores[person_idx].numpy()\n\n # draw boxes and write the frame to the video:\n if len(boxes): # check whether there are predictions\n box_frame = self.draw_person_boxes(frame, boxes, probs)\n else:\n box_frame = frame\n self.vid_writer.write(box_frame)\n\n pbar.update(1)\n pbar.close()\n\n # release the video capture object and write object:\n self.cap.release()\n self.vid_writer.release()\n\n print(\"Inferene on the video file took %0.3f seconds\"%(time.time()-start))", "def analyze(self):\n result = []\n for frame_no, frame in enumerate(self.work.files('frames')):\n areas = self.blur.check_image(frame, self.work.files('templates'))\n for area in areas:\n index = find_area(result, area)\n if index == -1:\n result.append({'area': area, 'frames': [frame_no]})\n else:\n result[index]['frames'].append(frame_no)\n\n for values in result:\n sectors = [[values['frames'][0], values['frames'][0]]]\n for index in range(1, len(values['frames'])):\n if (values['frames'][index] - sectors[-1][1]) == 1:\n sectors[-1][1] = values['frames'][index]\n else:\n sectors.append([values['frames'][index], values['frames'][index]])\n values['sectors'] = sectors\n return result", "def classifier(self):\n\n print \"Starting Classification\"\n self.detections.rotationClass = [ self.detections.rotationTimeTags[index] for index, theta in enumerate(self.detections.rotations) if theta > 30]\n if len(self.detections.rotationClass) < 1:\n print \"Too little rotation hits\"\n self.detections.classification = \"Too little rotation hits\"\n\n else:\n \n for attribute, value in classIterator(self.detections):\n print value[1]\n if 'crease' in attribute:\n \n if value[1] > self.detections.rotationClass[0] and value[1] < self.detections.rotationClass[-1]:\n print \"direct hit\", attribute, value[1]\n self.detections.classification = \"Direct hit\"\n #if self.detections.\n else:\n for angleStamp in self.detections.rotationClass:\n if secondsCount(value[1],angleStamp).total_seconds < 10:\n self.detections.classification = \"Near miss\"\n \n else:\n self.detections.classification = \"Nothing impressive\"\n print \"Ending Classification\"", "def func(frame):\n nonlocal net\n\t\n prevh, prevw, _= frame.shape\n\n wscale = prevw / 480\n hscale = prevh / 320\n\n frame = cv2.resize(frame, (480, 320))\n frame = jetson.utils.cudaFromNumpy(frame)\n detections = net.Detect(frame)\n ret = [(d.ClassID, d.Top*hscale, d.Left*wscale, d.Right*wscale, d.Bottom*hscale) for d in detections]\n print(ret)\n return ret", "def detect_and_visualize(self, im_list, root_dir=None, extension=None,\n classes=[], thresh=0.6, show_timer=False):\n dets = self.im_detect(im_list, root_dir, extension, show_timer=show_timer)\n if not isinstance(im_list, list):\n im_list = [im_list]\n assert len(dets) == len(im_list)\n for k, det in enumerate(dets):\n img = cv2.imread(im_list[k])\n img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)\n self.visualize_detection(img, det, classes, thresh)", "def generate_detection_results(self, rois, gt_class_ids, gt_boxes, image_meta):\n # Trim zeros.\n zero_ix = np.where(rois[:, 4] == 0)[0]\n N = zero_ix[0] if zero_ix.shape[0] > 0 else rois.shape[0]\n rois = rois[:N, :4]\n non_zeros = np.sum(np.abs(gt_boxes), axis=1).astype(np.bool)\n gt_boxes = gt_boxes[non_zeros]\n gt_class_ids = gt_class_ids[non_zeros]\n # Assign class ids for each roi.\n overlaps = utils.compute_overlaps(rois, gt_boxes)\n roi_iou_max = np.max(overlaps, axis=1)\n pos_roi_bool = roi_iou_max >= 0.5\n rois = rois[pos_roi_bool]\n overlaps = overlaps[pos_roi_bool]\n roi_gt_box_assignment = np.argmax(overlaps, axis=1)\n roi_gt_classes = gt_class_ids[roi_gt_box_assignment]\n # Unnormalize and formalize outputs.\n m = modellib.parse_image_meta(image_meta)\n original_image_shape = m['original_image_shape'][0, :]\n image_shape = m['image_shape'][0, :]\n window = m['window'][0, :]\n window = utils.norm_boxes(window, image_shape[:2])\n wy1, wx1, wy2, wx2 = window\n shift = np.array([wy1, wx1, wy1, wx1])\n wh = wy2 - wy1 # window height\n ww = wx2 - wx1 # window width\n scale = np.array([wh, ww, wh, ww])\n boxes = np.divide(rois - shift, scale)\n boxes = utils.denorm_boxes(boxes, original_image_shape[:2])\n # Filter out detections with zero area. Happens in early training when\n # network weights are still random\n exclude_ix = np.where(\n (boxes[:, 2] - boxes[:, 0]) * (boxes[:, 3] - boxes[:, 1]) <= 0)[0]\n if exclude_ix.shape[0] > 0:\n boxes = np.delete(boxes, exclude_ix, axis=0)\n roi_gt_classes = np.delete(roi_gt_classes, exclude_ix, axis=0)\n N = boxes.shape[0]\n masks = np.zeros(original_image_shape[:2] + (N,))\n scores = np.ones(N).astype(np.float32)\n return boxes, roi_gt_classes, scores, masks", "def parse_results(self, result):\n\n interesting = []\n for item in result[\"hits\"][\"hits\"]:\n source = item[\"_source\"]\n meta = source.get(\"meta\")\n\n title = \"No title found\"\n descr = None\n os_path = None\n highlight = None\n\n if meta is not None:\n title = meta.get(\"title\") or \"No title found\"\n if meta.get(\"raw\") is not None:\n descr = meta.get(\"raw\").get(\"description\")\n\n path = source.get(\"path\")\n if path is not None:\n os_path = path.get(\"real\")\n\n highlight = \" \".join(item[\"highlight\"][\"content\"][0].split())\n\n temp = {\n \"id\": item[\"_id\"],\n \"title\": title,\n \"description\": descr,\n \"path\": os_path,\n \"highlight\": highlight,\n }\n interesting.append(temp)\n self.interesting = interesting\n return interesting", "def visualize_detections(detection_result,\n label_map,\n font_path=None,\n font_size=10,\n line_width=1,\n score_thresh=.5,\n max_num_viz=None,\n color_per_instance_mask=True):\n image = np.copy(detection_result['image'])\n height, width = image.shape[:2]\n\n scores = detection_result['scores']\n boxes = detection_result['boxes']\n classes = detection_result['classes']\n\n detection_indices = scores >= score_thresh\n scores = scores[detection_indices]\n boxes = boxes[detection_indices]\n classes = classes[detection_indices]\n\n masks = None\n if 'masks' in detection_result:\n masks = detection_result['masks']\n masks = masks[detection_indices]\n\n num_detections = scores.shape[0]\n if max_num_viz is not None:\n num_detections = np.minimum(num_detections, max_num_viz)\n \n color_map = get_color_map(len(label_map) + 1)\n\n for i in range(num_detections):\n ymin, xmin, ymax, xmax = boxes[i].astype(np.int32)\n ymin = np.maximum(0, ymin)\n xmin = np.maximum(0, xmin)\n ymax = np.minimum(height - 1, ymax)\n xmax = np.minimum(width - 1, xmax)\n color = color_map[classes[i]]\n if color_per_instance_mask:\n mask_color = (color_map[i] + 10) % color_map.shape[0]\n if (mask_color[0] < 50).all(): \n mask_color = (color_map[i] + 15) % color_map.shape[0]\n else:\n mask_color = color\n\n image[ymin : ymax, \n np.maximum(xmin - line_width // 2, 0) : \n np.minimum(xmin + line_width - line_width // 2, width)] = color\n image[ymin : ymax, \n np.maximum(xmax - line_width // 2, 0) : \n np.minimum(xmax + line_width - line_width // 2, width)] = color\n image[np.maximum(ymin - line_width // 2, 0) : \n np.minimum(ymin + line_width - line_width // 2, height), \n xmin : xmax] = color\n image[np.maximum(ymax - line_width // 2, 0) : \n np.minimum(ymax + line_width - line_width // 2, height), \n xmin : xmax] = color\n\n detection_label_text = '%s: %.2f' % (\n label_map[classes[i]], int(scores[i] * 100) / 100)\n\n if font_path is not None: \n font = ImageFont.truetype(font_path, size=font_size)\n else:\n font = ImageFont.load_default()\n text_width, text_height = font.getsize(detection_label_text)\n\n x = xmin\n y = np.maximum(ymin - text_height, 0)\n \n image[y : np.minimum(y + text_height, height), \n x : np.minimum(x + text_width, width)] = color \n\n img_obj = Image.fromarray(image)\n draw = ImageDraw.Draw(img_obj)\n draw.text((x, y), detection_label_text, TEXT_COLOR, font=font)\n image = np.array(img_obj)\n\n if masks is not None:\n draw_mask(image, masks[i], color=mask_color)\n\n return image", "def find_objects(image, **kw):\n # This OpenCV based approach is MUCH slower:\n # objects = list()\n # for label in range(max_label+1 if max_label else amax(image)):\n # mask = array(image==(label+1), uint8)\n # if mask.any():\n # x, y, w, h = cv2.boundingRect(mask)\n # objects.append(sl.box(y,y+h,x,x+w))\n # else:\n # objects.append(None)\n # return objects\n try: return measurements.find_objects(image,**kw)\n except: pass\n types = [\"int32\",\"uint32\",\"int64\",\"uint64\",\"int16\",\"uint16\"]\n for t in types:\n try: return measurements.find_objects(array(image,dtype=t),**kw)\n except: pass\n # let it raise the same exception as before\n return measurements.find_objects(image,**kw)", "def _merge_results(detected_objects, localized_objects, tracked_objects):\n localized_map = {}\n for obj in localized_objects.objects_in_boxes:\n localized_map[Roi(obj.roi)] = obj\n\n detected_map = {}\n for obj in detected_objects.objects_vector:\n key = Roi(obj.roi)\n if key not in localized_map:\n continue\n detected_map[Roi(obj.roi)] = obj\n\n merged = []\n for obj in tracked_objects.tracked_objects:\n key = Roi(obj.roi)\n if key not in localized_map:\n continue\n merged.append(ObjectItem(tracked_objects.header, obj.roi, obj.id, detected_map[key].object,\n localized_map[key].min, localized_map[key].max))\n return merged", "def findFaces(self):\n\t\trects = self.detectAll()\n\t\tif len(rects)==0:\n\t\t\trects = []\n\t\telse:\n\t\t\trects[:, 2:] += rects[:, :2]\n\t\tself.analyzeFrame(rects)", "def detect(self, features):\n pass # TODO", "def results(self):\n \n results = {}\n if self.descriptors is not None:\n for i in range(len(self.items)):\n results[self.items[i]] = {'I': self.ivalue(i),\n 'J': self.jvalue(i)}\n else:\n for i in range(len(self.items)):\n results[self.items[i]] = {'I': self.ivalue(i)}\n return results" ]
[ "0.7317474", "0.71720695", "0.7067133", "0.70630217", "0.7045426", "0.7042128", "0.66765565", "0.662646", "0.6616235", "0.65515685", "0.65083957", "0.6459783", "0.6371822", "0.63698006", "0.6324095", "0.6281179", "0.6168086", "0.6153466", "0.61346966", "0.612045", "0.61051345", "0.6049953", "0.6044423", "0.6024107", "0.5908537", "0.58879673", "0.5883988", "0.5881097", "0.5857407", "0.58442765", "0.58229095", "0.5808425", "0.58061147", "0.5781495", "0.57753515", "0.57400036", "0.57386726", "0.57370967", "0.57364804", "0.573325", "0.5715215", "0.57109344", "0.56964016", "0.5674665", "0.5673416", "0.5635394", "0.5605685", "0.5593924", "0.5590143", "0.5588592", "0.558396", "0.5580234", "0.5578793", "0.5566511", "0.5538962", "0.553645", "0.55334914", "0.55317485", "0.55299443", "0.5504807", "0.549814", "0.5497286", "0.5482122", "0.54790086", "0.5471282", "0.54573995", "0.5457069", "0.54547787", "0.54148954", "0.54071707", "0.5394625", "0.53936607", "0.5392329", "0.5381664", "0.53810936", "0.5380865", "0.53804564", "0.53728443", "0.5366161", "0.5363926", "0.5363269", "0.53587973", "0.5357435", "0.53572786", "0.53567225", "0.534509", "0.5343152", "0.5333714", "0.5319029", "0.53179324", "0.5314429", "0.5314058", "0.5312706", "0.53072137", "0.53064024", "0.52953875", "0.5288239", "0.5278433", "0.5275595", "0.52743214" ]
0.7075441
2
Return a distance matrix.
def distance_matrix(n_row, n_col): n_pop = int(n_row * n_col) center = int(n_row/2*(n_col+1)) pop_idx = np.arange(n_pop) pop_idx_col = np.remainder(pop_idx, n_col) pop_idx_row = pop_idx // n_row pos = np.vstack((pop_idx_col,pop_idx_row)).T distance = spa.distance.cdist([pos[center]], pos)[0] return distance
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getDistanceMatrix(self):\n return self.distmat.as_matrix()", "def getDistanceMatrix(self):\n v = self.getVectors()\n vLis = v.keys()\n N = len(v.keys())\n D = np.zeros([N, N], dtype=np.float32)\n print(N)\n for i in range(N):\n print(\"%d/%d\" %(i, N))\n D[i, i] = 1\n for j in range(i + 1, N):\n dist = self.cosin_sim_pairs(v[vLis[i]], v[vLis[j]])\n D[i, j] = dist\n D[j, i] = dist\n return D", "def _distance_matrix(self):\n\n # Log the type of metric being used in Sequencing\n logger.info('Using {} Distance'.format(self.measure))\n\n # Convert the nodal coordinate tuples to a np.array\n coords = np.vstack(map(np.array, self.coords.values()))\n \n if self.measure == 'haversine':\n # Partially applied haversine function that takes a coord and computes the vector distances for all coords\n haversine = lambda coord: get_hav_distance(coords[:, 0], coords[:, 1], *coord) \n # Map the partially applied function over all coordinates, and stack to a matrix\n return np.vstack(map(haversine, coords))\n\n # Partially applied haversine function that takes a coord and computes the vector distances for all coords\n euclidean = lambda coord: get_euclidean_dist(coords, coord)\n # Map the partially applied function over all coordinates, and stack to a matrix\n return np.vstack(map(euclidean, coords))", "def get_distance_matrix(self):\n names = self.get_named_leaves()\n num_names = len(names)\n dist_mat = np.zeros((num_names, num_names), dtype='float')\n for i, j in itertools.combinations(range(num_names), 2):\n node1, node2 = self.node_names[names[i]], self.node_names[names[j]]\n dist = self.node_distance(node1, node2)\n dist_mat[i,j] = dist\n dist_mat[j,i] = dist\n return names, dist_mat", "def dist_matrix(self):\n return self.__dist_matrix", "def nm_dist_mat(self):\n mat = np.zeros([self.N, self.M])\n for n in range(self.N):\n for m in range(self.M):\n mat[n, m] = distance(self.N_coords[n], self.M_coords[m])\n return mat", "def calc_dist_matrix(self):\n\n self.dist_matrix = spatial.distance.squareform(spatial.distance.pdist(self.data_vector,metric=\"hamming\"))\n\n self.dist_frame = pd.DataFrame(self.dist_matrix,\n index = self.seq_strings,\n columns = self.seq_strings)", "def get_distance_matrix(self, points):\n return points[:, :, np.newaxis, :]-points[:, np.newaxis, :, :]", "def DistanceMatrices(self):\r\n return self._dms", "def __build_distance_matrix(self):\n for i in range(0, len(self.__corpus)):\n doc_i = self.__corpus[i]\n for j in range(i + 1, len(self.__corpus)):\n doc_j = self.__corpus[j]\n distance = doc_i.calc_distance(doc_j)\n self.__distance_matrix.append(distance)", "def distance_matrix(data):\n D = numpy.zeros( (data.shape[0], data.shape[0]) )\n for i in xrange(data.shape[0]):\n for j in xrange(i):\n D[i,j] = numpy.linalg.norm(data[i,:]-data[j,:])\n D[j,i] = D[i,j]\n\n return D", "def getDistanceMatrix(self):\n return self.pointcloud.distmat", "def get_distances(self):\n N = len(self.cells) # Number of cells\n distances = np.zeros([N, N]) # distances between cells\n positions = self.position_matrix() # positions of cells \n \n # get distances between cells (exploit symmetry between upper and lower triangular form)\n for i, position in enumerate(positions[:-1, :]): # Iterate matrix except the last one\n directions = positions[i+1:, :] - position # direction from i to j > i\n distances[i, i+1:] = np.linalg.norm(directions, axis=1) # length of directions\n \n return distances + distances.T # Add lower triangle of matrix to upper ", "def _generate_distance_kernel_matrix(self):\n with self._rw_lock.read_lock():\n # Create matrix whose elements are the distances between all row\n # permutations\n fmat = self._feature_mat # shorter name\n num_rows = fmat.shape[0]\n\n # distance kernel is a square matrix based on feature samples\n dist_kernel = np.mat(np.ndarray((num_rows,)*2))\n self._log.info(\"Creating distance kernel with shape %s\",\n dist_kernel.shape)\n\n timer_log = logging.getLogger('.'.join((self.__module__,\n self.__class__.__name__,\n \"SimpleTimer\")))\n\n for i in xrange(num_rows - 1):\n with SimpleTimer('computing distances from row %d to [%d-%d]'\n % (i, i+1, num_rows-1), timer_log):\n dist_kernel[i, i] = 1.0\n for j in xrange(i + 1, num_rows):\n dist = self._histogram_intersection_distance(fmat[i],\n fmat[j])\n dist_kernel[i, j] = dist_kernel[j, i] = dist\n dist_kernel[-1, -1] = 1.0\n return dist_kernel", "def _freespace_matrix(distance):\n\n return np.array([[1., distance], [0., 1.]])", "def get_dist_mat(self):\n n_site = self.status.give(keyword=\"n_site\")\n sites = self.status.give(keyword=\"sites\")\n dist_mat = [[0.0 for j in xrange(n_site)] for i in xrange(n_site)]\n for i in xrange(n_site):\n for j in xrange(n_site):\n ri = sites[i].pos\n rj = sites[j].pos\n dist_mat[i][j] = np.linalg.norm(ri-rj)\n # print ri, rj\n return dist_mat", "def compute_dist_matrix(X1, X2, distance):\n N, M = X1.shape[0], X2.shape[0]\n dist_matrix = np.zeros((N, M))\n for i in range(N):\n for j in range(M):\n dist_matrix[i][j] = dist(X1[i], X2[j], distance=distance)\n return dist_matrix", "def compute_distance(self, transpose=False):\n\n # Calculate distance matrix\n if transpose:\n distance_matrix = pdist(self.matrix.T, self.distance)\n else:\n distance_matrix = pdist(self.matrix, self.distance)\n\n # Remove NaNs\n distance_matrix[np.isnan(distance_matrix)] = 1.0\n\n return distance_matrix", "def Distmatrix(self):\n self.Dismatrix = np.zeros((self.nodenum, self.nodenum))\n for i in range(len(self.Dismatrix)):\n for j in range(len(self.Dismatrix)):\n self.Dismatrix[i, j] = sf.dist(self.y[i], self.x[i], self.y[j], self.x[j])\n self.Dismatrix[j, i] = self.Dismatrix[i, j]", "def distance_matrix(d1, d2=None):\n if d2 is None:\n dists = np.zeros(shape=(d1.shape[0], d1.shape[0]))\n for i in range(dists.shape[0]):\n dists[i] = (((d1 - d1[i]) ** 2).sum(axis=1)) ** 0.5\n else:\n dists = np.zeros(shape=(d1.shape[0], d2.shape[0]))\n for i in range(d1.shape[0]):\n dists[i] = (((d2 - d1[i]) ** 2).sum(axis=1)) ** 0.5\n return dists", "def im_dist_mat(self):\n mat = np.zeros([self.I, self.M])\n for i in range(self.I):\n for m in range(self.M):\n mat[i, m] = distance(self.I_coords[i], self.M_coords[m])\n return mat", "def create_dist_matrix(matrix):\n #Convert input data matrix to numpy matrix\n matrix = np.array(matrix)\n n = matrix.shape[0]\n \n #Iterate through number of samples to create distance matrix\n for i in range(n):\n dist_array = euclidean_distance(matrix[i,:], matrix)\n if i == 0:\n dist_matrix = dist_array\n else:\n dist_matrix = np.concatenate((dist_matrix, dist_array), axis = 1)\n return dist_matrix", "def calc_dist_matrix(self,verbose=False):\n\n print(\"Calculating distance matrix.\"); sys.stdout.flush()\n\n nrow = self.data_vector.shape[0]\n self.dist_matrix = np.zeros((nrow, nrow),dtype=float)\n for i in range(nrow):\n if verbose:\n if i % 1000 == 0:\n print(\"Row\",i,\"of\",nrow)\n sys.stdout.flush()\n\n for j in range(i + 1, nrow):\n self.dist_matrix[i,j] = self._pairwise_dist(self.data_vector[i],self.data_vector[j])\n self.dist_matrix[j,i] = self.dist_matrix[i,j]\n \n self.dist_frame = pd.DataFrame(self.dist_matrix,\n index = self.seq_strings,\n columns = self.seq_strings)", "def get_distance_matrix():\n df_afstandn2 = get_dataframe(\"\"\"SELECT *\n FROM proj_afval_netwerk.afv_poi_afstand\n WHERE afstand < 1000\n \"\"\")\n return df_afstandn2", "def distance_matrix(dnas: Collection[str], metric=hamming_distance, relative=True, as_ndarray=False):\n n = len(dnas)\n result = [[0] * n for _ in range(n)]\n for pair in itertools.combinations(zip(range(n), dnas), r=2):\n (idx1, dna1), (idx2, dna2) = pair\n distance = metric(dna1, dna2)\n distance = distance / max(len(dna1), len(dna2)) if relative else distance\n result[idx1][idx2] = distance\n result[idx2][idx1] = distance\n if as_ndarray:\n result = np.asarray(result)\n return result", "def format_distance_matrix(labels, data):\r\n return format_matrix(data, labels, labels)", "def build_distance_matrix(path_to_embeddings):\n\n embed_df = pd.read_csv(path_to_embeddings)\n print (\"length is: \", len(embed_df))\n columns = list(embed_df)\n\n \n distances = euclidean_distances(embed_df.iloc[:, 1:], embed_df.iloc[:, 1:])\n embed_df = embed_df.set_index([columns[0]])\n # format distance matrix\n distances_df = pd.DataFrame(distances)\n distances_df.columns = list(embed_df.index)\n distances_df.index = list(embed_df.index)\n\n print (\"finished building the distance matrix ...\")\n\n print (\"///////////////////\")\n print (len(distances_df))\n\n return distances_df", "def _derive_euclidean_dm(self, cat_mat, dim):\r\n res_mat = []\r\n\r\n for i in range(dim):\r\n res_mat.append([0 for k in range(dim)])\r\n for j in range(i):\r\n res_mat[i][j] = self._vector_dist(cat_mat[i], cat_mat[j])\r\n res_mat[j][i] = res_mat[i][j]\r\n\r\n return DistanceMatrix(res_mat, self.DistanceMatrices[0].ids)", "def get_distance_matrix(visits: List[str], distances: Dict[Tuple[str, str], float]) -> List[List[float]]:\n\n return [[distances[i,j] for j in visits] for i in visits]", "def flatten_distance_matrix(dist):\n inds = np.triu_indices(dist.shape[0])\n return dist[inds]", "def ni_dist_mat(self):\n mat = np.zeros([self.N, self.I])\n for n in range(self.N):\n for i in range(self.I):\n mat[n, i] = distance(self.N_coords[n], self.I_coords[i])\n return mat", "def distances(self) -> ndarray:\n return self._distances", "def _create_distance_matrix(mesh):\n l = len(mesh.faces)\n\n faces = polygons(mesh.faces, mesh.vertices, mesh.face_normals, mesh.area_faces)\n # map from edge-key to adjacent faces\n adj_faces_map = {}\n # find adjacent faces by iterating edges\n for index, face in enumerate(faces):\n for edge in face.edge_keys:\n if (edge[0] > edge[1]):\n new_edge = (edge[1], edge[0])\n else:\n new_edge = (edge[0], edge[1])\n if new_edge in adj_faces_map:\n adj_faces_map[new_edge].append(index) # 一对多\n else:\n adj_faces_map[new_edge] = [index]\n\n # helping vectors to create sparse matrix later on\n row_indices = []\n col_indices = []\n Gval = [] # values for matrix of angular distances\n Aval = [] # values for matrix of geodesic distances\n # iterate adjacent faces and calculate distances\n for edge, adj_faces in adj_faces_map.items():\n if len(adj_faces) == 2:\n i = adj_faces[0]\n j = adj_faces[1]\n # 一条边连接的两个面\n Gtemp = _geodesic_distance(mesh, faces[i], faces[j], edge) # 测地距离\n Atemp = _angular_distance(mesh, faces[i], faces[j]) # 角距离 # 其实是余弦距离\n Gval.append(Gtemp)\n Aval.append(Atemp)\n row_indices.append(i)\n col_indices.append(j)\n # add symmetric entry\n Gval.append(Gtemp)\n Aval.append(Atemp)\n row_indices.append(j)\n col_indices.append(i)\n\n elif len(adj_faces) > 2:\n print(\"Edge with more than 2 adjacent faces: \" + str(adj_faces) + \"!\")\n\n Gval = numpy.array(Gval)\n Aval = numpy.array(Aval)\n # delta是去全局变量,外部传入的\n values = delta * Gval / numpy.mean(Gval) + \\\n (1.0 - delta) * Aval / numpy.mean(Aval)\n\n # create sparse matrix\n distance_matrix = scipy.sparse.csr_matrix(\n (values, (row_indices, col_indices)), shape=(l, l))\n return distance_matrix", "def distancematrix(vec1, vec2):\n v1, v2 = np.meshgrid(vec1, vec2)\n return np.abs(v1 - v2)", "def distance_matrix(X, Y, metric):\n distance = np.zeros((len(X), len(Y)))\n for i in range(len(X)):\n for j in range(len(Y)):\n m = metric(X[i], Y[j])\n if np.isnan(m):\n pdb.set_trace()\n distance[i, j] = m\n return distance", "def get_distance_matrix(grouped_distance):\n return grouped_distance.groupby(\n F.col(\n \"category_a\"\n ).alias(\n \"category\"\n )\n ).pivot(\n \"category_b\"\n ).agg(\n F.expr(\n \"coalesce(min(distance), 10000.00)\"\n )\n ).orderBy(\n \"category\"\n )", "def get_correct_distance_matrix(L):\n n = len(L)\n D = np.zeros((n,n))\n for i in range(n):\n for j in range(n):\n if i != j:\n D[i][j] = get_minor(L, [i, j], [i, j]) / get_minor(L, [i], [i])\n return D", "def distance_matrix(cities):\n\n return [[city1.distance(city2) for city2 in cities]\n for city1 in cities]", "def compute_matrix(self):\n\n fac = self.a / self.dx ** 2\n\n diagonal = np.ones(self.nx) * 2 * fac\n lower = np.ones(self.nx - 1) * -fac\n upper = np.ones(self.nx - 1) * -fac\n\n matrix = sp.diags(\n diagonals=[diagonal, lower, upper],\n offsets=[0, -1, 1], shape=(self.nx, self.nx),\n format='csr')\n\n return matrix", "def matrix_dist(self):\n matrix_dic = {}\n for clus in self.clusters:\n for other_clus in self.clusters:\n if clus.samples[0].s_id > other_clus.samples[0].s_id: # avoid duplicates\n matrix_dic[(clus.samples[0].s_id, other_clus.samples[0].s_id)] = clus.samples[0]\\\n .compute_euclidean_distance(other_clus.samples[0])\n return matrix_dic", "def distance_matrix(sunspots1, sunspots2):\n \n N1 = len(sunspots1)\n N2 = len(sunspots2)\n\n distance_matrix = np.zeros((N1, N2))\n\n for i in list(range(N1)):\n for j in list(range(N2)):\n\n distance_matrix[i, j] = euclidean_dist(sunspots1[i], sunspots2[j])\n\n return distance_matrix", "def get_distance_matrix(df, distance_measure, feat_col_ix=1):\n n = len(df)\n dist_matrix = np.zeros((n,n))\n for i in range(n):\n for j in range(j):\n si = df.iloc[i, feat_col_ix:]\n sj = df.iloc[j, feat_col_ix:]\n dist_matrix[i,j] = distance_measure(si, sj)[0]\n return dist_matrix", "def calculate_dist_mat(embeddings: np.ndarray, norm: int) -> np.ndarray:\n kwargs = {'p': norm}\n condensed_dist = pdist(embeddings, metric='minkowski', **kwargs)\n dist_mat = squareform(condensed_dist)\n return dist_mat", "def create_dist_mat(predims, postdims, dist_metric):\n\n filename = './data/dist_matrix_' + dim2str(predims) + '_' + dim2str(postdims) + '_' + dist_metric.__name__ + '.npz'\n if os.path.isfile(filename):\n print('file existst: loading dist mat from file: ', filename)\n with np.load(filename) as loaded:\n dist_mat = loaded['dist_mat']\n else:\n xbins_pre = eq_dist_linspace(predims[0])\n ybins_pre = eq_dist_linspace(predims[1])\n x_pre, y_pre = np.meshgrid(xbins_pre, ybins_pre)\n\n xbins_post = eq_dist_linspace(postdims[0])\n ybins_post = eq_dist_linspace(postdims[1])\n x_post, y_post = np.meshgrid(xbins_post, ybins_post)\n\n pre_coordinates = np.asarray(list(zip(flatten(x_pre), flatten(y_pre))))\n post_coordinates = np.asarray(list(zip(flatten(x_post), flatten(y_post))))\n\n dist_metric = partial(dist_metric, weights=postdims)\n dist_mat = distance.cdist(post_coordinates, pre_coordinates, metric=dist_metric)\n # dist_mat = distance_matrix(pre_coordinates,post_coordinates)\n\n if not os.path.exists(os.path.dirname(filename)):\n os.makedirs(os.path.dirname(filename))\n print('created directory', os.path.dirname(filename), 'in', os.path.abspath(os.path.dirname(filename)))\n np.savez(filename, dist_mat=dist_mat)\n\n return dist_mat", "def distance(self, features, targets):\n cost_matrix = np.zeros((len(targets), len(features)))\n for i, target in enumerate(targets):\n cost_matrix[i, :] = self._metric(self.samples[target], features)\n return cost_matrix", "def distance(self, features, targets):\n cost_matrix = np.zeros((len(targets), len(features)))\n for i, target in enumerate(targets):\n cost_matrix[i, :] = self._metric(self.samples[target], features)\n return cost_matrix", "def Dmat(numpts, delta=1):\n a = 0.5 / delta * ones(numpts)\n a[0] = 0\n a[-2] = 0\n #b=-2./delta**2*ones(numpts); b[0]=0;b[-1]=0\n c = -0.5 / delta * ones(numpts)\n c[1] = 0\n c[-1] = 0\n return sparse.spdiags([a, c], [-1, 1], numpts, numpts)", "def distance_matrix(sequences, substitution_mat):\n distance_mat = numpy.empty((len(sequences), len(sequences)), dtype='float')\n\n print(\"Building distance matrix\")\n # Get similarity score\n for i, seqA in enumerate(sequences):\n sys.stdout.write(\"\\r%.f%%\" % (float(i+1)/len(sequences)*100))\n sys.stdout.flush()\n for j, seqB in enumerate(sequences[i:], start=i):\n score = substitution_score(substitution_mat, seqA, seqB)\n distance_mat[i, j] = score\n distance_mat[j, i] = score\n print(\"\")\n # Set equal the diagonal\n diag_mini = numpy.min(distance_mat.diagonal())\n for i in range(len(sequences)):\n distance_mat[i, i] = diag_mini\n # Convert similarity score into a distance\n mini = numpy.min(distance_mat)\n maxi = numpy.max(distance_mat)\n return 1 - (distance_mat + abs(mini))/(maxi - mini)", "def compute_euclidean_distance_matrix(locations):\n distances = {}\n distances_df=get_times(locations)\n print(distances_df)\n print(distances_df.iloc[0,0])\n print(distances_df.iloc[0,1])\n print(distances_df.iloc[0,2])\n for from_counter, from_node in enumerate(locations):\n distances[from_counter] = {}\n for to_counter, to_node in enumerate(locations):\n distances[from_counter][to_counter] = (int(\n distances_df.iloc[from_counter,to_counter]))\n return distances", "def init_Dist_Matrix(length):\r\n dist_matrix = []\r\n \r\n while len(dist_matrix) < length:\r\n dist_matrix.append([])\r\n while len(dist_matrix[-1]) < length:\r\n dist_matrix[-1].append(float(0))\r\n \r\n # print_matrix(dist_matrix) #just for the visuals can be removed later\r\n return(dist_matrix)", "def _get_node_distance_matrix(\n self, datapoint: np.ndarray, som_array: np.ndarray\n ) -> np.ndarray:\n # algorithms on the full matrix\n if self.distance_metric == \"euclidean\":\n return np.linalg.norm(som_array - datapoint, axis=2)\n\n # node-by-node algorithms\n distmat = np.zeros((self.n_rows, self.n_columns))\n if self.distance_metric == \"manhattan\":\n for node in self.node_list_:\n distmat[node] = dist.cityblock(\n som_array[node[0], node[1]], datapoint\n )\n\n elif self.distance_metric == \"mahalanobis\":\n for node in self.node_list_:\n som_node = som_array[node[0], node[1]]\n cov = np.cov(\n np.stack((datapoint, som_node), axis=0), rowvar=False\n )\n cov_pinv = np.linalg.pinv(cov) # pseudo-inverse\n distmat[node] = dist.mahalanobis(datapoint, som_node, cov_pinv)\n\n elif self.distance_metric == \"tanimoto\":\n # Note that this is a binary distance measure.\n # Therefore, the vectors have to be converted.\n # Source: Melssen 2006, Supervised Kohonen networks for\n # classification problems\n # VERY SLOW ALGORITHM!!!\n threshold = 0.5\n for node in self.node_list_:\n som_node = som_array[node[0], node[1]]\n distmat[node] = dist.rogerstanimoto(\n binarize(\n datapoint.reshape(1, -1),\n threshold=threshold,\n copy=True,\n ).ravel(),\n binarize(\n som_node.reshape(1, -1), threshold=threshold, copy=True\n ).ravel(),\n )\n\n elif self.distance_metric == \"spectralangle\":\n for node in self.node_list_:\n distmat[node] = np.arccos(\n np.divide(\n np.dot(som_array[node[0], node[1]], datapoint),\n np.multiply(\n # TODO check if an axis needs to be set here\n np.linalg.norm(som_array),\n np.linalg.norm(datapoint),\n ),\n )\n )\n\n return distmat", "def expansion_matrix_d(self):\n row = self._base_nlp._d_map\n nnz = len(self._base_nlp._d_map)\n col = np.arange(nnz, dtype=np.int)\n data = np.ones(nnz)\n return csr_matrix((data, (row, col)), shape=(self.ng, nnz))", "def get_alphabet_similarity_matrix(self):\n distance_matrix = numpy.zeros((len(self.alphabet), len(self.alphabet)))\n numpy.fill_diagonal(distance_matrix, 0)\n for index_one, descriptor_one in enumerate(self.descriptors):\n for index_two, descriptor_two in enumerate(self.descriptors):\n distance = descriptor_one - descriptor_two\n squared_distance = numpy.dot(distance, distance)\n distance_matrix[index_one, index_two] = squared_distance\n distance_matrix /= 2. * (self.sigma_amino_acid ** 2)\n return numpy.exp(-distance_matrix)", "def make_distance_matrix(city):\n distance_map = {}\n with open(spider_data_path + os.sep + 'station_list' + os.sep + 'list_{}.csv'.format(city)) as f:\n reader = csv.reader(f)\n for line in reader:\n distance_map[int(line[0])] = (float(line[1]), float(line[2]))\n\n numpy_file = np.empty((station_count[city], station_count[city]))\n for i in range(0, station_count[city]):\n for j in range(0, station_count[city]):\n if i == j:\n numpy_file[i, j] = 1\n else:\n distance = geodesic(distance_map[i], distance_map[j]).km\n numpy_file[i, j] = 1.0 / distance if distance > 1 else 1\n\n file_name = exp_data_path + os.sep + 'similarity' + os.sep + 'similarity_distance_{}_numpy'.format(city)\n if os.path.exists(file_name):\n os.remove(file_name)\n np.save(file_name, numpy_file)\n pass", "def squared_distance_matrix(X, augmented=False):\n XX = np.dot(X,X.T)\n D = np.outer(np.diag(XX), np.ones(len(X)))-2*XX+np.outer(np.ones(len(X)),\n np.diag(XX))\n if augmented == True:\n n = len(D)\n zeros_v = np.zeros((n,1))\n zeros_h = np.zeros((1,n+1))\n D = np.bmat('D zeros_v; zeros_h')\n return D", "def compute_l2_distance_matrix(features_queries, features_dataset):\n sx = np.sum(features_queries ** 2, axis=1, keepdims=True)\n sy = np.sum(features_dataset ** 2, axis=1, keepdims=True)\n\n return np.sqrt(-2 * features_queries.dot(features_dataset.T) + sx + sy.T)", "def mass_matrix(self):\n if not self._fr or not self._frstar:\n raise ValueError('Need to compute Fr, Fr* first.')\n return Matrix([self._k_d, self._k_dnh])", "def get_direction_matrix(self) -> int:", "def dist_matrix(self, group1, group2):\n \n tmps = []\n for i in group2:\n tmps.append([])\n for j in group1:\n mi, label = self.distance(i, j)\n tmps[-1].append(mi)\n return tmps", "def density_matrix(wires) -> \"DensityMatrixMP\":\n wires = Wires(wires)\n return DensityMatrixMP(wires=wires)", "def matrix(self):\n return np.matrix(list(self._columns.values()))", "def calcDistance(self):\n # Initialize the distance matrix\n arr = np.repeat(0, self.num_col)\n result_mat = np.repeat(arr, self.num_col)\n result_mat = np.reshape(result_mat, (self.num_col, self.num_col))\n trinary_mat = self.df_trinary.values\n for left_val in TRINARY_VALUES:\n left_func = lambda v: 1 if v==left_val else 0\n left_mat = np.transpose(np.vectorize(left_func)(trinary_mat))\n for right_val in TRINARY_VALUES:\n if left_val == right_val:\n continue\n right_func = lambda v: 1 if v==right_val else 0\n right_mat = np.vectorize(right_func)(trinary_mat)\n # Count the number of occurrences of this combination of values\n # by doing a matrix multiply\n new_mat = np.matmul(left_mat, right_mat)\n # Multiply by the squared distance between the values\n squared_distance = (left_val - right_val)**2\n new_mat = new_mat*squared_distance\n # Accumulate the result\n result_mat = result_mat + new_mat\n # Convert to dataframe\n result_mat = np.vectorize(lambda v: np.sqrt(v)) (result_mat)\n self.df_distance = pd.DataFrame(result_mat, columns=self.columns,\n index=self.columns)", "def assemble_distance_matrix(dm_components):\r\n print \"I get called.\"\r\n data = {}\r\n # iterate over compenents\r\n for c in dm_components:\r\n # create a blank list to store the column ids\r\n col_ids = []\r\n # iterate over lines\r\n for line in c:\r\n # split on tabs remove leading and trailing whitespace\r\n fields = line.strip().split()\r\n if fields:\r\n # if no column ids seen yet, these are them\r\n if not col_ids:\r\n col_ids = fields\r\n # otherwise this is a data row so add it to data\r\n else:\r\n sid = fields[0]\r\n data[sid] = dict(zip(col_ids, fields[1:]))\r\n\r\n # grab the col/row ids as a list so it's ordered\r\n labels = data.keys()\r\n # create an empty list to build the dm\r\n dm = []\r\n # construct the dm one row at a time\r\n for l1 in labels:\r\n dm.append([data[l1][l2] for l2 in labels])\r\n # create the dm string and return it\r\n dm = format_distance_matrix(labels, dm)\r\n return dm", "def compute_distmat(data, distfn):\n out = np.zeros((data.shape[0], data.shape[0]))\n for i in xrange(data.shape[0]):\n for j in xrange(data.shape[0]):\n if i == j: continue\n out[i,j] = distfn(data[i,:,:], data[j,:,:])\n return out", "def cal_distances(embeddings):\n # calculate\n dist = np.zeros([len(embeddings), len(embeddings)], dtype=float)\n for ii in xrange(len(embeddings)):\n for jj in xrange(ii + 1, len(embeddings)):\n dist[ii, jj] = np.linalg.norm(embeddings[ii] - embeddings[jj])\n dist[jj, ii] = dist[ii, jj] \n \n # return\n return dist", "def distance(self, method=\"euclidean\", **kwargs):\n return Adjacency(\n pairwise_distances(self, metric=method, **kwargs), matrix_type=\"Distance\"\n )", "def compute_distances(self, X):\n #print(X.shape, self.Xtr.shape)\n dists = np.zeros((X.shape[0], self.Xtr.shape[0]))\n for i in range(X.shape[0]):\n X_r = np.tile(X[i], (self.Xtr.shape[0], 1))\n dists[i] = np.sqrt(np.sum(np.square(self.Xtr - X_r), axis = 1))\n #print(dists.shape)\n return dists", "def calculate_distance_matrix(played_decks: Union[List[FuzzyDeck], List[Deck]], measure: str):\n deck_data = np.array(played_decks).reshape(len(played_decks), 1)\n if measure == \"jaccard\":\n dist = pdist(deck_data, lambda u, v: u[0].jaccard_distance(v[0]))\n elif measure == \"euclidean\":\n dist = pdist(deck_data, lambda u, v: u[0].euclidean_distance(v[0]))\n else:\n raise ValueError(\"Unknown distance measure {}. \".format(measure) +\n \"Please choose one of the following distance measures ['euclidean','jaccard']\")\n\n return dist", "def makeMMatrix(distances, r):\n M = copy.deepcopy(distances)\n\n for key in M.keys():\n for subkey in M[key].keys():\n M[key][subkey] -= (r[key] + r[subkey])\n\n return M", "def get_distances(self):\n return DistanceSensors(*self.bot_client.send_command(_Command.GetDistances))", "def cosineDistanceMatrix():\n\n\tmatrix = movieMatrix()\n\tsimilarity = np.dot(matrix, matrix.T)\n\tsquareMag = np.diag(similarity)\n\tinvSquareMag = 1/squareMag\n\tinvSquareMag[np.isinf(invSquareMag)]=0\n\tinvMag = np.sqrt(invSquareMag)\n\tcosine = similarity * invMag\n\tcosine = cosine.T * invMag\n\treturn cosine", "def dhMatrix(self):\n row1 = np.array([np.cos(self.theta), -np.sin(self.theta)*np.cos(self.alpha), np.sin(self.theta)*np.sin(self.alpha), self.a*np.cos(self.theta)])\n row2 = np.array([np.sin(self.theta), np.cos(self.theta)*np.cos(self.alpha), -np.cos(self.theta)*np.sin(self.alpha), self.a*np.sin(self.theta)])\n row3 = np.array([0.0, np.sin(self.alpha), np.cos(self.alpha), self.d])\n row4 = np.array([0.0, 0.0, 0.0, 1.0])\n T = np.array([row1, row2, row3, row4])\n return T", "def _get_tsp_matrix(graph: networkx.Graph) -> np.ndarray:\n number_of_nodes = len(graph)\n matrix = np.zeros((number_of_nodes, number_of_nodes))\n for i in nx.all_pairs_dijkstra_path_length(graph, weight=\"weight\"):\n distance_dist = i[1]\n for j in distance_dist.items():\n matrix[i[0] - 1][j[0] - 1] = j[1]\n matrix[j[0] - 1][i[0] - 1] = matrix[i[0] - 1][j[0] - 1]\n return matrix", "def compute_distance_matrix(input1, input2, metric='euclidean'):\n # check input\n assert isinstance(input1, torch.Tensor)\n assert isinstance(input2, torch.Tensor)\n assert input1.dim() == 2, 'Expected 2-D tensor, but got {}-D'.format(\n input1.dim()\n )\n assert input2.dim() == 2, 'Expected 2-D tensor, but got {}-D'.format(\n input2.dim()\n )\n assert input1.size(1) == input2.size(1)\n\n if metric == 'euclidean':\n distmat = euclidean_squared_distance(input1, input2)\n elif metric == 'cosine':\n distmat = cosine_distance(input1, input2)\n else:\n raise ValueError(\n 'Unknown distance metric: {}. '\n 'Please choose either \"euclidean\" or \"cosine\"'.format(metric)\n )\n\n return distmat", "def calculate_distance_matrix(atomlist):\n distlist = []\n for atom in atomlist:\n atomdict = {}\n for partner in atomlist:\n if not str(int(partner[0][1])) in atomdict.keys():\n atomdict[str(int(partner[0][1]))] = []\n atomdict[str(int(partner[0][1]))].append(np.linalg.norm(atom[1] - partner[1]))\n else:\n atomdict[str(int(partner[0][1]))].append(np.linalg.norm(atom[1] - partner[1]))\n atomdict[str(int(partner[0][1]))].sort()\n\n distlist.append(atomdict)\n\n return distlist", "def expansion_matrix_dl(self):\n\n row = self._base_nlp._lower_d_map\n nnz = len(self._base_nlp._lower_d_map)\n col = np.arange(nnz, dtype=np.int)\n data = np.ones(nnz)\n return csr_matrix((data, (row, col)), shape=(self.nd, nnz))", "def get_distances(self, crds):\n self.all_dist = np.zeros((self.natom, self.natom))\n # Loop over upper triangle of atom pairs\n for iat in range(self.natom-1):\n # Get the atom indices\n at_inds = np.arange(len(crds))\n\n # Calc distances between atoms (only upper triangle though)\n at_msk = at_inds > iat\n all_ut_dist = crds[at_msk] - crds[iat]\n all_ut_dist = np.linalg.norm(all_ut_dist, axis=1)\n\n self.all_dist[iat, iat+1:] = all_ut_dist\n\n # Get lower triangle indices\n self.all_dist = self.all_dist + self.all_dist.T", "def test_matrix_distance(self):\n # note that the score matrix must contain 'diagonal' elements m[i][i]\n # to avoid failure when the sequences match.\n m = {\"U\": {\"U\": 0, \"C\": 1, \"A\": 5}, \"C\": {\"C\": 0, \"A\": 2, \"G\": 4}}\n self.assertEqual(self.RNA(\"UUUCCC\").matrix_distance(\"UCACGG\", m), 14)\n self.assertEqual(self.RNA(\"UUUCCC\").matrix_distance(\"\", m), 0)\n self.assertEqual(self.RNA(\"UUU\").matrix_distance(\"CAC\", m), 7)\n self.assertRaises(KeyError, self.RNA(\"UUU\").matrix_distance, \"CAG\", m)", "def D2mat(numpts, delta=1, periodic=True, q=0):\n\n a = 1. / delta ** 2 * ones(numpts)\n b = -2. / delta ** 2 * ones(numpts)\n c = 1. / delta ** 2 * ones(numpts)\n #print \"delta = %f\" % (delta)\n if periodic:\n if q == 0:\n return sparse.spdiags([c, a, b, c, c], [-numpts + 1, -1, 0, 1, numpts - 1], numpts, numpts)\n else:\n return sparse.spdiags([exp(-(0. + 1.j) * q) * c, a, b, c, exp((0. + 1.j) * q) * c],\n [-numpts + 1, -1, 0, 1, numpts - 1], numpts, numpts)\n else:\n return sparse.spdiags([a, b, c], [-1, 0, 1], numpts, numpts)", "def get_incorrect_distance_matrix(L):\n n = len(L)\n D = np.zeros((n,n))\n for i in range(n):\n for j in range(n):\n if i != j:\n D[i][j] = get_minor(L, [i], [j]) / get_minor(L, [i], [i])\n return D", "def distance_dataframe(self, x, y, keyboard_weight=None):\r\n dist_matrix = self.distance_matrix(x, y, keyboard_weight)\r\n dist_df = pd.DataFrame(dist_matrix, index=[\"\", *list(x)], \r\n columns=[\"\", *list(y)])\r\n return dist_df", "def _get_edit_distance_matrix(x: str, y: str) -> list:\n matrix = [[-1 for _ in range(len(y) + 1)] for _ in range(len(x) + 1)]\n\n for j in range(len(matrix[0])):\n matrix[0][j] = j\n\n for i, _ in enumerate(matrix):\n matrix[i][0] = i\n\n return matrix", "def Mat_dis(x):\n x = np.mat(x) #构建矩阵\n aa = np.sum(np.multiply(x, x), 1) #哈达玛乘积\n ab = x * x.T\n dist_mat = aa + aa.T - 2 * ab\n dist_mat[dist_mat < 0] = 0\n #dist_mat = np.sqrt(dist_mat)\n dist_mat = np.maximum(dist_mat, dist_mat.T)\n\n return dist_mat", "def kinase_distance_matrix(\n structure_distances, by=\"minimum\", fill_diagonal=True, coverage_min=0.0\n):\n\n if by == \"size\":\n fill_diagonal = False\n\n # Data for upper half of the matrix\n pairs_upper = kinase_distances(structure_distances, by, coverage_min).reset_index()[\n [\"kinase.1\", \"kinase.2\", \"distance\"]\n ]\n # Data for lower half of the matrix\n pairs_lower = pairs_upper.rename(columns={\"kinase.1\": \"kinase.2\", \"kinase.2\": \"kinase.1\"})\n\n # Concatenate upper and lower matrix data\n pairs = (\n pd.concat([pairs_upper, pairs_lower])\n .sort_values([\"kinase.1\", \"kinase.2\"])\n .drop_duplicates()\n .reset_index(drop=True)\n )\n\n # Convert to matrix\n matrix = pairs.pivot(columns=\"kinase.2\", index=\"kinase.1\", values=\"distance\")\n\n if fill_diagonal:\n np.fill_diagonal(matrix.values, 0)\n\n # If matrix contains number of structure pairs: NaN > 0, cast to int\n if by == \"size\":\n matrix = matrix.fillna(0)\n matrix = matrix.astype(\"int64\")\n\n return matrix", "def distance_matrix(self, x, y, keyboard_weight=None):\r\n # create distance matrix\r\n size_x = len(x) + 1\r\n size_y = len(y) + 1\r\n dist_matrix = np.zeros((size_x, size_y))\r\n for i in range(size_x):\r\n dist_matrix[i, 0] = i\r\n for j in range(size_y):\r\n dist_matrix[0, j] = j\r\n\r\n ## fill distance matrix\r\n # no keyboard weight\r\n if not keyboard_weight:\r\n for i in range(1, size_x):\r\n for j in range(1, size_y):\r\n # if letters are same\r\n if x[i-1] == y[j-1]:\r\n dist_matrix[i, j] = dist_matrix[i-1, j-1]\r\n # if letters are different\r\n else:\r\n subs = dist_matrix[i-1, j-1] + 1\r\n delete = dist_matrix[i-1, j] + 1\r\n insert = dist_matrix[i, j-1] + 1 \r\n dist_matrix[i, j] = min(subs, delete, insert)\r\n # manhattan keyboard weight\r\n elif keyboard_weight == \"manhattan\":\r\n for i in range(1, size_x):\r\n for j in range(1, size_y):\r\n # if letters are same\r\n if x[i-1] == y[j-1]:\r\n dist_matrix[i, j] = dist_matrix[i-1, j-1]\r\n # if letters are different\r\n else:\r\n dist = self.key_distance(x[i-1], y[j-1], keyboard_weight)\r\n subs_weight = dist * self.manhattan_coef\r\n subs = dist_matrix[i-1, j-1] + subs_weight\r\n delete = dist_matrix[i-1, j] + 1\r\n insert = dist_matrix[i, j-1] + 1 \r\n dist_matrix[i, j] = min(subs, delete, insert)\r\n # euclidean keyboard weight\r\n elif keyboard_weight == \"euclidean\":\r\n for i in range(1, size_x):\r\n for j in range(1, size_y):\r\n # if letters are same\r\n if x[i-1] == y[j-1]:\r\n dist_matrix[i, j] = dist_matrix[i-1, j-1]\r\n # if letters are different\r\n else:\r\n dist = self.key_distance(x[i-1], y[j-1], keyboard_weight)\r\n subs_weight = dist * self.euclidean_coef\r\n subs = dist_matrix[i-1, j-1] + subs_weight\r\n delete = dist_matrix[i-1, j] + 1\r\n insert = dist_matrix[i, j-1] + 1 \r\n dist_matrix[i, j] = min(subs, delete, insert)\r\n \r\n return dist_matrix", "def get_euclidean_matrix(df):\n df.reset_index(drop=True, inplace=True)\n\n # foods = df['food_names']\n # food_examples = []\n # indices = list(range(0, len(foods)))\n # for i in indices:\n # food_examples.append(str(foods[i]) + str(i))\n # food_examples = pd.Series(food_examples)\n food_examples = df['food_names']\n\n df = df.drop(['food_names', 'height', 'weight', 'above_range', 'BMI', 'age', 'gender',\n 'glucose_tolerance_category','90-percentile_of_2h-iAUC', 'average_carbs_ratio',\n 'average_daily_carbs','average_meals_per_day', 'average_sleep_hours',\n 'average_glucose', 'baseline', 'coefficient_of_variation', 'max_2-hours_iAUC',\n 'median_fasting_glucose_level','median_of_2h-iAUC', 'night_baseline'], axis='columns')\n\n df = df.replace([-np.inf], 0).dropna(axis=1)\n\n num_examples = df.shape[0]\n\n distances = pdist(df.values, metric='euclidean')\n print(distance)\n dis_array = squareform(distances)\n print(dis_array)\n dis_df = pd.DataFrame(data = dis_array, index=food_examples, columns=food_examples)\n print(dis_df)\n writer = pd.ExcelWriter('Euclidean_distance_icarbonx.xlsx', engine='xlsxwriter')\n dis_df.to_excel(writer, sheet_name='Sheet1')\n writer.save()", "def get5x5matrix(self): #modified from nxvasc get3x3matrix()\n try:\n i = na.identity(3)\n \n self.d124 = i.copy()\n self.ds124 = na.zeros(124,na.float64)\n \n for k in range(1,124):\n self.d124 = na.concatenate((self.d124,i))\n# print len(self.d124)\n count = 0\n a = []\n for k in range(-2,3):\n for j in range(-2,3):\n for i in range(-2,3):\n if( i != 0 or j != 0 or k != 0 ):\n self.ds124[count] = math.sqrt(i**2+j**2+k**2)\n count += 1\n a.append(i)\n a.append(j)\n a.append(k)\n# print len(a)\n a = na.reshape(na.array(a),(372,1))\n# print len(self.d124)\n self.d124 = na.concatenate((self.d124,a),axis=1)\n except Exception as error:\n print(\"failed in get5x5matrix(): \", error)", "def getDistanceMatrix(self, alignedSequences):\n\t\tif not alignedSequences:\n\t\t\traise ValueError(\"alignedSequences must not be empty\")\n\t\tdominantAlignedSequence = alignedSequences[0]\n\t\tsubdominantAlignedSequences = alignedSequences[1:]\n\t\tdistanceMatrix = []\n\t\tfor seq in subdominantAlignedSequences:\n\t\t\tdistanceMatrix.append(len(seq) - self._getNumberOfSpaces(seq) - self._getNumberOfAlignedNucleotides(dominantAlignedSequence, seq))\n\t\treturn distanceMatrix", "def _matrix_(self, R=None):\n return self.adjacency_matrix()", "def structure_distance_matrix(structure_distances, coverage_min=0.0):\n\n data = structure_distances\n\n # Filter by coverage\n data = data[data[\"bit_coverage\"] >= coverage_min]\n # Data for upper half of the matrix\n pairs_upper = data[[\"structure.1\", \"structure.2\", \"distance\"]]\n # Data for lower half of the matrix\n pairs_lower = pairs_upper.rename(\n columns={\"structure.1\": \"structure.2\", \"structure.2\": \"structure.1\"}\n )\n\n # Concatenate upper and lower matrix data\n pairs = pd.concat([pairs_upper, pairs_lower]).sort_values([\"structure.1\", \"structure.2\"])\n # Convert to matrix\n matrix = pairs.pivot(columns=\"structure.2\", index=\"structure.1\", values=\"distance\")\n # Matrix diagonal is NaN > set to 0.0\n np.fill_diagonal(matrix.values, 0)\n\n return matrix", "def return_adjacencyMatrix(self):\n return self.__mat", "def test_compute_distance_matrix_from_metadata_int(self):\r\n exp_out = array(\r\n [[0, 0, 92, 9096, 9992, 9894, 18898, 18898, 18898], [0, 0, 92, 9096, 9992, 9894, 18898, 18898, 18898],\r\n [92, 92, 0, 9188, 10084, 9986, 18990, 18990, 18990], [9096,\r\n 9096, 9188, 0, 896, 798, 9802, 9802, 9802],\r\n [9992, 9992, 10084, 896, 0, 98, 8906, 8906, 8906], [9894,\r\n 9894, 9986, 798, 98, 0, 9004, 9004, 9004],\r\n [18898, 18898, 18990, 9802, 8906, 9004, 0, 0,\r\n 0], [18898, 18898, 18990, 9802, 8906, 9004, 0, 0, 0],\r\n [18898, 18898, 18990, 9802, 8906, 9004, 0, 0, 0]])\r\n\r\n res_out = compute_distance_matrix_from_metadata(self.DOB)\r\n assert_almost_equal(exp_out, res_out)", "def distances(self):\n self._sort_measurements()\n return self._distances", "def distances(self):", "def Dmat(self):\n return self._Dmat_cache", "def diagonal(self):\n M = self.rep\n m, n = self.shape\n return [M[i, i] for i in range(min(m, n))]", "def as_matrix(self) -> types.Matrix:", "def to_matrix(self):\n return numpy.array([[1, 1],\n [1, -1]], dtype=complex) / numpy.sqrt(2)", "def _build_dist(self):\n lamb = self.params['lamb']\n p = self.params['p']\n\n jac = self.jacobian\n # build D on grids\n xg, yg, mask = self._mask_grid()\n r_max = self._r_max(xg, yg, mask)\n d_mat = self._psf_grid(xg, yg, r_max=r_max)\n # E[yy^T]\n j_j_w = np.dot(jac, jac.transpose())\n r_mat = np.diag(np.diag(j_j_w) ** p)\n jac_inv = la.inv(j_j_w + lamb*r_mat)\n # RM = E[xx^T] / E[yy^T]\n h_mat = np.dot(np.dot(d_mat, jac.transpose()), jac_inv)\n return h_mat", "def getMatrix(self) -> CMatrix4:\n ..." ]
[ "0.8100148", "0.773013", "0.76817036", "0.76695836", "0.7526487", "0.7413217", "0.72635025", "0.72520757", "0.7237255", "0.720597", "0.71226275", "0.7120114", "0.7059747", "0.7056286", "0.7044589", "0.70375097", "0.69336534", "0.69194335", "0.6913005", "0.6888755", "0.6846131", "0.68238544", "0.68214476", "0.6819796", "0.6752289", "0.670792", "0.6683438", "0.66548693", "0.6539676", "0.65321434", "0.65298116", "0.65251076", "0.650694", "0.65028405", "0.6474988", "0.64099514", "0.6409759", "0.6391257", "0.6387647", "0.6351737", "0.63502353", "0.63303196", "0.6326515", "0.63079584", "0.6271257", "0.6271257", "0.6237769", "0.62234926", "0.62099785", "0.61975193", "0.6179958", "0.6167546", "0.6164013", "0.6144201", "0.6142588", "0.6074274", "0.60671973", "0.6060963", "0.60547554", "0.60370743", "0.6018981", "0.6017826", "0.6010634", "0.6006136", "0.60020715", "0.59867674", "0.5984807", "0.59773815", "0.5970043", "0.59561235", "0.5948627", "0.594734", "0.5945343", "0.59372514", "0.5924023", "0.59089965", "0.5905494", "0.589023", "0.58884174", "0.58841527", "0.58760136", "0.58737314", "0.5872207", "0.5871799", "0.5851732", "0.58284426", "0.5828123", "0.5828048", "0.5825658", "0.5822754", "0.58116776", "0.5802992", "0.57956743", "0.5783578", "0.57670325", "0.57546544", "0.57391423", "0.5713734", "0.5705524", "0.57047236" ]
0.6237683
47
transform tags for solr
def transform_tags(self, instance): return instance.tags.split(',')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _preprocess(self, tagged: List[Tuple]) -> Tuple:\n ori = \" \".join([tag[0] for tag in tagged])\n tags = [tag[1] for tag in tagged]\n # Mapping into general tagset\n tags = [self._map[tag] if tag in self._map else \"X\" for tag in tags]\n return \" \".join(tags), ori", "def normalize_tags(tags):\n return {normalize(tag) for tag in tags}", "def tags():", "def canonicalize(tags):\n # test format \n r = random.randint(0, len(tags)-1)\n\n # in multilabel format? each tag is in the form of [e1, e2, ...]\n isMultiLabel = True if hasattr(tags[r], '__iter__') else False\n\n if isMultiLabel: # i.e. each label is a list\n print('TDocTag.canonicalize> input labels in multilabel format.')\n docTags = []\n for i, tag in enumerate(tags): \n \n # docId = TDocTag.getDocID(i)\n docId = i # set docId here\n if tag[0] == docId: \n # do nothing, first element is already the intended docId\n pass \n else: \n tag.insert(0, docId)\n docTags.append(tag)\n else: \n docTags = []\n for i, tag in enumerate(tags): \n if i < 3: assert isinstance(tag, str)\n docId = i # docId = TDocTag.getDocID(i) \n docTags.append([docId, tag, ]) \n return docTags", "def tagger():", "def ref_tag_preprocess(inp_tag_text):\n\n # some words in references\n inp_tag_text = inp_tag_text.replace(\"до римлян\", \"Римл\")\n inp_tag_text = inp_tag_text.replace(\" и \", \"; \")\n inp_tag_text = inp_tag_text.replace(\" і \", \"; \")\n inp_tag_text = inp_tag_text.replace(\"–\", \"-\")\n # replacing \"'\" to \"’\", it is similar in Ukrainian\n inp_tag_text = inp_tag_text.replace(\"'\", \"’\")\n return inp_tag_text", "def _get_norm_tags(self, tags):\n norm_tags = []\n for tag in tags:\n lang = tag[0:2]\n norm_tags.append(lang + ':' + self.tag_manager.normalize_tag_wtokenization(tag, self.tries[lang]))\n return norm_tags", "def _postprocess(self, tags: List[str], words: List[str], pos: List[str]):\n result = list()\n\n i = 0\n for tag in tags:\n if (\"<\" not in tag) and (\">\" not in tag):\n if pos:\n result.append(f\"{words[i]}/{pos[i]}\")\n else:\n result.append(words[i])\n i += 1\n else:\n result.append(tag)\n\n return \" \".join(result)", "def tags_tocl(d, tag_list, title):\r\n filtered_anchors = []\r\n for anc in sorted(d.keys(), key=str.lower):\r\n entry = d[anc]\r\n if not \"tags\" in entry: continue\r\n found = [t for t in tag_list if t in entry[\"tags\"]]\r\n if not found: continue\r\n filtered_anchors.append(anc)\r\n return TemplateData(t=title, e=filtered_anchors)", "def _postprocess(\n self,\n tags: List[str],\n words: List[str],\n pos: bool = False,\n ):\n result = list()\n\n i = 0\n for tag in tags:\n if (\"<\" not in tag) and (\">\" not in tag):\n if pos:\n result.append(f\"{words[i]}/{pos[i]}\")\n else:\n result.append(words[i])\n i += 1\n else:\n result.append(tag)\n\n return \" \".join(result)", "def transform():", "def process_tags(tags=list):\n new_tag_list = list()\n for tag in tags:\n new_tag = tag.replace(\"<\", \" \")\n new_tag = new_tag.replace(\">\", \" \")\n new_tag = new_tag.split()\n # sort elements by string length (this to avoid 'c' being checked before 'c++', etc)\n new_tag.sort(key=len, reverse=True)\n new_tag_list.append(new_tag)\n return new_tag_list", "def preprocess(docs):\r\n # stop = set(stopwords.words('english'))\r\n tags = {'NN', 'NNS', 'NNP', 'NNP', 'NNPS', 'JJ', 'JJR', 'JJS'}\r\n for i in range(len(docs)):\r\n docs[i] = [(word.lower(), convert(tag)) for (word, tag) in nltk.pos_tag(nltk.word_tokenize(docs[i])) if tag in tags]\r\n return lemmatize_docs(docs)", "def xml2tokens(xml_tagged_sent, tokenized_sent, raw_sent):\n raw, entities = get_entities(xml_tagged_sent)\n if re.search(r\"ENAMEX\", raw):\n print(xml_tagged_sent)\n print(raw)\n # count += 1\n\n tokens, syllables = word_tokenize(tokenized_sent, raw_sent)\n level1_syl_tags = [\"O\" for i in range(len(syllables))]\n level2_syl_tags = [\"O\" for i in range(len(syllables))]\n level3_syl_tags = [\"O\" for i in range(len(syllables))]\n\n level1_token_tags = [\"O\" for i in range(len(tokens))]\n level2_token_tags = [\"O\" for i in range(len(tokens))]\n level3_token_tags = [\"O\" for i in range(len(tokens))]\n\n flag = False\n for entity in entities:\n value = entity[\"value\"]\n start = entity[\"start\"]\n end = entity[\"end\"]\n entity_type = entity[\"type\"]\n start_syl_id, end_syl_id = find_syl_index(start, end, syllables)\n start_tok_id, end_tok_id = find_tok_index(start_syl_id, end_syl_id, tokens)\n\n if start_syl_id != None and end_syl_id != None:\n if entity[\"level\"] == 1:\n level1_syl_tags[start_syl_id] = \"B-\" + entity_type\n for i in range(start_syl_id + 1, end_syl_id):\n level1_syl_tags[i] = \"I-\" + entity_type\n elif entity[\"level\"] == 2:\n level2_syl_tags[start_syl_id] = \"B-\" + entity_type\n for i in range(start_syl_id + 1, end_syl_id):\n level2_syl_tags[i] = \"I-\" + entity_type\n else:\n level3_syl_tags[start_syl_id] = \"B-\" + entity_type\n for i in range(start_syl_id + 1, end_syl_id):\n level3_syl_tags[i] = \"I-\" + entity_type\n else:\n print(\"{},{},\\\"{}\\\" in '{}' ({})\".format(start,end,value,raw,xml_tagged_sent))\n flag = True\n\n if start_tok_id != None and end_tok_id != None:\n if entity[\"level\"] == 1:\n level1_token_tags[start_tok_id] = \"B-\" + entity_type\n for i in range(start_tok_id+1, end_tok_id):\n level1_token_tags[i] = \"I-\" + entity_type\n elif entity[\"level\"] == 2:\n level2_token_tags[start_tok_id] = \"B-\" + entity_type\n for i in range(start_tok_id + 1, end_tok_id):\n level2_token_tags[i] = \"I-\" + entity_type\n else:\n level3_token_tags[start_tok_id] = \"B-\" + entity_type\n for i in range(start_tok_id + 1, end_tok_id):\n level3_token_tags[i] = \"I-\" + entity_type\n else:\n pass\n # print(\"{},{},\\\"{}\\\" in '{}' ({})\".format(start_syl_id, end_syl_id, value, raw, xml_tagged_sent))\n\n ret_syllables = list(zip([ s.text for s in syllables], level1_syl_tags, level2_syl_tags, level3_syl_tags))\n ret_tokens = list(zip( [tk.text for tk in tokens], level1_token_tags, level2_token_tags, level3_token_tags))\n return ret_syllables, ret_tokens, raw, flag", "def convert_all_tags(self):\n self.ratings = self.tag_converter.convert_ratings()\n self.categories = self.tag_converter.convert_categories()\n self.classes = self.tag_converter.convert_classes()\n\n old_characters = self.sql.read_table_to_dict(self.working_original, \"characters\")\n self.characters = self._convert_characters(old_characters)", "def apply_tags(self, tags):\n for tag_name in tags:\n tag = tag_name.strip().lower()\n self.tags.append(DBSession.merge(Tag(tag)))", "def build_taglist(tags):\n taglist = []\n for tag in tags:\n taglist.append(tag['value'].lower())\n return taglist", "def _process_tags(tags: dict):\n\n def process_val(value):\n if isinstance(value, (list, tuple)):\n # Array type of json\n return [process_val(item) for item in value]\n elif isinstance(value, dict):\n # Object type of json\n return {k: process_val(v) for k, v in value.items()}\n elif isinstance(value, (str, int, float, bool)) or value is None:\n # Other supported type of json\n return value\n elif isinstance(value, (torch.Tensor, np.ndarray)):\n return value.tolist()\n # Drop unsupported values.\n\n processed_tags = OrderedDict(process_val(tags))\n\n return processed_tags", "def transform(self):", "def tag(self):\n \n tag = super(self.__class__, self).tag();\n tag = als.tag_join(tag, als.stra(self.strain));\n tag = als.tag_join(tag, als.stra(self.dtype));\n tag = als.tag_join(tag, 'w=%s' % als.stra(self.wid)); \n tag = als.tag_join(tag, 's=%s' % als.stra(self.stage));\n #tag = analysis.tag_join(tag, 'l=%s' % analysis.stra(self.label)); \n\n return tag;", "def tag_sents(self, sents):\n # WORK HERE!!", "def prepare_tags(self, obj):\n return [tag.name for tag in obj.tags.all()]", "def _transform(self, document):\n pass", "def transform(self, entry):\r\n res = ''\r\n doc = self.nlp(entry, disable=['tagger', 'parser'])\r\n ents = doc.ents\r\n start_indices = {}\r\n end_indices = {}\r\n\r\n for ent in ents:\r\n start, end, label = ent.start, ent.end, ent.label_\r\n if label in ['PERSON', 'ORG', 'LOC', 'PRODUCT', 'DATE', 'QUANTITY', 'TIME']:\r\n start_indices[start] = label\r\n end_indices[end] = label\r\n\r\n for idx, token in enumerate(doc):\r\n if idx in start_indices:\r\n res += start_indices[idx] + ' '\r\n\r\n # normalizing the numbers\r\n if token.like_num:\r\n try:\r\n val = float(token.text)\r\n if val == round(val):\r\n res += '%d ' % (int(val))\r\n else:\r\n res += '%.2f ' % (val)\r\n except:\r\n res += token.text + ' '\r\n elif len(token.text) >= 7 and \\\r\n any([ch.isdigit() for ch in token.text]):\r\n res += 'ID ' + token.text + ' '\r\n else:\r\n res += token.text + ' '\r\n return res.strip()", "def transform(self, entry):\r\n res = ''\r\n doc = self.nlp(entry, disable=['tagger', 'parser'])\r\n ents = doc.ents\r\n start_indices = {}\r\n end_indices = {}\r\n\r\n for ent in ents:\r\n start, end, label = ent.start, ent.end, ent.label_\r\n if label in ['NORP', 'GPE', 'LOC', 'PERSON', 'PRODUCT']:\r\n start_indices[start] = 'PRODUCT'\r\n end_indices[end] = 'PRODUCT'\r\n if label in ['DATE', 'QUANTITY', 'TIME', 'PERCENT', 'MONEY']:\r\n start_indices[start] = 'NUM'\r\n end_indices[end] = 'NUM'\r\n\r\n for idx, token in enumerate(doc):\r\n if idx in start_indices:\r\n res += start_indices[idx] + ' '\r\n\r\n # normalizing the numbers\r\n if token.like_num:\r\n try:\r\n val = float(token.text)\r\n if val == round(val):\r\n res += '%d ' % (int(val))\r\n else:\r\n res += '%.2f ' % (val)\r\n except:\r\n res += token.text + ' '\r\n elif len(token.text) >= 7 and \\\r\n any([ch.isdigit() for ch in token.text]):\r\n res += 'ID ' + token.text + ' '\r\n else:\r\n res += token.text + ' '\r\n return res.strip()", "def replace_all_tags(tags):\n\twith postgres, postgres.cursor() as cur:\n\t\tcur.execute(\"truncate mustard.tags\");\n\t\tpsycopg2.extras.execute_values(cur,\n\t\t\t\"insert into mustard.tags (id, english_name, english_desc) values %s\",\n\t\t\ttags)\n\t\tcur.execute(\"update mustard.status set tags_updated = now()\")", "def transform(self, tags, values_to_sub):\n for tag, properties in tags.items():\n val = values_to_sub.get(tag)\n values_to_sub[tag] = self.transform_val(properties, val)\n if properties.get(\"children\") is not None:\n children = properties.get(\"children\")\n for child_tag, child_properties in children.items():\n child_val = self.transform_val(child_properties, val)\n values_to_sub[child_tag] = child_val\n return values_to_sub", "def preprocess_query(query):\r\n # stop = set(stopwords.words('english'))\r\n tags = {'NN', 'NNS', 'NNP', 'NNP', 'NNPS', 'JJ', 'JJR', 'JJS'}\r\n wordnet_lemmatizer = WordNetLemmatizer()\r\n # for i in range(len(query)):\r\n query = [(word.lower(), convert(tag)) for (word, tag) in nltk.pos_tag(nltk.word_tokenize(query)) if tag in tags]\r\n query = [wordnet_lemmatizer.lemmatize(w, t) for (w, t) in query ]\r\n return query", "def transform(self, query):\n query = preprocessing(query)\n return lookup(query, self.model.wv)", "def transform(self, data):", "def encode_tags(taglist, lang_name):\n tagvec = [None]*len(UNIMORPH_CATEGORIES)\n for tag in taglist:\n if tag in UNIMORPH_TAGTYPES:\n tagtype = UNIMORPH_TAGTYPES[tag]\n set_tagtype(tagvec, tagtype, tag, lang_name)\n\n if tag in UNIMORPH_POS_MAP:\n pos = UNIMORPH_POS_MAP[tag]\n set_tagtype(tagvec, \"POS\", pos, lang_name)\n\n return tagvec", "def retag_string(self, string, tags):\r\n for (i, tag) in enumerate(tags):\r\n p = '<%s>' % i\r\n string = re.sub(p, tag, string, 1)\r\n return string", "def _add_tags(self):\n\n if self.version != 'live':\n return\n\n tags = [t.strip() for t in self.tags_text.split(',')]\n tags = list(set(tags))\n\n for tag_name in tags:\n tag_slug = slugify(tag_name)\n if tag_slug:\n try:\n tag = Tag.objects.get(blog=self.blog, slug=tag_slug)\n except Tag.DoesNotExist:\n tag = Tag( blog = self.blog,\n name = tag_name,\n slug = tag_slug)\n\n tag.increment()\n tag.save()\n\n self.tags.add(tag)", "def get_entities(tags):\n pass", "def make_tags(tag, word):\n tag1 = \"<{}>\".format(tag)\n tag2 = \"</{}>\".format(tag)\n final = tag1 + word + tag2\n return final", "def _clean_up_tag_dict_tags(tag_dict):\n \n # Make the tag label all lowercase\n # and remove any underscores from the beginning\n for key in tag_dict.keys():\n tag_dict[key][u'tag'] = tag_dict[key][u'tag'].str.lower()\n tag_dict[key][u'tag'].replace(r\"\"\"^_\"\"\", u'', inplace=True, regex=True)\n \n return tag_dict", "def aggregate_tags(terms=terms, enableWarnning=False):\n ordered_tags = load_map()\n\n tmp_tags = collections.OrderedDict() # All keys to lowe cases\n for k in ordered_tags.keys():\n tag = k.lower()\n tmp_tags[tag] = k\n\n for term, tags in terms.iteritems():\n for tag in tags:\n try:\n ind = tag.rindex('/')\n tmp = tag[ind+1:]\n except ValueError as e:\n tmp = tag\n\n tmp = tmp.lower()\n if tmp in tmp_tags:\n tmp_tags[tmp] = tag\n else:\n if enableWarnning:\n print('Unknown tag ignored:[%s] for term [%s].' % (tag, term))\n\n return tmp_tags", "def create_tags():\n\n INPUT = \"\"\"\n \"Python general\",Python\n R,\"Other Programming Languages\"\n Java,\"Other Programming Languages\"\n C-Languages,\"Other Programming Languages\"\n Analytics,\"Data Science\"\n Visualization,\"Data Science\"\n \"Big Data\",\"Data Science\"\n Predictions,\"Data Science\"\n MongoDB,Databases\n \"Web Servers and MicroFWs (Flask/Tornado/Nginx/...)\",Web\n Ipython,Python\n \"Web General\",Web\n Socket,DevOps\n Django,\"Application Frameworks\"\n Docker,DevOps\n Security,Security\n Privacy,Security\n Odoo,\"Application Frameworks\"\n \"Scientific Libraries (Numpy/Pandas/SciKit/...)\",\"Data Science\"\n Pyramid,\"Application Frameworks\"\n Plone,\"Application Frameworks\"\n \"Data Science\",\"Data Science\"\n Machine-Learning,\"Data Science\"\n PostgreSQL,Databases\n Django-Girls,Community\n Agile,\"Development Methods\"\n Documentation,Programming\n \"DevOps general\",DevOps\n Community,Community\n \"Natural Language Processing\",\"Data Science\"\n PyPy,Python\n Open-Source,\"Open Source\"\n Linux,\"Operating Systems\"\n \"SQL Alchemy\",Databases\n Communication,Community\n Tooling,Programming\n \"Test Libraries (pyTest/node/...)\",Testing\n MySQL,Databases\n Packaging,Python\n \"JavaScript Web Frameworks (AngularJS/ReactJS/...)\",Web\n \"Internet of Things (IoT)\",Hardware\n Performance,Programming\n Saltstack,DevOps\n Management,\"Development Methods\"\n Scrum,\"Development Methods\"\n Kanban,\"Development Methods\"\n Internationalization,Programming\n \"Behavior Driven Development (BDD)\",\"Development Methods\"\n HTML5,Web\n NoSQL,Databases\n OpenGL,Web\n \"Test Driven Development (TDD)\",Testing\n Education,Educational\n CPython,Python\n APIs,Web\n \"Python 3\",Python\n \"Best Practice\",\"Best Practice and Use Cases\"\n Development,Programming\n Testing,Testing\n Beginners,Educational\n Programming,Programming\n Cython,Python\n \"Deep Learning\",\"Data Science\"\n Unix,\"Operating Systems\"\n \"Case Study\",\"Case Study\"\n E-Commerce,Web\n \"Distributed Systems\",DevOps\n \"Functional Programming\",Programming\n Architecture,Programming\n OpenStack,DevOps\n \"Raspberry PI\",Hardware\n Teaching,\"Everything Else\"\n \"Meta Classes\",Programming\n \"Public Cloud (AWS/Google/...)\",DevOps\n \"Augmented Reality\",\"Everything Else\"\n Engineering,\"Everything Else\"\n Physics,Sciences\n \"Clean Code\",Educational\n \"System Administration\",DevOps\n Mix-Ins,Programming\n \"Static Analysis\",\"Everything Else\"\n \"Compiler and Interpreters\",Python\n Type-Hinting,Programming\n \"Web Crawling\",Web\n JavaScript,\"Other Programming Languages\"\n NodeJS,Web\n \"Conferences and Meet-Ups\",Community\n Databases,Databases\n Infrastructure,DevOps\n \"Elastic Search\",Databases\n Go-Lang,\"Other Programming Languages\"\n HTTP,Web\n Operations,DevOps\n \"Configuration Management (Ansible/Fabric/Chef/...)\",DevOps\n \"Deployment/Continuous Integration and Delivery\",DevOps\n Jenkins,Testing\n Science,Sciences\n Authentication,Security\n 3D,\"Everything Else\"\n Blender,\"Everything Else\"\n Diversity,Community\n Robotics,Hardware\n Human-Machine-Interaction,Hardware\n Debugging,Testing\n \"Euro Python and EPS\",Community\n LaTeX,\"Other Programming Languages\"\n Game-Development,\"Everything Else\"\n Kivy,Python\n Cross-Platform-Development,Python\n Git,DevOps\n PyQt,Programming\n Virtualization,DevOps\n \"Software Design\",Programming\n Multi-Processing,Programming\n Multi-Threading,Programming\n Windows,\"Operating Systems\"\n \"Messaging and Job Queues (RabbitMQ/Redis/...)\",DevOps\n \"Fun and Humor\",\"Everything Else\"\n Command-Line,Programming\n CMS,Web\n \"GEO and GIS\",\"Everything Else\"\n \"Graph Databases\",Databases\n Abstractions,\"Everything Else\"\n \"Code Analysis\",Programming\n Wearables,Hardware\n Mobile,Web\n \"Jupyter/iPython Notebook\",Python\n RESTful,Web\n Cryptography,Security\n OpenCV,Hardware\n \"ASYNC / Concurreny\",Programming\n \"Virtual Env\",Programming\n PyPi,Python\n Micro-Computers,Hardware\n Microservices,Programming\n Scaling,DevOps\n \"Python Software Foundation (PSF)\",Community\n workforce,Business\n DIY,\"Everything Else\"\n \"Image Processing\",\"Everything Else\"\n \"Mac OS X\",\"Operating Systems\"\n \"Data Structures\",Programming\n \"System Architecture\",DevOps\n Algorithms,\"Data Science\"\n PyLadies,Community\n \"The Answer to Life the Universe and Everything Else\",\"Everything Else\"\n Gadgets,Hardware\n \"All Other Programming Languages\",\"Other Programming Languages\"\n \"Use Case\",\"Best Practice and Use Cases\"\n Sensors,Hardware\n \"Other Hardware\",Hardware\n failures/mistakes,\"Best Practice and Use Cases\"\n clients,Business\n freelancing,Business\n \"Mind Bending\",\"Everything Else\"\n Templating,Web\n legacy-code,Programming\n MicroPython,Python\n \"Python 2\",Python\n python,Python\n Data,\"Data Science\"\n Structures,\"Data Science\"\n Web,Web\n Business,Business\n Notebook,\"Data Science\"\n Jupyter/iPython,\"Data Science\"\n Life,Community\n Universe,Sciences\n Deep,\"Data Science\"\n Learning,\"Data Science\"\n Internet,Web\n \"Internet of Things\",DevOps\n EPS,Community\n EuroPython,Community\n \"Open Stack\",DevOps\n finance,\"\"\n Trading,\"\"\n \"\"\".strip()\n\n buffer = StringIO(INPUT)\n\n reader = csv.reader(buffer)\n for line in reader:\n ConferenceTag.objects.create(\n name=line[0].strip(), category=line[1].strip()\n )\n print(\"Created tag\", line[0].strip())", "def _convert_tags_to_wordpiece_tags(tags: List[str], offsets: List[int]) -> List[str]:\n new_tags = []\n j = 0\n for i, offset in enumerate(offsets):\n tag = tags[i]\n is_o = tag == \"O\"\n is_start = True\n while j < offset:\n if is_o:\n new_tags.append(\"O\")\n\n elif tag.startswith(\"I\"):\n new_tags.append(tag)\n\n elif is_start and tag.startswith(\"B\"):\n new_tags.append(tag)\n is_start = False\n\n elif tag.startswith(\"B\"):\n _, label = tag.split(\"-\", 1)\n new_tags.append(\"I-\" + label)\n j += 1\n\n # Add O tags for cls and sep tokens.\n return [\"O\"] + new_tags + [\"O\"]", "def tag_word (lx,wd):\n\n resultSet = {tag for (word, tag) in function_words_tags if (word == wd)}\n\n nS = noun_stem(wd)\n vS = verb_stem(wd)\n\n for x in lx.getAll('A'):\n if (x == wd):\n resultSet.add('A')\n\n for x in lx.getAll('P'):\n if (x == wd):\n resultSet.add('P')\n\n for x in lx.getAll('N'):\n if (x == nS):\n resultSet.add('Np')\n elif (x == wd):\n resultSet.add('Ns')\n\n for x in lx.getAll('I'):\n if (x == vS):\n resultSet.add('Ip')\n elif (x == wd):\n resultSet.add('Is')\n\n for x in lx.getAll('T'):\n if (x == vS):\n resultSet.add('Tp')\n elif (x == wd):\n resultSet.add('Ts')\n\n return list(resultSet)", "def tidy_tags(self, tags):\n tags = tags.split()\n # add target tag if not a calibrator\n if not any(\"cal\" in tag for tag in tags):\n if \"target\" not in tags:\n tags.append(\"target\")\n return \" \".join(tags)", "def split_corpus_tags(self, corpus):\n logging.info('Dividindo texto das tags')\n sentences = []\n tags = []\n dict_tags = {}\n for sentence in corpus:\n sentence_tmp = sentence.replace(\"\\n\", '')\n words_tmp = []\n tags_tmp = []\n words = sentence_tmp.split(\" \")\n for word in words:\n tag_word = word.split(\"_\")\n if tag_word[0] == \"\": pass\n else:\n words_tmp.append(tag_word[0])\n tags_tmp.append(tag_word[1])\n if not tag_word[1] in dict_tags.keys(): \n dict_tags[tag_word[1]] = {}\n dict_tags[tag_word[1]]['right'] = 0\n dict_tags[tag_word[1]]['pred'] = 0\n dict_tags[tag_word[1]]['pres'] = 1\n else: dict_tags[tag_word[1]]['pres'] += 1\n sentences.append(words_tmp)\n tags.append(tags_tmp)\n return sentences, tags, dict_tags", "def _convert_tags_to_wordpiece_tags(tags: List[str], offsets: List[int]) -> List[str]:\r\n new_tags = []\r\n j = 0\r\n for i, offset in enumerate(offsets):\r\n tag = tags[i]\r\n is_o = tag == \"O\"\r\n is_start = True\r\n while j < offset:\r\n if is_o:\r\n new_tags.append(\"O\")\r\n\r\n elif tag.startswith(\"I\"):\r\n new_tags.append(tag)\r\n\r\n elif is_start and tag.startswith(\"B\"):\r\n new_tags.append(tag)\r\n is_start = False\r\n\r\n elif tag.startswith(\"B\"):\r\n _, label = tag.split(\"-\", 1)\r\n new_tags.append(\"I-\" + label)\r\n j += 1\r\n\r\n # Add O tags for cls and sep tokens.\r\n return [\"O\"] + new_tags + [\"O\"]", "def strip_tags(tagged_sentences):\n untagged_sentences = []\n for taggedsent in tagged_sentences:\n untaggedsent = ''\n\tfor taggedword in taggedsent.split():\n\t word = re.split('(?<!\\\\\\)\\/', taggedword)[0]\n untaggedsent += word + ' '\n #print untaggedsent\n untagged_sentences.append(untaggedsent)\n return untagged_sentences", "def tag(text, pos_tagger):\n features = [get_crf_features([word for word in sent]) for sent in text]\n tags = pos_tagger.predict(features)\n tagged_text = []\n for i in range(len(text)):\n tagged_sent = []\n for j in range(len(text[i])):\n tagged_sent.append((text[i][j], tags[i][j]))\n tagged_text.append(tagged_sent)\n #print(tags)\n return tags, tagged_text", "def update_tag(str_tag):\n str_key, str_value = tp_key_value(str_tag)\n var_result = phrase_to_datetime(str_value)\n\n if var_result is not None:\n str_trans = var_result\n else:\n str_trans = str_value\n\n if str_trans != '':\n lst = [' @', str_key, \"(\", str_trans, \")\"]\n else:\n lst = [' @', str_key]\n\n return ''.join(lst)", "def _make_natural_type(self):\n for tag in self.tags:\n if self.tags[tag] is None or str(self.tags[tag]).strip() == \"\":\n self.tags[tag] = None\n else:\n if tag.lower() in VASP_TAG_INT_LIST:\n try:\n self.tags[tag] = int(self.tags[tag])\n except ValueError:\n raise IncarError(\"Could not convert '\" + tag + \"' : '\" + self.tags[tag] + \"' to int\")\n elif tag.lower() in VASP_TAG_FLOAT_LIST:\n try:\n self.tags[tag] = float(self.tags[tag].lower().replace('d','e'))\n except ValueError:\n raise IncarError(\"Could not convert '\" + tag + \"' : '\" + self.tags[tag] + \"' to float\")\n elif tag.lower() in VASP_TAG_BOOL_LIST:\n if not self.tags[tag].lower() in ['.true.','.false.']:\n raise IncarError(\"Could not find '\" + tag + \"' : '\" + self.tags[tag].lower() + \"' in ['.true.','.false.']\")\n else:\n self.tags[tag] = (self.tags[tag].lower() == '.true.')\n elif tag.lower() in VASP_TAG_SITEF_LIST + VASP_TAG_SPECF_LIST:\n temp = []\n for value in self.tags[tag].split():\n try:\n item=value.split('*')\n if len(item)==1:\n temp.append(float(value))\n else:\n if item[0] != 0:\n temp.append(str(item[0])+'*'+str(float(item[1])))\n except ValueError:\n raise IncarError(\"Could not convert '\" + tag + \"' : '\" + self.tags[tag] + \"' to float list\")\n self.tags[tag] = temp\n elif tag.lower() in VASP_TAG_SPECI_LIST:\n temp = []\n for value in self.tags[tag].split():\n try:\n temp.append(int(value))\n except ValueError:\n raise IncarError(\"Could not convert '\" + tag + \"' : '\" + self.tags[tag] + \"' to int list\")\n self.tags[tag] = temp\n elif tag.lower() in VASP_TAG_STRING_LIST:\n self._check_string_tag(tag,self.tags[tag])", "def process_subtags(element, node):\n \n for tag in element.iter(\"tag\"):\n tag_key = tag.attrib['k']\n tag_val = tag.attrib['v']\n \n # Check for problem characters\n if problemchars.match(tag_key):\n continue\n \n # fix tag 'v' attribute of streetname and postcode\n elif tag_key.startswith(\"addr:\"):\n if not \"address\" in node.keys():\n node[\"address\"] = {}\n addr_key = tag.attrib['k'][len(\"addr:\") : ]\n if lower_colon.match(addr_key):\n continue\n else:\n if tag.attrib['k'] == \"addr:street\":\n fixed_v, change = correct_street_type(tag_val)\n elif tag.attrib['k'] == \"addr:postcode\":\n fixed_v, change = correct_postcode(tag.attrib['v'])\n else:\n fixed_v = tag_val\n if fixed_v != None:\n node[\"address\"][addr_key] = fixed_v\n \n # fix fax and phone number\n elif tag_key == \"fax\" or tag_key == \"phone\":\n fixed_v, chang = correct_number(tag_val)\n node[tag_key] = fixed_v\n \n #fix multiple tag_key confusing. These two tag_key in the list have same meaing, \n #so just keep the latter one in the list and change the former to the latter\n elif tag_key in [ u'应急避难场所疏散人数万人',u'应急避难场所疏散人口万人']:\n node[u'应急避难场所疏散人口万人'] = tag_val\n \n # '疏散人数' and '疏散人数(万)' are two similar tag_key. Inthis way below, we change '疏散人数' to '疏散人数(万)'\n # by doing some math.\n elif tag_key == u'疏散人数':\n node[u'疏散人数(万)'] = str(round(float(tag_val.split()[0].replace(',',''))/10000,2))\n elif tag_val != None:\n node[tag_key] = tag_val\n \n return node", "def tag_mapping(sentences):\n tags = [[word[-1] for word in s] for s in sentences]\n dico = create_dico(tags)\n tag_to_id, id_to_tag = create_mapping(dico)\n print(\"Found %i unique named entity tags\" % len(dico))\n return dico, tag_to_id, id_to_tag", "def get_simplified_tags(self) -> Dict:\n return self.orthanc.get_instance_simplified_tags(self.identifier)", "def set_tags(self, tags):\n self.tags = []\n for tag in [t.strip() for t in tags.split(', ')]:\n self.tags.append(Tag(title=tag))", "def _transform_known_tags(self):\n self.missing_known_tags = []\n\n for k, tf in self._known_tags.items():\n v = self.tags.get(k, [])\n if not v:\n self.missing_known_tags.append(k)\n continue\n\n if len(v) > 1:\n raise Exception(f\"multiple instances of tag {k}\")\n\n setattr(self, k, v[0])", "def _apply_filters(self, text, tag):\n\n # The order of the filters below is important\n # and should not be changed\n\n # intial_quotes needs to happen at this point so that\n # attribute values introduced later on do not get affected\n text = self.initial_quotes(text)\n text = self.smarty_pants(text)\n text = self.amp(text)\n text = self.caps(text)\n\n return text", "def tags_with(self, word):\n return tags_with_word", "def tag_mapping(sentences):\n tags = [[char[-1] for char in s] for s in sentences]\n dico = create_dico(tags)\n tag_to_id, id_to_tag = create_mapping(dico)\n print(\"Found %i unique named entity tags\" % len(dico))\n return dico, tag_to_id, id_to_tag", "def find_named_entities(pos_tags):\n contains_proper_noun = False\n tokens = list()\n for tags in pos_tags:\n if tags['tag'] == '^':\n contains_proper_noun = True\n\n if contains_proper_noun:\n for tags in pos_tags:\n if len(tags['token']) == 1:\n tags['token'] = NLPUtils.character_to_unicode(tags['token'])\n tokens.append(tags['token'])\n try:\n text = ' '.join(tokens)\n headers = {\n 'Accept': 'application/json',\n }\n # print(text)\n data = [\n ('text', text),\n ('confidence', '0.25'),\n ('support', '20')\n ]\n\n r = requests.post('http://model.dbpedia-spotlight.org/en/annotate', headers=headers, data=data,\n timeout=10)\n # print(str(r.content.decode()))\n res = r.json()\n\n entities = list()\n if 'Resources' in res:\n for i in res['Resources']:\n # res_str = str(i).replace(',','\\n')\n # print(res_str)\n\n if i['@types'] is not None:\n original = i['@surfaceForm']\n entity_tmp = i['@URI']\n entity_tmp = re.sub('.*/', '', entity_tmp)\n entity_tmp = re.sub('\\(.*\\)', '', entity_tmp)\n entity = re.sub('_', ' ', entity_tmp).strip()\n\n if entity.lower() in text.lower() and ' ' in entity:\n entities.append((entity, int(i['@offset'])))\n # print(entities)\n new_pos_tags = list()\n curr_pos = 0\n tokens_to_omit = 0\n for tags in pos_tags:\n # if re.match(\"U\\+[a-zA-Z0-9]{1,5}\",tags['token']):\n # print(tags['token'])\n # tags['token'] = NLPUtils.unicode_to_character(tags['token'])\n # print(tags['token'])\n\n token = tags['token']\n for e in entities:\n curr_dict = dict()\n if curr_pos == e[1]:\n tokens_to_omit = len(re.split(' ', e[0]))\n curr_dict['token'] = e[0]\n curr_dict['tag'] = '^'\n new_pos_tags.append(curr_dict)\n # +1 for whitespace\n curr_pos += len(token) + 1\n if tokens_to_omit == 0:\n new_pos_tags.append(tags)\n else:\n tokens_to_omit -= 1\n\n # decode unicode sequence\n new_pos_tags = NLPUtils.unicode_to_character_pos_tagged(new_pos_tags)\n return new_pos_tags\n # decode uniocde character\n pos_tags = NLPUtils.unicode_to_character_pos_tagged(pos_tags)\n except Exception as e:\n print(e)\n return None\n\n return pos_tags", "def _fix_treetags(self, tree):\n for element in tree:\n element.tag = element.tag.split('}')[1]\n if len(element.getchildren()) > 0:\n self._fix_treetags(element)\n return tree", "def post_process_result(self, result: np.ndarray) -> np.ndarray:\n to_cut = len(\"_tag\")\n return np.asarray([[tag[:-to_cut] for tag in list_of_tags] for list_of_tags in result])", "def filter_tag(tags=None):\n tagdict = defaultdict(list)\n Besarkecil = lambda f: ' '.join(re.findall('[A-Z][^A-Z]*', f))\n for obj in list(tags):\n if len(obj.split(':')) == 2:\n k, v = obj.split(':')\n # filtering key Besarkecil, lowercase\n k = str(Besarkecil(k)).lower()\n # print(k)\n if k in ['cari', 'jadwal', 'keberangkatan', 'maskapai', 'type', 'ibadah', 'jumlah hari', 'rute', 'tour']:\n res = re.findall(r\"(^[A-Z][^A-Z]+)|([^\\W\\d_]+|[\\d+]+)\", v)\n arres = []\n for resple in res:\n arres.append(filter(None, resple)[0])\n # print([e for e in resple])\n # print(' '.join(arres))\n tagdict[k].append(' '.join(arres))\n return tagdict", "def normalize(doc):\n return {'content': u' '.join(map(itemgetter(1), doc))}", "def transform():\n pass", "def transform4Doc2Vec(docs):\n\n # transform documents to be used by doc2Vec\n documents = []\n analyzedDocument = namedtuple('AnalyzedDocument', 'words tags')\n for i, doc in enumerate(docs):\n # use first line if documents are not tokenized, otherwise next line\n # words = text.lower().split()\n tags = [i]\n documents.append(analyzedDocument(doc, tags))\n\n return documents", "def find_usefull_tags(tags, tagmodel, tag_count_vect):\n\n final_tags = []\n for tag in tags:\n if tag == None:\n continue\n else:\n tagpd = pd.Series(tag)\n tag_feature = tag_count_vect.transform(tagpd)\n result = tagmodel.predict(tag_feature)\n\n result = result.tolist() \n result = str(result)\n if result == '[1]':\n final_tags.append(tag)\n final_tags = list(dict.fromkeys(final_tags))\n return(final_tags)", "def testTagr(self):\n\t\ttags = (\n\t\t\t \t('bob', 'bo<strong>b</strong>'),\n\t\t\t \t('Jack', 'Jac<strong>k</strong>'),\n\t\t\t \t)\n\t\t\n\t\tf = Flickr()\n\t\tfor tag, tagr in tags:\n\t\t\tresult = f.make_tagr(tag)\n\t\t\tself.assertEqual(result, tagr)", "def change_tags_format(page_tags):\n\treturn [tags.replace('\\n', ', ') if not tags == None else None for tags in page_tags]", "def tag_bioes(tags, match_index, term_length):\n\n if term_length == 1:\n tags[match_index] = \"S\"\n else:\n for i in range(term_length):\n if i == 0:\n tags[match_index + i] = \"B\"\n elif i == term_length - 1:\n tags[match_index + i] = \"E\"\n else:\n tags[match_index + i] = \"I\"\n return tags", "def _parse_tags(self):\n tokens = self.tags_str[1:].split(\";\")\n self._tags = {\n k.strip(): v\n for token in tokens\n for k, v in [token.split(\"=\")]\n }", "def _replace_tags(codes, key, value, tag_dict, i):\n\n # if reaching the end of the tag dictionary\n if i == len(tag_dict):\n codes[key] = value\n return\n\n # check if the i-th <tag> is used in the key\n tag = list(tag_dict)[i]\n if tag in key:\n for tag_key, tag_attrs in tag_dict[tag].items():\n _key = key.replace(tag, tag_key)\n _value = replace_tag_attributes(value, tag, tag_attrs)\n _replace_tags(codes, _key, _value, tag_dict, i + 1)\n else:\n _replace_tags(codes, key, value, tag_dict, i + 1)", "def tag(word: str, tags: list):\n open_tags = ['<' + tag + '>' for tag in tags]\n close_tags = ['</' + tag + '>' for tag in reversed(tags)]\n logger.debug('*************** %s ' %\n\n word)\n return ''.join(open_tags) + word + ''.join(close_tags)", "def linkify_tags_and_mentions(value):\n value = find_hashtags_re.sub(tag_match_to_url, sanitize(value))\n value = find_mentions_re.sub(mention_match_to_url, value)\n # value = link_tags_parse(value)\n return mark_safe(value)", "def filter_tag(tags=None):\n tagdict = defaultdict(list)\n Besarkecil = lambda f: ' '.join(re.findall('[A-Z][^A-Z]*', f))\n for obj in list(tags):\n if len(obj.split(':')) == 2:\n k, v = obj.split(':')\n # filtering key Besarkecil, lowercase\n k = str(Besarkecil(k)).lower()\n # print(k)\n if k in ['cari', 'jadwal', 'keberangkatan', 'maskapai', 'type', 'ibadah', 'jumlah hari', 'rute',\n 'tour']:\n res = re.findall(r\"(^[A-Z][^A-Z]+)|([^\\W\\d_]+|[\\d+]+)\", v)\n arres = []\n for resple in res:\n arres.append(filter(None, resple)[0])\n # print([e for e in resple])\n # print(' '.join(arres))\n tagdict[k].append(' '.join(arres))\n return tagdict", "def manipulate_xml_tag(label, tag, manipulator):\r\n lines = label.splitlines()\r\n output_lines = []\r\n for line in lines:\r\n if tag in line:\r\n value = next(inside_the_tag(line))\r\n output_lines.append(manipulator(line, value))\r\n else:\r\n output_lines.append(line)\r\n return \"\\n\".join(output_lines)", "def __pos_tag(self, title, text):\n text_words = nltk.word_tokenize(text)\n stop = nltk.corpus.stopwords.words(\"english\")\n text_words = list(filter(lambda x: x.lower() not in stop and x.lower() not in string.punctuation, text_words))\n tagged_text = [\" \".join(x[1] for x in nltk.pos_tag(text_words))]\n title_words = nltk.word_tokenize(title)\n title_words = list(filter(lambda x: x.lower() not in stop and x.lower() not in string.punctuation, title_words))\n tagged_title = [\" \".join(x[1] for x in nltk.pos_tag(title_words))]\n return self.pos_vectorizer.transform(tagged_title), self.pos_vectorizer.transform(tagged_text)", "def preprocess(text):\n text = text.translate(None, string.punctuation)\n words = filter(None, re.split('\\s+', text))\n words = nltk.pos_tag(words)\n words = [(word.lower(), nltk.simplify_wsj_tag(tag)) for word, tag in words]\n words = [(word, 'V') if tag.startswith('V') else (word, tag)\n for word, tag in words]\n return words", "def rep(self,tag,nstr):\n tmp = []\n for line in self.content: \n if tag in line:\n tmp.append(line.replace(tag,nstr))\n else:\n tmp.append(line)\n self.content = tmp", "def encode_tags(self, text):\n text = re.sub(self.patterns['html_open_tag'], r' __\\1_START ', text)\n text = re.sub(self.patterns['html_close_tag'], r' __\\1_END ', text)\n return text", "def ts(region, tags, reset):", "def transform(self, docs):\n return [doc for doc in docs]", "def clean_tags(f: mutagen.FileType) -> mutagen.FileType:\n _awful_tags = (\n \"comment\",\n \"genre\",\n \"isrc\",\n \"upc\",\n \"barcode\",\n \"organization\",\n \"copyright\",\n \"bpm\",\n \"length\",\n \"website\",\n \"www\"\n )\n _awful_categories = (\n \"musicbrainz_.*\",\n \"replaygain_.*\",\n \"catalog.*\",\n \"beatport.*\",\n \".*label.*\",\n \"encod.*\",\n \".*key.*\",\n \"itunes.*\"\n )\n for key in f.keys():\n if key.lower() in _awful_tags or any(re.search(p, key.lower())\n is not None\n for p in _awful_categories):\n f.pop(key)\n # fix date tag\n if len(f.get(\"date\", [\"0000\"])[0]) > 4:\n date = f.pop(\"date\")\n date[0] = date[0][:4]\n f[\"date\"] = date\n # fix title\n if \"title\" in f:\n title = f.pop(\"title\")[0]\n # remove \"original mix\"\n title = re.sub(\"\\s*(-\\s*|\\()[Oo]riginal( [Mm]ix)?\\)?\\s*$\", \"\", title)\n # split out featured artist\n tmp_featured = re.split(\"\\s*\\(feat(\\.|uring)?\\s*\", title)\n if len(tmp_featured) == 2:\n title = tmp_featured[0]\n featured = re.sub(\"\\s*\\)\\s*$\", \"\", tmp_featured[1])\n if \"performer\" in f:\n f[\"performer\"].append(featured)\n else:\n f[\"performer\"] = [featured]\n f[\"title\"] = [title]\n return f", "def pos_tag(self,sentence):\n tagged = self.brill_tagger.tag(sentence.split())\n tagged_sentence = \" \".join([nltk.tag.tuple2str(tok) for tok in tagged])\n print tagged_sentence\n\n tag_list = [(each.split(\"/\")[0],each.split(\"/\")[1]) for each in tagged_sentence.split()]\n return tag_list", "def clean_tag(tag):\n tmp0 = tag.strip()\n tmp1 = tmp0.lower()\n return tmp1", "def validateTags(self, tags):\n\t\treturn tags.replace(', ',' ')", "def tags(*new_tags: str) -> Set[str]:\n current_tags = TagsContext.get().current_tags\n new_tags = current_tags.union(new_tags)\n with TagsContext(current_tags=new_tags):\n yield new_tags", "def tag(self, postagged_sentences):\n return [self.tag_sentence(sentence) for sentence in postagged_sentences]", "def tags_2_vec(tags, w2v_model=None):\n if len(tags) == 0 or len([tag for tag in tags if tag in w2v_model]) == 0:\n return np.zeros(200)\n else:\n output = np.sum([w2v_model[tag] for tag in tags if tag in w2v_model], axis=0)\n return output / np.linalg.norm(output)", "def update_tag_scheme(sentences, tag_scheme):\n for i, s in enumerate(sentences):\n tags = [w[-1] for w in s]\n # Check that tags are given in the IOB format\n if not NER_utils.iob2(tags):\n s_str = '\\n'.join(' '.join(w) for w in s)\n raise Exception('Sentences should be given in IOB format! ' +\n 'Please check sentence %i:\\n%s' % (i, s_str))\n if tag_scheme == 'iob':\n # If format was IOB1, we convert to IOB2\n for word, new_tag in zip(s, tags):\n word[-1] = new_tag\n elif tag_scheme == 'iobes':\n new_tags = NER_utils.iob_iobes(tags)\n for word, new_tag in zip(s, new_tags):\n word[-1] = new_tag\n else:\n raise Exception('Unknown tagging scheme!')", "def add_tagging(self, task_instance):", "def removeTags(self, words):\n\t\treturn re.sub(r'<.*?>', '', words)", "def remove_Tags(self,text):\n cleaned_text = re.sub('<[^<]+?>', '', text)", "def tags(catalog,lista,tag):\n final=lt.newList(datastructure='ARRAY_LIST')\n i=it.newIterator(lista)\n while it.hasNext(i):\n vid=it.next(i)\n if tag in vid['tags']:\n lt.addLast(final,vid)\n return final", "def tokenize(doc):\n text = doc\n doc = doc.lower()\n doc = re.sub('[,;]', ' ', doc)\n doc = re.split('\\s+', doc)\n doc = sorted(list(filter(None, doc)))\n ent = le.stanfordTagger(text)\n print(ent)\n l = []\n for item in ent:\n if ent[item] in ['LOCATION', 'GPE','PERSON']:\n l.append(item)\n ent = l#ent = sorted(list(le.stanfordTagger(text).keys()))\n #print(ent)\n #ent = [e.lower() for e in ent]\n crime_type = fileCrimeClassify.extractCrimeWord(text, returnOnlyLabels=True)\n crime_type = [c.lower() for c in crime_type]\n #print(crime_type + ent)\n #print(doc)\n return doc, ent + crime_type", "def add_tags(event):\n\n add_tags_from_presets()", "def fix_tags(entity):\n request = entity.request_key.get(use_cache=False, use_memcache=False)\n # Compare the two lists of tags.\n if entity.tags != request.tags:\n entity.tags = request.tags\n logging.info('Fixed %s', entity.task_id)\n yield operation.db.Put(entity)", "def update_tag_scheme(sentences, tag_scheme):\n for i, s in enumerate(sentences):\n tags = [w[-1] for w in s]\n # Check that tags are given in the IOB format\n if not iob2(tags):\n s_str = '\\n'.join(' '.join(w) for w in s)\n raise Exception('Sentences should be given in IOB format! ' +\n 'Please check sentence %i:\\n%s' % (i, s_str))\n if tag_scheme == 'iob':\n # If format was IOB1, we convert to IOB2\n for word, new_tag in zip(s, tags):\n word[-1] = new_tag\n elif tag_scheme == 'iobes':\n new_tags = iob_iobes(tags)\n for word, new_tag in zip(s, new_tags):\n word[-1] = new_tag\n else:\n raise Exception('Unknown tagging scheme!')", "def map_postags(treebank_tag):\n\n if treebank_tag.startswith('J'):\n return \"a\"\n elif treebank_tag.startswith('V'):\n return \"v\"\n elif treebank_tag.startswith('N'):\n return \"n\"\n elif treebank_tag.startswith('R'):\n return \"r\"\n else:\n return 'n'", "def mutate_tag_seq(words, seq1, seq2):\n if len(seq1) > len(words):\n return None\n seq_start = index_tag_seq(words, seq1)\n if seq_start > -1:\n pre = words[:seq_start]\n post = words[seq_start+len(seq1):]\n mutated = []\n for x in seq2:\n for j in range(len(seq1)): \n if x == words[seq_start+j].tag:\n mutated.append(words[seq_start+j])\n return pre + mutated + post\n return None", "def decompose(self):\r\n contents = [i for i in self.contents]\r\n for i in contents:\r\n if isinstance(i, Tag):\r\n i.decompose()\r\n else:\r\n i.extract()\r\n self.extract()", "def preprocess_sent(self, sent, sent_id):\n ids_x = []\n ids_y = []\n for word, tag in sent:\n tag = tag.lower()\n if tag not in self.mapping:\n # Add unk tags to mapping dict\n self.mapping[tag] = \"noun\"\n universal_tag = self.mapping[tag]\n word_id = self.dictionary.x_dict.add(word)\n tag_id = self.dictionary.y_dict.add(universal_tag)\n ids_x.append(word_id)\n ids_y.append(tag_id)\n return Sequence(self.dictionary, ids_x, ids_y, sent_id)", "def normalize_tags(tags):\n ret = []\n dupes = NormalizedDict({'': 1})\n for tag in tags:\n if not dupes.has_key(tag):\n ret.append(tag)\n dupes[tag] = 1\n ret.sort(lambda x, y: cmp(normalize(x), normalize(y)))\n return ret", "def searchbrown_phrase(tags):\n l = len(tags)\n brown_tagged_words = brown.tagged_words(categories='news')\n hitwords = []\n for i in range(len(brown_tagged_words)-l+1):\n searchtags = [tag for _,tag in brown_tagged_words[i:i+l]]\n if tags == searchtags:\n hitwords.append(tuple([w.lower()\n for w,_ in brown_tagged_words[i:i+l]]))\n return hitwords" ]
[ "0.6342739", "0.62034404", "0.61902505", "0.61189127", "0.61091095", "0.60965765", "0.6026633", "0.59895706", "0.5947233", "0.5945939", "0.59207904", "0.58725727", "0.5865535", "0.5798255", "0.57310975", "0.5727268", "0.5669075", "0.56652087", "0.5649698", "0.5608626", "0.55813783", "0.5565579", "0.5557909", "0.5546394", "0.5527269", "0.54794276", "0.54765725", "0.54739624", "0.5465698", "0.5436435", "0.54219955", "0.54212815", "0.54138327", "0.5401242", "0.53959113", "0.5387077", "0.5377522", "0.5356396", "0.53560305", "0.53382325", "0.53310263", "0.53125584", "0.529292", "0.52806616", "0.52683884", "0.525647", "0.52357334", "0.5233654", "0.5229625", "0.5207561", "0.52045596", "0.52040327", "0.5200548", "0.51870924", "0.5182511", "0.5181125", "0.5150942", "0.51426125", "0.51315665", "0.5126774", "0.51065546", "0.50964355", "0.5091461", "0.5088328", "0.50871325", "0.50642604", "0.50573736", "0.50555664", "0.5055103", "0.5052444", "0.5039175", "0.50337327", "0.5032444", "0.5028389", "0.50243676", "0.50236154", "0.5019509", "0.50191754", "0.5019152", "0.50134796", "0.5011372", "0.5010728", "0.50062984", "0.50048673", "0.49942648", "0.49939466", "0.49936527", "0.49887455", "0.49848267", "0.49833947", "0.49831036", "0.49822995", "0.49792823", "0.49783272", "0.4974794", "0.49736091", "0.49724856", "0.49692193", "0.49683565", "0.49618465" ]
0.6025879
7
get a model for solr
def transform_entity(self, instance, what): whats = [] for _what in instance.get_entity(what): whats.append(str(_what)) return whats
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_model(self):\n raise NotImplementedError(\n \"You must provide a 'get_model' method for the '%r' index.\" % self\n )", "def get_model(self):\n return QueryS", "def get_model():\n return UNISAL", "def get_model(self):\n url = self.resource()\n params = {'data': ''}\n resp = self._client.get(url, params=params)\n\n return resp.text", "def get_model(self):\n return Doc()", "def _getModel(self):\r\n \r\n return self._model", "def get_model(model_name):\n module_name = 'strain.models.strain_' + model_name.lower()\n model_module = importlib.import_module(module_name)\n obj = getattr(model_module, model_name)\n return obj", "def model(self):", "def model(self):", "def model(self):", "def model(self):", "def model(self):", "def getModel(self, *args):\n return _libsbml.CompSBMLDocumentPlugin_getModel(self, *args)", "def get_model(self):\n return self.model", "def get_model(self):\n return self.model", "def get_model(self):\n return self.model", "def get_model(self):\n return self.model", "def get_model(self):\n return self.model", "def get_model(self):\n return self.model", "def get_model(self):\n return self.model", "def get_model(self):\n return self.model", "def get_model(self):\n return self.model", "def get_model(self):\n return self.model", "def getModel(self, *args):\n return _libsbml.SBMLDocument_getModel(self, *args)", "def get_model(model=gin.REQUIRED):\n return model", "def get_search_dao(self):\n if hasattr(self, 'search_model'):\n return self.search_model()\n raise NotImplementedError()", "def get_model(*args):\n return Model()", "def get_model(source: str, supported_model_name: str, model_id: str):\n connector = __get_connector(source)\n supported_model = __get_supported_model(supported_model_name)\n\n try:\n model = connector.get(supported_model, model_id)\n return __parse_response(source, model)\n except Exception as e:\n abort(500, e)", "def model(self) -> Type[Model]:", "def getModel(self):\n return _libsbml.SBase_getModel(self)", "def load_model(self) -> Any:", "def get_model(self):\n\t\treturn self.object.__class__", "def get_object(self):\n queryset = self.get_queryset()\n\n model = self.get_model()\n obj = queryset.get(get_primary_keys(model, self.kwargs))\n\n if not obj:\n raise Http404('No %s matches the given query.' % model.__name__)\n\n return obj", "def getModel(self):\n return self._l[1]", "def model(self):\n return Product", "def solrsearch(self, **kwargs):\n return self.request.get('/@solrsearch', params=kwargs).json()", "def abstract_get(self, model, id=False):\n return self.env[model].sudo().browse(id) if id else self.env[model].search([])", "def model(self) -> str:\n ...", "def getModel(self):\n return self.model", "def model(self):\n return self.model_", "def model(self):\n return self.__model", "def _request_model(self, instance, success, get_embedded=True):\n coll = self.get_collection('_model')\n if get_embedded:\n callback = partial(self._get_embedded_model_names,\n instance=instance,\n success=success)\n else:\n callback = success\n\n try:\n instance['_model']\n except KeyError:\n raise tornado.web.HTTPError(400, 'Missing model key')\n coll.find_one({'_id': instance['_model']},\n callback=callback)", "def get_model(self):\n return self._model", "def get_model(self):\n return self._model", "def _get_model_by_name(self):\n \n # Interpret the request data based on the expected row and column structure\n row_template = ['strData']\n col_headers = ['model_name']\n \n # Create a Pandas Data Frame for the request data\n self.request_df = utils.request_df(self.request, row_template, col_headers)\n \n # Initialize the persistent model\n self.model = PersistentModel()\n \n # Get the model name from the request dataframe\n self.model.name = self.request_df.loc[0, 'model_name']\n \n # Get the model from cache or disk\n self._get_model()\n \n # Debug information is printed to the terminal and logs if the paramater debug = true\n if self.model.debug:\n self._print_log(3)", "def get_by_id(id: str) -> MLModel:\n model_data = _collection.find_one(filter={'_id': ObjectId(id)})\n if model_data is not None:\n return MLModel.parse_obj(model_data)\n else:\n raise ServiceException(f'Model with id={id} does not exist.')", "def get_model(model):\n all_models = cmd.get_object_list()\n\n if len(all_models) == 0:\n logging.parser_error('No models are opened.')\n return\n\n model = model.lower()\n\n if model and (model in all_models):\n return model\n\n if len(all_models) > 1:\n logging.parser_error(\"Please specify which model you want to use. {}\".format(all_models))\n return\n\n return all_models[0]", "def search_model(source: str, supported_model_name: str):\n if not request.data:\n abort(400, 'Request is missing the search terms (json) data')\n\n connector = __get_connector(source)\n supported_model = __get_supported_model(supported_model_name)\n\n try:\n search_terms = json.loads(request.data)\n models = connector.search(supported_model, search_terms)\n json_models = []\n for model in models:\n parsed_model = __parse_response(source, model)\n json_models.append(parsed_model)\n\n return {'results': json_models}\n except Exception as e:\n abort(500, e)", "def get_model(self):\n # just return the first model, since all replicas are the same\n return self.call_async(0, '_async_get_model').gen()", "def get_model(name, disable_logging=False):\n return PluginLoader._import(\"train.model\", name, disable_logging)", "def model() -> Model:\n return Model()", "def get_model_definition(request):\n modelname = request.matchdict['modelname']\n results = db_model_definition(request.db)[modelname]\n for result in results:\n return result.value\n raise NotFound(\"Unknown model %s\" % modelname)", "def model(self):\n return self._model", "def model(self):\n return self._model", "def model(self):\n return self._model", "def model(self):\n return self._model", "def model(self):\n return self._model", "def get(self, id=None):\n self.reset(id)\n url = ASSEMBLYAI_URL + '/model/' + str(self.id)\n response = requests.get(url, headers=self.headers)\n self.warning = handle_warnings(response, 'model')\n response = response.json()['model']\n # self.phrases = response['phrases']\n self.dict = response\n self.status = response['status']\n logging.debug('Model %s %s' % (self.id, self.status))\n return self", "def test_get_model(self) -> None:\n get_model()", "def GetModel(self, request, context):\n context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)", "def get(self) -> CrosswalkModel | None:\n self._refresh_model_terms()\n return self.model", "def search_model():\n search_condition = request.stream.read()\n try:\n search_condition = json.loads(search_condition if search_condition else \"{}\")\n except Exception:\n raise ParamValueError(\"Json data parse failed.\")\n\n model_lineage_info = _get_lineage_info(\n lineage_type=\"model\",\n search_condition=search_condition\n )\n\n return jsonify(model_lineage_info)", "def get_model(self, key: str = None, **kwargs) -> Dict:\n raise NotImplementedError", "def create_model(self):\n model = solph.Model(self.es)\n return model", "def get_model(self, name):\n bundle_name, model_name = name.split(\".\")\n bundle = self.bundles[bundle_name]\n model = bundle.models[name]\n return model", "def get_model(params):\r\n module_name, class_name = params.model.name.rsplit('.', 1)\r\n i = importlib.import_module(module_name)\r\n return getattr(i, class_name)", "def load_model_from_catalog(name, as_builder=False):\n return catalog.get_model_from_catalog(name, as_builder=as_builder)", "def get_result_model(cls):\n raise NotImplementedError()", "def model(self) -> Model:\n return self.software_system.get_model()", "def model(self) -> 'outputs.ModelDefinitionResponse':\n return pulumi.get(self, \"model\")", "def getModel(self, *args):\n return _libsbml.SBMLValidator_getModel(self, *args)", "def search_doc_type(self):\n return self._meta.model_name", "def model(self):\n return MODELS.get(self._model,self._model)", "def get_model(*, name: str) -> typing.Optional[typing.Type]:\n return getattr(open_alchemy.models, name, None)", "def _get_card_model(self, model: str) -> Any:\n return self.collection.models.byName(model)", "def get_model(self):\n return self.chain.model", "def real_model(request):\n return request.config.option.real_model", "def Model(self):\n return self._model", "def get_model():\n global model\n if model is None:\n model = AppModel()\n model.load_resources()\n return model", "def load_model(self):\n pass", "def modelClass(self):\n raise NotImplementedError", "def initialize_model(self):\n model = self.model_class()\n return model", "def get_model_by_name(cls, name):\n model_name = inflection.camelize(name) # class name of the model to use\n model = cls.models[model_name]\n return model", "def get_model_reference(self, model_name):\n\n print_debug(\"Geting model :\" + model_name)\n model = ModelsFactory.get(model_name=model_name)\n return model", "def get_model(self, model_id):\n if self.model_dict.has_key(model_id):\n return self.model_dict[model_id]\n return None", "def solr_dict(self):\n raise NotImplementedError", "def retrieve_model(self, model_name):\n\t\tmodel_detail = dbop.get_model(self, model_name)\n\t\t#since the 'owner' field of model_detail is only owner's username,\n\t\t#we have to change it to a User object\n\t\t#In this case, the owner of this model is the user itself\n\t\tmodel_detail['owner'] = self\n\t\tif model_detail['model_type'] == 'SPSS Predictive Model':\n\t\t\treturn model.SPSSModel(**model_detail)\n\t\telif model_detail['model_type'] == 'DashDB In-database Model':\n\t\t\treturn model.DashdbModel(**model_detail)", "def get_model_by_name(self, model_name):\n models = ModelDirectory.get_model_by_name(model_name, pipeline=self)\n return models", "def get_model(self, id):\n if id not in self._models:\n raise Exception(f'model with the id {id} does not exist')\n return self._models[id]", "async def get_one(self, where: t.Mapping[str, t.Any]) -> t.Optional[Model]:\n\n data = await self.collection.find_one(where)\n return self.model_class(**data) if data else None", "def model(self) -> Optional[str]:\n return pulumi.get(self, \"model\")", "def get_model(cls):\n if cls.model == None:\n with open(os.path.join(model_path, 'vdok3_rf.pkl'), 'rb') as inp:\n cls.model = pickle.load(inp)\n return cls.model", "def build_model():", "def _get(self, _model, **kwargs):\n return get_object_or_404(_model, **kwargs)", "def get_model(self):\n return get_object_or_404(Order, user=self.request.user, number=self.kwargs['order_number'])", "def _get_model(self, user_id, model_name, data_reviews):\n\t\treturn MAPPING_MODEL[self.model](\n\t\t\tuser_id=user_id,\n\t\t\tmodel_name=model_name,\n\t\t\tdata_reviews=data_reviews\n\t\t\t)", "def _get_model():\n with open('models/catapp_gp_model.pickle', 'rb') as modelfile:\n model = pickle.load(modelfile)\n return model", "def _get_model(self):\n if self.model == None:\n model_path = self.model_path\n w2v_model = gensim.models.Word2Vec.load(model_path)\n # Keep only the normalized vectors.\n # This saves memory but makes the model untrainable (read-only).\n w2v_model.init_sims(replace=True)\n self.model = w2v_model\n return self.model", "def model(self) -> Model:\n return self._model", "def get_content(self):\n if \"query\" in self.query:\n q = self.query[\"query\"]\n else:\n q = self.query\n search = custom_search_model(Content, q, field_map={\n \"feature-type\": \"feature_type.slug\",\n \"tag\": \"tags.slug\",\n \"content-type\": \"_type\"\n })\n return search", "def get_model(self):\n return self.model.module if isinstance(self.model, DDP) else self.model" ]
[ "0.6687158", "0.64133453", "0.63109964", "0.62503785", "0.62141037", "0.61795294", "0.6167323", "0.61600643", "0.61600643", "0.61600643", "0.61600643", "0.61600643", "0.6062293", "0.6059364", "0.6059364", "0.6059364", "0.6059364", "0.6059364", "0.6059364", "0.6059364", "0.6059364", "0.6059364", "0.6059364", "0.6047944", "0.6041127", "0.603784", "0.6030016", "0.60170305", "0.59668136", "0.59630257", "0.5944948", "0.59140617", "0.5904948", "0.5898001", "0.5896936", "0.589291", "0.58863544", "0.58724284", "0.5857973", "0.58569837", "0.5844866", "0.5830906", "0.5828391", "0.5828391", "0.579959", "0.57901573", "0.57787895", "0.57784677", "0.57684946", "0.5766717", "0.5764301", "0.5763886", "0.575726", "0.575726", "0.575726", "0.575726", "0.575726", "0.57515025", "0.5748793", "0.5745248", "0.5735784", "0.5732196", "0.5722812", "0.5713409", "0.5706978", "0.570434", "0.5700827", "0.5690564", "0.5661456", "0.56600046", "0.5645948", "0.5619513", "0.56183535", "0.5612262", "0.5610929", "0.5601371", "0.55995256", "0.55570513", "0.554821", "0.55467516", "0.5533502", "0.551493", "0.548831", "0.5486147", "0.5482658", "0.54689735", "0.5455565", "0.54537195", "0.54478186", "0.54441303", "0.5429713", "0.54222095", "0.5408876", "0.54066527", "0.54064924", "0.5403664", "0.53979284", "0.5389557", "0.5384537", "0.53781104", "0.5377959" ]
0.0
-1
get people for solr
def transform_people(self, instance): return self.transform_entity(instance, 'Person')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def search_professors(search_term):\n print(\"Professor to search\", search_term)\n if search_term == \"\" or search_term is None:\n return json.dumps([])\n else:\n # pandas_index_list = elastic_dash.search_professors(search_term)\n pandas_index_list = elastic_dash.search_personnel(search_term)\n print(\"pandas index list \", pandas_index_list)\n return json.dumps(pandas_index_list)", "def get_person_text(self, uid):\n words = \"\"\n\n query = \"\"\"\nSELECT ?overview ?researchO ?label\nWHERE\n{\n <%s> <http://vivoweb.org/ontology/core#overview> ?overview .\n <%s> <http://vivoweb.org/ontology/core#researchOverview> ?researchO .\n <%s> <http://www.w3.org/2000/01/rdf-schema#label> ?label .\n}\n \"\"\" % (uid, uid, uid)\n self.setQuery(query)\n try:\n rval = self.query()\n try:\n g = rval.convert()\n except:\n pass\n words = \"%s %s %s\" % (g['results']['bindings'][0]['overview']['value'], g['results']['bindings'][0]['researchO']['value'], g['results']['bindings'][0]['label']['value'])\n except:\n print \"Select failed: %s\" % query\n\n self.setQuery(\"\"\"\nPREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\nPREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\nPREFIX vivo: <http://vivoweb.org/ontology/core#>\nPREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\nSELECT ?name\nWHERE\n{\n ?auth vivo:relates <%s> .\n ?auth rdf:type vivo:Authorship .\n ?auth vivo:relates ?art .\n filter (?art!=<%s>) .\n ?art <http://vivoweb.org/ontology/core#dateTimeValue> ?date .\n ?date <http://vivoweb.org/ontology/core#dateTime> ?year .\n filter (?year>\"2009-01-01T00:00:00Z\"^^xsd:dateTime) .\n ?art rdfs:label ?name .\n}\nLIMIT 20\n\"\"\" % (uid, uid))\n try:\n rval = self.query()\n try:\n g = rval.convert()\n except:\n pass\n for t in g['results']['bindings']:\n words = words + \" \" + t['name']['value']\n\n except:\n print \"Select failed\"\n traceback.print_exc(file=sys.stdout)\n\n self.setQuery(\"\"\"\nPREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\nPREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\nPREFIX vivo: <http://vivoweb.org/ontology/core#>\nPREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n\nSELECT ?name\nWHERE\n{\n ?grant vivo:relates <%s> .\n ?grant rdf:type vivo:Grant .\n ?grant <http://vivoweb.org/ontology/core#dateTimeInterval> ?date .\n ?date <http://vivoweb.org/ontology/core#end> ?end .\n ?end <http://vivoweb.org/ontology/core#dateTime> ?year .\n filter (?year>\"2009-01-01T00:00:00Z\"^^xsd:dateTime) .\n ?grant rdfs:label ?name .\n}\n\n \"\"\" % (uid))\n try:\n rval = self.query()\n try:\n g = rval.convert()\n except:\n pass\n\n for t in g['results']['bindings']:\n words = words + \" \" + t['name']['value']\n\n except:\n print \"Select failed\"\n traceback.print_exc(file=sys.stdout)\n\n\n\n\n return words", "def solrsearch(self, **kwargs):\n return self.request.get('/@solrsearch', params=kwargs).json()", "def get_people(team):", "def get_queryset(self):\n\n return person_search_qs(self.request)", "def get_people(self, **kwargs):\n\n self.url = f\"{self.base_url}{self.PEOPLE_URL}\"\n self.method = \"get\"\n self.params = self._prepare_query_params(kwargs)\n\n self._make_request()\n\n return self.response.json()", "def find_people(self, name=''):\n ## fixme -- can this query be combined?\n ## like this: db.inventory.find( { $or: [ { qty: { $lt: 20 } }, { sale: true } ] } )\n\n cursor = self.people.find({\"first_name\": {'$regex' : '.*' + name + '.*',\n '$options':'i'}})\n results = [Person.from_dict(p) for p in cursor]\n\n cursor = self.people.find({\"last_name\": {'$regex' : '.*' + name + '.*',\n '$options':'i'}})\n\n return results + [Person.from_dict(p) for p in cursor]", "def search_person_get(self, request):\n try:\n search_result = self.get_result_for_activitystream()\n for entry in search_result[\"result\"]:\n obj = entry.get(\"self\", None)\n if obj:\n # not provide object itself, but app specific data\n entry[\"self\"] = None\n entry[\"cs_activitystream_data\"] = _get_person_view(request, obj)\n entry[\"date\"] = dump_datetime(entry[\"date\"])\n return {\"result\": search_result}\n except ESException, e:\n misc.log_traceback(\"Enterprise Search: %s\" % e)\n res = request.ResponseClass(json.dumps(unicode(e)),\n status=HTTPInternalServerError.code,\n content_type=\"application/json\")\n return res", "def _do_get_persons(args):\n u_context = UserContext(user_session, current_user, request)\n if args.get(\"pg\") == \"search\":\n # No scope\n u_context.set_scope_from_request()\n if args.get(\"rule\", \"start\") == \"start\" or args.get(\"key\", \"\") == \"\":\n return {\"rule\": \"start\", \"status\": Status.NOT_STARTED}, u_context\n else: # pg:'all'\n u_context.set_scope_from_request(request, \"person_scope\")\n args[\"rule\"] = \"all\"\n u_context.count = request.args.get(\"c\", 100, type=int)\n\n with PersonReaderTx(\"read_tx\", u_context) as service:\n res = service.get_person_search(args)\n\n return res, u_context", "def researchbyname():\n if request.method == 'GET':\n user = request.args.get('newb')\n data = {}\n data = Beers.find({\"Nom\":user}, {\"_id\":0})\n return fct.returning(data)", "def query_by_person_many(self, names: list): #-> cursor object\n if not self.client:\n self.connect()\n query = templates.query_titles_by_person_many(names)\n return self.db.find(query).limit(25)", "def query_for_people(search_params):\n qry = build_query_people(search_params)\n try:\n endpoint = SPARQLWrapper(\"http://dbpedia.org/sparql\")\n endpoint.setQuery(qry)\n endpoint.setReturnFormat(JSON)\n results = endpoint.queryAndConvert()\n return HttpResponse(dumps(results['results']['bindings']))\n except (EndPointNotFound, EndPointInternalError) as e:\n return HttpResponse(dumps({'err_msg' : str(e)}))", "def get_all_companies_and_people():", "def persons(self, start=None, limit=None):\r\n params = base.get_params(None, locals())\r\n url = '{0}/persons'.format(self.get_url())\r\n return http.Request('GET', url, params), parsers.parse_json", "def search(request):\n\tif request.method == 'GET':\n\t\ttitle = request.GET.get('title')\n\t\tname = request.GET.get('person')\n\t\tif title:\n\t\t\treturn search_by_title(title)\n\t\telif name:\n\t\t\treturn search_by_person(name)\n\t\telse:\n\t\t\treturn JSONResponse({})", "def getInterestedUsers():", "def get_people(self):\n cursor = self.cur()\n cursor.execute('SELECT * FROM {tn} '.format(tn=\"person\"))\n all_people = cursor.fetchall()\n return all_people", "def solr_query(config, solr_host, fq, solr_collection_name):\n # solr_collection_name = config['solr_collection_name']\n\n getVars = {'q': '*:*',\n 'fq': fq,\n 'rows': 300000}\n\n url = f'{solr_host}{solr_collection_name}/select?'\n response = requests.get(url, params=getVars)\n return response.json()['response']['docs']", "def get_persons(self):\n return self.person_list.model().get_person_list()", "def search_unified():\n result_types = flask.request.args.get('result_types').split(',')\n\n # TODO(david): Cache this.\n course_dicts = []\n if 'courses' in result_types:\n courses = sorted(list(m.Course.objects().only('id', 'name',\n '_keywords', 'department_id', 'number')),\n key=lambda c: c.id)\n course_dicts = [{\n 'label': c.id,\n 'name': c.name,\n 'type': 'course',\n 'tokens': c._keywords,\n 'department_id': c.department_id,\n 'number': c.number\n } for c in courses]\n\n friend_dicts = []\n if 'friends' in result_types:\n user = view_helpers.get_current_user()\n if user:\n friends = user.get_friends()\n friend_dicts = [{\n 'label': f.name,\n 'program': f.short_program_name,\n 'type': 'friend',\n 'id': f.id,\n 'pic': f.profile_pic_urls['square'],\n 'tokens': [f.first_name, f.last_name]\n } for f in friends]\n\n prof_dicts = []\n if 'professors' in result_types:\n professors = m.Professor.objects().only('id',\n 'first_name',\n 'last_name',\n 'departments_taught')\n prof_dicts = [{\n 'label': p.name,\n 'departments_taught': p.departments_taught,\n 'type': 'prof',\n 'prof_id': p.id,\n 'name': p.name,\n 'tokens': [p.first_name, p.last_name, 'professor']\n } for p in professors]\n\n return api_util.jsonify({\n 'friends': friend_dicts,\n 'courses': course_dicts,\n 'professors': prof_dicts\n })", "def get_persons():\n resp = requests.get(API_URL).content\n persons = json.loads(resp)\n return persons", "def search(self, query):", "def API_companysearch(request):\n company = request.GET.get(\"search\")\n company = str(company).strip()\n results = models.Company.objects.filter(name__icontains = company)\n results = [[company.pk,company.name] for company in results]\n return django.http.JsonResponse({\"success\":True,\"results\":results})", "def get_people(self):\n return self._people", "def query_person_titles(self, name: str): #-> cursor object\n if not self.client:\n self.connect()\n query = templates.query_titles_by_person(name)\n return self.db.find(query).limit(30)", "def search():\n\n candidate_ids_only = False\n get_percentage_match = True\n search_limit = 15\n page = int(request.vars.page) if request.vars.page else 1\n search_results = search_candidates(auth.user.domainId, request.vars, search_limit=search_limit,\n candidate_ids_only=candidate_ids_only, get_percentage_match=get_percentage_match)\n session.forget(response)\n\n area_of_interest_query = db(db.area_of_interest.domainId == auth.user.domainId).select(db.area_of_interest.id,\n db.area_of_interest.description,\n db.area_of_interest.parentId)\n custom_fields = get_search_form_data(auth.user)[\"custom_fields\"]\n\n # area_of_interest_objects_list = []\n # for row in area_of_interest_query:\n # area_of_interest_objects_list.append(dict(\n # id=row.id,\n # name=row.description,\n # parent=row.parentId\n # ))\n\n custom_fields_list = dict()\n for row in custom_fields:\n custom_fields_list[row] = [[x.name, x.id] for x in custom_fields[row]]\n\n return response.json(dict(\n facets=search_results['search_data']['facets'],\n talents=search_results['search_data']['descriptions'],\n total=search_results['total_found'],\n page=page,\n total_pages=search_results['max_pages'],\n mode=search_results['search_data'].get('mode'),\n percentage_matches=search_results['percentage_matches'],\n max_score=float(search_results['max_score']),\n # area_of_interests=area_of_interest_objects_list,\n custom_fields=custom_fields_list\n ))", "def do_search(self, *args, **kwargs):\n return [{}]", "def query_jql_people(self, user_selectors=None, output_properties=None, format='json'):\n return self._query_jql_items('people', user_selectors=user_selectors, output_properties=output_properties,\n format=format)", "def getResults():", "def PersonSearch(self, searchtext=None, **kw):\n\t\thtml = \"<ul><h4>Results:</h4>\"\n\t\tif searchtext:\n\t\t\tsearch = model.Person.select(OR (model.Person.q.NameFirst.contains(str(searchtext)), \n\t\t\t\t\t\t\tmodel.Person.q.NameLast.contains(str(searchtext))))\n\t\t\tif search.count() > 0:\n\t\t\t\tfor person in search:\n\t\t\t\t\thtml += '<li><a href=\"?PersonID=%s\">%s</a></li>' % (str(person.id), person.DisplayName())\n\t\t#log.debug(html)\n\t\thtml += \"</ul>\"\n\t\treturn html", "def get_companies_and_people(team):", "def get(self, request, search_string=None):\n query = SearchQuery(search_string)\n\n username_vector = SearchVector('username', weight='A')\n first_name_vector = SearchVector('first_name', weight='B')\n last_name_vector = SearchVector('last_name', weight='B')\n email_vector = SearchVector('email', weight='B')\n vectors = username_vector + first_name_vector + last_name_vector + email_vector\n qs = User.objects\n qs = qs.annotate(search=vectors).filter(search=query)\n qs = qs.annotate(rank=SearchRank(vectors, query)).order_by('-rank')\n print(qs)\n return Response(UserSerializer(qs, many=True).data)", "def test_05_get_person_by_name(self):\n p1 = Person.query.first()\n p1_data = p1.wrap()\n p1_f_name = p1_data[\"first_name\"]\n # find by first name only\n # get part of name and search\n q_string = \"?first_name={}\".format(p1_f_name[:3]) # TODO - verify the length\n rv = self.app.get('persons', query_string=q_string)\n data = json.loads(rv.data)\n self.assertEqual(data[\"count\"], 1)\n\n # find by first name and last name\n p1_l_name = p1_data[\"last_name\"]\n q_string = \"?first_name={}&last_name={}\".format(p1_f_name[:3], p1_l_name)\n rv = self.app.get('persons', query_string=q_string)\n data = json.loads(rv.data)\n self.assertEqual(data[\"count\"], 1)\n\n # find by first name and non-existing last name\n q_string = \"?first_name={}&last_name={}\".format(p1_f_name[:3], \"iAmNotThere\")\n rv = self.app.get('persons', query_string=q_string)\n data = json.loads(rv.data)\n self.assertEqual(data[\"count\"], 0)", "def get_results_for(t_client, search_q):\n results = t_client.search(q=\"#\"+search_q)\n\n # This can be refactored\n return [\n {\n \"author\": \"@%s\" % t.from_user,\n \"text\": t.text,\n \"id\": t.id,\n \"date_h\": t.created_at.strftime(\"%H:%M:%S %d/%m/%Y\"),\n \"date\": time.mktime(t.created_at.timetuple()),\n } for t in results\n ]", "def search_by_name(self, request, **kwargs):\n self.method_check(request, allowed=['get'])\n self.throttle_check(request)\n\n keyword = request.GET['keyword']\n members = Member.objects.filter(Q(first_name__icontains=keyword) | Q(last_name__icontains=keyword))\n\n bundles = []\n\n for member in members:\n bundle = self.build_bundle(obj=member, request=request)\n bundles.append(self.full_dehydrate(bundle, for_list=True))\n\n return self.create_response(request, bundles)", "def documents_search(request):\n\tif request.method == 'GET':\n\t\tparams = request.GET\n\telif request.method == 'POST':\n\t\tparams = request.POST\n\telse:\n\t\treturn HttpResponse(status=405)\n\n\t# grab params directly to implement defaults as\n\t# opposed to panicy django forms behavior.\n\tquery = params.get('q', '')\n\ttry:\n\t\tstart = int(params.get('start', '0'))\n\texcept:\n\t\tstart = 0\n\ttry:\n\t\tlimit = min(int(params.get('limit', DEFAULT_MAPS_SEARCH_BATCH_SIZE)),\n\t\t\t\t\tMAX_MAPS_SEARCH_BATCH_SIZE)\n\texcept: \n\t\tlimit = DEFAULT_MAPS_SEARCH_BATCH_SIZE\n\n\ttry:\n\t\trelated_id = int(params.get('related_id', None))\n\texcept: \n\t\trelated_id = None\n\n\trelated_type = params.get('related_type', None)\n\n\tsort_field = params.get('sort', u'')\n\tsort_field = unicodedata.normalize('NFKD', sort_field).encode('ascii','ignore')\t \n\tsort_dir = params.get('dir', 'ASC')\n\tresult = _documents_search(query, start, limit, sort_field, sort_dir, related_id, related_type, request.user)\n\n\tresult['success'] = True\n\treturn HttpResponse(json.dumps(result), mimetype=\"application/json\")", "def get():\n data = []\n start = request.args.get('start', 0, type=int)\n rows = request.args.get('rows', 10, type=int)\n query = request.args.get('query', '')\n if not query:\n return jsonify(data), 200\n\n try:\n solr_query, nr_number, nr_name = SolrQueries.get_parsed_query_name_nr_search(query)\n condition = ''\n if nr_number:\n condition = f\"requests.nr_num ILIKE '%{nr_number}%'\"\n if nr_name:\n if condition:\n condition += ' OR '\n name_condition = \"requests.name_search ILIKE '%\"\n name_condition += \"%' AND requests.name_search ILIKE '%\".join(nr_name.split())\n name_condition += \"%'\"\n\n condition += f'({name_condition})'\n\n results = RequestDAO.query.filter(\n RequestDAO.stateCd.in_([State.DRAFT, State.INPROGRESS, State.REFUND_REQUESTED]),\n text(f'({condition})')\n ).options(\n lazyload('*'),\n eagerload(RequestDAO.names).load_only(Name.name),\n load_only(\n RequestDAO.id,\n RequestDAO.nrNum\n )\n ).order_by(RequestDAO.submittedDate.desc()).limit(rows).all()\n\n data.extend([{\n # 'id': nr.id,\n 'nrNum': nr.nrNum,\n 'names': [n.name for n in nr.names]\n } for nr in results])\n\n while len(data) < rows:\n nr_data, have_more_data = RequestSearch._get_next_set_from_solr(solr_query, start, rows)\n nr_data = nr_data[:(rows - len(data))]\n data.extend([{\n # 'id': nr.id,\n 'nrNum': nr.nrNum,\n 'names': [n.name for n in nr.names]\n } for nr in nr_data])\n\n if not have_more_data:\n break # no more data in solr\n start += rows\n\n return jsonify(data), 200\n except Exception:\n return jsonify({'message': 'Internal server error'}), 500", "def search():\n # response = request.json['search']\n jsonObj = request.get_json()\n query = str(jsonObj['query'])\n regex = re.compile(query, re.IGNORECASE)\n results = []\n # Build a pymongo command to search the document by query term. Only executes if active is set to True.\n # Only matches _id\n active = True\n client = MongoClient(db_config)\n if active == True:\n # Search Publications\n db = client['Publications']\n publications = db['Publications']\n pcount = publications.find({\"_id\": regex}).count()\n p = publications.find({\"_id\": regex})\n # Search Corpus\n db = client['Corpus']\n corpus = db['Corpus']\n ccount = corpus.find({\"_id\": regex}).count()\n c = corpus.find({\"_id\": regex})\n\n htmlResult = \"\"\n if pcount == 0:\n htmlResult = \"<h4>No publications found.</h4>\"\n else:\n htmlResult = \"<h4>Publications: \" + str(pcount) + \"</h4>\"\n htmlResult += \"<ul>\"\n for item in p:\n args = '?_id=' + item[\"_id\"] + '&amp;path=' + item[\"path\"]\n htmlResult += '<li><a href=\"/publications/edit' + args + '\">' + item[\"_id\"] + '</a></li>'\n htmlResult += \"</ul>\"\n\n htmlResult += \"<hr>\"\n\n if ccount == 0:\n htmlResult += \"<h4>No corpus items found.</h4>\"\n else:\n htmlResult += \"<h4>Corpus: \" + str(ccount) + \"</h4>\"\n htmlResult += \"<ul>\"\n for item in c:\n args = '?_id=' + item[\"_id\"] + '&amp;path=' + item[\"path\"]\n htmlResult += '<li><a href=\"/corpus/collection/edit' + args + '\">' + item[\"_id\"] + '</a></li>'\n htmlResult += \"</ul>\"\n\n # Return the Ajax response\n return htmlResult", "def get(self):\n queries = {\"wildcard_properties\": []}\n\n fullname_query = request.args.get(\"fullName\", None)\n email_query = request.args.get(\"email\", None)\n\n if fullname_query:\n queries[\"fullName\"] = f\"TextP.startingWith('{fullname_query}')\"\n queries[\"wildcard_properties\"].append(\"fullName\")\n if email_query:\n queries[\"fullName\"] = f\"TextP.startingWith('{email_query}')\"\n queries[\"wildcard_properties\"].append(\"email\")\n\n users = User.filter(limit=10, **queries)\n response = UserListSchema(many=True).dumps(users).data\n\n return jsonify_response(json.loads(response), 200)", "def users(self, predicate=None):\n \n if predicate is None:\n return self._get(\"users\").json()\n else:\n return self._get(\"users/search\", params={\"predicate\":predicate}).json()", "def fetch_full_name_from_people(self):\n url = 'https://people.djangoproject.com/search/?q={0}'.format(self.full_name.replace(\" \", \"+\"))\n request = requests.get(url)\n soup = BeautifulSoup(request.content)\n vcards = soup.findAll(\"li\", { \"class\" : \"vcard\" })\n if len(vcards) == 1:\n for vcard in soup.findAll(\"li\", { \"class\" : \"vcard\" }):\n people_username = vcard.findAll(\"a\", { \"class\" : \"url fn n\" })[0].attrs['href'].strip(\"/\")\n if self.get_existing_speaker_by_people(people_username):\n self = self.get_existing_speaker_by_people(people_username)\n self.people = people_username\n self.photo = soup.findAll(\"img\", { \"class\" : \"main photo\" })[0].attrs['src']\n self.prenom = soup.findAll(\"span\", { \"class\" : \"given-name\" })[0].renderContents()\n self.save()\n elif len(vcards) == 0:\n return False\n elif len(vcards) > 1:\n raise Exception(\"{0} results found! No records created.\"\n \"\".format(len(vcards)))", "def get_all_persons(self):\r\n return self.__person_repository.elements", "def get_items(id_name, request, client):\n result = client.quick_search(request)\n \n items_pages = []\n limit_to_x_pages = None\n for page in result.iter(limit_to_x_pages):\n items_pages.append(page.get())\n\n items = [item for page in items_pages for item in page['features']]\n \n \n return (id_name, items)", "def search(request, template_name='congregation/search_results.html'):\n context = {}\n if request.GET:\n stop_word_list = re.compile(STOP_WORDS, re.IGNORECASE)\n search_term = '%s' % request.GET['findperson']\n cleaned_search_term = stop_word_list.sub('', search_term)\n cleaned_search_term = cleaned_search_term.strip()\n terms = cleaned_search_term.split(' ')\n if len(cleaned_search_term) != 0:\n people_list = Person.objects.filter(\n Q(first_name__icontains=cleaned_search_term) | \n Q(last_name__icontains=cleaned_search_term) | \n Q(preferred_first_name__icontains=cleaned_search_term) \n )\n if len(people_list) < 1:\n people_list = Person.objects.filter(\n Q(first_name__iregex=r'(' + '|'.join(terms) + ')') | Q(preferred_first_name__iregex=r'(' + '|'.join(terms) + ')')\n ).filter(last_name__iregex=r'(' + '|'.join(terms) + ')')\n if len(people_list)<1:\n message = 'no match found for '\n else:\n message = 'matches found for '\n else:\n message = 'matches found for '\n\n people_list = people_list.exclude(deceased__isnull=False).exclude(active=False).exclude(opt_in_directory=False)\n context = {'object_list': people_list, 'message': message, 'search_term':search_term, }\n else:\n message = 'Search term was too vague. Please try again.'\n context = {'message':message}\n return render_to_response(template_name, context, context_instance=RequestContext(request))", "def user_search_partial():\n username = request.args.get('search') or ''\n\n ret = []\n for user in User.query.filter(User.name.ilike(username + \"%\")):\n ret.append({\n \"id\": user.id,\n \"name\": user.name\n })\n return json.dumps(ret)", "def search_companies(request):\n search = request.data.get('search', None)\n if search:\n companies = Company.objects.filter(name__search=search)\n else:\n companies = Company.objects.all()\n \n context={'user_id': request.user.id}\n serializer = CompanySerializers(companies, context=context)\n print(search)\n return Response(serializer.data)", "def build_query_people(search_params):\n occupation = search_params.GET['occ']\n sort_by = search_params.GET['sort_by']\n language = search_params.GET['language']\n sort_field = \"?name\" if(sort_by == 'other') else \"?name_en\"\n people_qry = \"\"\"\n select distinct ?person ?name ?abstract ?name_en ?abstract_en\n where{\n ?person <http://dbpedia.org/ontology/abstract> ?abstract_en .\n ?person <http://www.w3.org/2000/01/rdf-schema#label> ?name_en .\n FILTER(LANG(?abstract_en) = 'en')\n FILTER(LANG(?name_en) = 'en')\n {select distinct ?person ?name ?abstract {\n ?person <http://dbpedia.org/ontology/occupation> <\"\"\" + occupation + \"\"\"> .\n ?person <http://dbpedia.org/ontology/abstract> ?abstract .\n ?person <http://www.w3.org/2000/01/rdf-schema#label> ?name .\n ?person a <http://dbpedia.org/ontology/Person> .\n FILTER(LANG(?abstract) = '\"\"\" + language + \"\"\"')\n FILTER(LANG(?name) = '\"\"\" + language + \"\"\"')\n }}} ORDER BY \"\"\" + sort_field\n return people_qry", "def get_people(self, letter = None):\n if letter:\n people = Person.objects.filter(member_of__entity__in = self.get_descendants(include_self = True), surname__istartswith = letter).distinct().order_by('surname', 'given_name', 'middle_names')\n else: \n people = Person.objects.filter(member_of__entity__in = self.get_descendants(include_self = True)).distinct().order_by('surname', 'given_name', 'middle_names')\n return people", "def wikidata_search(request, str):\n url_head = 'https://www.wikidata.org/w/api.php?action=wbsearchentities&search='\n url_tail = '&language=en&format=json'\n if request.method == 'GET':\n r = requests.get(url_head+str+url_tail);\n return Response(r.json()['search'])\n #print r", "def list_people():\n\n person_list = []\n for person in person_database:\n person_list.append(person)\n return person_list", "def query(self):\n query_url = self.get_query_url()\n logging.info('Querying: ' + query_url)\n json_data = request.urlopen(query_url).read().decode()\n logging.debug('Retrieved the following ' + json_data)\n response = json.loads(json_data)\n\n return self.get_docs_from_response(response)", "def persons(self):\r\n return persons.Persons(self)", "def participant_list(request):\n if request.method == 'GET':\n if request.GET.get('search'):\n request_terms = request.GET.get('search')\n search_terms_array = request_terms.split()\n\n initial_term = search_terms_array[0]\n participant_list = Participant.objects.filter(\n Q(fullname__icontains=initial_term) |\n Q(email__icontains=initial_term))\n\n if len(search_terms_array) > 1:\n for term in range(1, len(search_terms_array)):\n participant_list = participant_list.filter(Q(fullname__icontains=search_terms_array[term]) |\n Q(email__icontains=search_terms_array[term]))\n else:\n participant_list = get_list_or_404(Participant)\n paginator = PageNumberPagination()\n results = paginator.paginate_queryset(participant_list, request)\n serializer = ParticipantSerializer(results, many=True)\n return paginator.get_paginated_response(serializer.data)", "def do_search(arg):\n result = {'count': 0, 'time': 0, 'records': []}\n try:\n uri, q, k, m = arg\n dqp = Pyro.core.getProxyForURI(uri)\n scoresLen,results,indocids,exdocids = dqp.search(q, k, m)\n result=(scoresLen,results,indocids,exdocids)\n except Exception as e:\n print \"Exception:\", e\n return result", "def search_contact(request, **kwargs):\n limit = int(request.GET.get('limit', constants.DEFAULT_LIMIT))\n offset = int(request.GET.get('offset', constants.DEFAULT_OFFSET))\n search_term = request.GET.get('search_term')\n contact = private.Contact()\n data = contact.fetch_list(limit, offset, search_term)\n return JsonResponse({'objects': data})", "def papersearch(category, search_me, userid):\n\n user_id = userid\n search_me = search_me.strip()\n research_papers = {}\n all_papers = []\n # get arxiv results\n try:\n papers = wisdomaiengine.getarxivresults(search_me.lower())\n for paper in papers:\n all_papers.append(paper[1])\n research_papers[\"arxiv\"] = papers\n except:\n research_papers[\"arxiv\"] = \"\"\n # get google scholar results\n try:\n google_scholar = wisdomaiengine.getgooglescholar(search_me.lower())\n research_papers[\"google scholar\"] = google_scholar\n for paper in google_scholar:\n all_papers.append(paper[-1])\n except:\n research_papers[\"google scholar\"] = \"\"\n # get DOAJ articles\n try:\n doaj = wisdomaiengine.getdoajarticles(search_me.lower())\n research_papers[\"DOAJ\"] = doaj\n for article in doaj:\n all_papers.append(article[2])\n except:\n research_papers[\"DOAJ\"] = \"\"\n # get wordcloud of all papers\n try:\n all_papers_text = \" \".join(a for a in all_papers)\n wordcloud = wisdomaiengine.wordcloud(search_me, all_papers_text)\n except:\n wordcloud = \"No topics found!...\"\n # return json object\n jsonob = jsonify(papers=research_papers,\n wordcloud=wordcloud)\n return jsonob", "def person_search_qs(request):\n query = request.GET\n qs = Person.objects.filter(expiry_date__gt=timezone.now())\n\n # base registration and application querysets\n registrations_qs = Register.objects.all()\n applications_qs = RegistriesApplication.objects.all()\n\n person_filters = Q()\n reg_filters = Q()\n appl_filters = Q()\n\n\n # Search for cities (split list and return all matches)\n # search comes in as a comma-separated querystring param e.g: ?city=Atlin,Lake Windermere,Duncan\n cities = query.get('city', None)\n if cities:\n cities = cities.split(',')\n person_filters = person_filters & Q(registrations__organization__city__in=cities)\n reg_filters = reg_filters & Q(organization__city__in=cities)\n \n # regional areas\n region_guids = query.get('region', None)\n if region_guids:\n region_guids = region_guids.split(',')\n regional_areas = RegionalArea.objects.filter(regional_area_guid__in=region_guids)\n person_filters &= Q(registrations__organization__regional_areas__in=regional_areas)\n reg_filters &= Q(organization__regional_areas__in=regional_areas)\n\n #bbox\n sw_long = query.get('sw_long')\n sw_lat = query.get('sw_lat')\n ne_long = query.get('ne_long')\n ne_lat = query.get('ne_lat')\n if sw_long and sw_lat and ne_long and ne_lat:\n try:\n bbox = Polygon.from_bbox((sw_long, sw_lat, ne_long, ne_lat))\n bbox.srid = 4326\n person_filters = person_filters & Q(registrations__organization__geom__bboverlaps=bbox)\n reg_filters = reg_filters & Q(organization__geom__bboverlaps=bbox)\n except (ValueError, GEOSException):\n pass\n\n #Subactivities param comes as a csv list\n subactivities = query.get('subactivities')\n if subactivities is not None: \n subactivities = subactivities.split(\",\")\n person_filters = person_filters & \\\n Q(registrations__applications__subactivity__registries_subactivity_code__in=subactivities)\n reg_filters = reg_filters & \\\n Q(applications__subactivity__registries_subactivity_code__in=subactivities)\n appl_filters = appl_filters & \\\n Q(subactivity__registries_subactivity_code__in=subactivities)\n\n activity = query.get('activity', None)\n status = query.get('status', None)\n user_is_staff = request.user.groups.filter(name=REGISTRIES_VIEWER_ROLE).exists()\n\n if activity:\n if (status == 'P' or not status) and user_is_staff:\n # We only allow staff to filter on status\n # For pending, or all, we also return search where there is no registration.\n person_filters = person_filters & \\\n (\n Q(registrations__registries_activity__registries_activity_code=activity) |\n Q(registrations__isnull=True)\n )\n reg_filters = reg_filters & \\\n Q(registries_activity__registries_activity_code=activity)\n else:\n # For all other searches, we strictly filter on activity.\n person_filters = person_filters & \\\n Q(registrations__registries_activity__registries_activity_code=activity)\n reg_filters = reg_filters & \\\n Q(registries_activity__registries_activity_code=activity)\n\n if user_is_staff:\n # User is logged in\n if status:\n if status == 'Removed':\n # Things are a bit more complicated if we're looking for removed, as the current\n # status doesn't come in to play.\n person_filters = person_filters & \\\n Q(registrations__applications__removal_date__isnull=False)\n reg_filters = reg_filters & \\\n Q(applications__removal_date__isnull=False)\n appl_filters = appl_filters & \\\n Q(removal_date__isnull=False)\n else:\n if status == 'P':\n # If the status is pending, we also pull in any people without registrations\n # or applications.\n person_filters = person_filters & \\\n (\n Q(registrations__applications__current_status__code=status) |\n Q(registrations__isnull=True) |\n Q(registrations__applications__isnull=True) \n ) & \\\n Q(registrations__applications__removal_date__isnull=True)\n reg_filters = reg_filters & \\\n (\n Q(applications__current_status__code=status) | \n Q(applications__isnull=True) \n ) & \\\n Q(applications__removal_date__isnull=True) \n appl_filters = appl_filters & \\\n (\n Q(current_status__code=status) \n #Q(isnull=True) \n ) & \\\n Q(removal_date__isnull=True) \n else:\n person_filters = person_filters & \\\n (\n Q(registrations__applications__current_status__code=status) &\n Q(registrations__applications__removal_date__isnull=True)\n )\n reg_filters = reg_filters & \\\n (\n Q(applications__current_status__code=status) &\n Q(applications__removal_date__isnull=True)\n )\n appl_filters = appl_filters & \\\n (\n Q(current_status__code=status) &\n Q(removal_date__isnull=True)\n )\n else:\n # User is not logged in\n # Only show active drillers to non-admin users and public\n person_filters = person_filters & \\\n (\n Q(registrations__applications__current_status__code='A') &\n Q(registrations__applications__removal_date__isnull=True)\n )\n\n reg_filters = reg_filters & \\\n (\n Q(applications__current_status__code='A') &\n Q(applications__removal_date__isnull=True)\n )\n appl_filters= appl_filters & \\\n (\n Q(current_status='A') & \n Q(removal_date__isnull=True)\n )\n\n #apply all the \"main\" and \"registration\" filters that were chosen above\n qs = qs.filter(person_filters)\n registrations_qs = registrations_qs.filter(reg_filters)\n applications_qs = applications_qs.filter(appl_filters)\n\n\n # generate applications queryset\n applications_qs = applications_qs \\\n .select_related(\n 'current_status',\n 'primary_certificate',\n 'primary_certificate__cert_auth',\n 'subactivity',\n ) \\\n .prefetch_related(\n 'subactivity__qualification_set',\n 'subactivity__qualification_set__well_class'\n ).distinct() \n\n # generate registrations queryset, inserting filtered applications queryset defined above\n registrations_qs = registrations_qs \\\n .select_related(\n 'registries_activity',\n 'organization',\n 'organization__province_state',\n ) \\\n .prefetch_related(\n Prefetch('applications', queryset=applications_qs)\n ).distinct() \n\n # insert filtered registrations set\n qs = qs \\\n .prefetch_related(\n Prefetch('registrations', queryset=registrations_qs)\n )\n\n return qs.distinct()", "def search_results(request):\n #key\n\n user_input = request.GET['q']\n\n people_objs = Person.objects.filter(Q(last__contains=user_input) | Q(\n first__contains=user_input))\n document_objs = Document.objects.filter(title__contains=user_input)\n folder_objs = Folder.objects.filter(full__contains=user_input)\n organization_objs = Organization.objects.filter(Q(name__contains=user_input)|Q(\n location__contains=user_input))\n obj_dict = {\n 'people_objs': people_objs,\n 'document_objs': document_objs,\n 'folder_objs': folder_objs,\n 'organization_objs': organization_objs,\n 'query': user_input,\n }\n response = render(request, 'search_results.jinja2', obj_dict)\n return response", "def test_get_with_filter_person(mockclient_cl1):\n r = mockclient_cl1.get(TEST_URL + \"?size=100&p=P00022\")\n assert r.status_code == 200\n assert len(r.json[\"statements\"]) == 6", "def get_candidates(data):\n return data.groups[\"Candidates\"].objects", "def search_service(self, name_filter):\n rs=search_service(name_filter)\n for el in rs:\n print(el)", "def listSearches(self, authenticationToken):\r\n pass", "def __ui_search_persons_by_name(self):\n searched_name = input(\"Introduce the name: \").strip().lower()\n if searched_name == \"\":\n print(\"You cannot search persons by an empty name!\\n\")\n return\n\n searched_persons = self.__person_service.find_persons_by_name(searched_name)\n\n if len(searched_persons) == 0:\n print('There is no person whose name contains \"{}\"!\\n'.format(searched_name))\n else:\n print(\"\")\n for person in searched_persons:\n print(person)\n print(\"\")", "def search():\n results = []\n for row in db.session.query(DBcorpus):\n serialized = fix_corpus_format(CorpusSchema().dump(row).data)\n results.append(serialized)\n return results, 200", "def read_people():\n try:\n conn = sqlite3.connect(settings.database_name)\n conn.row_factory = sqlite3.Row\n c = conn.cursor()\n c.execute(\"PRAGMA foreign_keys = ON\")\n c.execute(\"SELECT * FROM person LIMIT {0};\".format(settings.search_result_row_limit))\n p = []\n for row in c:\n _person = Person()\n _person.person_id = row[\"personid\"]\n _person.first_name = row[\"firstname\"]\n _person.last_name = row[\"lastname\"]\n _person.middle_initial = row[\"middleinitial\"]\n _person.nick_name = row[\"nickname\"]\n _person.date_of_birth = row[\"dateofbirth\"]\n _person.date_of_death = row[\"dateofdeath\"]\n p.append(_person)\n conn.close()\n return p\n except:\n return []", "def processSearchResult(self):", "def get_users_by_name(name_):\n users = User.es.search(name_)\n return users", "def search(self, name=None):\r\n params = base.get_params(('name', ), locals())\r\n request = http.Request('GET', self.get_url(), params)\r\n\r\n return request, parsers.parse_json", "def search_users(self, q):\n # TODO: Assuming first server is good - need to make fallback logic\n return self.session.get_any(\"{base}{request_url}\".format(base=self.servers[0],\n request_url=F\"/User/SearchUsers/\"))", "def list_people():\n conn = get_db()\n try:\n cur = conn.cursor()\n try:\n # Note: don't use prefixes like \"oktatas.\" above for tables\n # within your own schema, as it ruins portability.\n # This table has 10k rows, so we intentionally limit the result set to 50\n # (Oracle note: not the first 50 rows by name, but rather\n # the first 50 rows of the table, which are then ordered by name).\n # Also, long queries can be broken into two shorter lines like this\n cur.execute('''SELECT szemelyi_szam, nev FROM oktatas.szemelyek\n WHERE ROWNUM < 50 ORDER BY nev ASC''')\n # there's a better way, but outside the scope of this lab:\n # http://docs.python.org/2/tutorial/datastructures.html#list-comprehensions\n results = []\n # we make use of the fact that\n # - cursors are iterable and\n # - `for` can unpack objects returned by each iteration\n for szemelyi_szam, nev in cur:\n results.append({'szemelyi_szam': szemelyi_szam, 'nev': nev})\n return jsonify(szemelyek=results)\n finally:\n cur.close()\n finally:\n # this is also a naive implementation, a more Pythonic solution:\n # http://docs.python.org/2/library/contextlib.html#contextlib.closing\n conn.close()", "def list_people():\n conn = get_db()\n try:\n cur = conn.cursor()\n try:\n # Note: don't use prefixes like \"oktatas.\" above for tables\n # within your own schema, as it ruins portability.\n # This table has 10k rows, so we intentionally limit the result set to 50\n # (Oracle note: not the first 50 rows by name, but rather\n # the first 50 rows of the table, which are then ordered by name).\n # Also, long queries can be broken into two shorter lines like this\n cur.execute('''SELECT szemelyi_szam, nev FROM oktatas.szemelyek\n WHERE ROWNUM < 50 ORDER BY nev ASC''')\n # there's a better way, but outside the scope of this lab:\n # http://docs.python.org/2/tutorial/datastructures.html#list-comprehensions\n results = []\n # we make use of the fact that\n # - cursors are iterable and\n # - `for` can unpack objects returned by each iteration\n for szemelyi_szam, nev in cur:\n results.append({'szemelyi_szam': szemelyi_szam, 'nev': nev})\n return jsonify(szemelyek=results)\n finally:\n cur.close()\n finally:\n # this is also a naive implementation, a more Pythonic solution:\n # http://docs.python.org/2/library/contextlib.html#contextlib.closing\n conn.close()", "def get_people(self):\n cur = self.conn.cursor(pymysql.cursors.DictCursor)\n\n cur.execute('SELECT name FROM Client;')\n\n return CursorIterator(cur)", "def search(self, query, maxhits=100):", "def get_people(self):\n url = self.base_url + 'memberships'\n\n req = requests.get(headers=self.headers, url=url)\n\n return req.json()", "def get_persons(self):\n response = self.do_request('/misc/user/export/json')\n if response:\n return response.json()", "def search(request):\n # Get data form request\n name = request.DATA.get('first_name', \"''\")\n if name == \"\":\n\n first_names = request.DATA.get('first_name', \"''\")\n last_names = request.DATA.get('last_name', \"''\")\n display_names = request.DATA.get('display_name', \"''\")\n else:\n first_names = name\n last_names = name\n display_names = name\n genders = request.DATA.get('gender', \"''\")\n\n handicaps = request.DATA.get('handicap', \"''\")\n business_area = request.DATA.get('business_area', \"''\")\n city = request.DATA.get('city', \"''\")\n district = request.DATA.get('district', \"''\")\n age = request.POST.get('age', 0)\n dob_year = int(datetime.now().year) - int(age)\n min_dob = datetime.strptime(str(dob_year) + '-01-1', '%Y-%m-%d')\n max_dob = datetime.strptime(str(dob_year) + '-12-30', '%Y-%m-%d')\n\n # Search by single properties\n results = SearchQuerySet().filter(first_name=first_names).filter(last_name=last_names\n ).filter(gender=genders).filter(\n display_name=display_names).filter(handicap_us=handicaps\n ).filter(handicap_36=handicaps\n ).filter(business_area=business_area).filter(city=city\n ).filter(\n district=district)\n\n if age is not 0:\n results.filter(dob__gte=min_dob, dob__lte=max_dob)\n # Get List user\n queryset = User.objects.all()\n # Create result list\n results_list = []\n # Get User to list by id\n max_loop = results.count()\n for x in range(0, max_loop):\n user = get_object_or_404(queryset, pk=results[x].object.id)\n results_list.append(user)\n # Convert to serializer\n serializer = UserSerializer(results_list, many=True)\n if serializer.is_valid:\n return Response({'status': '200', 'code': 'OK_SEARCH',\n 'detail': serializer.data}, status=200)\n else:\n return Response({'status': '400', 'code': 'E_INVALID_PARAMETER_VALUES',\n 'detail': serializer.errors}, status=400)", "def _get_persons(self):\n if not hasattr(self, 'persons'):\n url = \"http://www.kongehuset.no/program.html?tid=27511&sek=26946\"\n r = requests.get(url)\n soup = BeautifulSoup(r.text, \"html.parser\")\n options = soup.find(\"select\", { \"name\": \"person\" })\\\n .find_all(\"option\")\n self.persons = zip(\n [x.text for x in options],\n [x[\"value\"] for x in options]\n )\n return self.persons[2:]", "def search():\n # q is the name of the http parameter\n request.args.get(\"q\")\n\n #check for missing arguments\n if not(request.args.get(\"q\")):\n raise RuntimeError(\"Missing geo!\")\n\n #\"%\":match any number of characters\n q=request.args.get(\"q\") + \"%\"\n\n #retrieve data from database\n rows=db.execute(\"SELECT * from places WHERE postal_code LIKE :pc OR place_name LIKE :city OR admin_name1 LIKE :state\", pc=q,city=q,state=q)\n\n return jsonify(rows)", "def get_searchdata(user, filter=None):\n # note to Jonas, useful python debuging tool. \n #import pdb\n maps = get_maps(user)\n searchdata = []\n for the_map in maps.values():\n topic = the_map.main_topic\n for subtopic in the_map.subtopics.values():\n \n #pdb.set_trace()\n for url in subtopic.urls.values():\n searchdata.append([topic, subtopic.text, url])\n return searchdata", "def fetch_all(): \n client, index_name = connection_es()\n res = client.search(index = index_name+\"*\")\n return res", "def query(self, *args, **kwargs) -> List[str]:\r\n self.logger.info(\"Returning Manual Users\")\r\n\r\n return kwargs['users']", "def articles():\n\n #The URL parameters are available in request.args, which is a MultiDict that has a get method\n request.args.get(\"geo\")\n\n #check for missing arguments\n if not(request.args.get(\"geo\")):\n raise RuntimeError(\"Missing geo!\")\n\n articles= lookup(request.args.get(\"geo\"))\n\n #server response\n return jsonify(articles)", "def people(self):\r\n return pp.People(self)", "def autocomplete():\n value = str(request.args.get('q'))\n result = s.query(Genes).filter(Genes.name.like(\"%\" + value + \"%\")).all()\n data = [i.name for i in result]\n return jsonify(matching_results=data)", "def get_users(self):\n fields = ['name', ]\n return self.get_data(\"myUsers\", fields)", "def get_persons(self):\n response = self.do_request('/management/persons/export/json/')\n if response:\n return response.json()", "def api_plain_user_search(request):\n if request.GET.get('query'):\n users = search_for_plain_users(request.GET.get('query'))\n return JsonResponse(users, safe=False) \n return render_json(error=u'Mangler søkestreng')", "def get_users(filter, api_site_parameter, page = 1, pagesize = 30, sort = 'reputation'):\n path = \"users\"\n results = __fetch_results(path, api_site_parameter, inname= filter, page = page, pagesize = pagesize, sort = sort)\n return results", "def list(self, request, *args, **kwargs):\n if request.query_params.get('filter[fullName]'):\n name_filter = request.query_params.get('filter[fullName]')\n\n # List of all guests who are non-plus-ones\n people_dict = {\n p.__str__(): p.id\n for p in Person.objects.all() if not p.is_plus_one\n }\n people = list(people_dict)\n match_list = extract(name_filter, people)\n\n # If top two matched names are similar enough (within a match\n # rating of 10), we return both names and have the user pick the\n # correct one.\n if match_list[0][1] - match_list[1][1] < 10:\n\n # Using extract() above, it just returns the best two name\n # matches, but both those matches don't necessarily have to be\n # *close* matches (if every other match is even worse, for\n # example).\n #\n # So if we have two names that match closely with each other,\n # but both are bad matches to the original, we don't want to\n # return either of them. We check here if the best name match\n # has an over 70% accuracy ratio with the `name_filter`. If it\n # doesn't, we return nothing. We transform everything to\n # lower() because the ratio method cares about case, and we\n # don't.\n if ratio(name_filter.lower(), match_list[0][0].lower()) > 70:\n queryset = Person.objects.filter(\n Q(id=people_dict[match_list[0][0]])\n | Q(id=people_dict[match_list[1][0]]))\n else:\n queryset = Person.objects.none()\n else:\n # Return the queryset filtered to a single person, but only if\n # the filtered name matches the returned name with a greater\n # than 70% degree of accuracy.\n queryset = Person.objects.filter(\n id=people_dict[match_list[0][0]]) if ratio(\n name_filter.lower(), match_list[0][0]\n .lower()) > 70 else Person.objects.none()\n\n try:\n queryset\n except NameError:\n queryset = self.filter_queryset(self.get_queryset())\n\n # Boilerplate code taken from parent's list method\n page = self.paginate_queryset(queryset)\n if page is not None:\n serializer = self.get_serializer(page, many=True)\n return self.get_paginated_response(serializer.data)\n\n serializer = self.get_serializer(queryset, many=True)\n return Response(serializer.data)", "def getMembers():", "def getMembers():", "def getMembers():", "def getMembers():", "def getSearch(self, authenticationToken, guid):\r\n pass", "def list(self, request, **kwargs):\n queryset = self.get_queryset()\n filtered_queryset = self.filter_queryset(queryset)\n filtered_queryset = exclude_persons_without_registrations(request, filtered_queryset) \n\n page = self.paginate_queryset(filtered_queryset)\n if page is not None:\n serializer = PersonListSerializer(page, many=True)\n return self.get_paginated_response(serializer.data)\n\n serializer = PersonListSerializer(filtered_queryset, many=True)\n return Response(serializer.data)", "def queryuser(q, limit=10):\n _, idx1 = idquery.query(q)\n _, idx2 = nicknamequery.query(q)\n idx = list(set(idx1 + idx2))\n if len(idx)>999:\n idx = idx[:999]\n rst = db_session.query(User.id, User.nickname).filter(User.index.in_(idx)).\\\n order_by(User.score.desc(), User.active.asc()).limit(limit).all()\n return [{'id':itm[0], 'name':itm[1]} for itm in rst]", "def get_all_users():", "def search(self, term):", "def researchbytype():\n if request.method == 'GET':\n user2 = request.args.get('type')\n data2 = {}\n data2 = Beers.find({\"Type\":user2}, {\"_id\":0})\n return fct.returning(data2)", "def f1results():\n\n FIELDS = {'_id': False, }\n\n with MongoClient(MONGO_URI) as conn:\n collection = conn[DBS_NAME][COLLECTION_NAME]\n results = collection.find(projection=FIELDS)\n return json.dumps(list(results))", "def search(request):\n\n term = \"\"\n organizations = None\n memberships = None\n events = None\n persons = None\n airports = None\n training_requests = None\n comments = None\n only_result = None\n\n if request.method == \"GET\" and \"term\" in request.GET:\n form = SearchForm(request.GET)\n if form.is_valid():\n term = form.cleaned_data.get(\"term\", \"\")\n tokens = re.split(r\"\\s+\", term)\n\n organizations = Organization.objects.filter(\n Q(domain__icontains=term) | Q(fullname__icontains=term)\n ).order_by(\"fullname\")\n if len(organizations) == 1 and not only_result:\n only_result = organizations[0]\n\n memberships = Membership.objects.filter(\n registration_code__icontains=term\n ).order_by(\"-agreement_start\")\n if len(memberships) == 1 and not only_result:\n only_result = memberships[0]\n\n events = Event.objects.filter(\n Q(slug__icontains=term)\n | Q(host__domain__icontains=term)\n | Q(host__fullname__icontains=term)\n | Q(url__icontains=term)\n | Q(contact__icontains=term)\n | Q(venue__icontains=term)\n | Q(address__icontains=term)\n ).order_by(\"-slug\")\n if len(events) == 1 and not only_result:\n only_result = events[0]\n\n # if user searches for two words, assume they mean a person\n # name\n if len(tokens) == 2:\n name1, name2 = tokens\n complex_q = (\n (Q(personal__icontains=name1) & Q(family__icontains=name2))\n | (Q(personal__icontains=name2) & Q(family__icontains=name1))\n | Q(email__icontains=term)\n | Q(secondary_email__icontains=term)\n | Q(github__icontains=term)\n )\n persons = Person.objects.filter(complex_q)\n else:\n persons = Person.objects.filter(\n Q(personal__icontains=term)\n | Q(family__icontains=term)\n | Q(email__icontains=term)\n | Q(secondary_email__icontains=term)\n | Q(github__icontains=term)\n ).order_by(\"family\")\n\n if len(persons) == 1 and not only_result:\n only_result = persons[0]\n\n airports = Airport.objects.filter(\n Q(iata__icontains=term) | Q(fullname__icontains=term)\n ).order_by(\"iata\")\n if len(airports) == 1 and not only_result:\n only_result = airports[0]\n\n training_requests = TrainingRequest.objects.filter(\n Q(group_name__icontains=term)\n | Q(family__icontains=term)\n | Q(email__icontains=term)\n | Q(github__icontains=term)\n | Q(affiliation__icontains=term)\n | Q(location__icontains=term)\n | Q(user_notes__icontains=term)\n )\n if len(training_requests) == 1 and not only_result:\n only_result = training_requests[0]\n\n comments = Comment.objects.filter(\n Q(comment__icontains=term)\n | Q(user_name__icontains=term)\n | Q(user_email__icontains=term)\n | Q(user__personal__icontains=term)\n | Q(user__family__icontains=term)\n | Q(user__email__icontains=term)\n | Q(user__github__icontains=term)\n ).prefetch_related(\"content_object\")\n if len(comments) == 1 and not only_result:\n only_result = comments[0]\n\n # only 1 record found? Let's move to it immediately\n if only_result and not form.cleaned_data[\"no_redirect\"]:\n msg = format_html(\n \"You were moved to this page, because your search <i>{}</i> \"\n \"yields only this result.\",\n term,\n )\n if isinstance(only_result, Comment):\n messages.success(request, msg)\n return redirect(\n only_result.content_object.get_absolute_url()\n + \"#c{}\".format(only_result.id)\n )\n elif hasattr(only_result, \"get_absolute_url\"):\n messages.success(request, msg)\n return redirect(only_result.get_absolute_url())\n\n else:\n messages.error(request, \"Fix errors below.\")\n\n # if empty GET, we'll create a blank form\n else:\n form = SearchForm()\n\n context = {\n \"title\": \"Search\",\n \"form\": form,\n \"term\": term,\n \"organisations\": organizations,\n \"memberships\": memberships,\n \"events\": events,\n \"persons\": persons,\n \"airports\": airports,\n \"comments\": comments,\n \"training_requests\": training_requests,\n }\n return render(request, \"dashboard/search.html\", context)" ]
[ "0.64752334", "0.6471866", "0.6469839", "0.6443082", "0.63948816", "0.63601685", "0.62068886", "0.61978406", "0.6176863", "0.61702245", "0.60960656", "0.6083975", "0.60412025", "0.602639", "0.60190237", "0.59710723", "0.59653676", "0.5906777", "0.58994013", "0.5892216", "0.5872057", "0.5869428", "0.5832438", "0.58284485", "0.5819416", "0.5804897", "0.57713866", "0.5763683", "0.57485986", "0.57379526", "0.57155925", "0.5712031", "0.57112473", "0.570885", "0.5708598", "0.5702822", "0.56916684", "0.5681967", "0.56781423", "0.56762546", "0.5671847", "0.56714916", "0.5662859", "0.565845", "0.5657404", "0.5653055", "0.563784", "0.56282884", "0.56269884", "0.56204456", "0.5594319", "0.5575733", "0.55740434", "0.55682486", "0.5558577", "0.5556371", "0.5538899", "0.55319774", "0.5526882", "0.5519926", "0.55135685", "0.5507179", "0.5505986", "0.5497954", "0.54912925", "0.5490061", "0.5488887", "0.54835635", "0.5482918", "0.5479211", "0.5479211", "0.5473625", "0.5472605", "0.5468914", "0.54646605", "0.54627264", "0.54447776", "0.54305357", "0.5427456", "0.5425114", "0.5417429", "0.5409204", "0.54079443", "0.54072714", "0.5401605", "0.5401425", "0.53968215", "0.5394911", "0.53789896", "0.5373673", "0.5373673", "0.5373673", "0.5373673", "0.5373466", "0.5368069", "0.5359269", "0.5353088", "0.53513867", "0.53487176", "0.5345829", "0.53428584" ]
0.0
-1
get organisations for solr
def transform_organisations(self, instance): return self.transform_entity(instance, 'Organisation')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def organizations(self):\n return self.get('{}/orgs'.format(ApiVersion.A1.value))", "def get_org_list():\r\n\r\n resp = requests.get(''.join([Kegg.BASE_URL, 'list/organism']))\r\n return resp.text", "def organizations(self):\n self.elements('organizations')", "def list(self) -> List[Organisation]:\n ...", "def test_retrieve_l_organizations(self):\n pass", "def organizations(self):\r\n return organizations.Organizations(self)", "async def get_organizations(request: Request):\n redis = request.app.state.redis\n organizations_obj = orjson.loads(await redis.get_key(\"influxdb_organizations\"))\n return [org for org in organizations_obj]", "def test_getorgs(self):\n pass", "def listOrganizations(self, name='', type=''):\n return self.get_json('/organization', {'name': name, 'type': type})", "def myorgs(request):\n context = RequestContext(request)\n \n user = request.user\n orgs = user.orgusers.get_query_set()\n \n context['orgs'] = orgs\n return render_to_response('myorgs.html', context)", "def test_retrieve_l_organization_locations(self):\n pass", "def get_all_orgs():\r\n org_filter_set = set()\r\n if not has_configuration_set():\r\n return org_filter_set\r\n\r\n for value in settings.MICROSITE_CONFIGURATION.values():\r\n org_filter = value.get('course_org_filter')\r\n if org_filter:\r\n org_filter_set.add(org_filter)\r\n\r\n return org_filter_set", "def get_orgs():\n \n url = \"https://api.github.com/user/orgs\"\n \n org_urls = []\n orgs = utils.get_json(url)\n \n for org in orgs:\n org_urls.append(org[\"url\"])\n \n return org_urls", "def test_retrieve_l_organization(self):\n pass", "def organizations(self) -> pulumi.Output[Optional[Sequence[str]]]:\n return pulumi.get(self, \"organizations\")", "def atlas_organizations():\n pass", "def list_all_organizations(ctx):\n pprint(ctx.obj.orgs.get().data)", "def get_all_companies_and_people():", "def organizations(self):\n return sorted(set([team.org for team in self.teams]), key=lambda o: o.title)", "def test_organizations_list(self):\n pass", "def list_orgs(self):\n orgs = list(self.orgs.keys())\n orgs.sort()\n return orgs", "def organizations(self):\r\n return Organizations(self)", "def test_get_organization(self):\n pass", "def _createOrganizationsCollections(folder):\n collections = [\n {'id': 'all_orgs', 'tit': _('all_orgs'), 'subj': (u'search', ), 'query': [\n {'i': 'portal_type',\n 'o': 'plone.app.querystring.operation.selection.is',\n 'v': ['organization']}],\n 'cond': u\"\", 'bypass': [],\n 'flds': (u'select_row', u'org_pretty_link_with_additional_infos',\n u'SelectedInPlonegroupColumn', u'PloneGroupUsersGroupsColumn',\n u'review_state', u'CreationDate', u'actions'),\n 'sort': u'sortable_title', 'rev': False, 'count': False},\n ]\n _createDashboardCollections(folder, collections)", "def get_coauthors(self):\n # Get number of authors to search for\n res = download(url=self.coauthor_link, accept='json')\n data = loads(res.text)['search-results']\n N = int(data.get('opensearch:totalResults', 0))\n # Store information in namedtuples\n fields = 'surname given_name id areas affiliation_id name city country'\n coauth = namedtuple('Coauthor', fields)\n coauthors = []\n # Iterate over search results in chunks of 25 results\n count = 0\n while count < N:\n params = {'start': count, 'count': 25}\n res = download(url=self.coauthor_link, params=params, accept='json')\n data = loads(res.text)['search-results'].get('entry', [])\n # Extract information for each coauthor\n for entry in data:\n aff = entry.get('affiliation-current', {})\n try:\n areas = [a['$'] for a in entry.get('subject-area', [])]\n except TypeError: # Only one subject area given\n areas = [entry['subject-area']['$']]\n new = coauth(surname=entry['preferred-name']['surname'],\n given_name=entry['preferred-name'].get('given-name'),\n id=entry['dc:identifier'].split(':')[-1],\n areas='; '.join(areas),\n affiliation_id=aff.get('affiliation-id'),\n name=aff.get('affiliation-name'),\n city=aff.get('affiliation-city'),\n country=aff.get('affiliation-country'))\n coauthors.append(new)\n count += 25\n return coauthors", "def test_success_with_all_orgs(self):\n auth_client = self.create_auth_client()\n all_orgs = ListOrgSerializer(Org.objects.all(), many=True)\n response = auth_client.get(self.search_org_api)\n self.assertEqual(response.data, all_orgs.data)", "def test_getorganizations_item(self):\n pass", "def get_companies_and_people(team):", "def _get_org_repos(self):\n url = f\"{BASE_URL}/orgs/{ORG}/repos\"\n return self.fetch_all_pages(url, flatten=True, query_params={\"per_page\": 100})", "def organizations(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"organizations\")", "def organizations(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"organizations\")", "def ListOrganizations(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def get_queryset(self, request):\n qs = super().get_queryset(request)\n if not request.user.is_superuser and request.user.approved_organisations.exists():\n qs = qs.filter(organisation__in=request.user.approved_organisations.all()).distinct()\n return qs", "def getCatalogs():", "def query_repositories():\n return buildapi.query_repositories()", "def organizations_at_location(self, location):\n if location is None:\n queryset = self.filter(location=None)\n elif location.region is None:\n queryset = self.filter(Q(location=None) | Q(location=location))\n elif location.tik is None:\n queryset = self.filter(Q(location=None) | Q(location__id__in=[location.region_id, location.id]))\n else:\n queryset = self.filter(Q(location=None) | Q(location__id__in=[location.tik_id, location.region_id, location.id]))\n\n organization_ids = set(queryset.values_list('organization_id', flat=True))\n\n organizations = Organization.objects.filter(id__in=organization_ids).order_by('title')\n\n for representative in OrganizationRepresentative.objects.filter(organization__in=organization_ids):\n organization = (filter(lambda org: org.id==representative.organization_id, organizations) or [None])[0]\n if organization:\n organization.representative = True\n\n return organizations", "def get_organization_links(self):\n yield from self.get_resource_by_item(\"/orgs\")", "def get_available_companies_and_people(team):", "def get_organization_links_by_page(self):\n return self.get_resource_by_page(\"/orgs\")", "def get_organizations(self, language=None):\n return self.get_direct_related_page_extensions(\n Organization, OrganizationPluginModel, language=language\n )", "def filter_organisation(self, org_name):\n return self.form.set_value(\"organisation search\", org_name)", "def query_repos(self):\n return [self.config[\"repo\"]]", "def test_organizations_read(self):\n pass", "def list_members_of_organisation(\n self, organisation_id: OrganisationId\n ) -> List[Publisher]:\n ...", "def organization_list(request):\n return [o.slug for o in Organization.objects.all()]", "def get_available_companies(team):", "def exportOrgs ( c ) :\n assert str(type(c)) == \"<type '_mysql.connection'>\"\n xml = \"\"\n o = sqlQuery ( c, \"select * from Organizations;\" )\n for i in o:\n oL = sqlQuery ( c, \"select * from OrganizationLocations where orgID = '\"+i[0]+\"';\" )\n oER = sqlQuery ( c, \"select * from OrganizationExternalResources where orgID = '\"+i[0]+\"';\" )\n oTC = sqlQuery ( c, \"select * from OrganizationsToCrises where orgID = '\"+i[0]+\"';\" )\n pTO = sqlQuery ( c, \"select * from PeopleToOrganizations where orgID = '\"+i[0]+\"';\" )\n xml += openTagAtt ( \"Organization\", \"organizationIdent\", i[0])\n xml += openCloseTag ( \"Name\", i[1])\n xml += closeTagAtt ( \"Kind\", \"organizationKindIdent\", i[2])\n for j in oL :\n xml += openTag ( \"Location\" )\n xml += openCloseTag ( \"Locality\", j [ 1 ] )\n xml += openCloseTag ( \"Region\", j [ 2 ] )\n xml += openCloseTag ( \"Country\", j [ 3 ] )\n xml += closeTag ( \"Location\" )\n xml += openCloseTag (\"History\", i[3])\n xml += openTag ( \"ContactInfo\" )\n xml += openCloseTag (\"Telephone\", i[4])\n xml += openCloseTag (\"Fax\", i[5])\n xml += openCloseTag (\"Email\", i[6])\n xml += openTag (\"PostalAddress\")\n xml += openCloseTag (\"StreetAddress\", i[7])\n xml += openCloseTag ( \"Locality\", i[8])\n xml += openCloseTag ( \"Region\", i[9])\n xml += openCloseTag ( \"PostalCode\", i[10])\n xml += openCloseTag ( \"Country\", i[11])\n xml += closeTag ( \"PostalAddress\" )\n xml += closeTag ( \"ContactInfo\" )\n xml += openTag (\"ExternalResources\")\n for j in oER:\n xml += openCloseTag ( j[1], j[2])\n xml += closeTag (\"ExternalResources\")\n xml += openTag (\"RelatedCrises\")\n for j in oTC:\n xml += closeTagAtt (\"RelatedCrisis\", \"crisisIdent\", j[1])\n xml += closeTag (\"RelatedCrises\")\n xml += openTag (\"RelatedPersons\")\n for j in pTO:\n xml += closeTagAtt (\"RelatedPerson\", \"personIdent\", j[0])\n xml += closeTag (\"RelatedPersons\")\n xml += closeTag (\"Organization\")\n assert str ( type ( xml ) ) == \"<type 'str'>\"\n return xml", "def fetch_education(self):\r\n # intialize storage vars\r\n organizations = []\r\n education = set()\r\n\r\n ## 1. first get all the organization names using nltk\r\n \r\n # go through every sentence\r\n for sent in nltk.sent_tokenize(self.stringtext):\r\n # the through every POS-tagged chunk \r\n for chunk in nltk.ne_chunk(nltk.pos_tag(nltk.word_tokenize(sent))):\r\n # filter organizations \r\n if hasattr(chunk, 'label') and chunk.label() == 'ORGANIZATION':\r\n # append the matches to the result \r\n organizations.append(' '.join(c[0] for c in chunk.leaves()))\r\n \r\n # we search for each bigram and trigram for reserved words\r\n # (college, university etc...)\r\n for org in organizations:\r\n for word in SCHOOLWORDS:\r\n # append if it appears in the organization \r\n if org.lower().find(word) >= 0:\r\n education.add(org)\r\n \r\n return list(education)", "def get(self, org_name=None): \n if org_name is None: # Return a list of all orgs\n filter = '%s=*' % self.org_attr\n scope = 1\n trueorfalse = False\n else:\n filter = '%s=%s' % (self.org_attr, org_name)\n scope = self.search_scope\n trueorfalse = True \n result = self._get_object(self.base_dn, scope, filter, \\\n unique=trueorfalse)\n self.log.debug('Result: %s' % result)\n return result", "def get_organizations(\n self, *, params: Optional[dict] = None\n ) -> \"resource_types.Organizations\":\n\n return communicator.Organizations(self.__requester).fetch(parameters=params)", "def _list_orgs(self, context):\r\n try:\r\n rtn = {'context': context,\r\n 'orgs': sorted(list(self._bbreader.cache[context].keys()))}\r\n except KeyError:\r\n raise RequestError('Context {} not found'.format(context))\r\n return rtn", "def getOrganisation(self):\n return _libsbml.ModelCreator_getOrganisation(self)", "def get_organisation_with_role(user, rolecode):\n return get_organisations_with_role(user, rolecode).get()", "def collect_org_repos(self):\n log.info(\"GHUB\", \"Collecting org repos.\")\n raw_repos = self._get_org_repos()\n preprocessed_repos = self._preprocess_repos(raw_repos)\n parsed_repos = json_reducer.reduce(REPOS_SCHEMA, preprocessed_repos)\n result = []\n for repo in parsed_repos:\n result.append(repo)\n return result", "def organizations_owned(self):\n return sorted(set([team.org for team in self.teams if team.org.owners == team]),\n key=lambda o: o.title)", "def get_all_locations(self):", "def get_all_authors():\n try:\n authors = g.projects.distinct('authors')\n all_authors = sorted(authors, key=lambda k: str(k).lower()) if authors else []\n return jsonify(all_authors)\n except Exception as err:\n raise ApiException(str(err), 500)", "def _get_org(self, org_name):\n org = SpokeOrg()\n result = org.get(org_name)\n if result == []:\n msg = \"Can't find org %s\" % org_name\n self.log.error(msg)\n raise error.NotFound(msg) \n return result", "def get_organism_names(results):\r\n\r\n organism_names = []\r\n\r\n for result in results:\r\n organism_names.append(result)\r\n\r\n return organism_names", "def org():\n\n settings = current.deployment_settings\n ADMIN = current.session.s3.system_roles.ADMIN\n SECTORS = \"Clusters\" if settings.get_ui_label_cluster() \\\n else \"Sectors\"\n stats = lambda i: settings.has_module(\"stats\")\n\n return M(c=\"org\")(\n M(\"Organizations MSW\", f=\"organisation\")(\n M(\"Create\", m=\"create\"),\n M(\"Import\", m=\"import\"),\n M(\"TestSpiegel\", c=\"org\",f=\"spiegel\")\n ),\n M(\"Offices\", f=\"office\")(\n M(\"Create\", m=\"create\"),\n M(\"Map\", m=\"map\"),\n M(\"Import\", m=\"import\")\n ),\n M(\"Facilities\", f=\"facility\")(\n M(\"Create\", m=\"create\"),\n M(\"Import\", m=\"import\"),\n ),\n M(\"Resources\", f=\"resource\", m=\"summary\",\n check=stats)(\n M(\"Create\", m=\"create\"),\n M(\"Import\", m=\"import\")\n ),\n M(\"Organization Types\", f=\"organisation_type\",\n restrict=[ADMIN])(\n M(\"Create\", m=\"create\"),\n ),\n M(\"Office Types\", f=\"office_type\",\n restrict=[ADMIN])(\n M(\"Create\", m=\"create\"),\n ),\n M(\"Facility Types\", f=\"facility_type\",\n restrict=[ADMIN])(\n M(\"Create\", m=\"create\"),\n ),\n M(SECTORS, f=\"sector\", restrict=[ADMIN])(\n M(\"Create\", m=\"create\"),\n ),\n )", "def get_queryset(self):\n return self.request.user.setting_set.get().companies", "def get_queryset(self):\n return self.request.user.setting_set.get().companies", "def get_queryset(self):\n return self.request.user.setting_set.get().companies", "def get_queryset(self):\n return self.request.user.setting_set.get().companies", "def lookup_organisation(formvars):\n\n orgname = formvars.get(\"organisation\")\n if not orgname:\n return None\n\n db = current.db\n s3db = current.s3db\n\n otable = s3db.org_organisation\n ftable = s3db.org_facility\n ltable = s3db.gis_location\n gtable = s3db.org_group\n mtable = s3db.org_group_membership\n\n # Search by name among test stations\n query = (otable.name == orgname) & \\\n (otable.deleted == False)\n join = [mtable.on(mtable.organisation_id == otable.id),\n gtable.on((gtable.id == mtable.group_id) & \\\n (gtable.name == TESTSTATIONS)),\n ftable.on(ftable.organisation_id == otable.id),\n ]\n\n # Do we have a selected location (should have since mandatory)\n location = formvars.get(\"location\")\n if isinstance(location, str):\n try:\n location = json.loads(location)\n except JSONERRORS:\n location = None\n\n if location:\n # Include the Lx ancestor in the lookup\n ancestor = None\n for level in (\"L4\", \"L3\", \"L2\"):\n ancestor = location.get(level)\n if ancestor:\n break\n if ancestor:\n join.append(ltable.on(ltable.id == ftable.location_id))\n query &= ((ltable.level == None) & (ltable.parent == ancestor)) | \\\n (ltable.id == ancestor)\n\n rows = db(query).select(otable.id, join = join)\n organisation_id = None\n if len(rows) > 1:\n # Multiple matches => try using facility email to reduce\n facility_email = formvars.get(\"facility_email\")\n if facility_email:\n candidates = {row.id for row in rows}\n query = (ftable.organisation_id.belongs(candidates)) & \\\n (ftable.email == facility_email) & \\\n (ftable.deleted == False)\n match = db(query).select(ftable.organisation_id,\n limitby = (0, 2),\n )\n if len(match) == 1:\n organisation_id = match.first().organisation_id\n elif rows:\n # Single match - this organisation already exists\n organisation_id = rows.first().id\n\n return organisation_id", "def test_get_all_for_organization(self):\n org = Organization.create(name='foo', program_id=self.program.uid)\n org.put()\n user = User.create(name='foo', email='[email protected]',\n owned_organizations=[org.uid])\n user.put()\n response = self.testapp.get(\n '/api/organizations/{}/users'.format(org.uid),\n headers=self.login_headers(user),\n )\n response_list = json.loads(response.body)\n self.assertEqual(len(response_list), 1)", "def get_organisation_metadata() -> pd.DataFrame:\n return GETTER.organisationmetadata", "def get_main_organization(self):\n return (\n self.get_organizations()\n .order_by(\"extended_object__organization_plugins__cmsplugin_ptr__position\")\n .first()\n )", "def get_doc_prov(j, gcis_url, refList, orgList):\n doc = ProvEsDocument()\n \n org = requests.get(j['href']).json()\n \n doc_attrs = [\n (\"prov:type\", 'gcis:organization'),\n (\"prov:label\", j['name']),\n (\"prov:location\", \"%s%s\"%(gcis_url, j['uri'])),\n (\"gcis:organization_type_identifier\", j['organization_type_identifier']),\n (\"gcis:country_code\", j['country_code']),\n ]\n orgID = 'bibo:%s' % j['identifier']\n doc.agent(orgID, doc_attrs)\n\n for child in org['children']:\n cOrgURI = child['organization']\n rel = child['relationship']\n\n cOrg = next(o for o in orgList if o['uri'] == cOrgURI)\n cOrgID = 'bibo:%s'%cOrg['identifier']\n\n #cOrgAttrs = [\n # (\"prov:type\", 'gcis:organization'),\n # (\"prov:label\", cOrg['name']),\n # (\"prov:location\", cOrg['uri']),\n # (\"gcis:organization_type_identifier\", cOrg['organization_type_identifier']),\n # (\"gcis:country_code\", cOrg['country_code']),\n # ]\n #doc.entity(cOrgID, cOrgAttrs)\n #doc.hadMember(orgID, cOrgID)\n #for parent in org['parents']:\n # pOrgURI = parent['organization']\n # rel = parent['relationship']\n # pOrg = next(o for o in orgList if o['uri'] == pOrgURI)\n # pOrgID = 'bibo:%s'%pOrg['identifier']\n # doc.hadMember(pOrgID, orgID)\n\n prov_json = json.loads(doc.serialize())\n\n return prov_json", "def get_university(doc = None, cursor = None):\n\tif cursor is None and doc is not None:\n\t\treturn doc['details']['university']\n\telif doc is None and cursor is not None:\n\t\tallunivs = list()\n\t\tfor thisdoc in cursor:\n\t\t\tallunivs.append(thisdoc['details']['university'])\n\t\treturn allcoms\n\telse:\n\t\tprint \"Supply any one argument only!\"", "def get_organizations_list_with_links(year_link):\n response = get_response(year_link)\n if response.ok:\n soup = BeautifulSoup(response.text, 'html.parser')\n orgs_li = soup.find_all(\n 'li', attrs={'class': 'organization-card__container'})\n orgs_dict = {}\n for orgs_html in orgs_li:\n org_name = orgs_html.select('h4')[0].text.replace('\\n', '')\n relative_link = orgs_html.select('a')[0].get('href')\n full_link = HOME_PAGE + relative_link\n orgs_dict[org_name] = full_link\n return orgs_dict\n else:\n print('Something Went Wrong')\n print(f'Status Code: {response.status_code}')\n sys.exit(1)", "def _get_repo_list(self, *args, **kwargs): \r\n repo_list = kwargs['repositories'] if kwargs.get('repositories', None) else self.get_list(\r\n api_endpoint=settings.GITHUB_SETTINGS['GITHUB_USER_REPO_API'].format(**kwargs), **kwargs\r\n )\r\n for r in repo_list:\r\n if isinstance(r, dict):\r\n yield r['name']\r\n else:\r\n yield r", "def getUsersByOrganisation(SID, organisation_id, start, max, orderby, asc):\n return call(\"getUsersByOrganisation\", SID, organisation_id, start, max, orderby, asc)", "async def All_orgs():\n\n links_13 = []\n links_14 = []\n valid_url = \"/?archive/?gsoc/\\d+[0-9]/orgs/[a-zA-Z]+\"\n for year in range(2009, 2016):\n year_url = melange + \"/archive/gsoc/{}\".format(year)\n soup = await get_page(year_url)\n\n for url in soup.find_all('a'):\n if re.match(valid_url, url.get(\"href\")):\n if year <= 2013:\n links_13.append(join(melange, url.get(\"href\")[1:]))\n else:\n links_14.append(join(melange, url.get(\"href\")[1:]))\n return links_13, links_14", "def users_organizations(user):\n if not user or not user.is_authenticated():\n return None\n else:\n return get_users_organizations(user)", "def get(self):\n authenticated_user_id = token_auth.current_user()\n orgs_dto = OrganisationService.get_organisations_managed_by_user_as_dto(\n authenticated_user_id\n )\n if len(orgs_dto.organisations) < 1:\n return {\n \"Error\": \"User is not a manager of the project\",\n \"SubCode\": \"UserPermissionError\",\n }, 403\n\n search_dto = self.setup_search_dto()\n admin_projects = ProjectAdminService.get_projects_for_admin(\n authenticated_user_id,\n request.environ.get(\"HTTP_ACCEPT_LANGUAGE\"),\n search_dto,\n )\n return admin_projects.to_primitive(), 200", "def test_get_all_organization(self):\n self.client.force_authenticate(user=self.inventory_manager)\n response = self.client.get(\"/organization/\")\n self.assertEqual(response.status_code,\n status.HTTP_403_FORBIDDEN)", "def get_org_admins(self, dataset: Dict) -> List[User]:\n organization_id = dataset[\"organization_id\"]\n orgadmins = list()\n organization = self.organizations[organization_id]\n if \"admin\" in organization:\n for userid in self.organizations[organization_id][\"admin\"]:\n user = self.users.get(userid)\n if user:\n orgadmins.append(user)\n return orgadmins", "def addOrganisation(SID, name):\n return call(\"addOrganisation\", SID, name)", "def org():\n\n ADMIN = current.session.s3.system_roles.ADMIN\n SECTORS = \"Clusters\" if current.deployment_settings.get_ui_label_cluster() \\\n else \"Sectors\"\n\n return M(c=\"org\")(\n M(\"Organizations\", f=\"organisation\")(\n M(\"Create\", m=\"create\"),\n M(\"Import\", m=\"import\")\n ),\n M(\"Facilities\", f=\"facility\", m=\"summary\")(\n M(\"Create\", m=\"create\"),\n M(\"Map\", m=\"map\"),\n M(\"Import\", m=\"import\"),\n ),\n M(\"Offices\", f=\"office\")(\n M(\"Create\", m=\"create\"),\n M(\"Map\", m=\"map\"),\n M(\"Import\", m=\"import\")\n ),\n M(\"Resources\", f=\"resource\", m=\"summary\")(\n M(\"Create\", m=\"create\"),\n M(\"Import\", m=\"import\")\n ),\n M(\"Organization Types\", f=\"organisation_type\",\n restrict=[ADMIN])(\n M(\"Create\", m=\"create\"),\n ),\n M(\"Service Types\", f=\"service\",\n restrict=[ADMIN])(\n M(\"Create\", m=\"create\"),\n ),\n M(\"Office Types\", f=\"office_type\",\n restrict=[ADMIN])(\n M(\"Create\", m=\"create\"),\n ),\n M(\"Facility Types\", f=\"facility_type\",\n restrict=[ADMIN])(\n M(\"Create\", m=\"create\"),\n ),\n M(SECTORS, f=\"sector\", restrict=[ADMIN])(\n M(\"Create\", m=\"create\"),\n ),\n )", "def get_organization(self):\n return self.reference[REF_ORGANIZATION][REF_VALUE]", "def test_client_get_organizations(mocker, client_all_orgs_input):\n mocker.patch(\"tracker_client.client.get_auth_token\")\n mocker.patch(\"tracker_client.client.create_client\")\n test_client = Client()\n test_client.execute_query = mocker.MagicMock(return_value=client_all_orgs_input)\n\n org_list = test_client.get_organizations()\n\n test_client.execute_query.assert_called_once_with(\n queries.GET_ALL_ORGS, {\"after\": \"abc\", \"search\": \"\"}\n )\n assert org_list[0].acronym == \"FOO\"\n assert org_list[1].name == \"Fizz Bang\"\n assert org_list[0].domain_count == 10\n assert org_list[1].verified", "def fetch_organization(organization):\n return fetch_json(organization_url, organization)", "def getProjectsForOrgs(org_keys, limit=1000):\n q = getProjectsQueryForOrgs(org_keys)\n return q.fetch(limit)", "def test_companies(self, setup_data):\n term = 'abc defg'\n\n url = reverse('api-v3:search:basic')\n response = self.api_client.get(\n url,\n data={\n 'term': term,\n 'entity': 'company',\n },\n )\n\n assert response.status_code == status.HTTP_200_OK\n assert response.data['count'] == 2\n assert response.data['results'][0]['name'].startswith(term)\n assert [{'count': 2, 'entity': 'company'}] == response.data['aggregations']", "def get_repos(self):\n\n if self.url == 'test':\n repos = ['feature', 'dev', 'int']\n else:\n repos = []\n\n return repos", "def getInterestedUsers():", "def get_organisation_description() -> pd.DataFrame:\n return GETTER.organisationdescription", "def _get_org_members(self):\n url = f\"{BASE_URL}/orgs/{ORG}/members\"\n return self.fetch_all_pages(url, flatten=True, query_params={\"per_page\": 100})", "def repositories(self, user_name=None):\n user_name = user_name if user_name else self._auth[0]\n data = self._request('GET', 'users', user_name)\n return data.repositories\n #ret_val = []\n #for repository in data.repositories:\n # ret_val.append(repository.name)\n # #print 'repo', repository['name'] # can use as dict or as object\n #return ret_val", "def organization_arns(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"organization_arns\")", "def get_repos():\n response = requests.get('https://quay.io/api/v1/repository?public=true&namespace=ucsc_cgl')\n repo_data = json.loads(response.text)\n assert response.status_code == 200, 'Quay.io API request to view repositories failed.'\n repos = {str(x[u'name']) for x in repo_data['repositories']}\n return repos", "def corporate_authors(self, key, value):\n _corporate_authors = self.get(\"authors\", [])\n\n for v in force_list(value):\n if key == \"710__\":\n if \"a\" in v:\n _corporate_authors.append(\n {\n \"full_name\": clean_val(\"a\", v, str),\n \"type\": \"ORGANISATION\",\n }\n )\n else:\n self[\"authors\"] = collaborations(self, key, value)\n raise IgnoreKey(\"corporate_authors\")\n else:\n _corporate_authors.append(\n {\"full_name\": clean_val(\"a\", v, str), \"type\": \"ORGANISATION\"}\n )\n return _corporate_authors", "def districts(self):\n catalog = getToolByName(self.context, 'portal_catalog')\n d = [dict(url=district.getURL(), title=district.Title,\n address=district.Description) for district in\n catalog({'object_provides': IDistrict.__identifier__,\n 'path': dict(query='/'.join(self.context.getPhysicalPath()),\n depth=1), 'sort_on': 'sortable_title'})]\n print d\n return d", "def fetch_gh_org_collaborators(self):\n for config in self.config.get('org.permissions.org_integrity.orgs'):\n host, org = config['url'].rsplit('/', 1)\n for aff in config.get('collaborator_types', GH_ALL_COLLABORATORS):\n url_hash = get_sha256_hash([config['url']], 10)\n json_file = f'gh_{aff}_collaborators_{url_hash}.json'\n path = ['permissions', json_file]\n description = (\n f'{aff.title()} collaborators of the {org} GH org'\n )\n self.config.add_evidences(\n [RawEvidence(path[1], path[0], DAY, description)]\n )\n with raw_evidence(self.locker, '/'.join(path)) as evidence:\n if evidence:\n if host not in self.gh_pool:\n self.gh_pool[host] = Github(base_url=host)\n if not config.get('repos'):\n repos = self.gh_pool[host].paginate_api(\n f'orgs/{org}/repos'\n )\n config['repos'] = [repo['name'] for repo in repos]\n collabs = {}\n for repo in config['repos']:\n collabs_url = f'repos/{org}/{repo}/collaborators'\n collabs[repo] = self.gh_pool[host].paginate_api(\n collabs_url, affiliation=aff\n )\n evidence.set_content(json.dumps(collabs))", "def get_organisations_with_role(user, rolecode):\n return Organisation.objects.filter(\n user_organisation_roles__user=user,\n user_organisation_roles__role__code=rolecode)", "def export_organizations(self):\n print('\\n=== Exporting all organization data...')\n\n for organization in self.client.organizations:\n print('- Exporting organizations:', organization.name)\n\n json = {\n 'id': self.get_id(organization),\n 'href': organization.href,\n 'name': organization.name,\n 'nameKey': organization.name_key,\n 'description': organization.description,\n 'status': organization.status,\n 'createdAt': organization.created_at.isoformat(),\n 'modifiedAt': organization.modified_at.isoformat(),\n 'customData': self.get_custom_data(organization),\n 'default_account_store_mapping': None,\n 'default_group_store_mapping': None,\n 'account_store_mappings': [],\n }\n\n default_account_store_mapping = organization.default_account_store_mapping\n default_group_store_mapping = organization.default_group_store_mapping\n\n if default_account_store_mapping:\n json['default_account_store_mapping'] = {\n 'id': organization.default_account_store_mapping.href.split('/')[-1],\n 'href': organization.default_account_store_mapping.href,\n 'type': organization.default_account_store_mapping.account_store.__class__.__name__,\n 'name': organization.default_account_store_mapping.account_store.name,\n 'list_index': organization.default_account_store_mapping.list_index,\n }\n\n if default_group_store_mapping:\n json['default_group_store_mapping'] = {\n 'id': organization.default_group_store_mapping.href.split('/')[-1],\n 'href': organization.default_group_store_mapping.href,\n 'type': organization.default_group_store_mapping.account_store.__class__.__name__,\n 'name': organization.default_group_store_mapping.account_store.name,\n 'list_index': organization.default_group_store_mapping.list_index,\n }\n\n for account_store_mapping in organization.account_store_mappings:\n json['account_store_mappings'].append({\n 'id': self.get_id(account_store_mapping),\n 'href': account_store_mapping.href,\n 'account_store': {\n 'type': account_store_mapping.account_store.__class__.__name__,\n 'id': self.get_id(account_store_mapping.account_store),\n 'href': account_store_mapping.account_store.href,\n 'name': account_store_mapping.account_store.name,\n 'description': account_store_mapping.account_store.description,\n 'status': account_store_mapping.account_store.status,\n },\n 'list_index': account_store_mapping.list_index,\n 'is_default_account_store': account_store_mapping.is_default_account_store,\n 'is_default_group_store': account_store_mapping.is_default_group_store,\n })\n\n tenant = self.get_id(organization.tenant)\n self.write('%s/%s/organizations/%s' % (self.location, tenant, json['id']), json)\n\n print('=== Done!\\n')", "def test_search_organizations_post(self):\n pass", "def get_members_and_supervisors(organization):\n members = []\n supervisors = []\n if organization:\n if organization.members:\n members = organization.members.reporters.all()\n if organization.supervisors:\n supervisors = organization.supervisors.reporters.all() \n return (members, supervisors)", "def findCoAuthorsMultiLevel(request, level, name):\n try:\n root = findCoAuthorsMultiLevel_(level, name)\n except DoesNotExist as e:\n return JsonResponse({'error': \"Can't find Author: \" + name})\n return JsonResponse({'coauthors': simplejson.dumps(root.toDict())})" ]
[ "0.70180196", "0.6930935", "0.68222815", "0.67474467", "0.66747344", "0.64471346", "0.6437008", "0.64310277", "0.63073766", "0.62133086", "0.6173378", "0.61321056", "0.61253846", "0.61062926", "0.6087762", "0.6074626", "0.60670793", "0.60509616", "0.60316247", "0.6012012", "0.6007616", "0.60016155", "0.59924805", "0.5938018", "0.58998865", "0.5840614", "0.5833405", "0.58186895", "0.5796284", "0.5773584", "0.5773584", "0.577293", "0.56786525", "0.567486", "0.5674401", "0.5655577", "0.55811095", "0.5576214", "0.55699795", "0.5567561", "0.5556471", "0.5548918", "0.5538453", "0.5537592", "0.5537419", "0.5527268", "0.55232084", "0.5505993", "0.5500089", "0.54764086", "0.5453737", "0.5451591", "0.54487044", "0.5428748", "0.5414091", "0.5412599", "0.53943074", "0.53937244", "0.5388012", "0.53857464", "0.53760815", "0.53760815", "0.53760815", "0.53760815", "0.5358548", "0.53517616", "0.533408", "0.53237545", "0.5309597", "0.5305573", "0.529751", "0.528953", "0.5280885", "0.5266784", "0.5265071", "0.5262156", "0.5254977", "0.52499485", "0.5242276", "0.523902", "0.52375525", "0.5228073", "0.5226082", "0.5223691", "0.5213425", "0.5206088", "0.51925194", "0.5192049", "0.5185702", "0.51787114", "0.51771754", "0.51765305", "0.51750857", "0.5170313", "0.51681024", "0.5164562", "0.5164432", "0.5153692", "0.5150168", "0.5147756" ]
0.59292954
24
get places for solr
def transform_places(self, instance): return self.transform_entity(instance, 'Place')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_places():\n global app_id, rest_api_key, places\n\n if not places:\n connection = httplib.HTTPSConnection(PARSE_API_URL, PARSE_API_PORT)\n connection.connect()\n connection.request(\n method='GET',\n url=PLACES_ENDPOINT,\n headers={\"X-Parse-Application-Id\": app_id, \"X-Parse-REST-API-Key\": rest_api_key}\n )\n places = json.loads(connection.getresponse().read())\n\n return places", "def getPlaces(self):\n return self.wrapper.getPlaces()", "def get_places(location, keyword):\n if location is None or keyword is None:\n return None\n\n api_key = ''\n\n search_term = '%s %s' % (location, keyword)\n places_url = 'https://maps.googleapis.com/maps/api/place/textsearch/json?' \\\n 'query=%s&key=%s' % (search_term, api_key)\n\n places_response = requests.get(places_url)\n if not places_response.ok:\n return None\n\n else:\n data = json.loads(places_response.text)\n if data['status'] != 'OK':\n return None\n\n else:\n # Store all place information as a list of dictionaries.\n places_list = []\n for place in data['results']:\n address = place['formatted_address']\n name = place['name']\n try:\n open_bool = place['opening_hours']['open_now']\n except KeyError:\n open_bool = 'n/a'\n try:\n rating = place['rating']\n rating_total = place['user_ratings_total']\n except KeyError:\n rating = 'n/a'\n rating_total = 'n/a'\n\n p_dict = {'address': address, 'name': name, 'open': open_bool,\n 'rating': rating, 'total': rating_total}\n\n places_list.append(p_dict)\n\n return places_list", "def get_all_places(self):\n self.cursor.execute(\"select * from places\")\n self.connection.commit()\n return self.cursor.fetchall()", "def get(self):\n # TODO: catch ValueError and raise proper 400 exception with data about error instead of 500\n lon = float(self.get_query_argument(\"lon\"))\n lat = float(self.get_query_argument(\"lat\"))\n distance = int(self.get_query_argument(\"distance\", 5000))\n limit = int(self.get_query_argument(\"limit\", 20))\n offset = int(self.get_query_argument(\"offset\", 0))\n\n cursor = self.db_conn.places.find({\n \"loc\": {\n \"$near\": {\n \"$geometry\": {\n \"type\": \"Point\",\n \"coordinates\": [lon, lat]\n },\n \"$maxDistance\": distance\n }\n },\n \"box\": {\"$exists\": True}\n },\n {\"_id\": 0, \"box\": 0}).skip(offset).limit(limit)\n\n res = yield cursor.to_list(length=limit)\n\n raise gen.Return(res)", "def get_all_locations(self):", "def places_get_all():\n if request.method == 'GET':\n data = get_all_places()\n if len(data['places']) == 0:\n return jsonify({'response': 'Not have places.'}), 404\n return jsonify(data), 200", "def places_search():\n new_obj = request.get_json()\n if new_obj is None:\n abort(400, \"Not a JSON\")\n\n city_list = []\n if \"cities\" in new_obj.keys():\n city_list = new_obj[\"cities\"]\n\n if \"states\" in new_obj.keys():\n states_list = new_obj[\"states\"]\n for id in states_list:\n state = storage.get(State, id)\n if state is None:\n pass\n for city in state.cities:\n if city.id not in city_list:\n city_list.append(city.id)\n\n amenities_list = []\n if \"amenities\" in new_obj.keys():\n amenities_list = new_obj[\"amenities\"]\n\n list_res = []\n\n if len(city_list):\n for city_id in city_list:\n city = storage.get(City, city_id)\n if city is None:\n continue\n places = city.places\n for place in places:\n ame_in_place = []\n for ame in place.amenities:\n ame_in_place.append(ame.id)\n del place.amenities\n list_res.append(place.to_dict())\n for amenity_id in amenities_list:\n if amenity_id not in ame_in_place:\n list_res.remove(place.to_dict())\n return jsonify(list_res)\n\n if len(new_obj) == 0 or len(city_list) == 0:\n all_places = storage.all(Place)\n for place in all_places.values():\n ame_in_place = []\n for ame in place.amenities:\n ame_in_place.append(ame.id)\n del place.amenities\n list_res.append(place.to_dict())\n for amenity_id in amenities_list:\n if amenity_id not in ame_in_place:\n list_res.remove(place.to_dict())\n return jsonify(list_res)", "def get_places_autocomplete(q: str = None, **params) -> JsonResponse:\n if params.get('page') == 'all':\n places = PlaceAutocompletePaginator(q=q, **params).all()\n else:\n places = get(f'{API_V1}/places/autocomplete', q=q, **params).json()\n\n places['results'] = convert_all_coordinates(places['results'])\n return places", "def show_places():\n t0 = time.time()\n print(f\"--- {request}\")\n print(f\"--- {user_session}\")\n # Set context by owner and the data selections\n u_context = UserContext(user_session, current_user, request)\n # Which range of data is shown\n u_context.set_scope_from_request(request, \"place_scope\")\n u_context.count = request.args.get(\"c\", 50, type=int)\n\n with PlaceReader(\"read\", u_context) as service:\n # reader = PlaceReader(readservice, u_context)\n # The 'items' list has Place objects, which include also the lists of\n # nearest upper and lower Places as place[i].upper[] and place[i].lower[]\n res = service.get_place_list()\n\n if res[\"status\"] == Status.NOT_FOUND:\n print(f'bp.scene.routes.show_places: {_(\"No places found\")}')\n elif res[\"status\"] != Status.OK:\n print(\n f'bp.scene.routes.show_places: {_(\"Could not get places\")}: {res.get(\"statustext\")}'\n )\n\n elapsed = time.time() - t0\n stk_logger(\n u_context,\n f\"-> bp.scene.routes.show_places n={len(res.get('items'))} e={elapsed:.3f}\",\n )\n return render_template(\n \"/scene/places.html\",\n places=res[\"items\"],\n menuno=4,\n user_context=u_context,\n elapsed=elapsed,\n )", "def features_search(df, type_, keywords):\n PLACES_KEY = os.environ[\"PLACES_KEY\"]\n output_file = \"json\"\n radius = \"1500\"\n lst = []\n\n for i in range(len(df)):\n coor = df[\"latitude\"][i].astype(str) + \", \" + df[\"longitude\"][i].astype(str)\n url = \"https://maps.googleapis.com/maps/api/place/nearbysearch/\"+ output_file +\"?location=\"+coor +\"&radius=\" +radius+ \"&type=\"+type_+\"&keyword=\"+keywords + \"&key=\"+ PLACES_KEY\n res = requests.get(url)\n data = res.json()\n lst.append(len(data))\n \n return lst", "def all_places_get(city_id=None):\n lista = []\n flag = 0\n for v in storage.all(City).values():\n if v.id == city_id:\n for place in v.places:\n lista.append(place.to_dict())\n flag = 1\n if flag == 0:\n abort(404)\n else:\n return (jsonify(lista))", "def query_google(lat='38.890762', lon='-77.084755', radius='400', keywords=['coffee', 'cafe', 'brunch']):\n base_url = \"https://maps.googleapis.com/maps/api/place/nearbysearch/json\"\n location = f\"{lat}, {lon}\"\n for kw in keywords:\n params = {\n \"key\": codecs.decode(config['google']['api_key'], 'rot-13'),\n \"type\": 'food',\n \"rankby\": 'prominence',\n \"location\": location,\n \"radius\": radius,\n \"keyword\": kw\n }\n\n try:\n response = requests.get(base_url, params=params).json()\n key_results_list = response['results']\n except Exception as e:\n print(f'error in query_google {e}')\n\n #passes to this point\n print(f'query_google - key_results_list: {key_results_list}')\n\n if \"next_page_token\" in response:\n params = {\n \"key\": codecs.decode(config['google']['api_key'], 'rot-13'),\n \"type\": 'food',\n \"rankby\": 'prominence',\n \"location\": location,\n \"radius\": radius,\n \"keyword\": kw,\n \"pagetoken\": response[\"next_page_token\"]\n }\n\n response_next_page = requests.get(base_url, params=params).json()\n key_results_list = key_results_list + response_next_page['results']\n print(response_next_page)\n\n else:\n print(\"no next page\")\n\n for kr in key_results_list:\n kr[\"keyword\"] = kw\n print(f'key results list length is: {len(kr)}')\n\n #db.get_collection(\"google_places\").delete_many({}) # This needs to be moved into Flask to aggregate results\n db.get_collection(\"google_places\").insert_many(key_results_list)", "def get_places(place_id=None, city_id=None):\n if city_id:\n city_obj = st.get(City, city_id)\n if city_obj:\n if request.method == \"GET\":\n my_list = []\n for place in city_obj.places:\n my_list.append(place.to_dict())\n return jsonify(my_list)\n elif request.method == \"POST\":\n data = request.get_json()\n if not data:\n return jsonify(error=\"Not a JSON\"), 400\n elif not data.get(\"user_id\"):\n return jsonify(error=\"Missing user_id\"), 400\n elif not data.get(\"name\"):\n return jsonify(error=\"Missing name\"), 400\n user_obj = st.get(User, data[\"user_id\"])\n if user_obj:\n data[\"city_id\"] = city_id\n new_place = Place(**data)\n new_place.save()\n return jsonify(new_place.to_dict()), 201\n return abort(404)\n elif place_id:\n place_obj = st.get(Place, place_id)\n if place_obj:\n if request.method == \"GET\":\n return jsonify(place_obj.to_dict())\n elif request.method == \"DELETE\":\n st.delete(place_obj)\n st.save()\n return jsonify({}), 200\n elif request.method == \"PUT\":\n data = request.get_json()\n if not data:\n return jsonify(error=\"Not a JSON\"), 400\n for key, value in data.items():\n if key not in [\"id\", \"created_at\", \"updated_at\",\n \"city_id\", \"user_id\"]:\n setattr(place_obj, key, value)\n st.save()\n return jsonify(place_obj.to_dict()), 200\n return abort(404)", "def search_nearby(self, fields: dict) -> list[dict]:\r\n results: list = []\r\n\r\n if \"location\" not in fields.keys():\r\n geolocate: dict = self.get_current_locate()\r\n fields[\"location\"] = geolocate[\"location\"]\r\n\r\n if \"radius\" not in fields.keys():\r\n fields[\"radius\"] = 1000\r\n\r\n fields[\"type\"] = \"restaurant\"\r\n\r\n for i in range(1):\r\n places = self.gmaps.places_nearby(**fields)\r\n if places[\"status\"] != \"OK\":\r\n continue\r\n results.extend(places[\"results\"])\r\n try:\r\n # Update attribute to get next 20 places.\r\n fields = {\r\n \"page_token\": places[\"next_page_token\"]\r\n }\r\n # 連続実行するとエラー(Google側の仕様)\r\n time.sleep(2)\r\n except KeyError:\r\n # 最大で60件まで それ以上検索すると next_page_token がなくなる\r\n break\r\n\r\n return results", "def solrsearch(self, **kwargs):\n return self.request.get('/@solrsearch', params=kwargs).json()", "def get(self):\n parser = reqparse.RequestParser()\n parser.add_argument('lat', type=float)\n parser.add_argument('lng', type=float)\n parser.add_argument('radius', type=int)\n args = parser.parse_args()\n lat = args.get('lat', None)\n lng = args.get('lng', None)\n radius = args.get('radius', None)\n\n if lat and lng and radius:\n trucks = db.trucks.find({\"coordinates\": {\"$geoWithin\": \\\n {\"$centerSphere\": [[lng, lat], meter_to_radian(radius)]}}})\n else:\n trucks = db.trucks.find({})\n return list(trucks)", "def search():\n\n # Store the 'q' part of the URL as a string called 'q'. Check 'q' loaded, and produce runtime error if not.\n # e.g. '12589'\n q = request.args.get(\"q\")\n if not q:\n raise RuntimeError(\"missing location\")\n\n # Rewrites user input as lowercase\n q = str.lower(q)\n\n # Select the entire row from database 'places' that at least contains the value of 'q' in one of the 'postal_code', 'place_name', or 'admin_name1' fields.\n # e.g. [{'country_code':'US','postal_code':'12589'}]\n q_info = db.execute(\"SELECT * FROM places WHERE postal_code LIKE :q OR LOWER(place_name) LIKE :q OR LOWER(admin_name1) LIKE :q LIMIT 10\", q='%'+q+'%')\n\n # Run 'q_info' dict through 'jsonify()' function to convert some elements to JSON compatible(?)\n return jsonify(q_info)", "def search_places(self, search, country=\"True\", city=\"True\"):\n params = {}\n params[\"query\"] = search\n params[\"includeCities\"] = city\n params[\"includeCountries\"] = country\n placeRequestPath = \"/apiservices/autosuggest/v1.0/\"\n browsePlacesURL = self.rootURL + placeRequestPath + self.originCountry + \"/\" + self.currency + \"/\" + self.locale + \"/\"\n response = self.session.get(browsePlacesURL, params=params)\n resultJSON = json.loads(response.text)\n return resultJSON", "def get_places(city_id):\n city_obj = storage.get(\"City\", city_id)\n if city_obj is None:\n abort(404)\n places_list = [place.to_dict() for place in city_obj.places]\n return jsonify(places_list)", "def search():\n # q is the name of the http parameter\n request.args.get(\"q\")\n\n #check for missing arguments\n if not(request.args.get(\"q\")):\n raise RuntimeError(\"Missing geo!\")\n\n #\"%\":match any number of characters\n q=request.args.get(\"q\") + \"%\"\n\n #retrieve data from database\n rows=db.execute(\"SELECT * from places WHERE postal_code LIKE :pc OR place_name LIKE :city OR admin_name1 LIKE :state\", pc=q,city=q,state=q)\n\n return jsonify(rows)", "def all_places(city_id):\n city = storage.get(\"City\", city_id)\n if not city:\n abort(404)\n place_list = [place.to_dict() for place in city.places]\n return jsonify(place_list)", "def all_places(city_id=None):\n list_places = []\n city = storage.get(City, city_id)\n if city is None:\n abort(404)\n for place in city.places:\n list_places.append(place.to_dict())\n return jsonify(list_places)", "def show_places(city_id):\n places_list = []\n city = storage.get(City, city_id)\n if city is None:\n abort(404)\n place = storage.all('Place')\n for obj in place.values():\n objdict = obj.to_dict()\n if objdict['city_id'] == city_id:\n places_list.append(objdict)\n return jsonify(places_list)", "def get_places_by_id(place_id: MultiInt, **params) -> JsonResponse:\n response = get(f'{API_V1}/places', ids=place_id, **params)\n\n # Convert coordinates to floats\n places = response.json()\n places['results'] = convert_all_coordinates(places['results'])\n return places", "def place_by_name(place, API_KEY=API_KEY, FIND_PLACE_API_URL=FIND_PLACE_API_URL):\n params = {\n 'input': '{}'.format(place),\n 'fields':'name,geometry,formatted_address',\n 'inputtype':'textquery',\n 'key': API_KEY\n }\n\n # Do the request and get the response data\n response = requests.get(FIND_PLACE_API_URL, params=params)\n\n response = response.json()['candidates'][0]\n\n geodata = dict()\n geodata['lat'] = response['geometry']['location']['lat']\n geodata['lng'] = response['geometry']['location']['lng']\n geodata['address'] = response['formatted_address']\n\n return geodata", "def find_places(query):\n parts = str(query).split(' ')\n for i, p in enumerate(parts):\n p = p.replace('-', ' ').strip()\n try:\n postal_code = int(p)\n if len(postal_code) == 4:\n print(postal_code, parts[i+1])\n # Check \n #response = get_osm_location(\"{postal_code} {name}\")\n #lon = response['lon']\n #lat = response['lat']\n #poly = \n except Exception as e:\n continue", "def places():\n # number of recipes per page\n per_page = 8\n page = int(request.args.get('page', 1))\n # count total number of recipes\n total = mongo.db.places.count_documents({})\n # logic for what recipes to return\n all_places = mongo.db.places.find().skip((page - 1)*per_page).limit(per_page)\n pages = range(1, int(math.ceil(total / per_page)) + 1)\n return render_template('restaurants.html', places=all_places, page=page, pages=pages, total=total)", "def api_interesting_places(dcid):\n return dc.get_interesting_places([dcid])", "def find(self, request):\n try:\n lat = float(request.GET.get('lat', ''))\n lon = float(request.GET.get('lon', ''))\n except ValueError:\n return Response({'detail': 'wrong latitude or longitude value'},\n status.HTTP_400_BAD_REQUEST)\n point = Point(lon, lat)\n areas = ServiceArea.objects.filter(area__bbcontains=point)\n serializer = SearchServiceAreaSerializer(areas, many=True)\n return Response(serializer.data, status=status.HTTP_200_OK)", "def getAllPlaces(city_id):\n place_list = []\n all_places = storage.all('Place')\n\n get_city = storage.get(\"City\", city_id)\n if get_city is None:\n abort(404)\n\n for item in all_places.values():\n if item.city_id == city_id:\n place_list.append(item.to_dict())\n\n return jsonify(place_list)", "def get_nearby_location(request):\n latitude, longitude = latlang(request)\n point = Point(float(longitude), float(latitude), srid=4326)\n locations = Location.objects.filter(point__distance_lte=(point, D(km=100)))\n return JsonResponse(json.dumps([serializer(location) for location in locations]), safe=False)", "def search():\n\n # no search query retrieved\n if not request.args.get(\"q\"):\n raise RuntimeError(\"missing search parameter q\")\n\n # store search query\n q = request.args.get(\"q\")\n\n # remove any punctuation\n for punc in string.punctuation:\n q = q.replace(punc, '')\n\n # prevents http 500 error when string started with punctuation\n if q == \"\":\n q = \"xyz\"\n\n # split multi-word query\n elements = []\n for word in q.split():\n # add to array, concat with SQL wildcard\n elements.append(word + '%')\n\n if len(elements) == 1:\n # assuming: city // state\n station_list = Station.query.join(Place).\\\n filter(db.or_(Place.city.like(elements[0]), Place.state.like(elements[0]))).all()\n\n # assuming: name // call\n station_list += Station.query.\\\n filter(db.or_(Station.name.like(elements[0]), Station.call.like(elements[0]))).all()\n\n elif len(elements) == 2:\n # assuming: city city\n station_list = Station.query.join(Place).\\\n filter(Place.city.like(elements[0]+elements[1])).all()\n\n # assuming: city, state\n station_list += Station.query.join(Place).\\\n filter(db.and_(Place.city.like(elements[0]), Place.state.like(elements[1]))).all()\n\n # assuming: name / call, city / state\n station_list += Station.query.join(Place).\\\n filter(db.and_(\n db.or_(Station.name.like(elements[0]), Station.call.like(elements[0])),\n db.or_(Place.city.like(elements[1]), Place.state.like(elements[1])))).all()\n\n elif len(elements) == 3:\n # assuming: city city, state\n station_list = Station.query.join(Place).\\\n filter(db.and_(Place.city.like(elements[0]+elements[1]), Place.state.like(elements[2]))).all()\n\n # assuming: name / call, city city\n station_list += Station.query.join(Place).\\\n filter(db.and_(\n db.or_(Station.name.like(elements[0]), Station.call.like(elements[0])),\n Place.city.like(elements[1]+elements[2]))).all()\n\n # assuming: name / call, city, state\n station_list += Station.query.join(Place).\\\n filter(db.and_(\n db.or_(Station.name.like(elements[0]), Station.call.like(elements[0])),\n db.and_(Place.city.like(elements[1]), Place.state.like(elements[2])))).all()\n\n elif len(elements) == 4:\n # assuming: name / call, city city, state\n station_list = Station.query.join(Place).\\\n filter(db.and_(\n db.or_(Station.name.like(elements[0]), Station.call.like(elements[0])),\n db.and_(Place.city.like(elements[1]+elements[2]), Place.state.like(elements[3])))).all()\n\n # serialize thequery set\n result = geo_stations.dump(station_list)\n\n return jsonify(result.data)", "def __init__(self):\n # For the sake of simplicity, we presently look for\n # \"name\", \"geometry\", \"icon\", \"vicinity\" tags only in the\n # response and ignore the others.\n self._reqd_tags = [\"name\", \"geometry\", \"icon\", \"vicinity\"]\n\n self._f_name = \"Custom POI Search.\"\n self._no_results = \"No search results found.\"\n\n self._kml = \"\"\"<kml xmlns=\"http://www.opengis.net/kml/2.2\"\n xmlns:gx=\"http://www.google.com/kml/ext/2.2\"\n xmlns:kml=\"http://www.opengis.net/kml/2.2\"\n xmlns:atom=\"http://www.w3.org/2005/Atom\">\n <Folder>\n <name>${foldername}</name>\n <open>1</open>\n <Style id=\"placemark_label\">\\\n ${style}\\\n </Style>\\\n ${placemark}\n </Folder>\n </kml>\n \"\"\"\n\n self._iconstyle = \"\"\"\n <IconStyle>\n <scale>1</scale>\n </IconStyle>\\\n \"\"\"\n self._linestyle = \"\"\"\n <LineStyle>\n <color>7fffff00</color>\n <width>5</width>\n </LineStyle>\\\n \"\"\"\n self._polystyle = \"\"\"\n <PolyStyle>\n <color>7f66ffff</color>\n <colorMode>normal</colorMode>\n <fill>1</fill>\n <outline>1</outline>\n </PolyStyle>\n \"\"\"\n self._style = (\n \"%s %s %s\"\n % (self._iconstyle, self._linestyle, self._polystyle))\n\n self._json = \"\"\" {\\\n \"Folder\": {\\\n \"name\": \"${name}\",\\\n \"Placemark\":${placemark}\\\n }\\\n }\\\n \"\"\"\n self._json_template = Template(self._json)\n self._kml_template = Template(self._kml)\n\n # URL to access Google Places database.\n # output type(xml or json), location, radius and server key are\n # mandatory parameters required to perform the search.\n self._baseurl = \"https://maps.googleapis.com/maps/api/place/nearbysearch\"\n self._places_api_url = self._baseurl + \"/%s?location=%s&radius=%s&key=%s\"\n\n self.logger = logging.getLogger(\"ge_search\")\n self._content_type = \"Content-type, %s\"\n\n self.utils = utils.SearchUtils()", "def search(self):\n return self.key.geocode(self.cleanplace)", "def get_all_places(city_id):\n city = storage.get('City', city_id)\n if city is None:\n abort(404)\n places = []\n for place in storage.all('Place').values():\n if place.city_id == city_id:\n places.append(place.to_json())\n return jsonify(places)", "def places_in_city(city_id):\n list_res = []\n city = storage.get(City, city_id)\n if city is None:\n abort(404)\n\n places = city.places\n for place in places:\n list_res.append(place.to_dict())\n return jsonify(list_res)", "def get_cities_sorted_location(request):\n latitude, longitude = latlang(request)\n point = Point(float(longitude), float(latitude), srid=4326)\n locations = Location.objects.filter(point__distance_lte=(point, D(km=200))).annotate(distance=Distance(\"point\", point)).order_by(\"distance\")[:10]\n return JsonResponse(json.dumps([serializer_distance(location) for location in locations]), safe=False)", "def get_places_nearby(\n nelat: float, nelng: float, swlat: float, swlng: float, **params\n) -> JsonResponse:\n response = get(\n f'{API_V1}/places/nearby', nelat=nelat, nelng=nelng, swlat=swlat, swlng=swlng, **params\n )\n return convert_all_place_coordinates(response.json())", "def get_place_details(self):\n self.google_api_url = 'https://maps.googleapis.com/maps/api/place/details/json?placeid={}&key={}'.format(self.place_id, api_key)\n self.r = requests.get(url=self.google_api_url)\n self.data = self.r.json()\n self.address_components = self.data['result']['address_components']\n\n for i in self.address_components:\n if i['types'][0] == 'locality':\n self.city = (i['long_name'])\n return (self.city)\n else:\n pass", "def places(request):\n places = []\n for h in models.SpecialPlace.objects.filter(visible = True):\n places.append({\n 'id': h.id,\n 'name': h.name,\n 'address': h.address,\n 'type': h.type,\n 'url': h.url,\n 'email': h.email,\n 'telephone': h.telephone,\n 'note': h.note,\n 'lng': h.lng,\n 'lat': h.lat,\n 'html': render_to_string('conference/render_place.html', {'p': h}),\n })\n for h in models.Hotel.objects.filter(visible = True):\n places.append({\n 'id': h.id,\n 'name': h.name,\n 'type': 'hotel',\n 'telephone': h.telephone,\n 'url': h.url,\n 'email': h.email,\n 'availability': h.availability,\n 'price': h.price,\n 'note': h.note,\n 'affiliated': h.affiliated,\n 'lng': h.lng,\n 'lat': h.lat,\n 'modified': h.modified.isoformat(),\n 'html': render_to_string('conference/render_place.html', {'p': h}),\n })\n\n return places", "def all_places(place_id=None):\n flag = 0\n for v in storage.all(Place).values():\n if v.id == place_id:\n attr = (v.to_dict())\n flag = 1\n if flag == 0:\n abort(404)\n else:\n return (jsonify(attr))", "def browse(self, lat, lon):\n places = self.filter(active=True).order_by('-id')[:10]\n items = []\n for item in places:\n item.distance = item.compute_distance(lat, lon)\n item.orientation = self.orientation(int(item.compute_orientation(lat,lon)))\n items.append(item)\n return items", "def search(lat, lng, distance, query):\n\n url = SEARCH_URL.format(lat, lng, distance,\n query, F_CLIENT_ID, F_CLIENT_SECRET,\n time.strftime(\"%Y%m%d\"))\n venue_list = []\n\n data = requests.get(url).json()\n for i in range(0, len(data['response']['groups'][0]['items'])):\n try:\n item = data['response']['groups'][0]['items'][i]\n venue = item['venue']\n venue_list.append(Business(venue['name'],\n venue['location']['address'],\n venue['rating'],\n venue['ratingSignals'],\n (venue['location']['lat'], venue['location']['lng'])))\n except:\n pass\n\n return venue_list", "def _RetrievePlacemarks(self, xml_data):\n\n xmlstr = \"\"\n total_results = 0\n # Perform XML parsing using cElementTree.\n root = ET.parse(xml_data).getroot()\n\n for element in root:\n if element.tag == \"result\":\n # Rename \"result\" tags as \"Placemark\" as per KML(XML) requirements.\n element.tag = \"Placemark\"\n\n for subelement in element[:]:\n # For the sake of simplicity, we presently look for\n # \"name\", \"geometry\", \"icon\", \"vicinity\" tags only in the\n # response and ignore the others.\n if subelement.tag not in self._reqd_tags:\n element.remove(subelement)\n continue\n\n if subelement.tag == \"geometry\":\n # Extract latitude and longitude coordinates.\n lat = subelement.find(\"location\").find(\"lat\").text\n lng = subelement.find(\"location\").find(\"lng\").text\n\n # Add \"Point\" and \"coordinates\" tags to element.\n point = ET.SubElement(element, \"Point\")\n coords = ET.SubElement(point, \"coordinates\")\n coords.text = \"%s, %s\" %(lng, lat)\n element.remove(subelement)\n\n # Rename \"vicinity\" and \"icon\" tags to\n # \"snippet\" and \"description\" as per naming convention\n # being followed in existing Search Services.\n elif subelement.tag == \"vicinity\":\n subelement.tag = \"snippet\"\n elif subelement.tag == \"icon\":\n subelement.tag = \"description\"\n\n xmlstr += ET.tostring(element, method=\"xml\")\n total_results += 1\n\n return (xmlstr, total_results)", "def search(api_key, term, location, categories, offset, price):\n\n url_params = {\n 'term': term.replace(' ', '+'),\n 'location': location.replace(' ', '+'),\n 'limit': int(params['limit']),\n 'offset': offset,\n 'categories': categories,\n 'price':price\n }\n \n find_locs = request(API_HOST, SEARCH_PATH, api_key, url_params=url_params)\n \n return json_normalize(find_locs['businesses'])", "def get_restaurants(term, lat=\"37.788744\", lon=\"-122.411587\", radius=\"805\"):\n\n # Create OAuth2 token and store in session (we don't need to get a new one\n # for every API request)\n\n access_token = get_access_token()\n\n if not SEEDING:\n if \"access_token\" not in session:\n session[\"access_token\"] = access_token\n\n base_url = \"https://api.yelp.com/v3/businesses/search\"\n\n # Create a Unix timestamp for current day at 1:00 PM\n year = datetime.now().year\n day = datetime.now().day\n month = datetime.now().month\n open_time = datetime(year, month, day, 13, 0, 0)\n\n unix_time = time.mktime(open_time.timetuple())\n unix_time_trunc = int(unix_time)\n\n # Set parameters for our request to the business search API.\n parameters = {\n \"latitude\": lat,\n \"longitude\": lon,\n \"radius\": radius,\n \"term\": term,\n \"categories\": \"restaurants\",\n \"limit\": 24,\n \"price\": \"1,2,3\",\n \"sort_by\": \"distance\",\n \"open_at\": unix_time_trunc,\n }\n\n # FIXME: Store resulting JSON data in database...\n\n # Fetch all restaurants that fit these parameters and capture the response.\n response = requests.get(url=base_url,\n params=parameters,\n headers={\n 'Authorization': 'Bearer {token}'.format(\n token=access_token)\n })\n\n # Extract just the business info.\n return response.json()['businesses']", "def get_data(query):\n par = {\"key\": str(GOOGLE_KEY), \"query\": query}\n url = \"https://maps.googleapis.com/maps/api/place/textsearch/json\"\n req = requests.get(url, params=par)\n return req.json()", "def solr_query(config, solr_host, fq, solr_collection_name):\n # solr_collection_name = config['solr_collection_name']\n\n getVars = {'q': '*:*',\n 'fq': fq,\n 'rows': 300000}\n\n url = f'{solr_host}{solr_collection_name}/select?'\n response = requests.get(url, params=getVars)\n return response.json()['response']['docs']", "def search(self, query, num_results=10, starting_at=1):\n\t service = build(\"customsearch\", \"v1\", developerKey=self.license)\n\t result = service.cse().list( q = query, \n\t\t\t\t\t gl = self.geolocation, \n\t\t\t\t\t num = num_results,\n\t\t\t\t\t start = starting_at,\n\t\t\t\t\t cx\t= cx_key,).execute()\n\t return result", "def search_geoloc_range(request):\n\n distance = float(request.POST['distance'])\n\n latlng = (request.POST['latlng']).replace(\"(\",'').replace(\")\",'').split(', ')\n latitude = float(latlng[0])\n longitude = float(latlng[1])\n print distance\n print latitude\n print longitude\n\n # count range of nowa latlng\n radius_lat = (distance/(69.172)) #count latitude range\n min_lat = latitude - radius_lat\n max_lat = latitude + radius_lat\n print min_lat\n print max_lat\n\n radius_lng = (math.fabs(distance/(math.cos(longitude) * 69.172))) #count longitude range\n min_lng = longitude - radius_lng\n max_lng = longitude + radius_lng\n print min_lng\n print max_lng\n\n # if sys.version_info < (2, 7):\n # min_lat = decimal.Decimal(str(min_lat))\n # max_lat = decimal.Decimal(str(max_lat))\n # min_lng = decimal.Decimal(str(min_lng))\n # max_lng = decimal.Decimal(str(max_lng))\n\n # query db to match the range of dentist work place in db\n total = WorkPlace.objects.filter(latitude__gte=min_lat, latitude__lte=max_lat,\n longitude__gte=min_lng, longitude__lte=max_lng).count()\n\n result = []\n\n # step for how many lines separate per page. then count nowa page's start line no. and end line no.\n if 'page' in request.POST:\n page = request.POST['page']\n else:\n page = 1\n\n step = 10\n end = step * int(page)\n start = step * (int(page)-1)\n is_end = False\n\n if (end - total) < step:\n is_end = False\n WorkPlaceDict = WorkPlace.objects.filter(latitude__gte=min_lat, latitude__lte=max_lat,\n longitude__gte=min_lng, longitude__lte=max_lng).order_by('id')[start:end]\n\n for i in WorkPlaceDict:\n\n dentist_profile = i.dentistid\n did = dentist_profile.user.user.id\n\n latitude = str(i.latitude)\n longitude = str(i.longitude)\n latlng = \"(\"+latitude+\", \"+longitude+\")\"\n\n counts = _relation_counts(request,did,request.user.id)\n\n i_wrap = {\n \"clinic\": i.clinic_name,\n \"work_location\": i.location,\n \"latlng\": latlng,\n \"business_hour\": str(i.business_hour),\n \"dentistid\": did,\n \"dentistname\": _show_obj_name(did),\n \"summary\": dentist_profile.user.summary,\n \"avatar\": settings.MEDIA_URL + str(dentist_profile.user.imagesmall),\n \"patient_count\": counts[\"patient_count\"],\n \"follower_count\": counts[\"follower_count\"],\n \"status\": counts[\"status\"],\n \"is_end\": is_end\n }\n\n result.append(i_wrap)\n\n else:\n is_end = True\n i_wrap = {\n \"is_end\": is_end\n }\n\n result.append(i_wrap)\n\n template_var = {\n \"searchresult\": result\n }\n\n return JsonResponse(template_var)", "def search():\n query = request.args['query']\n # find instances of the entered word in title, tags or ingredients\n results = mongo.db.places.find({\n '$or': [\n {'name': {'$regex': query, '$options': 'i'}},\n {'tags': {'$regex': query, '$options': 'i'}},\n {'city': {'$regex': query, '$options': 'i'}},\n ]\n })\n return render_template('search.html', query=query, results=results)", "def search(latit, longit, dist, num_results):\n API_PRIVATE = os.environ.get(\"TOM_TOM_PRIVATE\")\n apiParameters = {\n 'key': API_PRIVATE,\n 'typeahead': True,\n 'limit': num_results,\n 'ofs': 0,\n 'countrySet': 'US',\n 'lat': latit,\n 'lon': longit,\n 'radius': dist,\n 'categorySet': '9361023, 7332005, 9361066, 9361051, 9361009'\n }\n apiQuery = str('https://api.tomtom.com/search/2/categorySearch/.json');\n\n response = requests.get(apiQuery, params=apiParameters)\n while True:\n try:\n jsonResponse = response.json()\n break\n except:\n response = requests.get(apiQuery, params=apiParameters)\n\n latitude_lst = []\n longitude_lst = []\n for eachStore in jsonResponse['results']:\n latitude_lst.append(eachStore['position']['lat'])\n longitude_lst.append(eachStore['position']['lon'])\n final_lat = []\n final_lon = []\n for i in range(len(latitude_lst)):\n repeat = False\n for j in range(len(final_lat)):\n if final_lat[j] == latitude_lst[i] and final_lon[j] == longitude_lst[i]:\n repeat = True\n break\n if repeat == False:\n final_lat.append(latitude_lst[i])\n final_lon.append(longitude_lst[i])\n return final_lat, final_lon", "def search(self, case_numbers=[], **kwargs):\n site = Site(self.place_id)\n logger.info(\n \"Executing search for {}\".format(self.place_id)\n )\n data = site.search(case_numbers=case_numbers)\n return data", "def get_cities(self, city_name: str = None):", "def search(bearer_token, price, location, categories, radius, openat):\n\n RESTAURANT_LIMIT = 3\n\n url_params = {\n 'term': 'restaurants',\n 'location': location.replace(' ', '+'),\n 'limit': RESTAURANT_LIMIT,\n 'open_at': openat,\n 'price': price,\n 'categories': categories,\n 'radius': radius\n }\n return request(API_HOST, SEARCH_PATH, bearer_token, url_params=url_params)", "def search(lat, lng, distance):\r\n\r\n url = 'https://api.foursquare.com/v2/venues/explore?ll=%s,%s&intent=browse&radius=%s&limit=50&categoryId=%s&client_id=%s&client_secret=%s&v=%s' % (lat, lng, distance, CATEGORY_ID, CLIENT_ID, CLIENT_SECRET, time.strftime(\"%Y%m%d\"))\r\n venue_list = []\r\n\r\n try:\r\n data = make_request(url)\r\n\r\n for item in data['response']['groups'][0]['items']:\r\n venue = item['venue']\r\n venue_list.append(Business(venue['name'],\r\n venue['location']['address'],\r\n venue['rating'],\r\n venue['ratingSignals'],\r\n venue['stats']['checkinsCount']))\r\n except Exception, e:\r\n print e\r\n\r\n return venue_list", "def find_facility_google_place(\n retriever: RetrieverGoogleMaps,\n facility: Facility\n):\n\n msg_fmt = \"Performing place-search for facility '{}'.\".format(facility)\n logger.info(msg_fmt)\n\n # Define a list of facility location components that can be used to identify\n # it in the Google Places API in order of decreasing granularity.\n search_input_components = [\n facility.name,\n facility.city,\n facility.state,\n facility.country,\n ]\n\n response = None\n # Perform iterative requests against the Google Places API gradually\n # decreasing granularity until a place if found.\n for i in range(len(search_input_components)):\n # Assemble a search query string by joining components that aren't\n # `None`.\n query = \" \".join(list(filter(\n lambda x: x is not None,\n search_input_components[i:],\n )))\n\n # If the remaining query components yield an empty string then we cant\n # perform a search so we're returning `None`.\n if not query:\n return None\n\n msg = \"Performing place-search for facility '{}' with query '{}'.\"\n msg_fmt = msg.format(facility, query)\n logger.debug(msg_fmt)\n\n # Perform the request against the Google Places API.\n response = retriever.search_place(query=query)\n\n if not response:\n return None\n\n # If the response has a `ZERO_RESULTS` status then repeat the request\n # gradually decreasing granularity.\n if response[\"status\"] == \"ZERO_RESULTS\":\n msg_fmt = \"No results found for query '{}'.\".format(query)\n logger.debug(msg_fmt)\n continue\n # If the response has a `OVER_QUERY_LIMIT` status then throw the\n # corresponding exception.\n elif response[\"status\"] == \"OVER_QUERY_LIMIT\":\n msg_fmt = \"Query limit exceeded.\"\n raise GooglePlacesApiQueryLimitError(msg_fmt)\n # If the request succeeded and a place was found then return the\n # response.\n elif response[\"status\"] == \"OK\":\n msg = \"Results '{}' found for query '{}'.\"\n msg_fmt = msg.format(response, query)\n logger.info(msg_fmt)\n return response\n\n return response", "def get_map_locs(self, CalSwimView):\n # Initialize query list\n query_build = []\n \n if (CalSwimView.lat and CalSwimView.lng): \n # Search query has a specified location thus check against intersection of points and polygons in database\n self.cursor.execute(\"SET @center = GeomFromText('POINT(%s %s)');\",(float(CalSwimView.lat), float(CalSwimView.lng)))\n self.cursor.execute(\"SET @radius = %s;\",(CalSwimView.radius))\n self.cursor.execute(\"\"\"\n SET @bbox = CONCAT('POLYGON((',\n X(@center) - @radius, ' ', Y(@center) - @radius, ',',\n X(@center) + @radius, ' ', Y(@center) - @radius, ',',\n X(@center) + @radius, ' ', Y(@center) + @radius, ',',\n X(@center) - @radius, ' ', Y(@center) + @radius, ',',\n X(@center) - @radius, ' ', Y(@center) - @radius, '))'\n );\n \"\"\")\n query_build.append(\"\"\"\n SELECT gd_id, organization, project_name_short, project_name, project_description, data_type, data_target, AsText(location)\n FROM GeoData\n WHERE Intersects( location, GeomFromText(@bbox) )\n AND\n CASE geometrytype(location)\n WHEN 'POINT' THEN\n SQRT(POW( ABS( X(location) - X(@center)), 2) + POW( ABS(Y(location) - Y(@center)), 2 )) < @radius\n ELSE\n TRUE\n END\n \"\"\")\n # Search query has at least 1 keyword\n if len(CalSwimView.keywords) > 0:\n # Just a few MySQL notes:\n # Default MySQL operation executes an \"OR\" search among terms\n # To make sure all terms are in a given result, \"AND\" search among terms, then just add prefix \"+\" before each term\n # To exclude results with a given term, just add prefix \"-\" before the term\n keyword_query = \"*, \".join(CalSwimView.keywords) +\"*\" \n query_build.append(\"\"\" \n AND\n MATCH (organization, contact, project_name, project_description, project_funder, data_target, location_description, data_collector, data_type, keyword, other)\n AGAINST ('%(KeywordQuery)s' IN BOOLEAN MODE)\n \"\"\" % {\"KeywordQuery\":keyword_query})\n else:\n # Search query does not have a specified location\n query_build.append(\"\"\"\n SELECT gd_id, organization, project_name_short, project_name, project_description, data_type, data_target, AsText(location)\n FROM GeoData\n \"\"\")\n # Search query has at least 1 keyword\n if len(CalSwimView.keywords) > 0:\n # Just a few MySQL notes:\n # Default MySQL operation executes an \"OR\" search among terms\n # To make sure all terms are in a given result, \"AND\" search among terms, then just add prefix \"+\" before each term\n # To exclude results with a given term, just add prefix \"-\" before the term\n keyword_query = \"*, \".join(CalSwimView.keywords) +\"*\" \n query_build.append(\"\"\" \n WHERE\n MATCH (organization, contact, project_name, project_description, project_funder, data_target, location_description, data_collector, data_type, keyword, other)\n AGAINST ('%(KeywordQuery)s' IN BOOLEAN MODE)\n \"\"\" % {\"KeywordQuery\":keyword_query})\n select_query = \"\\n\".join(query_build)\n #print >> CalSwimView.errors, select_query\n \n # execute SQL query using execute() method.\n self.cursor.execute(select_query)\n\n # Fetch a single row using fetchone() method.\n rows = [] \n table_data = {}\n coordinates = []\n while(1):\n row=self.cursor.fetchone()\n if row == None:\n break \n coordinates.append( str(row[7]).replace('POINT(','').replace('POLYGON((','').replace(')','') )\n rows.append( {\"c\":[{\"v\":row[0]}, {\"v\":row[1]}, {\"v\":row[2]}, {\"v\":row[3]}, {\"v\":row[4]}, {\"v\":row[5]}, {\"v\":row[6]}]} )\n \n # Return search values as json\n cols = [{\"id\":'gd_id', \"label\":'gd_id', \"type\":'string'},\n {\"id\":'organization', \"label\":'Organization', \"type\":'string'},\n {\"id\":'project_short', \"label\":'Project Short', \"type\":'string'},\n {\"id\":'project', \"label\":'Project', \"type\":'string'},\n {\"id\":'description', \"label\":'Description', \"type\":'string'}, \n {\"id\":'target', \"label\":'Target', \"type\":'string'}]\n table_data[\"cols\"] = cols\n table_data[\"rows\"] = rows\n # Assign table data to json table data container\n json_data = {}\n json_data[\"table_data\"] = table_data\n json_data[\"coordinates\"] = coordinates\n \n # Close DB connections \n self.cursor.close()\n \n # Return results\n return json.dumps(json_data)", "def location_search(self, latitude, longitude, radius_km, system_ids=None):\n\n\t\tpath = f'{self.BIKE_ENDPOINT}location?latitude={latitude}&longitude={longitude}&radius_km={radius_km}&{self.secret_key}'\n\t\tresponse = requests.get(path).json()\n\t\tself.check_api_key(response)\n\n\t\treturn response", "def api_nearby_places(dcid):\n req_json = {'dcids': [dcid],\n 'property': 'nearbyPlaces', 'direction': 'out'}\n url = dc.API_ROOT + dc.API_ENDPOINTS['get_property_values']\n payload = dc.send_request(url, req_json=req_json)\n prop_values = payload[dcid].get('out')\n if not prop_values:\n return json.dumps([])\n places = []\n for prop_value in prop_values:\n places.append(prop_value['value'].split('@'))\n places.sort(key=lambda x: x[1])\n dcids = [place[0] for place in places]\n data = dc.get_property_values(dcids, 'typeOf', True)\n return json.dumps(data)", "def get_locations_by_ids(self, id_list):", "def place_objects(city_id):\n city = models.storage.get('City', city_id)\n if city is None:\n abort(404)\n\n if req.method == 'GET':\n places = [obj.to_dict() for obj in city.places]\n return jsonify(places)\n\n if req.method == 'POST':\n body = req.get_json()\n if body is None:\n abort(400, 'Not a JSON')\n if body.get('name', None) is None:\n abort(400, 'Missing name')\n if body.get('user_id', None) is None:\n abort(400, 'Missing user_id')\n\n user = models.storage.get('User', body.get('user_id'))\n if user is None:\n abort(404)\n\n place = Place(**body)\n place.city_id = city_id\n place.save()\n return jsonify(place.to_dict()), 201", "def get_places_table(\n self, place_type: str, district_name: str\n ) -> List[Dict[str, Any]]:\n query = f\"{place_type} near {district_name}\"\n places = self.gmaps.places(query=query)\n\n places_table = []\n while \"next_page_token\" in places:\n\n for place in places[\"results\"]:\n place_record = self._create_place_record(\n place, place_type, district_name\n )\n places_table.append(place_record)\n\n time.sleep(2)\n places = self.gmaps.places(\n query=query, page_token=places[\"next_page_token\"]\n )\n\n places_table = self._add_distances_from_center(places_table, district_name)\n\n return places_table", "def scrapping():\r\n\r\n data_cust = {}\r\n #token, latitude, longitude, name, place_id, types_places, vicinity = [],[],[],[],[],[], []\r\n\r\n apik = 'AIzaSyDiFSOQvPbWVh3voJPSSORT9TSfKAXMy7E'\r\n urls = 'https://maps.googleapis.com/maps/api/place/nearbysearch/json?location={},{}&radius={}&key={}&type={}&keyword={}'.format(\r\n lat_ori, long_ori, radius, apik, types_user, keyword_user)\r\n r = requests.get(urls)\r\n data_cust['0'] = r.json()\r\n\r\n \"\"\"\r\n /////////////////////////////////////////////////////////////////////////////\r\n\r\n CODE FOR NEXT PAGE TOKEN\r\n\r\n /////////////////////////////////////////////////////////////////////////////\r\n \"\"\"\r\n\r\n for number in range(10):\r\n\r\n content = str(number)\r\n if 'next_page_token' in data_cust[content].keys():\r\n sleep(5)\r\n pagetoken = data_cust[content]['next_page_token']\r\n apik = 'AIzaSyDiFSOQvPbWVh3voJPSSORT9TSfKAXMy7E'\r\n urls = 'https://maps.googleapis.com/maps/api/place/nearbysearch/json?location={},{}&radius={}&type={}&keyword={}&key={}{pagetoken}'.format(\r\n lat_ori, long_ori, radius, types_user, keyword_user, apik, pagetoken=\"&pagetoken=\"+pagetoken if pagetoken else \"\")\r\n r = requests.get(urls)\r\n get = requests.post(urls)\r\n print(get)\r\n new_id = str(number+1)\r\n data_cust[new_id] = r.json()\r\n else:\r\n print(\"Done\")\r\n break\r\n\r\n latitude, longitude, name, place_id, types_places, vicinity = [], [], [], [], [], []\r\n for i in range(number+1):\r\n content = str(i)\r\n for numbers in range(len(data_cust[content]['results'])):\r\n latitude.append(data_cust[content]['results']\r\n [numbers]['geometry']['location']['lat'])\r\n longitude.append(data_cust[content]['results']\r\n [numbers]['geometry']['location']['lng'])\r\n name.append(data_cust[content]['results'][numbers]['name'])\r\n place_id.append(data_cust[content]['results'][numbers]['place_id'])\r\n types_places.append(\r\n data_cust[content]['results'][numbers]['types'][0])\r\n vicinity.append(data_cust[content]['results'][numbers]['vicinity'])\r\n\r\n datacustype = pd.DataFrame({'customer_name': name, 'customer_type': types_places, 'place_id': place_id,\r\n 'keyword': keyword_user, 'radius': radius, 'latitude_origin': lat_ori, 'longitude_origin': long_ori, 'latitude_destination': latitude,\r\n 'longitude_destination': longitude})\r\n datacustype\r\n\r\n \"\"\"\r\n /////////////////////////////////////////////////////////////////////////////\r\n\r\n PHONE NUMBER\r\n\r\n /////////////////////////////////////////////////////////////////////////////\r\n \"\"\"\r\n\r\n data_number = {}\r\n for number in datacustype['place_id'].values:\r\n apik = 'AIzaSyDiFSOQvPbWVh3voJPSSORT9TSfKAXMy7E'\r\n urls = 'https://maps.googleapis.com/maps/api/place/details/json?place_id={}&fields=name,formatted_address,rating,formatted_phone_number&key={}'.format(\r\n number, apik)\r\n r = requests.get(urls)\r\n data_number[number] = r.json()\r\n\r\n data_number\r\n\r\n datanumb = pd.DataFrame.from_dict(data_number).T.reset_index()\r\n datanumb.columns = ['place_id', 'html_attributions', 'result', 'status']\r\n datanumb\r\n\r\n name, phone, alamat = [], [], []\r\n\r\n for number in range(len(datanumb)):\r\n if datanumb['status'][number] == 'NOT_FOUND':\r\n name.append('Unknown')\r\n phone.append(0)\r\n alamat.append('-')\r\n else:\r\n name.append(datanumb['result'][number]['name'])\r\n alamat.append(datanumb['result'][number]['formatted_address'])\r\n if 'formatted_phone_number' in (datanumb['result'][number].keys()):\r\n phone.append(datanumb['result'][number]\r\n ['formatted_phone_number'])\r\n else:\r\n phone.append(0)\r\n\r\n datanumb2 = pd.DataFrame(\r\n {'customer_name': name, 'customer_address': alamat, 'phone_number': phone})\r\n datanumb2['place_id'] = datanumb['place_id']\r\n datanumb2\r\n\r\n \"\"\"\r\n /////////////////////////////////////////////////////////////////////////////\r\n\r\n DATA MERGE\r\n\r\n /////////////////////////////////////////////////////////////////////////////\r\n \"\"\"\r\n\r\n datamerge = datacustype.merge(datanumb2, how='left', on='place_id')\r\n datamerge\r\n\r\n \"\"\"\r\n /////////////////////////////////////////////////////////////////////////////\r\n\r\n DUMMY\r\n\r\n /////////////////////////////////////////////////////////////////////////////\r\n \"\"\"\r\n\r\n datadummy = datamerge.copy()\r\n datadummy\r\n\r\n datadummydrop = datadummy.drop(['customer_name_y'], axis=1)\r\n datadummydrop.rename(\r\n columns={'customer_name_x': 'customer_name'}, inplace=True)\r\n datadummydrop2 = datadummydrop[['customer_name', 'customer_address', 'customer_type', 'keyword', 'radius',\r\n 'place_id', 'latitude_origin', 'longitude_origin', 'latitude_destination', 'longitude_destination', 'phone_number']]\r\n datadummydrop2\r\n\r\n \"\"\"\r\n /////////////////////////////////////////////////////////////////////////////\r\n\r\n DISTANCE MATRIX\r\n\r\n /////////////////////////////////////////////////////////////////////////////\r\n \"\"\"\r\n\r\n API_key = 'AIzaSyDiFSOQvPbWVh3voJPSSORT9TSfKAXMy7E' # enter Google Maps API key\r\n gmaps = googlemaps.Client(key=API_key)\r\n\r\n distancedrive, distancewalks = [], []\r\n\r\n # Loop through each row in the data frame using pairwise\r\n for number in range(datadummydrop2.shape[0]):\r\n # Assign latitude and longitude as origin/departure points\r\n LatOrigin = datadummydrop2['latitude_origin'][number]\r\n LongOrigin = datadummydrop2['longitude_origin'][number]\r\n origins = (LatOrigin, LongOrigin)\r\n\r\n # Assign latitude and longitude from the next row as the destination point\r\n # Save value as lat\r\n LatDest = datadummydrop2['latitude_destination'][number]\r\n # Save value as lat\r\n LongDest = datadummydrop2['longitude_destination'][number]\r\n destination = (LatDest, LongDest)\r\n\r\n # pass origin and destination variables to distance_matrix function# output in meters\r\n result = gmaps.distance_matrix(origins, destination, mode='driving', avoid='tolls',\r\n units='metric', departure_time=1703981100)[\"rows\"][0][\"elements\"][0][\"distance\"][\"value\"]\r\n # 1703981100 #1606867500\r\n # append result to list\r\n distancedrive.append(result)\r\n\r\n datadummydrop2['distance_driving'] = distancedrive\r\n datadummydrop3 = datadummydrop2.sort_values(\r\n by=['distance_driving'], ascending=True, ignore_index=True)\r\n datadummydrop3\r\n\r\n \"\"\"\r\n /////////////////////////////////////////////////////////////////////////////\r\n\r\n DATAFRAME TO POSTGRE\r\n\r\n /////////////////////////////////////////////////////////////////////////////\r\n \"\"\"\r\n\r\n database = psycopg2.connect(database=\"customerDB\",\r\n user=\"postgres\",\r\n password=\"1234\",\r\n host=\"localhost\")\r\n\r\n cursor = database.cursor()\r\n\r\n for i in datadummydrop3.index:\r\n c1 = datadummydrop3['customer_name'][i]\r\n c2 = datadummydrop3['customer_address'][i]\r\n c3 = datadummydrop3['customer_type'][i]\r\n c4 = datadummydrop3['keyword'][i]\r\n c5 = datadummydrop3['radius'][i]\r\n c6 = datadummydrop3['place_id'][i]\r\n c7 = datadummydrop3['latitude_origin'][i]\r\n c8 = datadummydrop3['longitude_origin'][i]\r\n c9 = datadummydrop3['latitude_destination'][i]\r\n c10 = datadummydrop3['longitude_destination'][i]\r\n c11 = datadummydrop3['phone_number'][i]\r\n c12 = datadummydrop3['distance_driving'][i]\r\n query = \"\"\"\r\n Insert into customertarget_customerpotential(customer_name, customer_address, customer_type, keyword, radius, place_id, latitude_origin, longitude_origin, latitude_destination, longitude_destination, phone_number, distance_driving) VALUES('%s','%s','%s','%s','%s','%s',%s,%s,%s,%s,'%s',%s);\r\n \"\"\" % (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12)\r\n cursor.execute(query)\r\n cursor.close()\r\n\r\n database.commit()\r\n database.close()\r\n\r\n print(\"Data berhasil di upload\")", "def lookup():\n \"\"\" OR station info for current selection\"\"\"\n\n # check which arguements are present\n if request.args.get(\"city\") and request.args.get(\"state\"):\n # get all stations for a location\n\n city = request.args.get(\"city\")\n state = request.args.get(\"state\")\n\n station_list = Station.query.join(Place).\\\n filter(Place.city == city, Place.state == state).all()\n\n result = geo_stations.dump(station_list)\n\n if request.args.get(\"stream\"):\n # get station for specified url\n\n url = request.args.get(\"stream\")\n\n station_list = Station.query.join(Place).\\\n filter(Station.url_stream == url).all()\n\n result = geo_stations.dump(station_list)\n\n return jsonify(result.data)", "def geo_collect_tweets(search_term,latitude,longitude,radius):\n i = None\n tweets = []\n rep = 1\n for n in range(2): #can only search 100 tweets at a time, so run search multiple times\n \tresults = api.GetSearch(term = search_term, \n \t\tcount = 100, \n \t\tresult_type = 'recent', \n \t\tmax_id = i, #start a search from the most recent tweet id, working backwards\n \t\tgeocode =(latitude, longitude, radius))\n for tweet in results:\n tweets.append(tweet.text)\n i = tweet.id - 1 #want it to start at the tweet after the last tweet\n rep += 1\n return list(set(tweets)) #set gets rid of repititve tweets, but need to return a list", "def shortsearch(term,location):\n results = search(term,location)['listings']\n result = []\n for business in results:\n result.append([business['id'],business['name'],\"Yellow Pages\"])\n return result", "def query_api(term, location):\n response = search(term, location)\n\n businesses = response.get('businesses')\n\n if not businesses:\n print 'No businesses for {0} in {1} found.'.format(term, location)\n return\n\n business_id = businesses[0]['id']\n \n print '{0} businesses found, querying business info for the top result \"{1}\" ...'.format(\n len(businesses),\n business_id\n )\n \n response=[]\n for biz in range(len(businesses)):\n response.append(get_business(businesses[biz]['id']))\n #response = get_business(business_id)\n return response", "def geosearch(self, place):\n url = 'https://geocode.search.hereapi.com/v1/geocode'\n params = {'q': place, 'apiKey': self._key}\n try:\n response = requests.get(url, params=params)\n response.raise_for_status()\n except requests.RequestException:\n raise HereApiError(\"Request to Here.com API failed\")\n data = response.json()\n item = data.get('items')[0]\n return Position(\n address=item.get('address'),\n latitude=item.get('position', {}).get('lat'),\n longitude=item.get('position', {}).get('lng'),\n )", "def test_foodtrucks_searchByLocation(self):\n\t\tprint 'API Test: retrieving foodtrucks nearby'\n\t\turl = reverse('foodtruck_list')\n\t\tdata = [{'objectid': 0, 'latitude':37.7841781516735 , 'longitude':-122.394064145441 },\\\n\t\t\t\t{'objectid': 1, 'latitude':37.7862060821039 , 'longitude':-122.402532491346 }, \t#ft2 is closest to ft0\\\n\t\t\t\t{'objectid': 2, 'latitude':37.7800057026855 , 'longitude':-122.390270961311 },\t#ft1 is further to ft0\\\n\t\t\t\t{'objectid': 3, 'latitude':32 , 'longitude': -100}]\t\t\t\t\t\t\t\t#ft3 is far far away\n\t\tfor d in data:\n\t\t\tresponse = self.client.post(url, d, format='json')\n\t\t\tself.assertEqual(response.status_code, status.HTTP_201_CREATED)\n\t\t\n\t\tlat = data[0]['latitude'] #given latitude\n\t\tlon = data[0]['longitude']\t#given longitude\n\t\trad = 3 \t\t\t\t\t#given radius of search\n\t\tlim = 2 \t\t\t\t\t#given limit of results\n\t\t\"\"\" test both urls: w/ or w/out limit \"\"\"\n\t\tresponse = self.client.get('/foodtrucks/bylocation?latitude=%f&longitude=%f&radius=%f' % (lat, lon, rad), format='json')\n\t\tself.assertEqual(response.status_code, status.HTTP_200_OK)\n\t\tself.assertEqual(len(response.data), 3)\n\t\tself.assertEqual(response.data[0]['objectid'], 0)\n\t\tself.assertEqual(response.data[1]['objectid'], 2)\t\t\n\t\tself.assertEqual(response.data[2]['objectid'], 1)\t\t\n\n\t\tresponse = self.client.get('/foodtrucks/bylocation?latitude=%f&longitude=%f' % (lat, lon), format='json')\n\t\tself.assertEqual(response.status_code, status.HTTP_200_OK)\n\t\tself.assertEqual(len(response.data), 3)\n\n\t\tresponse = self.client.get('/foodtrucks/bylocation?latitude=%f&longitude=%f&radius=%f&limit=%d' % (lat, lon, rad, lim), format='json')\n\t\tself.assertEqual(response.status_code, status.HTTP_200_OK)\n\t\tself.assertEqual(len(response.data), 2)\n\n\t\t\"\"\" test the radius: shrink the radius such that only ft0 will be found \"\"\"\n\t\trad = 0.01\n\t\tresponse = self.client.get('/foodtrucks/bylocation?latitude=%f&longitude=%f&radius=%f' % (lat, lon, rad), format='json')\n\t\tself.assertEqual(response.status_code, status.HTTP_200_OK)\n\t\tself.assertEqual(len(response.data), 1)\n\t\tprint 'pass'", "def get_features_near_me(self,collection,point,radius,earth_radius=3963.2): #km = 6371\n x,y = point\n res = self.client['rephie'][collection].find( { 'geometry': { '$geoWithin': { '$centerSphere': [ [x, y ] , radius/earth_radius ] } }} )\n \n return self._make_result_list(res)", "def ExtractPlaces(places, name_field):\n ok_places = {\n 'New York': 'New York City',\n 'San Francisco': 'San Francisco',\n 'New York County': 'Manhattan',\n 'Queens County': 'Queens',\n 'Kings County': 'Brooklyn',\n 'Bronx County': 'The Bronx',\n 'Richmond County': 'Staten Island'\n }\n ret = {}\n for i, rec in enumerate(places.shapeRecords()):\n name = rec.record[name_field]\n if name not in ok_places: continue\n \n real_name = ok_places[name]\n\n sys.stderr.write('%6d %s: %s\\n' % (i, real_name, rec.shape))\n ret[real_name] = rec\n return ret", "def get_lat_lng(self):\n self.input_api = '%20'.join(self.parsed_question)\n self.input_api = ' '.join(self.parsed_question)\n self.google_api_url = 'https://maps.googleapis.com/maps/api/place/findplacefromtext/json?input={}&inputtype=textquery&fields=geometry,name,place_id&types=point_of_interest&key={}'.format (self.input_api, api_key) \n self.r = requests.get(url=self.google_api_url)\n self.data = self.r.json()\n self.name = self.data['candidates'][0]['name']\n self.place_id = self.data['candidates'][0]['place_id']\n self.lat = self.data['candidates'][0]['geometry']['location']['lat']\n self.lng = self.data['candidates'][0]['geometry']['location']['lng']\n print(self.lat, self.lng, self.place_id)\n return (self.lat, self.lng, self.place_id)", "def get_all_locations():\n rs = run_query('''select * from zlrz_office_location''')\n return [] if rs is None else list(map(lambda t: Location(t[1], t[2], t[3], t[4], t[5], t[0]), rs))", "def find_store(request):\n r = {'result':'-1'}\n \n import httplib, urllib\n\n h = httplib.HTTPConnection(\"api.remix.bestbuy.com\")\n lat = request.POST['lat']\n lon = request.POST['lon']\n distance = request.POST['distance']\n\n h.request('GET', '/v1/stores(area(%s,%s,%s))?format=json&apiKey=%s'%(lat, lon, distance, api_key))\n\n result = h.getresponse()\n logger.info( \"BestBuy Location HTTP output: %s, reason: %s\"%(result.status, result.reason) )\n response = json.loads(result.read())\n\n stores = response.get(\"stores\", [])\n if len(stores) > 0: \n r['result'] = stores[0]\n\n return JSONHttpResponse(r)", "def get_service_locations(self):\n url = URLS['servicelocation']\n headers = {\"Authorization\": \"Bearer {}\".format(self.access_token)}\n r = requests.get(url, headers=headers)\n r.raise_for_status()\n return r.json()", "def get_locations(self):\n try:\n output_json = {}\n total_locations = list(self.mongo_db_object.find_all(AppConfigurations.MONGO_DATABASE,\n AppConstants.LOCATION.MONGO_LOCATION_COLLECTION_NAME))\n output_json = total_locations\n return AppConstants.result_success_template(output_json)\n\n except Exception as e:\n print(\"Error while fetching the Location Data.\", str(e))", "def resources_search(request):\n\tif request.method == 'GET':\n\t\tparams = request.GET\n\telif request.method == 'POST':\n\t\tparams = request.POST\n\telse:\n\t\treturn HttpResponse(status=405)\n\n\ttype = params.get('type','layer')\n\tqset = Layer.objects.all().order_by('title') if type == 'layer' else Map.objects.all().order_by('title')\n\n\tresources_list= []\n\n\tfor item in qset:\n\t\t resources_list.append({\n\t\t\t'id' : item.id,\n\t\t\t'title' : item.title,\n\t\t})\n\n\tresult = {'rows': resources_list,'total': qset.count()}\n\treturn HttpResponse(json.dumps(result))", "def update():\n\n # Ensure parameters are present\n if not request.args.get(\"sw\"):\n raise RuntimeError(\"missing sw\")\n if not request.args.get(\"ne\"):\n raise RuntimeError(\"missing ne\")\n\n # Ensure parameters are in lat,lng format\n if not re.search(\"^-?\\d+(?:\\.\\d+)?,-?\\d+(?:\\.\\d+)?$\", request.args.get(\"sw\")):\n raise RuntimeError(\"invalid sw\")\n if not re.search(\"^-?\\d+(?:\\.\\d+)?,-?\\d+(?:\\.\\d+)?$\", request.args.get(\"ne\")):\n raise RuntimeError(\"invalid ne\")\n\n # Explode southwest corner into two variables\n sw_lat, sw_lng = map(float, request.args.get(\"sw\").split(\",\"))\n\n # Explode northeast corner into two variables\n ne_lat, ne_lng = map(float, request.args.get(\"ne\").split(\",\"))\n\n # Find 10 cities within view, pseudorandomly chosen if more within view\n if sw_lng <= ne_lng:\n\n # Doesn't cross the antimeridian\n rows = db.execute(\"\"\"SELECT * FROM places\n WHERE :sw_lat <= latitude AND latitude <= :ne_lat AND (:sw_lng <= longitude AND longitude <= :ne_lng)\n GROUP BY country_code, place_name, admin_code1\n ORDER BY RANDOM()\n LIMIT 10\"\"\",\n sw_lat=sw_lat, ne_lat=ne_lat, sw_lng=sw_lng, ne_lng=ne_lng)\n\n else:\n\n # Crosses the antimeridian\n rows = db.execute(\"\"\"SELECT * FROM places\n WHERE :sw_lat <= latitude AND latitude <= :ne_lat AND (:sw_lng <= longitude OR longitude <= :ne_lng)\n GROUP BY country_code, place_name, admin_code1\n ORDER BY RANDOM()\n LIMIT 10\"\"\",\n sw_lat=sw_lat, ne_lat=ne_lat, sw_lng=sw_lng, ne_lng=ne_lng)\n\n # Output places as JSON\n return jsonify(rows)", "def get_cities(self, city_name: str = \"\"):", "def extract_listing_location_from_result(soup, location):\r\n for div in soup.find_all(name='div', class_='pdate'):\r\n for city in div.find(name='span'):\r\n location.append(city)\r\n # print(locations)\r\n return location", "def searchNearbyHospitals(lat, lng, radius = 5000, limit_search_count = 10):\n # Initialising the GooglePlaces constructor \n google_places = GooglePlaces(GOOGLE_MAPS_API_KEY) \n \n query_result = google_places.nearby_search( \n lat_lng ={'lat': lat, 'lng': lng}, \n radius = radius, \n types =[types.TYPE_HOSPITAL]) \n \n nearby_hospitals = []\n\n for place in query_result.places[:limit_search_count]: \n hospital = dict()\n place.get_details()\n hospital['hospitalName'] = place.name\n hospital['address'] = place.formatted_address\n hospital['phoneNumber'] = place.local_phone_number \n nearby_hospitals.append(hospital)\n\n if(len(nearby_hospitals) == 0):\n nearby_hospitals = searchNearbyHospitals(lat = lat, lng = lng, radius = radius+5000)\n\n return nearby_hospitals", "def test_search_service_area_list(self):\n search_point = {\n 'lat': 14.57,\n 'lng': 78.6\n }\n provider = ProviderFactory.create()\n # Creating 18 providers for listing and testing pagination.\n for num in range(18):\n ServiceAreaFactory.create(provider=provider)\n\n url = reverse('search', kwargs=search_point)\n response = self.client.get(url)\n data = response.json()\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(data['count'], 18)\n # Page 1 should contain only 10 results, because of pagination\n self.assertEqual(len(data['results']), 10)\n self.assertIsNotNone(data['next'])\n\n response = self.client.get(data['next'])\n data = response.json()\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n # Page 2 should contain only 8 results\n self.assertEqual(len(data['results']), 8)\n self.assertIsNone(data['next'])", "def geocoded(self):\n return self.get_queryset().filter(latitude__isnull=False,\n longitude__isnull=False)", "def make_searches(vase):\n params = {\n 'ch': vase.trendall_ch,\n 'no': vase.trendall_no,\n 'city': vase.location.city_name,\n 'col': vase.location.collection_name,\n 'id': vase.location.collection_id,\n }\n return [\n 'trendall {ch}.{no}'.format(**params),\n 'trendall {ch}/{no}'.format(**params),\n '{city} {id}'.format(**params),\n '{col} {id}'.format(**params),\n ]", "def stores(request):\n search = request.GET.get('search')\n paginator = LimitOffsetPagination()\n queryset = Store.objects.all()\n if search:\n queryset = queryset.filter(\n Q(name__contains=search) or\n Q(postcode__contains=search)\n )\n queryset = queryset.order_by('postcode', 'name')\n\n if request.GET.get('limit'):\n page = paginator.paginate_queryset(queryset)\n if page is not None:\n serializer = StoreSerializer(page, many=True)\n return paginator.get_paginated_response(serializer.data)\n else:\n queryset = queryset.order_by('name')\n\n serializer = StoreSerializer(queryset, many=True)\n return Response(serializer.data)", "def __init__(self, place):\n self.place = place\n self.cleanplace = parser(self.place)\n self.key = googlemaps.Client(key=os.environ.get(\"GMAP_KEY\") or GMAP_KEY)\n self.response = self.search()\n self.latitude = self.response[0][\"geometry\"][\"location\"][\"lat\"]\n self.longitude = self.response[0][\"geometry\"][\"location\"][\"lng\"]\n self.address = self.response[0][\"formatted_address\"]\n self.wiki = self.response[0][\"address_components\"][1][\"long_name\"]", "def get_cities():\n _, cities = API.cities(limit=1000)\n result = []\n for city in cities['results']:\n result.append(city['city'])\n return result", "def search(self, cp, min_surf, max_price, ad_type, nb_room_min, raw=True):\n _cp = []\n if type(cp) is list:\n for c in cp:\n _cp.append(self.get_location(c))\n else:\n _cp.append(self.get_location(cp))\n \n SEARCH_PAYLOAD = {\n \"pageIndex\": 1,\n \"pageSize\": 50000,\n \"query\": {\n \"bedrooms\": [],\n \"includeNewConstructions\": True,\n \"inseeCodes\": _cp,\n \"maximumPrice\": max_price,\n \"minimumLivingArea\": min_surf,\n \"realtyTypes\": 3,\n \"rooms\": range(nb_room_min, 5),\n \"sortBy\": 0,\n \"transactionType\": self._map_type(ad_type)\n }\n }\n \n SEARCH_URL = \"https://api-seloger.svc.groupe-seloger.com/api/v1/listings/search\"\n \n r = requests.post(SEARCH_URL, data=json.dumps(SEARCH_PAYLOAD), headers=self.headers)\n data = r.json()\n ret = {\n 'id': [],\n 'source': self.website\n }\n if raw:\n ret['raw'] = data\n for i in data['items']:\n ret['id'].append(i['id'])\n return ret", "def search(api_key, term, location):\n\n\n\n url_params = {\n\n 'term': term.replace(' ', '+'),\n\n 'location': location.replace(' ', '+'),\n\n 'limit': SEARCH_LIMIT\n\n }\n\n return request(API_HOST, SEARCH_PATH, api_key, url_params=url_params)", "def search(location=DEFAULT_LOCATION, api_key=API_KEY):\n latitude, longtitude = location[0], location[1]\n url_params = {\"page\": \"1\", \"lon\": longtitude, \"lat\": latitude, \"distance\": \"5\"}\n\n return request(API_HOST, SEARCH_PATH, api_key, url_params=url_params)['result']['data']", "def search(self):\n\n if (self.latitude is None or self.longitude is None):\n raise Exception('Please specify both a latitude and longitude')\n\n if (self.access_token == '' or self.access_token is None):\n raise Exception('Please specify a valid access token')\n\n # Book-keeping\n id_limit = 50 # Only 50 per /?ids= call allowed by FB\n curr_time = int(round(time.time()))\n venues_count = 0\n events_count = 0\n\n # Initial places request info\n place_params = {\n 'type': 'place',\n 'q': self.query,\n 'center': str(self.latitude) + ',' + str(self.longitude),\n 'distance': self.distance,\n 'limit': 1000,\n 'fields': 'id',\n 'access_token': self.access_token\n }\n place_url = ('https://graph.facebook.com/' + self.version + '/search?' +\n urllib.urlencode(place_params))\n\n # Grab places and prepare to get events\n\n places_data = r.get(place_url).json()['data']\n venues_count = len(places_data)\n\n # Batch places based on FB id_limit\n ids = []\n temp_lst = []\n for place in places_data:\n temp_lst.append(place['id'])\n if len(temp_lst) >= id_limit:\n ids.append(temp_lst)\n temp_lst = []\n if len(ids) == 0:\n ids.append(temp_lst)\n\n # Inner function to convert a list of\n # ids to a request url for events\n def ids_to_url(id_lst):\n events_fields = [\n 'id',\n 'type',\n 'name',\n 'cover.fields(id,source)',\n 'picture.type(large)',\n 'description',\n 'start_time',\n 'end_time',\n 'category',\n 'attending_count',\n 'declined_count',\n 'maybe_count',\n 'noreply_count'\n ]\n\n fields = [\n 'id',\n 'name',\n 'about',\n 'emails',\n 'cover.fields(id,source)',\n 'picture.type(large)',\n 'location',\n 'events.fields(' + ','.join(events_fields) + ')'\n ]\n\n timing = ('.since(' + str(self.since) + ')' +\n ('' if self.until is None else '.until(' + str(self.until) + ')'))\n\n events_params = {\n 'ids': ','.join(id_lst),\n 'access_token': self.access_token,\n 'fields': ','.join(fields) + timing\n }\n\n events_url = ('https://graph.facebook.com/' + self.version + '/?' +\n urllib.urlencode(events_params))\n\n return r.get(events_url).json()\n\n # Event results\n results = [ids_to_url(id_lst) for id_lst in ids]\n\n # Inner function to convert a list of\n # of venue result events to a list of\n # well-formatted events\n def venue_to_events(venue):\n venue_events = []\n if 'events' in venue and len(venue['events']['data']) > 0:\n for event in venue['events']['data']:\n event_r = dict()\n event_r['id'] = event['id']\n event_r['name'] = event['name']\n event_r['type'] = event['type']\n event_r['cover_picture'] = event['cover']['source'] if 'cover' in event else None\n event_r['profile_picture'] = event['picture']['data']['url'] if 'picture' in event else None\n event_r['description'] = event['description'] if 'description' in event else None\n event_r['start_time'] = event['start_time'] if 'start_time' in event else None\n event_r['end_time'] = event['end_time'] if 'end_time' in event else None\n event_r['time_from_now'] = self.calculate_start_time_diff(curr_time, event['start_time'])\n event_r['category'] = event['category'] if 'category' in event else None\n event_r['distance'] = (self.haversine_distance([venue['location']['latitude'],\n venue['location']['longitude']],\n [self.latitude, self.longitude]) * 1000\n if 'location' in venue else None)\n\n event_r['stats'] = {\n 'attending': event['attending_count'],\n 'declined': event['declined_count'],\n 'maybe': event['maybe_count'],\n 'noreply': event['noreply_count']\n }\n\n event_r['venue'] = {\n 'id': venue['id'],\n 'name': venue['name'],\n 'about': venue['about'] if 'about' in venue else None,\n 'emails': venue['emails'] if 'emails' in venue else None,\n 'cover_picture': venue['cover']['source'] if 'cover' in venue else None,\n 'profile_picture': venue['picture']['data']['url'] if 'picture' in venue else None,\n 'location': venue['location'] if 'location' in venue else None\n }\n\n venue_events.append(event_r)\n return venue_events\n\n # Grab the events\n events = []\n for result in results:\n for venue_id in result.keys():\n events.extend(venue_to_events(result[venue_id]))\n events_count = len(events)\n\n # Sort if specified\n if self.sort is not None:\n events.sort(self.allowed_sorts[self.sort])\n\n # Return events w/metadata\n return {\n 'events': events,\n 'metadata': { 'venues': venues_count, 'events': events_count }\n }", "def prepare_actor_searchable_pob(self, object):\n if object.POB is not None:\n locations = []\n\n locations.append('/api/v1/location/{0}/'.format(object.POB.id))\n if object.POB.parent_location is not None:\n locations += self.get_locations_recursively(\n object.POB.parent_location.id\n )\n\n return locations\n else:\n return ''", "def woeid_search(query):\n query = 'q=select * from geo.places where text=\"%s\"&format=json' % query\n body = requests.get('http://query.yahooapis.com/v1/public/yql?' + query)\n return body", "def search_definition(self, position: Position, uri: str) -> List[Location]:\n raise NotImplementedError", "def places_for_distrito_and_seccion(distrito_id, seccion_id):\n # Add school number search on top\n extract_integer = re.compile(r\"^.*?(\\d+)\").match\n match = extract_integer(request.args.get('nombre'))\n if match:\n n = int(match.group(1))\n else:\n n = -1\n\n search_type = request.args.get('search_type')\n nombre = request.args.get('nombre').replace(\"'\", \"''\")\n direccion = request.args.get('direccion').replace(\"'\", \"''\")\n localidad = request.args.get('localidad').replace(\"'\", \"''\")\n\n q_sch_num = \"\"\"\n SELECT esc.ogc_fid, esc.nombre, esc.direccion, esc.localidad,\n st_asgeojson(esc.wkb_geometry_4326) AS geojson,\n 1 as score\n FROM escuelasutf8 esc\n WHERE esc.id_distrito = '%(distrito)s'\n AND esc.id_seccion = '%(seccion)s'\n AND esc.num_escuela = '%(var)s'\n \"\"\"\n\n q_sim = \"\"\"\n SELECT esc.ogc_fid, esc.nombre, esc.direccion, esc.localidad,\n st_asgeojson(esc.wkb_geometry_4326) AS geojson,\n similarity(%(key)s, '%(val)s') as score\n FROM escuelasutf8 esc\n WHERE esc.id_distrito = '%(distrito)s'\n AND esc.id_seccion = '%(seccion)s'\n AND similarity(%(key)s, '%(val)s') IS NOT NULL\"\"\"\n\n q_end = \"\"\" ORDER BY score DESC LIMIT 40\"\"\"\n\n q = q_sch_num % {'distrito': distrito_id, 'seccion': seccion_id, 'var': n}\n if search_type is not None:\n if search_type == \"\":\n q += ' UNION ' + q_sim % {'distrito': distrito_id,\n 'seccion': seccion_id,\n 'key': 'nombre',\n 'val': nombre}\n\n q += ' UNION ' + q_sim % {'distrito': distrito_id,\n 'seccion': seccion_id,\n 'key': 'direccion',\n 'val': direccion}\n elif search_type == \"n\":\n q += ' UNION ' + q_sim % {'distrito': distrito_id,\n 'seccion': seccion_id,\n 'key': 'nombre',\n 'val': nombre}\n elif search_type == \"a\":\n q += ' UNION ' + q_sim % {'distrito': distrito_id,\n 'seccion': seccion_id,\n 'key': 'direccion',\n 'val': direccion}\n elif search_type == \"l\":\n q += ' UNION ' + q_sim % {'distrito': distrito_id,\n 'seccion': seccion_id,\n 'key': 'localidad',\n 'val': localidad}\n\n q = q + q_end\n\n r = [dict(e.items() + [('geojson', json.loads(e['geojson']))])\n for e in db.query(q)]\n\n return flask.Response(flask.json.dumps(r),\n mimetype='application/json')", "def solr_sources(self):\n # conn = pysolr.Solr(settings.SOLR['SERVER'])\n q = {\n \"fq\": ['type:source', f'archive_i:{self.pk}'],\n \"fl\": [\"pk\",\n \"public_images_b\",\n 'display_name_s',\n 'cover_image_i',\n 'source_type_s',\n 'date_statement_s',\n 'surface_type_s'],\n \"rows\": 10000,\n \"sort\": [\"shelfmark_ans asc\"]\n }\n\n res = SolrConnection.search(\"*:*\", **q)\n if res.hits > 0:\n return res.docs\n else:\n return []", "def search(api_key, term, location, offset, RADIUS_SIZE):\n #DEBUG\n\n url_params = {\n 'term': term.replace(' ', '+'),\n 'offset': offset,\n 'location': location.replace(' ', '+'),\n 'radius': RADIUS_SIZE,\n 'limit': 50\n }\n return request(API_HOST, SEARCH_PATH, api_key, url_params=url_params)", "def test_get_place(self):\n req_data = jsonify({\"lat\": 28.615551,\n \"long\": 77.224091})\n res = req.post(get_place_url, json=req_data)\n self.assertEqual(\"200\", json.loads(res.text)[\"Status\"])", "def _get_catalog_results(self, featured=False, **kw):\n if 'context' in kw.keys():\n kw['path'] = {'query': '/'.join(kw['context'].getPhysicalPath())}\n\n types = ('Article', 'Blog Entry', )\n states = ('published', )\n sort = 'Date'\n \n results = self.qrymethod(portal_type=types,\n review_state=states,\n is_featured=featured,\n sort_on=sort, \n sort_order='descending',\n **kw)\n\n return results" ]
[ "0.7075361", "0.6944823", "0.6674378", "0.6600828", "0.65889496", "0.65875274", "0.653786", "0.65263355", "0.64731616", "0.6373014", "0.6317828", "0.63092935", "0.63040537", "0.6248984", "0.62213624", "0.61126393", "0.61077696", "0.60606116", "0.6059774", "0.6050336", "0.6047025", "0.60104173", "0.5988838", "0.59258217", "0.5897937", "0.5845283", "0.5833624", "0.5833307", "0.5816919", "0.5815829", "0.580547", "0.5804477", "0.577906", "0.5778004", "0.5764377", "0.5754412", "0.5741478", "0.57208174", "0.5701565", "0.5689241", "0.56519765", "0.56397974", "0.5629528", "0.562848", "0.5619793", "0.5600478", "0.5583989", "0.55277985", "0.5518801", "0.55161124", "0.5513191", "0.54744136", "0.54675466", "0.5458458", "0.54549533", "0.5440202", "0.5440143", "0.543512", "0.54350746", "0.5428655", "0.54190993", "0.541565", "0.5412776", "0.54100084", "0.54053676", "0.5401793", "0.5400192", "0.5395679", "0.539264", "0.53882474", "0.53853285", "0.5383897", "0.53825754", "0.5375447", "0.53741455", "0.5372193", "0.53538066", "0.5353436", "0.53444886", "0.53386486", "0.53342086", "0.5321239", "0.5307542", "0.5300896", "0.529647", "0.52904135", "0.52874196", "0.5279329", "0.5274605", "0.526213", "0.5261351", "0.52585214", "0.5237688", "0.5215464", "0.52136004", "0.5201155", "0.5199661", "0.5189638", "0.51842207", "0.5175898", "0.517512" ]
0.0
-1
idAIV, EC, SC, PC, RV, OC, Total, FK_Organization, FK_Equipment
def loadAIVValues(self, values): #The 'values' variable has the following form if values[1] == values[2] == values[3] == values[4] == values[5] == 0.00: #wx.MessageBox("Es un total") self.aiv_totalok.SetValue(True) self.changeTextsInputs(True) self.aiv_total.SetValue(str(values[6])) else: self.changeTextsInputs(False) self.aiv_totalok.SetValue(False) #wx.MessageBox("Costos individuales") self.aiv_equipmentCost.SetValue(str(values[1])) self.aiv_serviceCost.SetValue(str(values[2])) self.aiv_personnelCost.SetValue(str(values[3])) self.aiv_resellCost.SetValue(str(values[4])) self.aiv_otherCost.SetValue(str(values[5])) self.aiv_total.SetValue(str(values[6]))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def ItemDed(_posagi, e17500, e18400, e18500, e18800, e18900,\n e20500, e20400, e19200, e20550, e20600, e20950, e19500, e19570,\n e19400, e19550, e19800, e20100, e20200, e20900, e21000, e21010,\n MARS, _sep, c00100, ID_ps, ID_Medical_frt, ID_Casualty_frt,\n ID_Miscellaneous_frt, ID_Charity_crt_Cash, ID_Charity_crt_Asset,\n ID_prt, ID_crt, ID_StateLocalTax_HC, ID_Charity_frt, puf):\n # Medical\n c17750 = ID_Medical_frt * _posagi\n c17000 = max(0, e17500 - c17750)\n\n # State and Local Income Tax, or Sales Tax\n _statax = max(e18400, 0)\n\n # Other Taxes (including state and local)\n c18300 = _statax + e18500 + e18800 + e18900\n\n # Casulty\n if e20500 > 0:\n c37703 = e20500 + ID_Casualty_frt * _posagi\n c20500 = c37703 - ID_Casualty_frt * _posagi\n else:\n c37703 = 0.\n c20500 = 0.\n\n # Miscellaneous\n c20750 = ID_Miscellaneous_frt * _posagi\n if puf:\n c20400 = e20400\n else:\n c20400 = e20550 + e20600 + e20950\n c20800 = max(0, c20400 - c20750)\n\n # Interest paid deduction\n if puf:\n c19200 = e19200\n else:\n c19200 = e19500 + e19570 + e19400 + e19550\n\n # Charity (assumes carryover is non-cash)\n base_charity = e19800 + e20100 + e20200\n if base_charity <= 0.2 * _posagi:\n c19700 = base_charity\n else:\n lim50 = min(ID_Charity_crt_Cash * _posagi, e19800)\n lim30 = min(ID_Charity_crt_Asset * _posagi, e20100 + e20200)\n c19700 = min(0.5 * _posagi, lim30 + lim50)\n\n charity_floor = ID_Charity_frt * _posagi # frt is zero in present law\n c19700 = max(0, c19700 - charity_floor)\n\n # Gross Itemized Deductions\n\n c21060 = (e20900 + c17000 + (1 - ID_StateLocalTax_HC) * c18300 + c19200 +\n c19700 + c20500 + c20800 + e21000 + e21010)\n\n # Limitations on deductions excluding medical, charity etc\n _phase2_i = ID_ps[MARS - 1]\n\n _nonlimited = c17000 + c20500 + e19570 + e21010 + e20900\n _phase2_i = ID_ps[MARS - 1]\n _limitratio = _phase2_i / _sep\n\n if c21060 > _nonlimited and c00100 > _limitratio:\n dedmin = ID_crt * (c21060 - _nonlimited)\n dedpho = ID_prt * max(0, _posagi - _limitratio)\n c21040 = min(dedmin, dedpho)\n else:\n c21040 = 0.0\n\n # Itemized deductions amount after limitation if any\n c04470 = c21060 - c21040\n\n # the variables that are casted as floats below can be either floats or\n # ints depending n which if/else branches they follow in the above code.\n # they need to always be the same type\n\n c20400 = float(c20400)\n c20400 = float(c20400)\n c19200 = float(c19200)\n c37703 = float(c37703)\n c20500 = float(c20500)\n\n return (c17750, c17000, _statax, c18300, c37703, c20500,\n c20750, c20400, c19200, c20800, c19700, c21060, _phase2_i,\n _nonlimited, _limitratio, c04470, c21040)", "def _amount_all(self, cr, uid, ids,field_name, arg, context={}):\n res={}\n for record in self.browse(cr, uid, ids, context=context):\n res[record.id] = { 'amount_untaxed': 0.0, 'amount_tax': 0.0, 'amount_total': 0.0}\n amount_untaxed = 0.0\n amount_tax = 0.0\n amount_total = 0.0\n\t if not record.allowances_lines_after and record.allowances_lines_before:\n \tfor line in record.allowances_lines_before:\n \tamount_untaxed += line.amount_untaxed\n \tamount_tax += line.amount_tax\n \tamount_total += line.amount_total\n \tres[record.id]['amount_untaxed'] = amount_untaxed \n \tres[record.id]['amount_tax'] = amount_tax \n \tres[record.id]['amount_total'] = amount_total \n\n\t elif record.allowances_lines_after and record.allowances_lines_before :\n \tfor line in record.allowances_lines_after:\n \tamount_untaxed += line.amount_untaxed\n \tamount_tax += line.amount_tax\n \tamount_total += line.amount_total\n \tres[record.id]['amount_untaxed'] = amount_untaxed \n \tres[record.id]['amount_tax'] = amount_tax \n \tres[record.id]['amount_total'] = amount_total \n return res", "def info_equipment_get():\n equipment = _equipment_by_group()\n return equipment, 200", "def _total_price(self, cr, uid, ids, field_name, arg, context={}):\n res = {}\n for record in self.browse(cr, uid, ids, context=context):\n val = 0.0\n for line in record.item_ids:\n val += line.price\n res[record.id] = val \n return res", "def CalcularImpacto(request, id_item):\n item = Item.objects.get(id=id_item)\n\n item.complejidadtotal = impacto_complejidad(id_item)\n item.costototal = impacto_costo(id_item)\n item.save()\n messages.info(request, \"Se calculo corresctamente el impacto de modificacion del itrm %s .\" % item)\n return HttpResponseRedirect('/admin/todo/item')", "def GetIPCVoucherAttrControlSummary(ivac):\n out_str = \"\"\n fmt = \"{c: <#018x} {c.ivac_refs: <10d} {c.ivac_port: <#018x} {c.ivac_table: <#018x} {c.ivac_table_size: <8d} {c.ivac_key_index: <5d} {growing: <5s} {c.ivac_freelist: <5d}\"\n growing_str = \"\"\n \n if unsigned(ivac) == 0:\n return \"{: <#018x}\".format(ivac)\n\n if unsigned(ivac.ivac_is_growing):\n growing_str = \"Y\"\n out_str += fmt.format(c=ivac, growing = growing_str)\n return out_str", "def total(request) : # foreign key 가 아니라서 ORM 상에서 JOIN 이 안됨;;\n data = VisitOccurrence.objects.values('visit_concept_id').annotate(Count('visit_concept_id'))\n\n for tmp in data : # JOIN 안해서 생긴 불필요한 반복문(N+1 문제)\n concept_id = tmp['visit_concept_id']\n concept_name = Concept.objects.filter(concept_id=concept_id).values('concept_name')\n tmp['concept_name'] = concept_name[0]['concept_name']\n\n return Response(data)", "def add_eqns(df):\n\n def lett(col): return alpha[list(df.columns).index(col)]\n for i in df.index:\n row = str(i + 3)\n if df.loc[i, 'Deleted'] != 'Total':\n df.loc[i, 'M/M_Total'] = '=IF(' + lett('Deleted') + row + '<>\"\",0,' + lett('# Molds') + row + '*' + lett('Price/Mold') + row + '+' + lett('Model Price') + row + ')'\n df.loc[i, 'Unit_Total'] = '=IF(' + lett('Deleted') + row + '<>\"\",0,' + lett('# Units') + row + '*' + lett('Price/Unit') + row + ')'\n df.loc[i, 'Line_Total'] = '=IF(' + lett('Deleted') + row + '<>\"\",0,' + 'SUM(' + lett('M/M_Total') + row + ',' + lett('Unit_Total') + row + '))'\n return df", "def update(self, arm, reward, alpha=0.05, l=0.05):\n\n # Get context\n context = self.context.iloc[self.t, :]\n\n\n # Add price\n price_dict = {}\n productid_dict = {}\n \n for var in context.keys():\n price_dict[var + '_price'] = context[var] * self.df_arm_dummies.ix[arm, 'price']\n\n for i in range(10, 26):\n productid_dict[var + '_productid_' + str(i)] = context[var] * \\\n self.df_arm_dummies.ix[arm, 'productid_' + str(i)]\n\n print(\"Price dict is\")\n print(price_dict)\n print(productid_dict)\n \n\n#Age_price = context.Age * self.df_arm_dummies.ix[arm, 'price']\n#Agent_Linux_price = self.df_arm_dummies.ix[arm, 'price'] * context.Agent_Linux\n#Agent_OSX_price = self.df_arm_dummies.ix[arm, 'price'] * context.Agent_OSX\n#Agent_Windows_price = self.df_arm_dummies.ix[arm, 'price'] * context.Agent_Windows\n#Agent_mobile_price = self.df_arm_dummies.ix[arm, 'price'] * context.Agent_mobile\n#\n#\n#Language_EN_price = self.df_arm_dummies.ix[arm, 'price'] * context.Language_EN\n#Language_GE_price = self.df_arm_dummies.ix[arm, 'price'] * context.Language_GE\n#Language_NL_price = self.df_arm_dummies.ix[arm, 'price'] * context.Language_NL\n#Referer_Bing_price = self.df_arm_dummies.ix[arm, 'price'] * context.Referer_Bing\n#Referer_Google_price = self.df_arm_dummies.ix[arm, 'price'] * context.Referer_Google\n#\n\n combined = np.append(context, self.df_arm_dummies.iloc[arm, :])#.reshape(-1, 1)\n\n prices = prict_dict.items()\n\n # Combine with arm\n combined = np.append(combined,\n [Age_price,\n Agent_Linux_price,\n Agent_OSX_price,\n Agent_Windows_price,\n Agent_mobile_price,\n Language_EN_price,\n Language_GE_price,\n Language_NL_price,\n Referer_Bing_price,\n Referer_Google_price\n ]).reshape(-1, 1)\n \n if reward > 0:\n reward = 1\n else:\n reward = -1\n\n # Bayes\n self.B = self.B + np.dot(context, context)\n \n self.f = self.f + combined * reward\n\n self.mu_hat = np.dot(np.linalg.inv(self.B), self.f)\n\n self.mu = min(5, self.mu + 0.1 * (-0.5 + int(bool(reward))))\n\n # Update time step\n self.t += 1", "def __cacula_agio(table):\n from m2py.misc.vectorize import column\n\n PV = table[0][-1]\n total = sum(column(table, 1))\n premium = total/PV - 1\n return round(premium, 2)", "def populateTechnicalMaintenanceEquipmentDefaults(self):\n params = {}\n for i, e in enumerate(g_currentVehicle.item.eqsLayout):\n params['eId%s' % (i + 1)] = e.intCD if e else None\n\n self.populateTechnicalMaintenanceEquipment(**params)\n return", "def _get_bulk_cad_assessment_data(data):\n # pylint: disable=too-many-locals\n all_cads = db.session.query(\n CAD,\n all_models.Assessment.id,\n all_models.Assessment.title,\n all_models.Assessment.assessment_type,\n all_models.Assessment.slug,\n CAV.attribute_value,\n CAV.attribute_object_id,\n ).join(\n all_models.Assessment, CAD.definition_id == all_models.Assessment.id\n ).outerjoin(\n CAV, CAD.id == CAV.custom_attribute_id,\n ).filter(\n all_models.Assessment.id.in_(data[\"ids\"]),\n CAD.definition_type == 'assessment',\n )\n response_dict = OrderedDict()\n for (cad, asmt_id, asmt_title, asmt_type, asmt_slug,\n cav_value, cav_person_id) in all_cads:\n multi_choice_options = \",\".join(\n sorted(cad.multi_choice_options.split(','))\n ).lower() if cad.multi_choice_options else cad.multi_choice_options\n item_key = (cad.title, cad.attribute_type, cad.mandatory,\n multi_choice_options, cad.multi_choice_mandatory)\n item_response = response_dict.get(\n item_key,\n {\n \"attribute\": {\n \"attribute_type\": cad.attribute_type,\n \"title\": cad.title,\n \"default_value\": cad.default_value,\n \"multi_choice_options\": cad.multi_choice_options,\n \"multi_choice_mandatory\": cad.multi_choice_mandatory,\n \"mandatory\": cad.mandatory,\n \"placeholder\": None,\n },\n \"related_assessments\": {},\n \"assessments_with_values\": [],\n }\n )\n if cav_value:\n item_response[\"assessments_with_values\"].append({\n \"id\": asmt_id,\n \"title\": asmt_title,\n \"attribute_value\": cav_value,\n \"attribute_person_id\": cav_person_id,\n })\n if not item_response[\"related_assessments\"].get(asmt_type):\n item_response[\"related_assessments\"][asmt_type] = []\n item_response[\"related_assessments\"][asmt_type].append({\n \"id\": asmt_id,\n \"attribute_definition_id\": cad.id,\n \"slug\": asmt_slug,\n })\n response_dict[item_key] = item_response\n response = []\n\n for _, cad_item in response_dict.items():\n related_assessments = cad_item[\"related_assessments\"]\n cad_item[\"related_assessments\"] = {\"values\": []}\n asmt_count = 0\n for asmt_type, assessments in related_assessments.items():\n cad_item[\"related_assessments\"][\"values\"].append({\n \"assessments_type\": asmt_type,\n \"assessments\": assessments\n })\n asmt_count += len(assessments)\n cad_item[\"related_assessments\"][\"count\"] = asmt_count\n response.append(cad_item)\n return response", "def income(self):\r\n self.checkingConnection()\r\n model = QSqlQueryModel()\r\n model.setQuery('''\r\n SELECT Clients.id, Clients.date, Clients.hour, Clients.name, \r\n (Clients.carne + Clients.pollo) AS empanadas,\r\n Clients.total, Clients.value FROM Clients''', self.db)\r\n self.setModel(model)", "def sub_tax_sales_transfer_pricing(manager, df_fields, seq_recs, seq_reservas):\n # df_hotel = manager.get_dataframe(tables['dwc_bok_t_canco_hotel'])\n # df_circuit = manager.get_dataframe(tables['dwc_bok_t_canco_hotel_circuit'])\n # df_other = manager.get_dataframe(tables['dwc_bok_t_canco_other'])\n # df_transfer = manager.get_dataframe(tables['dwc_bok_t_canco_transfer'])\n # df_endow = manager.get_dataframe(tables['dwc_bok_t_canco_endowments'])\n # df_extra = manager.get_dataframe(tables['dwc_bok_t_canco_extra'])\n\n df_aux = df_fields.select(\"operative_incoming\", \"booking_id\")\n\n df_hotel = sub_tax_sales_transfer_pricing_aux(manager, df_hotelt, seq_recs, seq_reservas, df_aux)\n df_circuit = sub_tax_sales_transfer_pricing_aux(manager, df_circuitt, seq_recs, seq_reservas, df_aux)\n df_other = sub_tax_sales_transfer_pricing_aux(manager, df_othert, seq_recs, seq_reservas, df_aux)\n df_transfer = sub_tax_sales_transfer_pricing_aux(manager, df_transfert, seq_recs, seq_reservas, df_aux)\n df_endow = sub_tax_sales_transfer_pricing_aux(manager, df_endowt, seq_recs, seq_reservas, df_aux)\n df_extra = sub_tax_sales_transfer_pricing_aux_extra(manager, df_extrat, seq_recs, seq_reservas, df_aux)\n\n df_impuesto_canal = df_hotel.union(df_circuit).union(df_other).union(df_transfer).union(df_endow).union(\n df_extra)\n\n df_impuesto_canal = df_impuesto_canal.groupBy(\"seq_rec\", \"seq_reserva\") \\\n .agg({'impuesto_canal': 'sum'}).withColumnRenamed(\"SUM(impuesto_canal)\", \"Tax_Sales_Transfer_pricing\")\n\n df_fields = df_fields.join(df_impuesto_canal, [df_fields.operative_incoming == df_impuesto_canal.seq_rec,\n df_fields.booking_id == df_impuesto_canal.seq_reserva],\n 'left_outer').drop(df_impuesto_canal.seq_rec).drop(df_impuesto_canal.seq_reserva)\n\n df_fields = df_fields.na.fill({\"Tax_Sales_Transfer_pricing\": 0})\n\n df_fields = df_fields.withColumn(\"Tax_Sales_Transfer_pricing\",\n udf_round_ccy(df_fields.Tax_Sales_Transfer_pricing,\n df_fields.booking_currency))\n\n del df_hotel, df_circuit, df_other, df_transfer, df_endow, df_extra, df_impuesto_canal\n\n return df_fields", "def life_insurance_to_recive_total(self):\n pass", "def SumaryVentas(vj):\n\n vj.MontoVentas = vj.GanacVentas = 0.0 # Inicializa sumarios de ventas\n vj.MontoConsumo = vj.GanacConsumo = vj.MontoConsumoRecp = 0.0 # Inicializa sumarios de items de consumo\n vj.NumChgPrecio = vj.MontoChgPrecio = 0.0 # Inicializa sumarios de cambios de precio\n vj.NumDevoluc = vj.MontoDevoluc = 0.0 # Inicializa sumarios de devoluciones\n vj.NumSinPagar = vj.MontoSinPagar = 0.0 # Inicializa sumarios de Items sin pagar \n vj.NumSinVender = vj.MontoSinVender = 0.0 # Inicializa sumarios de Items sin vender \n\n GroupVentas = {} # Dicionario para contar las ventas por preductos\n\n for idVenta, row in vj.tbVentas.rows.items():\n Cant = row.count\n idProd = row.idProd\n\n if idProd in GroupVentas: GroupVentas[idProd] += Cant # Acumula la cantidad de ventas por producto\n else: GroupVentas[idProd] = Cant\n\n rowProd = vj.tbCompras.rows.get(idProd) # Busca datos de item asociado a la venta\n if not rowProd: continue\n\n montoProd = vj.Cnv( Cant*rowProd.precio, rowProd.moneda, MD.Cuc ) # Monto al precio del item en CUC\n\n if row.vendedor == vj.Vendedores[0]: # Item para consumo\n costo = Cant * rowProd.valCucItem\n costoRcp = costo * vj.RecupIdx\n\n vj.MontoConsumo += costo # Acumula costos de compra\n vj.MontoConsumoRecp += costoRcp # Acumula costos de recuperación\n vj.GanacConsumo += ( montoProd-costoRcp )\n continue # No hace más analisis para esa venta\n\n precioVenta = vj.Cnv( row.precio, row.moneda, MD.Cuc) # Lleva precio de la venta a CUC\n montoVenta = Cant * precioVenta # Calcula el monto de la venta en CUC\n\n vj.MontoVentas += montoVenta # Acumula todos los montos de las ventas\n\n if montoProd != montoVenta: # Cambio el precio del producto en la venta\n vj.NumChgPrecio += Cant # Acumula # de items que cambian de precio\n vj.MontoChgPrecio += (montoVenta-montoProd) * Cant # Acumula las diferencias de precio\n\n if len(row.comentario): # Si hay comentarios\n matches = reNDevuelto.findall( row.comentario ) # Busca la cantidad de items devueltos\n for match in matches: # Para cada devolución\n Num = int(match) # Convierte a entero la cantidad de devoluciones\n\n vj.NumDevoluc += Num # Acumula de cantidad de devoluciones\n vj.MontoDevoluc += ( Num*precioVenta ) # Acumula el precio de las devoluciones\n\n Pago = GetPagado( vj, idVenta, MD.Cuc ) # Determina la cantidad de la venta pagada\n SinPagar = montoVenta - Pago # Calcula lo que queda sin pagar\n\n if precioVenta!=0: # Si ya hay un precio establecido\n vj.NumSinPagar += SinPagar/precioVenta # Acumula el # de items sin pagar\n\n vj.MontoSinPagar += SinPagar # Acumula el monto sin pagar\n\n vj.GanacVentas = vj.MontoVentas - vj.MontoInvers # Calcula las ganancias totales por ventas\n\n for idProd, row in vj.tbCompras.rows.items(): # Recorre todos los productos\n Resto = row.count # Inicializa productos que quedan (todos)\n if idProd in GroupVentas: Resto -= GroupVentas[idProd] # Quita la cantidad de productos vendidos\n\n if Resto <= 0: continue # Si todos estan vendidos no hace mas nada\n\n Precio = vj.Cnv( row.precio, row.moneda, MD.Cuc ) # Lleva el precio del producto a cuc\n\n vj.NumSinVender += Resto # Acumula la cantidad de productos sin vender\n vj.MontoSinVender += ( Resto*Precio ) # Acumula el precio de los productos sin vender", "def sub_tax_transfer_pricing_eur(manager, df_fields, seq_recs, seq_reservas):\n # df_hotel = manager.get_dataframe(tables[\"dwc_bok_t_canco_hotel\"])\n # df_circuit = manager.get_dataframe(tables[\"dwc_bok_t_canco_hotel_circuit\"])\n # df_other = manager.get_dataframe(tables[\"dwc_bok_t_canco_other\"])\n # df_transfer = manager.get_dataframe(tables[\"dwc_bok_t_canco_transfer\"])\n # df_endow = manager.get_dataframe(tables[\"dwc_bok_t_canco_endowments\"])\n # df_extra = manager.get_dataframe(tables[\"dwc_bok_t_canco_extra\"])\n\n df_aux = df_fields.select(\"operative_incoming\", \"booking_id\", \"invoicing_company\", \"creation_date\",\n \"booking_currency\")\n\n df_hotel = sub_tax_transfer_pricing_eur_aux(manager, df_hotelt, seq_recs, seq_reservas, df_aux)\n df_circuit = sub_tax_transfer_pricing_eur_aux(manager, df_circuitt, seq_recs, seq_reservas, df_aux)\n df_other = sub_tax_transfer_pricing_eur_aux(manager, df_othert, seq_recs, seq_reservas, df_aux)\n df_transfer = sub_tax_transfer_pricing_eur_aux(manager, df_transfert, seq_recs, seq_reservas, df_aux)\n df_endow = sub_tax_transfer_pricing_eur_aux(manager, df_endowt, seq_recs, seq_reservas, df_aux)\n df_extra = sub_tax_transfer_pricing_eur_aux_extra(manager, df_extrat, seq_recs, seq_reservas, df_aux)\n\n df_impuesto_canco = df_hotel.union(df_circuit).union(df_other).union(df_transfer).union(df_endow).union(\n df_extra)\n\n df_impuesto_canco = df_impuesto_canco.groupBy(\"operative_incoming\", \"booking_id\") \\\n .agg({'impuesto_canco': 'sum'}).withColumnRenamed(\"SUM(impuesto_canco)\", \"impuesto_canco\")\n\n df_impuesto_canco = df_impuesto_canco.withColumnRenamed(\"operative_incoming\", \"seq_rec\") \\\n .withColumnRenamed(\"booking_id\", \"seq_res\")\n\n df_fields = df_fields.join(df_impuesto_canco, [df_fields.operative_incoming == df_impuesto_canco.seq_rec,\n df_fields.booking_id == df_impuesto_canco.seq_res],\n 'left_outer').drop(\"seq_rec\", \"seq_res\")\n\n df_addcanco = sub_transfer_pricing_aux_add_canco(manager, df_fields, seq_recs, seq_reservas, EUR)\n\n df_addcanco = df_addcanco.withColumnRenamed(\"operative_incoming\", \"seq_rec\") \\\n .withColumnRenamed(\"booking_id\", \"seq_res\")\n\n # add add_impuesto_canco\n df_fields = df_fields.join(df_addcanco, [df_fields.operative_incoming == df_addcanco.seq_rec,\n df_fields.booking_id == df_addcanco.seq_res],\n \"left_outer\").drop(df_addcanco.seq_rec).drop(df_addcanco.seq_res)\n\n df_fields = df_fields.na.fill({'impuesto_canco': 0, 'add_impuesto_canco': 0})\n\n df_fields = df_fields.withColumn(\"Tax_Transfer_pricing_EUR\",\n df_fields.impuesto_canco + df_fields.add_impuesto_canco) \\\n .drop(\"impuesto_canco\", \"add_impuesto_canco\")\n\n del df_hotel, df_circuit, df_other, df_transfer, df_endow, df_extra, df_impuesto_canco, df_addcanco\n\n return df_fields", "def sub_tax_cost_transfer_pricing_eur(manager, df_fields, seq_recs, seq_reservas):\n # df_hotel = manager.get_dataframe(tables[\"dwc_bok_t_canco_hotel\"])\n # df_circuit = manager.get_dataframe(tables[\"dwc_bok_t_canco_hotel_circuit\"])\n # df_other = manager.get_dataframe(tables[\"dwc_bok_t_canco_other\"])\n # df_transfer = manager.get_dataframe(tables[\"dwc_bok_t_canco_transfer\"])\n # df_endow = manager.get_dataframe(tables[\"dwc_bok_t_canco_endowments\"])\n # df_extra = manager.get_dataframe(tables[\"dwc_bok_t_canco_extra\"])\n\n df_aux = df_fields.select(\"operative_incoming\", \"booking_id\", \"invoicing_company\", \"creation_date\",\n \"booking_currency\")\n\n df_hotel = sub_tax_transfer_pricing_eur_aux(manager, df_hotelt, seq_recs, seq_reservas, df_aux)\n df_circuit = sub_tax_transfer_pricing_eur_aux(manager, df_circuitt, seq_recs, seq_reservas, df_aux)\n df_other = sub_tax_transfer_pricing_eur_aux(manager, df_othert, seq_recs, seq_reservas, df_aux)\n df_transfer = sub_tax_transfer_pricing_eur_aux(manager, df_transfert, seq_recs, seq_reservas, df_aux)\n df_endow = sub_tax_transfer_pricing_eur_aux(manager, df_endowt, seq_recs, seq_reservas, df_aux)\n df_extra = sub_tax_transfer_pricing_eur_aux_extra(manager, df_extrat, seq_recs, seq_reservas, df_aux)\n\n df_impuesto_canco = df_hotel.union(df_circuit).union(df_other).union(df_transfer).union(df_endow).union(\n df_extra)\n\n df_impuesto_canco = df_impuesto_canco.groupBy(\"operative_incoming\", \"booking_id\") \\\n .agg({'impuesto_canco': 'sum'}).withColumnRenamed(\"SUM(impuesto_canco)\", \"impuesto_canco\")\n\n df_impuesto_canco = df_impuesto_canco.withColumnRenamed(\"operative_incoming\", \"seq_rec\") \\\n .withColumnRenamed(\"booking_id\", \"seq_res\")\n\n df_fields = df_fields.join(df_impuesto_canco, [df_fields.operative_incoming == df_impuesto_canco.seq_rec,\n df_fields.booking_id == df_impuesto_canco.seq_res],\n 'left_outer').drop(\"seq_rec\", \"seq_res\")\n\n df_addcanco = sub_transfer_pricing_aux_add_canco(manager, df_fields, seq_recs, seq_reservas, EUR)\n\n df_addcanco = df_addcanco.withColumnRenamed(\"operative_incoming\", \"seq_rec\") \\\n .withColumnRenamed(\"booking_id\", \"seq_res\")\n\n # add add_impuesto_canco\n df_fields = df_fields.join(df_addcanco, [df_fields.operative_incoming == df_addcanco.seq_rec,\n df_fields.booking_id == df_addcanco.seq_res],\n \"left_outer\").drop(df_addcanco.seq_rec).drop(df_addcanco.seq_res)\n\n df_fields = df_fields.na.fill({'impuesto_canco': 0, 'add_impuesto_canco': 0})\n\n df_fields = df_fields.withColumn(\"Tax_Cost_Transfer_pricing_EUR\",\n df_fields.impuesto_canco + df_fields.add_impuesto_canco) \\\n .drop(\"impuesto_canco\", \"add_impuesto_canco\")\n\n del df_hotel, df_circuit, df_other, df_transfer, df_endow, df_extra, df_impuesto_canco, df_addcanco\n\n return df_fields", "def _get_ea_index():\n ea_index_temp = {'Address': 5, 'Agency': 10, 'City': 4, 'Country': 3,\n 'Datacenter': 7, 'Division': 8, 'Interface Name': 13,\n 'Region_List': 2, 'Requester Email': 9, 'Site': 6,\n 'VLAN Description': 11, 'IPR Designation': 16}\n return ea_index_temp", "def get_the_sum_of_prices_from_table(table, item_ids):\n\n # your code", "def get_total_shield(self,obs):", "def table_allocations(\n self, id_value, id_type, start_date=None, end_date=None, freq=None\n ):\n\n start_date, end_date, freq = self.get_time_parameters(\n start_date, end_date, freq\n )\n\n try:\n df = self.get_allocations(id_value, id_type, start_date, end_date, freq)\n\n if id_type == \"project\" and \"ALL\" not in str(id_value):\n # add the project's missing people allocation\n if freq == \"D\":\n df[\"UNALLOCATED\"] = self.wim.project_peoplereq[id_value]\n else:\n df[\"UNALLOCATED\"] = (\n self.wim.project_peoplereq[id_value].resample(freq).mean()\n )\n\n elif id_type == \"person\" and \"ALL\" not in str(id_value):\n # add the person's total project assignment to the data frame\n if freq == \"D\":\n df[\"TOTAL\"] = self.wim.people_totals[id_value]\n else:\n df[\"TOTAL\"] = self.wim.people_totals[id_value].resample(freq).mean()\n\n df = self.format_date_index(df, freq)\n\n return self.highlight_allocations(df)\n\n except ValueError as e:\n print(e)", "def total_amortization(self):\n return sum(self.table[\"amortization\"])", "def add_facility_id_unit_id_epa(df):\n if \"facility_id\" not in df.columns:\n df[\"facility_id\"] = np.NaN\n if \"unit_id_epa\" not in df.columns:\n df[\"unit_id_epa\"] = np.NaN\n return df", "def sub_tax_cost_transfer_pricing(manager, df_fields, seq_recs, seq_reservas):\n # df_hotel = manager.get_dataframe(tables[\"dwc_bok_t_canco_hotel\"])\n # df_circuit = manager.get_dataframe(tables[\"dwc_bok_t_canco_hotel_circuit\"])\n # df_other = manager.get_dataframe(tables[\"dwc_bok_t_canco_other\"])\n # df_transfer = manager.get_dataframe(tables[\"dwc_bok_t_canco_transfer\"])\n # df_endow = manager.get_dataframe(tables[\"dwc_bok_t_canco_endowments\"])\n # df_extra = manager.get_dataframe(tables[\"dwc_bok_t_canco_extra\"])\n\n df_aux = df_fields.select(\"operative_incoming\", \"booking_id\", \"invoicing_company\", \"creation_date\",\n \"booking_currency\")\n\n df_hotel = sub_tax_cost_transfer_pricing_aux(manager, df_hotelt, seq_recs, seq_reservas, df_aux)\n df_circuit = sub_tax_cost_transfer_pricing_aux(manager, df_circuitt, seq_recs, seq_reservas, df_aux)\n df_other = sub_tax_cost_transfer_pricing_aux(manager, df_othert, seq_recs, seq_reservas, df_aux)\n df_transfer = sub_tax_cost_transfer_pricing_aux(manager, df_transfert, seq_recs, seq_reservas, df_aux)\n df_endow = sub_tax_cost_transfer_pricing_aux(manager, df_endowt, seq_recs, seq_reservas, df_aux)\n df_extra = sub_tax_cost_transfer_pricing_aux_extra(manager, df_extrat, seq_recs, seq_reservas, df_aux)\n\n df_impuesto_canco = df_hotel.union(df_circuit).union(df_other).union(df_transfer).union(df_endow).union(\n df_extra)\n\n df_impuesto_canco = df_impuesto_canco.groupBy(\"operative_incoming\", \"booking_id\") \\\n .agg({'impuesto_canco': 'sum'}).withColumnRenamed(\"SUM(impuesto_canco)\", \"impuesto_canco\")\n\n df_impuesto_canco = df_impuesto_canco.withColumnRenamed(\"operative_incoming\", \"seq_rec\") \\\n .withColumnRenamed(\"booking_id\", \"seq_res\")\n\n # add impuesto_canco\n df_fields = df_fields.join(df_impuesto_canco, [df_fields.operative_incoming == df_impuesto_canco.seq_rec,\n df_fields.booking_id == df_impuesto_canco.seq_res],\n 'left_outer').drop(\"seq_rec\", \"seq_res\")\n\n df_addcanco = sub_transfer_pricing_aux_add_canco(manager, df_fields, seq_recs, seq_reservas)\n\n df_addcanco = df_addcanco.withColumnRenamed(\"operative_incoming\", \"seq_rec\") \\\n .withColumnRenamed(\"booking_id\", \"seq_res\")\n\n # add add_impuesto_canco\n df_fields = df_fields.join(df_addcanco, [df_fields.operative_incoming == df_addcanco.seq_rec,\n df_fields.booking_id == df_addcanco.seq_res],\n \"left_outer\").drop(df_addcanco.seq_rec).drop(df_addcanco.seq_res)\n\n df_fields = df_fields.na.fill({'impuesto_canco': 0, 'add_impuesto_canco': 0})\n\n df_fields = df_fields.withColumn(\"Tax_Cost_Transfer_pricing\",\n df_fields.impuesto_canco + df_fields.add_impuesto_canco) \\\n .drop(\"impuesto_canco\", \"add_impuesto_canco\")\n\n df_fields = df_fields.withColumn(\"Tax_Cost_Transfer_pricing\", udf_round_ccy(df_fields.Tax_Cost_Transfer_pricing,\n df_fields.booking_currency))\n\n del df_hotel, df_circuit, df_other, df_transfer, df_endow, df_extra, df_impuesto_canco, df_addcanco, df_aux\n\n return df_fields", "def __str__(self):\n return 'id: {0} designation {1} FK specie {2} Nb bacteria {3}'.format(self.id, self.designation, self.specie, len(self.bacteria))", "def _amount_all(self, cr, uid, ids,field_name, arg, context={}):\n res={}\n for record in self.browse(cr, uid, ids, context=context):\n val = 0.0\n for line in record.enrich_lines:\n if line.state == 'done' :\n val += line.cost\n res[record.id] = {\n 'paid_amount':val,\n 'residual_amount':record.amount - val,\n }\n return res", "def aus(self):\n return self[self.au_columns]", "def aus(self):\n return self[self.au_columns]", "def tpc_eci(df):\n return df.expanded_income - df[mdf.ECI_REMOVE_COLS].sum(axis=1)", "def summarise_equipment(equipment):\n totals = reduce(\n lambda total, gear: (\n total[0] + gear.cost,\n total[1] + gear.damage,\n total[2] + gear.armor,\n ),\n filter(lambda x: x is not None, itertools.chain(*equipment)),\n (0, 0, 0),\n )\n return EquipmentSummary(*totals)", "def __init__(self, table_name, table_columns, table_primary_keys, con):\n self.con = con\n self.table_name = table_name\n self.table_columns = table_columns\n self.table_primary_keys = table_primary_keys\n # url that sub classes will use to pull MMS tables from nemweb.\n self.url = 'http://nemweb.com.au/Data_Archive/Wholesale_Electricity/MMSDM/{year}/MMSDM_{year}_{month}/' + \\\n 'MMSDM_Historical_Data_SQLLoader/DATA/PUBLIC_DVD_{table}_{year}{month}010000.zip'\n self.columns_types = {\n 'INTERVAL_DATETIME': 'TEXT', 'DUID': 'TEXT', 'BIDTYPE': 'TEXT', 'BANDAVAIL1': 'REAL', 'BANDAVAIL2': 'REAL',\n 'BANDAVAIL3': 'REAL', 'BANDAVAIL4': 'REAL', 'BANDAVAIL5': 'REAL', 'BANDAVAIL6': 'REAL',\n 'BANDAVAIL7': 'REAL', 'BANDAVAIL8': 'REAL', 'BANDAVAIL9': 'REAL', 'BANDAVAIL10': 'REAL', 'MAXAVAIL': 'REAL',\n 'ENABLEMENTMIN': 'REAL', 'ENABLEMENTMAX': 'REAL', 'LOWBREAKPOINT': 'REAL', 'HIGHBREAKPOINT': 'REAL',\n 'SETTLEMENTDATE': 'TEXT', 'PRICEBAND1': 'REAL', 'PRICEBAND2': 'REAL', 'PRICEBAND3': 'REAL',\n 'PRICEBAND4': 'REAL', 'PRICEBAND5': 'REAL', 'PRICEBAND6': 'REAL', 'PRICEBAND7': 'REAL',\n 'PRICEBAND8': 'REAL', 'PRICEBAND9': 'REAL', 'PRICEBAND10': 'REAL', 'T1': 'REAL', 'T2': 'REAL',\n 'T3': 'REAL', 'T4': 'REAL', 'REGIONID': 'TEXT', 'TOTALDEMAND': 'REAL', 'DEMANDFORECAST': 'REAL',\n 'INITIALSUPPLY': 'REAL', 'DISPATCHMODE': 'TEXT', 'AGCSTATUS': 'TEXT', 'INITIALMW': 'REAL',\n 'TOTALCLEARED': 'REAL', 'RAMPDOWNRATE': 'REAL', 'RAMPUPRATE': 'REAL', 'AVAILABILITY': 'REAL',\n 'RAISEREGENABLEMENTMAX': 'REAL', 'RAISEREGENABLEMENTMIN': 'REAL', 'LOWERREGENABLEMENTMAX': 'REAL',\n 'LOWERREGENABLEMENTMIN': 'REAL', 'START_DATE': 'TEXT', 'END_DATE': 'TEXT', 'DISPATCHTYPE': 'TEXT',\n 'CONNECTIONPOINTID': 'TEXT', 'TRANSMISSIONLOSSFACTOR': 'REAL', 'DISTRIBUTIONLOSSFACTOR': 'REAL',\n 'CONSTRAINTID': 'TEXT', 'RHS': 'REAL', 'GENCONID_EFFECTIVEDATE': 'TEXT', 'GENCONID_VERSIONNO': 'TEXT',\n 'GENCONID': 'TEXT', 'EFFECTIVEDATE': 'TEXT', 'VERSIONNO': 'TEXT', 'CONSTRAINTTYPE': 'TEXT',\n 'GENERICCONSTRAINTWEIGHT': 'REAL', 'FACTOR': 'REAL', 'FROMREGIONLOSSSHARE': 'REAL', 'LOSSCONSTANT': 'REAL',\n 'LOSSFLOWCOEFFICIENT': 'REAL', 'IMPORTLIMIT': 'REAL', 'EXPORTLIMIT': 'REAL', 'LOSSSEGMENT': 'TEXT',\n 'MWBREAKPOINT': 'REAL', 'DEMANDCOEFFICIENT': 'REAL', 'INTERCONNECTORID': 'TEXT', 'REGIONFROM': 'TEXT',\n 'REGIONTO': 'TEXT', 'MWFLOW': 'REAL', 'MWLOSSES': 'REAL', 'MINIMUMLOAD': 'REAL', 'MAXCAPACITY': 'REAL',\n 'SEMIDISPATCHCAP': 'REAL', 'RRP': 'REAL'\n }", "def INDEC(set_year=2016,save_output=True,print_output=True):\n\n # load mapping function for industries\n ind_mapper = pd.read_excel(os.path.join(data_path,'INDEC',\n 'sh_cou_06_16.xls'),\n sheet_name='ind_mapper',header=None)\n\n ind_mapper = dict(zip(ind_mapper[0],ind_mapper[1]))\n\n # load mapping function for products\n com_mapper = pd.read_excel(os.path.join(data_path,'INDEC',\n 'sh_cou_06_16.xls'),\n sheet_name='com_mapper',header=None)\n com_mapper = dict(zip(com_mapper[0],['P_'+x for x in com_mapper[1]]))\n\n #create list of sectors\n sectors = [chr(i) for i in range(ord('A'),ord('P')+1)]\n\n \"\"\"\n Load supply table and aggregate\n \"\"\"\n\n sup_table_in = pd.read_excel(os.path.join(data_path,'INDEC',\n 'sh_cou_06_16.xls'), sheet_name='Mat Oferta pb',\n skiprows=2,header=[0,1],index_col=[0,1],nrows=271)\n \n sup_table_in = sup_table_in.drop('Total',level=0,axis=1)\n \n sup_table = sup_table_in.copy()\n\n sup_table.columns = sup_table.columns.get_level_values(0)\n sup_table.columns = sup_table.columns.map(ind_mapper)\n sup_table = sup_table.T.groupby(level=0,axis=0).sum()\n sup_table.columns = sup_table.columns.get_level_values(0)\n sup_table.columns = sup_table.columns.map(com_mapper)\n sup_table = sup_table.T.groupby(level=0,axis=0).sum()\n\n \"\"\"\n Load use table and aggregate\n \"\"\"\n\n use_table = pd.read_excel(os.path.join(data_path,'INDEC',\n 'sh_cou_06_16.xls'),\n sheet_name='Mat Utilizacion pc',\n skiprows=2,header=[0,1],\n index_col=[0,1],nrows=271)\n\n basic_prod_prices = use_table[[#'PRODUCCION NACIONAL A PRECIOS BASICOS',\n 'IMPORTACIONES (CIF a nivel de producto y FOB a nivel total)',\n 'AJUSTE CIF/FOB DE LAS IMPORTACIONES','DERECHOS DE IMPORTACION',\n 'IMPUESTOS A LOS PRODUCTOS NETOS DE SUBSIDIOS','MARGENES DE COMERCIO',\n 'MARGENES DE TRANSPORTE','IMPUESTO AL VALOR AGREGADO NO DEDUCIBLE',\n #'OFERTA TOTAL A PRECIOS DE COMPRADOR'\n ]]*-1\n\n use_table = use_table.drop(['PRODUCCION NACIONAL A PRECIOS BASICOS',\n 'IMPORTACIONES (CIF a nivel de producto y FOB a nivel total)',\n 'AJUSTE CIF/FOB DE LAS IMPORTACIONES','DERECHOS DE IMPORTACION',\n 'IMPUESTOS A LOS PRODUCTOS NETOS DE SUBSIDIOS','MARGENES DE COMERCIO',\n 'MARGENES DE TRANSPORTE','IMPUESTO AL VALOR AGREGADO NO DEDUCIBLE',\n 'OFERTA TOTAL A PRECIOS DE COMPRADOR','UTILIZACION INTERMEDIA',\n 'UTILIZACION FINAL','DEMANDA TOTAL'],level=0,axis=1)\n\n # change to basic prices\n basic_prod_prices.columns = basic_prod_prices.columns.get_level_values(0)\n basic_prod_prices = basic_prod_prices.T.groupby(level=0,axis=0).sum()\n basic_prod_prices.columns = basic_prod_prices.columns.get_level_values(0)\n basic_prod_prices.columns = basic_prod_prices.columns.map(com_mapper)\n basic_prod_prices = basic_prod_prices.T.groupby(level=0,axis=0).sum()\n basic_prod_prices = basic_prod_prices.astype(int)\n\n use_table.columns = use_table.columns.get_level_values(0)\n use_table.columns = use_table.columns.map(ind_mapper)\n use_table = use_table.T.groupby(level=0,axis=0).sum()\n use_table.columns = use_table.columns.get_level_values(0)\n use_table.columns = use_table.columns.map(com_mapper)\n use_table = use_table.T.groupby(level=0,axis=0).sum()\n\n use_table= pd.concat([use_table,basic_prod_prices],axis=1) \n\n \"\"\"\n Create Industry-Industry IO table\n \"\"\" \n \n # GET VARIABLES\n x = np.array(sup_table.sum(axis=0)) # total production on industry level\n g = np.array(sup_table.sum(axis=1)) # total production on product level\n F = use_table.iloc[:16,16:].sum(axis=1)\n\n #Numpify\n Sup_array = np.asarray(sup_table.iloc[:16,:16]) # numpy array of supply matrix\n Use_array = np.asarray(use_table.iloc[:16,:16]) # numpy array of use matrix\n\n g_diag_inv = np.linalg.inv(np.diag(g)) # inverse of g (and diagolinized)\n x_diag_inv = np.linalg.inv(np.diag(x)) # inverse of x (and diagolinized)\n\n # Calculate the matrices\n B = np.dot(Use_array,x_diag_inv) # B matrix (U*x^-1)\n D = np.dot(Sup_array.T,g_diag_inv) # D matrix (V*g^-1)\n I_i = np.identity((len(x))) # Identity matrix for industry-to-industry\n\n # Inverse for industry-to-industry\n A_ii = np.dot(D,B)\n IDB_inv = np.linalg.inv((I_i-np.dot(D,B))) # (I-DB)^-1 \n\n # And canclulate sum of industries\n ind = np.dot(IDB_inv,np.dot(D,F)/1e6) # (I-DB)^-1 * DF\n\n # split FD in local, import and export\n LFD = np.dot(D,use_table.iloc[:16,[16,18,19,21,22,23,24]].sum(axis=1) )/1e6\n Exp = np.dot(D,use_table.iloc[:16,17])/1e6\n Imp = np.dot(D,use_table.iloc[:16,20])/1e6\n\n # create combined table for the year 2004\n IO_ARG = pd.concat([pd.DataFrame(np.dot(A_ii,np.diag(ind))),\n pd.DataFrame(LFD),pd.DataFrame(Exp)],axis=1)\n\n IO_ARG.columns = list(use_table.columns[:18])\n IO_ARG.index = list(use_table.columns[:16])\n VA = np.array(list(ind)+[0,0])-np.array(IO_ARG.sum(axis=0))\n IMP = np.array(list(Imp*-1)+[0,0])\n VA[-2:] = 0\n IO_ARG.loc['ValueA'] = VA\n IO_ARG.loc['Imports'] = IMP\n IO_ARG.rename({'UTILIZACION FINAL':'FD',\n 'U_EXPORTACIONES':'EXP' },axis=1,inplace=True)\n IO_ARG[IO_ARG < 1e-5] = 0\n\n if set_year == 2004:\n return IO_ARG\n\n \"\"\"\n Update table to preferred year\n \"\"\"\n \n # load value added and total production time-series\n ValueA_series = pd.read_excel(os.path.join(data_path,\n 'INDEC','sh_VBP_VAB_06_19.xls'),\n sheet_name='Cuadro 4',skiprows=3,index_col=[0])/1e3\n\n Total_Prod = pd.read_excel(os.path.join(data_path,\n 'INDEC','sh_VBP_VAB_06_19.xls'),\n sheet_name='Cuadro 2',skiprows=3,index_col=[0])/1e3 \n\n # split table \n FD = np.array(IO_ARG['FD'][:16]*np.array((Total_Prod[2004]))/np.array(IO_ARG.sum(1)[:16]))\n Exports = np.array(IO_ARG['EXP'][:16][:16]*np.array((Total_Prod[2004]))/np.array(IO_ARG.sum(1)[:16]))\n Imports = np.array(IO_ARG.loc['Imports'][:16]*np.array((Total_Prod[2004]))/np.array(IO_ARG.sum(1)[:16]))\n ValueA = np.array(ValueA_series[2004])\n\n # convert to numpy matrix\n X0 = IO_ARG.values[:,:]\n\n # get sum of T\n u = np.array(list(Total_Prod[2004])+[sum(ValueA),sum(Imports)])\n v = np.array(list(Total_Prod[2004])+[x*((sum(ValueA)+sum(Imports))/(sum(FD)+sum(Exports))) for x in [sum(FD),sum(Exports)]])\n v[v < 0] = 0\n # and only keep T\n\n # apply RAS method to rebalance the table for 2004\n new_IO = ras_method(X0,u,v,1e-5,print_out=False)\n\n NEW_IO = pd.DataFrame(new_IO,index=sectors+['ValueA','Imports'],columns=sectors+['FD','EXP'])\n\n for year in [int(x) for x in np.linspace(2004, set_year, set_year-2004)]:\n\n FD = np.array(NEW_IO['FD'][:16]*np.array((Total_Prod[year]))/np.array(NEW_IO.sum(1)[:16]))\n Exports = np.array(NEW_IO['EXP'][:16][:16]*np.array((Total_Prod[year]))/np.array(NEW_IO.sum(1)[:16]))\n Imports = np.array(NEW_IO.loc['Imports'][:16]*np.array((Total_Prod[year]))/np.array(NEW_IO.sum(1)[:16]))\n ValueA = np.array(ValueA_series[year])\n\n # convert to numpy matrix\n X0 = NEW_IO.values[:,:]\n\n # get sum of T\n u = np.array(list(Total_Prod[year])+[sum(ValueA),sum(Imports)])\n v = np.array(list(Total_Prod[year])+[x*((sum(ValueA)+sum(Imports))/(sum(FD)+sum(Exports))) for x in [sum(FD),sum(Exports)]])\n v[v < 0] = 0\n # and only keep T\n\n # apply RAS method to rebalance the table\n new_IO = ras_method(X0,u,v,1e-5,print_out=False)\n \n INDEC = pd.DataFrame(new_IO,index=sectors+['ValueA','Imports'],columns=sectors+['FD','EXP'])*1e3\n\n if save_output:\n INDEC.to_csv(os.path.join(data_path,'national_tables','{}_INDEC.csv'.format(set_year)))\n\n if print_output:\n print('NOTE : Standardized national table for Argentina for the year {} finished using INDEC data'.format(year))\n \n return INDEC", "def _ebit(self):\n return self.net_income + self.tax_expense + self.interest_expense", "def visit_equipment(self, equipment):", "def _IC(df): # Balance\n # No adjustments for cash-flow and off-balance sheet yet\n return Invested_Book_Capital(df)", "def OrderforInventory(request):\n modes=['manage','add','order']\n departments={}\n clearfilter=False\n for league in models.Dept.objects.all(): \n departments[league.pk]=league\n visited=False\n message=\"\"\n nonemptyAuthors = [x for x in request.POST.getlist('oAuthorName') if x!='']\n nonemptybooknames = [x for x in request.POST.getlist('obookName') if x!='']\n nonemptybookDesc = [x for x in request.POST.getlist('obookdesc') if x!='']\n nonemptyQuantities = [x for x in request.POST.getlist('oQuantity') if x!='']\n nonemptyRows = [x for x in request.POST.getlist('oRowRack') if x!='']\n nonemptyselectedDeparts = [x for x in request.POST.getlist('odepart_select') if x!='NA']\n\n \n for j,k,h,fa,z,loc in itertools.zip_longest(nonemptyAuthors,nonemptybooknames,nonemptybookDesc,nonemptyselectedDeparts,nonemptyQuantities,nonemptyRows):\n visited=True\n shortname=k[1:5] \n values=k.split(\"-\")\n if len(values)==1:\n ye=dt.today().year\n values.extend(['I',ye,'0'])\n c=loc.split(\"-\")\n if len(c)==1:\n c.extend(['0','0'])\n if len(values) >0:\n try:\n departmentDetails=models.Dept.objects.get(dpt_id=fa)\n except Exception as e:\n print(e)\n pass\n try:\n i=0\n testa = models.Atr.objects.values('a_id')\n for test in testa:\n if i>int(test['a_id']):\n i=i\n else:\n i=int(test['a_id'])\n \n varas = models.Atr.objects.values('name')\n isin=False\n for f in list(varas):\n if str(j) in f['name']:\n isin=True\n break\n if isin:\n pass\n else:\n models.Atr.objects.create(a_id=str(i+1),name=str(j),title=\"Mr.\",email=\"[email protected]\")\n except Exception as e:\n if \"does not\" in str(e):\n models.Atr.objects.create(a_id=str(i+1),name=str(j),title=\"Mr.\",email=\"[email protected]\")\n print(e)\n pass\n varset=None\n try:\n bookop=None\n i=0;\n testab = models.Bks.objects.values('b_id')\n for test in testab:\n if i>int(str(test['b_id']).split('_')[2]):\n i=i\n else:\n i=int(str(test['b_id']).split('_')[2])\n if (models.Bks.objects.filter(title=str(values[0])).exists()):\n try: \n if not models.Bks.objects.filter(title=str(values[0]),edition=str(values[1]),p_year=str(values[2]),pub=str(values[3])).exists():\n models.Bks.objects.create(b_id=\"IN_\"+shortname+\"_\"+str(i+1),title=str(values[0]),desc=str(h),type=\"ref\",edition=str(values[1]),p_year=str(values[2]),pub=str(values[3]),email=\"[email protected]\",a_id_id=str(i+1),dpt_id_id=str(fa))\n else:\n message=\"book with the same name already exists\"\n bookop=models.Bks.objects.filter(title=str(values[0]),edition=str(values[1]),p_year=str(values[2]),pub=str(values[3]))\n except Exception as e:\n print(e)\n else:\n if isin:\n atrobj=models.Atr.objects.get(name=str(j))\n models.Bks.objects.create(b_id=\"IN_\"+shortname+\"_\"+str(i+1),title=str(values[0]),desc=str(h),type=\"ref\",edition=str(values[1]),p_year=str(values[2]),pub=str(values[3]),email=\"[email protected]\",a_id_id=atrobj.a_id,dpt_id_id=str(fa))\n else:\n atrobj=models.Atr.objects.get(name=str(j))\n models.Bks.objects.create(b_id=\"IN_\"+shortname+\"_\"+str(i+1),title=str(values[0]),desc=str(h),type=\"ref\",edition=str(values[1]),p_year=str(values[2]),pub=str(values[3]),email=\"[email protected]\",a_id_id=atrobj.a_id,dpt_id_id=str(fa))\n\n except Exception as e:\n if \"Bks matching query does not\" in str(e):\n models.Bks.objects.create(b_id=\"IN_\"+shortname+\"_\"+str(i+1),title=str(values[0]),desc=str(h),type=\"ref\",edition=str(values[1]),p_year=str(values[2]),pub=str(values[3]),email=\"[email protected]\",a_id_id=str(i+1),dpt_id_id=str(fa))\n print(e)\n pass\n \n try:\n g=0\n bookobj =models.Bks.objects.filter(title=str(values[0]),edition=str(values[1]),p_year=str(values[2]),pub=str(values[3]))\n testba = models.Invt.objects.values('id') \n for test in testba:\n if g>int(str(test['id'])):\n g=g\n else:\n g=int(str(test['id']))\n \n Invobj=models.Invt.objects.filter(i_id_id=\"IN_\"+shortname+\"_\"+str(g+1))\n librarians=get_librarians()\n librnobj=None\n for u in librarians:\n if request.user.username.lower() == u.lower():\n librnobj=models.Librn.objects.get(lb_id=u)\n\n if len(bookobj) >= 0:\n if(len(Invobj) == 0):\n for s in bookobj:\n models.Invt.objects.create(id=str(g+1),qty=int(z),i_id_id=s.b_id,shelf=str(c[0]),rack=str(0),row=int(0))\n models.Border.objects.create(id=int(g+1),qty=int(z),status=loc,i_id_id=s.invt.id,lb_id_id=librnobj.lb_id)\n message=\"Order placed successfully\"\n else:\n for s in bookobj:\n models.Invt.objects.create(id=str(g+1),qty=int(z),i_id_id=s.b_id,shelf=str(c[0]),rack=str(0),row=int(0))\n models.Border.objects.create(id=int(g+1),qty=int(z),status=loc,i_id_id=s.invt.id,lb_id_id=librnobj.lb_id)\n message=\"Order placed successfully\"\n\n else:\n models.Invt.objects.create(id=str(g+1),qty=int(z),i_id_id=\"IN_\"+shortname+\"_\"+str(g+1),shelf=str(c[0]),rack=str(0),row=int(0))\n models.Border.objects.create(id=int(g+1),qty=int(z),status=loc,i_id_id=int(g+1),lb_id_id=librnobj.lb_id)\n message=\"Order placed successfully\"\n except Exception as e:\n try:\n if \"does not\" in str(e): \n models.Invt.objects.create(id=str(g+1),qty=int(z),i_id_id=\"IN_\"+shortname+\"_\"+str(g+1),shelf=str(c[0]),rack=str(0),row=int(0))\n else:\n t=models.Invt.objects.get(i_id_id=\"IN_\"+shortname+\"_\"+str(g+1))\n t.qty= t.qty+int(z)\n t.save()\n except Exception as e:\n message=\"There is already an exisiting order for this book\"\n print(e)\n \n \n else:\n message=\"the book details are not given properly\"\n pass\n \n if not visited:\n message=\"Fill the form properly and then press the SAVE \"\n return render(\n request,\n 'app/orderInv.html',\n {\n 'title':'Order Inventory',\n 'invmodes':modes,\n 'dispmode':'order',\n 'message':message,\n 'librarian':get_librarians(),\n 'le':list(range(1,2)),\n 'DepartmentList':departments.keys(),\n 'books':get_Books().values(),\n 'clearfilter':clearfilter,\n 'year':datetime.now().year,\n }\n )", "def elixcomoscore(df,col_icd,col_id):\n output = icdtoelixcomo(df,col_icd)\n output = output.loc[output['ElixComo'].notnull(),:]\n output = output.loc[:,[col_id,'ElixComo','ElixComoScore']]\n output = output.drop_duplicates()\n output = pd.DataFrame(output.groupby(col_id)['ElixComoScore'].sum()).reset_index()\n output = output.merge(df.loc[:,[col_id]].drop_duplicates(),how='outer',left_on=col_id,right_on=col_id).fillna(0.)\n return output", "def impacto_complejidad(id_item):\n item = Item.objects.get(id=id_item)\n com = 0\n try:\n relaciones = RelacionItem.objects.filter(itemorigen=id_item)\n except RelacionItem.DoesNotExist:\n relaciones = False\n if relaciones:\n for hijo in relaciones:\n com = com + impacto_complejidad(hijo.itemdestino.id)\n com = com + item.complejidad\n return com\n else:\n return item.complejidad", "def createIndustryInfo(self):\n self.setCurrentValue(0)\n self.setMinMax()\n self.writeIndustryName()\n self.createIndustrySim()\n self.writeIndustryDescription()\n self.writeIndustryCost()", "def dr14comp(a,b,av,bv):\n load=apload.ApLoad(apred='r11')\n dr14=apload.ApLoad(dr='dr14')\n\n i1,i2=match.match(a['APOGEE_ID'],b['APOGEE_ID'])\n gd = np.where((a['NVISITS'][i1] == b['NVISITS'][i2]) & (a['SNR'][i1]>75) )[0]\n a=a[i1[gd]]\n b=b[i2[gd]]\n \n j=np.argsort(a['VHELIO_AVG']-b['VHELIO_AVG'])\n \n fig,ax=plots.multi(1,3,hspace=0.3) \n pfig,pax=plots.multi(1,3,hspace=0.3) \n wfig,wax=plots.multi(1,3,hspace=0.3) \n chips=['a','b','c']\n for jj in j :\n j1=np.where(av['APOGEE_ID'] == a['APOGEE_ID'][jj])[0]\n j2=np.where(bv['APOGEE_ID'] == a['APOGEE_ID'][jj])[0]\n print(a['APOGEE_ID'][jj],a['RV_TEFF'][jj],b['RV_TEFF'][jj],a['SNR'][jj],b['SNR'][jj])\n for jjj,kkk in zip(j1,j2) : \n print(av['MJD'][jjj],av['PLATE'][jjj],av['FIELD'][jjj],av['SNR'][jjj],av['FIBERID'][jjj],av['VHELIO'][jjj],av['ESTVHELIO'][jjj])\n print(bv['MJD'][kkk],bv['PLATE'][kkk],bv['FIELD'][kkk],bv['SNR'][kkk],bv['FIBERID'][kkk],bv['VHELIO'][kkk],bv['ESTVHELIO'][kkk])\n va=load.apPlate(int(av['PLATE'][jjj]),av['MJD'][jjj])\n vsum=load.apVisitSum(int(av['PLATE'][jjj]),av['MJD'][jjj])[1].data\n f=np.where(vsum['FIBERID'] == av['FIBERID'][jjj])[0]\n print(vsum['RV_TEFF'][f])\n applot.chip(va,ax=ax,row=300-av['FIBERID'][jjj],color='r')\n applot.chip(va,ax=pax,row=300-av['FIBERID'][jjj],color='r',pixel=True)\n vb={}\n for chip in chips :\n tmp=fits.open(os.environ['APOGEE_REDUX']+'/r8/apo25m/{:04d}/{:05d}/apPlate-{:s}-{:04d}-{:05d}.fits'.format(\n int(bv['PLATE'][kkk]),bv['MJD'][kkk],chip,int(bv['PLATE'][kkk]),bv['MJD'][kkk]))\n vb[chip] = tmp\n vsum=fits.open(os.environ['APOGEE_REDUX']+'/r8/fields/apo25m/{:04d}/apVisitSum-{:04d}-{:05d}.fits'.format(\n int(bv['LOCATION_ID'][kkk]),int(bv['PLATE'][kkk]),bv['MJD'][kkk]))[1].data\n f=np.where(vsum['FIBERID'] == bv['FIBERID'][kkk])[0]\n print(vsum['RV_TEFF'][f])\n applot.chip(vb,ax=ax,row=300-bv['FIBERID'][kkk],color='b')\n applot.chip(vb,ax=pax,row=300-bv['FIBERID'][kkk],color='b',pixel=True)\n for ichip,chip in enumerate(chips) :\n wax[ichip].plot(va[chip][4].data[300-av['FIBERID'][jjj],:]-vb[chip][4].data[300-bv['FIBERID'][kkk],:])\n plt.show()\n pdb.set_trace()\n\n for ichip in range(3) :\n ax[ichip].cla()\n pax[ichip].cla()\n wax[ichip].cla()", "def ADP (self):", "def sub_transfer_pricing(manager, df_fields, seq_recs, seq_reservas):\n # df_hotel = manager.get_dataframe(tables[\"dwc_bok_t_canco_hotel\"])\n # df_circuit = manager.get_dataframe(tables[\"dwc_bok_t_canco_hotel_circuit\"])\n # df_other = manager.get_dataframe(tables[\"dwc_bok_t_canco_other\"])\n # df_transfer = manager.get_dataframe(tables[\"dwc_bok_t_canco_transfer\"])\n # df_endow = manager.get_dataframe(tables[\"dwc_bok_t_canco_endowments\"])\n # df_extra = manager.get_dataframe(tables[\"dwc_bok_t_canco_extra\"])\n\n df_aux = df_fields.select(\"operative_incoming\", \"booking_id\", \"invoicing_company\",\n \"creation_date\", \"booking_currency\")\n\n df_hotel = sub_transfer_pricing_aux(manager, df_hotelt, seq_recs, seq_reservas, df_aux)\n df_circuit = sub_transfer_pricing_aux(manager, df_circuitt, seq_recs, seq_reservas, df_aux)\n df_other = sub_transfer_pricing_aux(manager, df_othert, seq_recs, seq_reservas, df_aux)\n df_transfer = sub_transfer_pricing_aux(manager, df_transfert, seq_recs, seq_reservas, df_aux)\n df_endow = sub_transfer_pricing_aux(manager, df_endowt, seq_recs, seq_reservas, df_aux)\n df_extra = sub_transfer_pricing_aux_extra(manager, df_extrat, seq_recs, seq_reservas, df_aux)\n\n df_impuesto_canco = df_hotel.union(df_circuit).union(df_other).union(df_transfer).union(df_endow).union(\n df_extra)\n\n df_impuesto_canco = df_impuesto_canco.groupBy(\"operative_incoming\", \"booking_id\") \\\n .agg({'impuesto_canco': 'sum'}).withColumnRenamed(\"SUM(impuesto_canco)\", \"impuesto_canco\")\n\n df_impuesto_canco = df_impuesto_canco.withColumnRenamed(\"operative_incoming\", \"seq_rec\") \\\n .withColumnRenamed(\"booking_id\", \"seq_res\")\n\n # add impuesto_canco\n df_fields = df_fields.join(df_impuesto_canco, [df_fields.operative_incoming == df_impuesto_canco.seq_rec,\n df_fields.booking_id == df_impuesto_canco.seq_res],\n 'left_outer').drop(\"seq_rec\", \"seq_res\")\n\n df_addcanco = sub_transfer_pricing_aux_add_canco(manager, df_fields, seq_recs, seq_reservas)\n\n df_addcanco = df_addcanco.withColumnRenamed(\"operative_incoming\", \"seq_rec\") \\\n .withColumnRenamed(\"booking_id\", \"seq_res\")\n\n # add add_impuesto_canco\n df_fields = df_fields.join(df_addcanco, [df_fields.operative_incoming == df_addcanco.seq_rec,\n df_fields.booking_id == df_addcanco.seq_res],\n \"left_outer\").drop(df_addcanco.seq_rec).drop(df_addcanco.seq_res)\n\n df_fields = df_fields.na.fill({'impuesto_canco': 0, 'add_impuesto_canco': 0})\n\n df_fields = df_fields.withColumn(\"Transfer_pricing\", df_fields.impuesto_canco + df_fields.add_impuesto_canco) \\\n .drop(\"impuesto_canco\", \"add_impuesto_canco\")\n\n df_fields = df_fields.withColumn(\"Transfer_pricing\", udf_round_ccy(df_fields.Transfer_pricing,\n df_fields.booking_currency))\n\n del df_hotel, df_circuit, df_other, df_transfer, df_endow, df_extra, df_impuesto_canco, df_addcanco, df_aux\n\n return df_fields", "def get_TECRDB_compounds_data(self):\n TECRDB_compounds_data_table = pd.read_csv('data/TECRDB_compounds_data.csv')\n #all possible information that the particular ion bound state can have\n data_entry_list = ['Cp', 'H_number', 'binding_constant', 'charge', 'dG_f', 'dH_f', 'dS_f', 'groups', 'metal_type','smiles_form','metal_number']\n for i, row in TECRDB_compounds_data_table.iterrows():\n cur_sid = row['species_id']\n cur_cid = row['compound_id']\n self.TECRDB_compounds_data_dict[cur_sid] = {'compound_id':cur_cid}\n if row['is_pH7_species'] == True:\n self.TECRDB_compounds_pH7_species_id_dict[cur_cid] = cur_sid\n if row['is_least_protonated_species'] == True:\n self.TECRDB_compounds_least_H_sid_dict[cur_cid] = cur_sid\n for data_entry in data_entry_list:\n if not pd.isnull(row[data_entry]):\n if data_entry == 'groups':\n #convert the text form of groups to python list\n cur_sid_groups = list(map(float,row['groups'].strip('[').strip(']').split(',')))\n self.TECRDB_compounds_data_dict[cur_sid]['groups'] = cur_sid_groups\n else:\n try:\n #convert value from string to float\n self.TECRDB_compounds_data_dict[cur_sid][data_entry] = float(row[data_entry])\n except ValueError:\n self.TECRDB_compounds_data_dict[cur_sid][data_entry] = row[data_entry]", "def atributo_complejidad():\n tipo_defecto = ItemTipos.objects.filter(es_supertipo=True)\n if tipo_defecto.count() > 0:\n attr1 = ItemAtributos.objects.filter(nombre='complejidad').\\\n filter(idtipoitem=tipo_defecto)\n return attr1\n return None", "def setEmpireStats(self):\n totalEmpires = len(self.empires.keys())\n stats = {'Research':[], 'Fleet Size':[], 'Army Size':[], 'CR Production':[],\n 'AL Production':[],'EC Production':[],'IA Production':[]}\n \n # Calculate Research Stats\n d = {}\n for empireID, myEmpire in self.empires.iteritems():\n if empireID <> '1':\n num = 0\n for techID, myTech in myEmpire.techTree.iteritems():\n if myTech.complete == 1:\n num += 1\n d[empireID] = num\n stats['Research'] = anwp.func.funcs.sortDictByValue(d, True)\n \n # Calculate Fleet Stats\n d = {}\n for shipID, myShip in self.ships.iteritems():\n if myShip.empireID <> '1':\n (BV,CR,AL,EC,IA) = myShip.getMyValue()\n if myShip.empireID in d.keys():\n d[myShip.empireID] += BV\n else:\n d[myShip.empireID] = BV\n stats['Fleet Size'] = anwp.func.funcs.sortDictByValue(d, True)\n \n # Calculate Army Stats\n d = {}\n for regimentID, myRegiment in self.regiments.iteritems():\n if myRegiment.empireID <> '1':\n (BV,CR,AL,EC,IA) = myRegiment.getMyValue()\n if myRegiment.empireID in d.keys():\n d[myRegiment.empireID] += BV\n else:\n d[myRegiment.empireID] = BV\n stats['Army Size'] = anwp.func.funcs.sortDictByValue(d, True)\n\n # Calculate Production Stats\n for res in ['CR','AL','EC','IA']:\n d = {}\n for systemID, mySystem in self.systems.iteritems():\n if mySystem.myEmpireID <> '1':\n myValue = getattr(mySystem, 'prod%s' % res)\n if mySystem.myEmpireID in d.keys():\n d[mySystem.myEmpireID] += myValue\n else:\n d[mySystem.myEmpireID] = myValue\n myEmpire = self.empires[mySystem.myEmpireID]\n myEmpireValue = getattr(myEmpire, 'totalProd%s' % res)\n setattr(myEmpire, 'totalProd%s' % res, myEmpireValue+myValue)\n \n stats['%s Production' % res] = anwp.func.funcs.sortDictByValue(d, True)\n \n # calculate top captains\n d = {}\n for captainID, myCaptain in self.captains.iteritems():\n if myCaptain.myEmpire.id <> '1':\n myCaptain.resetData()\n d[myCaptain.id] = myCaptain.experience\n topCaptains = anwp.func.funcs.sortDictByValue(d, True)\n topCaptains = topCaptains[:2*len(self.empires.keys())]\n \n # Send out Stats to each Empire\n for empireID, myEmpire in self.empires.iteritems():\n if empireID <> '1':\n title = 'Round:%d Statistics' % self.currentRound\n body = ['%s ROUND %d STATS:' % (myEmpire.name, self.currentRound)]\n body.append('====================================================')\n for item in ['Research','Fleet Size', 'Army Size', 'CR Production',\n 'AL Production', 'EC Production', 'IA Production']:\n if empireID in stats[item]:\n body.append('You are %s in %s' % (anwp.func.funcs.getNiceNumber(stats[item].index(empireID)+1), item))\n \n # total production\n body.append('')\n body.append('TOTAL EMPIRE PRODUCTION OVER %d ROUNDS:' % self.currentRound)\n body.append('====================================================')\n for res in ['CR','AL','EC','IA']:\n body.append('Total %s Production: %d' % (res, getattr(myEmpire, 'totalProd%s' % res)))\n\n # legendary captains\n body.append('')\n body.append('TOP %d STARSHIP CAPTAINS in ROUND %d:' % ((2*len(self.empires.keys()), self.currentRound)))\n body.append('====================================================')\n for captainID in topCaptains:\n myCaptain = self.captains[captainID]\n myCaptain.promoteMe()\n body.append('%s ---> RANK:%s -- EXP:%d -- %s' % (string.upper(myCaptain.name), myCaptain.rank, myCaptain.experience, string.upper(myCaptain.myEmpire.name)))\n \n myEmpire.genMail({'fromEmpire':empireID, 'round':self.currentRound,\n 'messageType':'general', 'subject':title, 'body':body})", "def ex_sire_model_data_table():\n data_dict = {'Calf': [1, 3, 1, 4, 3],\n 'Sire': ['Unknown', 'Unknown', 'Unknown', 1, 'Unknown'],\n 'Dam': ['Unknown', 'Unknown', 'Unknown', 'Unknown', 'Unknown'],\n 'Sex': ['Male', 'Female', 'Female', 'Male', 'Male'],\n 'WWG': [4.5, 2.9, 3.9, 3.5, 5.0]}\n\n df = pd.DataFrame(data_dict)\n\n return(df)", "def get_avg_price(request):\n Domestic = list((\n Domesticinvoice.objects.values(\"part_number\")\n .order_by(\"part_number\")\n .annotate(\n sum_of_total_invoice_amts=Sum(\"total_invoice_amount\"),\n sum_of_invoice_qty=Sum(\"invoice_quantity\")\n )\n ))\n Overseas = (\n Overseasinvoice.objects.values(\"part_number\")\n .order_by(\"part_number\")\n .annotate(\n sum_of_total_invoice_amts=Sum(\"total_amount\"),\n sum_of_invoice_qty=Sum(\"invoice_quantity\")\n )\n )\n parts_price = {}\n part_all = Parts.objects.values_list('pk', flat=True)\n for part in part_all:\n for rd in Domestic:\n if part == rd['part_number']:\n # print(rd)\n if part not in parts_price:\n parts_price[part] = [\n rd['sum_of_total_invoice_amts'],\n rd['sum_of_invoice_qty']\n ]\n else:\n newrd_amount = (\n parts_price[part][0] + rd['sum_of_total_invoice_amts']\n )\n newrd_qty = parts_price[part][1] + rd['sum_of_invoice_qty']\n parts_price[part] = [newrd_amount, newrd_qty]\n\n for ro in Overseas:\n if part == ro['part_number']:\n if part not in parts_price:\n parts_price[part] = [\n ro['sum_of_total_invoice_amts'],\n ro['sum_of_invoice_qty']\n ]\n else:\n newro_amount = (\n parts_price[part][0] + ro['sum_of_total_invoice_amts']\n )\n newrd_num = parts_price[part][1] + ro['sum_of_invoice_qty']\n parts_price[part] = [newro_amount, newrd_num]\n\n for part in parts_price:\n obj = AveragePrice.objects.create(\n part_number=Parts.objects.get(pk=part),\n average_price=parts_price[part][0] / parts_price[part][1]\n )\n obj.save()\n\n return HttpResponse(status=200)", "def inventory_report(products):\r\n names = set()\r\n total_price = 0\r\n total_weight = 0\r\n total_flammability = 0\r\n for product in products:\r\n names.add(product.name)\r\n total_price += product.price\r\n total_weight += product.weight\r\n total_flammability += product.flammability\r\n\r\n print(\"ACME CORPORATION OFFICIAL INVENTORY REPORT\")\r\n print(\"Unique product names: {}\".format(len(names)))\r\n print(\"Average price: {}\".format(total_price / len(products)))\r\n print(\"Average weight: {}\".format(total_weight / len(products)))\r\n print(\"Average flammability:{}\".format(\r\n total_flammability / len(products)))\r\n\r\n print(\"Following is useful starting code for acme_report.py:\")", "def _prepare_invoice_grp(self, cr, uid, order, line_ids, context=None):\n if context is None:\n context = {}\n context = dict(context)\n\n inv_data = super(grp_orden_compra, self)._prepare_invoice_grp(cr, uid, order, line_ids, context=context)\n\n # adicionando campos numero compromiso y no obligacion desde la OC\n monto_oc = math.floor(order.total_llavep or 0)\n monto_oc = int(monto_oc)\n inv_data.update({'nro_compromiso': order.nro_compromiso or False, 'monto_comprometido': monto_oc or 0, 'currency_id':order.currency_oc.id})\n\n # adicionando campos no afectacion y monto autorizado desde la primera APG\n if order.pc_apg_id:\n first_apg = order.pc_apg_id\n monto_apg = math.floor(first_apg.total_llavep)\n monto_apg = int(monto_apg)\n # TODO R SPRING X ADICIONANDO CABEZALES SIIF A LA FACTURA A PARTIR DE LA APG\n inv_data.update({'nro_afectacion': first_apg.nro_afectacion_siif or False,\n 'monto_afectado': monto_apg or 0,\n 'siif_tipo_ejecucion':first_apg.siif_tipo_ejecucion.id,\n 'siif_concepto_gasto':first_apg.siif_concepto_gasto.id,\n 'siif_financiamiento':first_apg.siif_financiamiento.id,\n 'siif_codigo_sir':first_apg.siif_codigo_sir.id,\n 'siif_nro_fondo_rot':first_apg.siif_nro_fondo_rot.id,\n }) # cambiando nro_afectacion 23/10\n # inv.update({'nro_afectacion': first_apg.nro_afectacion_apg or False, 'monto_afectado': monto_apg or 0})\n\n # # TODO R SPRING X NO LLEVAR LAS LLAVES PRESUPUESTALES POR DEFECTO\n # if order.pc_apg_id.llpapg_ids:\n # llavep_ids = []\n # for llavep in order.pc_apg_id.llpapg_ids:\n # llavep_ids.append((0, 0, {\n # 'programa_id': llavep.programa_id.id,\n # 'odg_id': llavep.odg_id.id,\n # 'auxiliar_id': llavep.auxiliar_id.id,\n # 'disponible': llavep.disponible,\n # 'proyecto_id': llavep.proyecto_id.id,\n # 'fin_id': llavep.fin_id.id,\n # 'mon_id': llavep.mon_id.id,\n # 'tc_id': llavep.tc_id.id,\n # 'importe': llavep.importe\n # }))\n # inv_data.update({'llpapg_ids': llavep_ids})\n\n return inv_data", "def get_summary_of_records(self):\n ids = self.get_saleman_ids()\n table = [\n [\"Seller name\",\"Number of sales\",\"Total Value ($)\"]\n ]\n for id in ids:\n table_id = [self.get_seller_name(id),self.get_number_of_sales(id),\n self.get_total_of_saleman(id)]\n table.append(table_id)\n data_table = AsciiTable(table)\n print(data_table.table)", "def GetIPCVoucherGlobalTableElementSummary(ivgte):\n out_str = \"\"\n fmt = \"{g: <#018x} {g.ivgte_key: <10d} {ctrl_s:s} {mgr_s:s}\"\n out_str += fmt.format(g=ivgte, ctrl_s=GetIPCVoucherAttrControlSummary(ivgte.ivgte_control), mgr_s=GetIPCVoucherAttrManagerSummary(ivgte.ivgte_manager))\n return out_str", "def _ebitda(self):\n try:\n return self.net_income + self.tax_expense + self.interest_expense + self.depreciation_amortization\n except TypeError:\n logger.exception(\n 'net_income: {}, tax_expense: {}, interest_expense: {}, depreciation_amortization: {}'\n .format(self.net_income, self.tax_expense,\n self.interest_expense,\n self.depreciation_amortization))", "def test_visualize_equipment(self):\n pass", "def sector_metrics_fx():\r\n global df_sector_metrics\r\n \r\n df_sector_metrics = df_co_metrics\r\n drop_company = ['company_ref']\r\n df_sector_metrics.drop(labels = drop_company, axis = 1, inplace = True)\r\n df_sector_metrics = df_sector_metrics.groupby(['date_list', 'sector']).agg({'co_inv_overall': 'sum', 'co_inv_50k': 'sum', 'co_inv_100k': 'sum', 'co_inv_manager': 'sum', 'co_inv_sales': 'sum', 'co_inv_key_roles': 'sum', 'co_inv_it': 'sum', 'co_inv_hourly': 'sum','co_fut_cost_overall': 'sum', 'co_fut_cost_50k': 'sum', 'co_fut_cost_100k': 'sum', 'co_fut_cost_manager': 'sum', 'co_fut_cost_sales': 'sum', 'co_fut_cost_key_roles': 'sum', 'co_fut_cost_it': 'sum', 'co_fut_cost_hourly': 'sum','co_inv_overall_28d': 'sum', 'co_inv_50k_28d': 'sum', 'co_inv_100k_28d': 'sum', 'co_inv_manager_28d': 'sum', 'co_inv_sales_28d': 'sum', 'co_inv_key_roles_28d': 'sum', 'co_inv_it_28d': 'sum', 'co_inv_hourly_28d': 'sum','co_fut_cost_overall_28d': 'sum', 'co_fut_cost_50k_28d': 'sum', 'co_fut_cost_100k_28d': 'sum', 'co_fut_cost_manager_28d': 'sum', 'co_fut_cost_sales_28d': 'sum', 'co_fut_cost_key_roles_28d': 'sum', 'co_fut_cost_it_28d': 'sum', 'co_fut_cost_hourly_28d': 'sum'})", "def pre_approve(self, cr, uid, ids, context={}):\n \tfor voucher in self.browse(cr, uid, ids, context=context):\n \t if not voucher.department_id.analytic_account_id:\n \t raise osv.except_osv(_('Configration Check!'), _(\"Please add cost center for your department!\"))\n \t periods = self.pool.get('account.period').search(cr, uid, [('date_start','<=',voucher.date),('date_stop','>=',voucher.date),('company_id','=',voucher.company_id.id)], context=context)\n\n\n res=0.0\n if voucher.purpose:\n if not voucher.purpose.account_id: raise osv.except_osv(_('Warning!'), _('Please configure account for this purpose!')) \n voucher_line = {\n \t\t'voucher_id': voucher.id,\n \t\t'partner_id': voucher.partner_id.id,\n \t\t'untax_amount': voucher.amount,\n \t\t'amount': voucher.amount,\n 'name': voucher.narration,\n \t\t'type': 'dr',\n \t\t'account_analytic_id': voucher.department_id.analytic_account_id and voucher.department_id.analytic_account_id.id,\n 'account_id': voucher.purpose.account_id.id,\n \t }\n new_amount = res and res or voucher.amount \n voucher_line.update({'amount':new_amount,'untax_amount':new_amount})\n \t if voucher.line_ids :\n for line in voucher.line_ids:\n \t\t self.pool.get('account.voucher.line').write(cr, uid, line.id, {\n \t\t'voucher_id': voucher.id,\n \t\t'partner_id': voucher.partner_id.id,\n \t\t'untax_amount': res or line.amount,\n \t\t'amount': line.amount,\n 'name': voucher.narration,\n \t\t'type': 'dr',\n \t\t'account_analytic_id': line.account_analytic_id and line.account_analytic_id.id or voucher.department_id.analytic_account_id.id,\n 'account_id': voucher.purpose.account_id.id or line.account_id.id,\n \t }, context=context)\n \t else:\n\n \t\t new_voucher_line = self.pool.get('account.voucher.line').create(cr, uid, voucher_line, context=context)\n context.update({'purchase':True})\n self.create_budget_confirmation(cr, uid, [voucher.id], context)\n \tself.write(cr, uid, ids,{'state': 'preapprove','type':'purchase','ratification':True}, context=context)\n #cxt = context.copy()\n #cxt.update({'type':'ratification'})\n if not super(account_voucher, self).create_budget_confirmation(cr, uid, ids, context=context):\n self.write(cr, uid, ids, {'state': 'approved'}, context=context)\n\n \t'''self.write(cr, uid, ids, {'state': 'preapprove'})\n if not super(account_voucher, self).create_budget_confirmation(cr, uid, ids, context=context):\n self.write(cr, uid, ids, {'state': 'approve','type':'purchase','ratification':True}, context=context)'''\n return True", "def _get_toal_cp_(obj):\n \n fTotal = 0.0\n for item in obj.order_line:\n fTotal += item.purchase_price * item.product_uom_qty\n \n return fTotal", "def Addtoinventory(request): \n modes=['manage','add','order']\n departments={}\n booklist=[]\n for league in models.Dept.objects.all(): \n departments[league.pk]=league\n \n message=\"\"\n nonemptyAuthors = [x for x in request.POST.getlist('AuthorName') if x!='']\n nonemptybooknames = [x for x in request.POST.getlist('bookName') if x!='']\n nonemptybookDesc = [x for x in request.POST.getlist('bookdesc') if x!='']\n nonemptyQuantities = [x for x in request.POST.getlist('Quantity') if x!='']\n nonemptyRows = [x for x in request.POST.getlist('RowRack') if x!='']\n nonemptyselectedDeparts = [x for x in request.POST.getlist('depart_select') if x!='NA']\n \n for j,k,h,fa,z,loc in itertools.zip_longest(nonemptyAuthors,nonemptybooknames,nonemptybookDesc,nonemptyselectedDeparts,nonemptyQuantities,nonemptyRows):\n shortname=k[1:5] \n values=k.split(\"-\")\n if len(values)==1:\n ye=dt.today().year\n values.extend(['I',ye,'0'])\n \n if loc is not None:\n c=loc.split(\"-\")\n if len(c)==1:\n c.extend(['0','0'])\n else:\n #setting default value\n c=[\"20\",\"10\",\"1\"]\n if len(values) >0:\n try:\n departmentDetails=models.Dept.objects.get(dpt_id=fa)\n except Exception as e:\n print(e)\n pass\n try:\n i=0\n testa = models.Atr.objects.values('a_id')\n for test in testa:\n if i>int(test['a_id']):\n i=i\n else:\n i=int(test['a_id'])\n \n varas = models.Atr.objects.values('name')\n isin=False\n for f in list(varas):\n if str(j).lower() == f['name'].lower():\n isin=True\n break\n if isin:\n pass\n else:\n models.Atr.objects.create(a_id=str(i+1),name=str(j),title=\"Mr.\",email=\"[email protected]\")\n except Exception as e:\n if \"does not\" in str(e):\n models.Atr.objects.create(a_id=str(i+1),name=str(j),title=\"Mr.\",email=\"[email protected]\")\n print(e)\n pass\n varset=None\n try:\n i=0;\n testab = models.Bks.objects.values('b_id')\n for test in testab:\n if i>int(str(test['b_id']).split('_')[2]):\n i=i\n else:\n i=int(str(test['b_id']).split('_')[2])\n if (models.Bks.objects.filter(title=str(values[0])).exists()):\n try: \n if not models.Bks.objects.filter(title=str(values[0]),edition=str(values[1]),p_year=str(values[2]),pub=str(values[3])).exists():\n models.Bks.objects.create(b_id=\"IN_\"+shortname+\"_\"+str(i+1),title=str(values[0]),desc=str(h),type=\"ref\",edition=str(values[1]),p_year=str(values[2]),pub=str(values[3]),email=\"[email protected]\",a_id_id=str(i+1),dpt_id_id=str(fa))\n else:\n message=\"book with the same name already exists\"\n except Exception as e:\n print(e)\n else:\n if isin:\n atrobj=models.Atr.objects.get(name=str(j))\n models.Bks.objects.create(b_id=\"IN_\"+shortname+\"_\"+str(i+1),title=str(values[0]),desc=str(h),type=\"ref\",edition=str(values[1]),p_year=str(values[2]),pub=str(values[3]),email=\"[email protected]\",a_id_id=atrobj.a_id,dpt_id_id=str(fa))\n else:\n atrobj=models.Atr.objects.get(name=str(j))\n models.Bks.objects.create(b_id=\"IN_\"+shortname+\"_\"+str(i+1),title=str(values[0]),desc=str(h),type=\"ref\",edition=str(values[1]),p_year=str(values[2]),pub=str(values[3]),email=\"[email protected]\",a_id_id=atrobj.a_id,dpt_id_id=str(fa))\n\n except Exception as e:\n if \"does not\" in str(e):\n models.Bks.objects.create(b_id=\"IN_\"+shortname+\"_\"+str(i+1),title=str(values[0]),desc=str(h),type=\"ref\",edition=str(values[1]),p_year=str(values[2]),pub=str(values[3]),email=\"[email protected]\",a_id_id=str(i+1),dpt_id_id=str(fa))\n print(e)\n pass\n \n try:\n g=0\n bookobj =models.Bks.objects.filter(title=str(values[0]),edition=str(values[1]),p_year=str(values[2]),pub=str(values[3]))\n testba = models.Invt.objects.values('id') \n for test in testba:\n if g>int(str(test['id'])):\n g=g\n else:\n g=int(str(test['id']))\n \n Invobj=models.Invt.objects.filter(i_id_id=\"IN_\"+shortname+\"_\"+str(g+1))\n\n if len(bookobj) >= 0:\n if(len(Invobj) == 0):\n for s in bookobj:\n models.Invt.objects.create(id=str(g+1),qty=int(z),i_id_id=s.b_id,shelf=str(c[0]),rack=str(c[1]),row=int(c[2]))\n else:\n for s in bookobj:\n models.Invt.objects.create(id=str(g+1),qty=int(z),i_id_id=s.b_id,shelf=str(c[0]),rack=str(c[1]),row=int(c[2]))\n\n else:\n models.Invt.objects.create(id=str(g+1),qty=int(z),i_id_id=\"IN_\"+shortname+\"_\"+str(g+1),shelf=str(c[0]),rack=str(c[1]),row=int(c[2]))\n except Exception as e:\n try:\n if \"does not\" in str(e): \n models.Invt.objects.create(id=str(g+1),qty=int(z),i_id_id=\"IN_\"+shortname+\"_\"+str(g+1),shelf=str(c[0]),rack=str(c[1]),row=int(c[2]))\n else:\n t=models.Invt.objects.get(i_id_id=\"IN_\"+shortname+\"_\"+str(g+1))\n t.qty= t.qty+int(z)\n t.save()\n except Exception as e:\n print(e)\n \n else:\n message=\"the book details are not given properly\"\n pass\n\n return render(\n request,\n 'app/manageInv.html',\n {\n 'title':'Manage Inventory',\n 'invmodes':modes,\n 'dispmode':'manage',\n 'message':message,\n 'librarian':get_librarians(),\n 'le':list(range(1,2)),\n 'DepartmentList':departments.keys(),\n 'books':get_valid_Books().values(),\n 'year':datetime.now().year,\n }\n )", "def _vROHR(self,vKNOT=None):\r\n\r\n logStr = \"{0:s}.{1:s}: \".format(self.__class__.__name__, sys._getframe().f_code.co_name)\r\n logger.debug(\"{0:s}{1:s}\".format(logStr,'Start.')) \r\n \r\n try: \r\n vROHR=None \r\n \r\n vROHR=pd.merge(self.dataFrames['ROHR'],self.dataFrames['ROHR_BZ'],left_on='pk',right_on='fk')\r\n\r\n vROHR=vROHR[[\r\n 'BESCHREIBUNG'\r\n ,'IDREFERENZ'\r\n #Asset\r\n ,'BAUJAHR','HAL'\r\n ,'IPLANUNG','KENNUNG'\r\n #Reibung\r\n ,'L','LZU','RAU','ZAUS','ZEIN','ZUML'\r\n ,'JLAMBS','LAMBDA0'\r\n #inst.\r\n ,'ASOLL','INDSCHALL'\r\n #FW\r\n ,'fk2LROHR','KVR'\r\n #Ref.\r\n ,'fkCONT'\r\n ,'fkDTRO_ROWD'\r\n ,'fkLTGR','fkSTRASSE'\r\n ,'fkKI','fkKK'\r\n #IDs \r\n ,'pk_x','tk'\r\n ,'GEOM','GRAF'\r\n #BZ\r\n ,'IRTRENN'\r\n ,'LECKSTART','LECKEND','LECKMENGE','LECKORT','LECKSTATUS'\r\n #Rest\r\n ,'QSVB'\r\n ,'ZVLIMPTNZ'\r\n ,'KANTENZV'\r\n ]]\r\n\r\n vROHR.rename(columns={'pk_x':'pk'},inplace=True)\r\n vROHR=pd.merge(vROHR,self.dataFrames['CONT'],left_on='fkCONT',right_on='pk')\r\n\r\n if 'IDREFERENZ_x' in vROHR.columns.tolist(): #90-12\r\n vROHR.rename(columns={'IDREFERENZ_x':'IDREFERENZ'},inplace=True)\r\n\r\n vROHR=vROHR[[\r\n 'BESCHREIBUNG'\r\n ,'IDREFERENZ'\r\n #Asset\r\n ,'BAUJAHR','HAL'\r\n ,'IPLANUNG','KENNUNG'\r\n #Reibung\r\n ,'L','LZU','RAU','ZAUS','ZEIN','ZUML'\r\n ,'JLAMBS','LAMBDA0'\r\n #inst.\r\n ,'ASOLL','INDSCHALL'\r\n #FW\r\n ,'fk2LROHR','KVR'\r\n #Ref.\r\n ,'fkDTRO_ROWD'\r\n ,'fkLTGR','fkSTRASSE'\r\n ,'fkKI','fkKK'\r\n #IDs \r\n ,'pk_x','tk_x'\r\n ,'GEOM_x','GRAF_x'\r\n #BZ\r\n ,'IRTRENN'\r\n ,'LECKSTART','LECKEND','LECKMENGE','LECKORT','LECKSTATUS'\r\n #Rest\r\n ,'QSVB'\r\n ,'ZVLIMPTNZ'\r\n ,'KANTENZV'\r\n #CONT\r\n ,'NAME' \r\n ,'ID'\r\n ,'LFDNR'\r\n ]]\r\n vROHR.rename(columns={'pk_x':'pk','tk_x':'tk','NAME':'CONT','ID':'CONT_ID','LFDNR':'CONT_LFDNR'},inplace=True) \r\n vROHR=pd.merge(vROHR,self.dataFrames['DTRO_ROWD'],left_on='fkDTRO_ROWD',right_on='pk') \r\n\r\n vROHR=vROHR[[\r\n 'BESCHREIBUNG'\r\n ,'IDREFERENZ'\r\n #Asset\r\n ,'BAUJAHR','HAL'\r\n ,'IPLANUNG','KENNUNG'\r\n #Reibung\r\n ,'L','LZU','RAU','ZAUS','ZEIN','ZUML'\r\n ,'JLAMBS','LAMBDA0'\r\n #inst.\r\n ,'ASOLL','INDSCHALL'\r\n #FW\r\n ,'fk2LROHR','KVR'\r\n #DTRO_ROWD\r\n ,'AUSFALLZEIT', 'DA', 'DI', 'DN', 'KT', 'PN', 'REHABILITATION','REPARATUR', 'S', 'WSTEIG', 'WTIEFE'\r\n #Ref.\r\n ,'fkLTGR','fkSTRASSE'\r\n ,'fkKI','fkKK'\r\n #IDs \r\n ,'pk_x','tk_x'\r\n ,'GEOM_x','GRAF_x'\r\n #BZ\r\n ,'IRTRENN'\r\n ,'LECKSTART','LECKEND','LECKMENGE','LECKORT','LECKSTATUS'\r\n #Rest\r\n ,'QSVB'\r\n ,'ZVLIMPTNZ'\r\n ,'KANTENZV'\r\n #CONT\r\n ,'CONT' \r\n ,'CONT_ID'\r\n ,'CONT_LFDNR'\r\n ]]\r\n vROHR.rename(columns={'pk_x':'pk','tk_x':'tk'},inplace=True)\r\n vROHR=pd.merge(vROHR,self.dataFrames['LTGR'],left_on='fkLTGR',right_on='pk')\r\n\r\n vROHR=vROHR[[\r\n 'BESCHREIBUNG_x'\r\n ,'IDREFERENZ'\r\n #Asset\r\n ,'BAUJAHR','HAL'\r\n ,'IPLANUNG','KENNUNG'\r\n #Reibung\r\n ,'L','LZU','RAU','ZAUS','ZEIN','ZUML'\r\n ,'JLAMBS','LAMBDA0'\r\n #inst.\r\n ,'ASOLL','INDSCHALL'\r\n #FW\r\n ,'fk2LROHR','KVR'\r\n #DTRO_ROWD\r\n ,'AUSFALLZEIT', 'DA', 'DI', 'DN', 'KT', 'PN', 'REHABILITATION','REPARATUR', 'S', 'WSTEIG', 'WTIEFE'\r\n #LTGR\r\n ,'NAME','BESCHREIBUNG_y','SICHTBARKEIT','VERLEGEART','fkDTRO','fkSRAT'\r\n #Ref.\r\n ,'fkSTRASSE'\r\n ,'fkKI','fkKK'\r\n #IDs \r\n ,'pk_x','tk_x'\r\n ,'GEOM_x','GRAF_x'\r\n #BZ\r\n ,'IRTRENN'\r\n ,'LECKSTART','LECKEND','LECKMENGE','LECKORT','LECKSTATUS'\r\n #Rest\r\n ,'QSVB'\r\n ,'ZVLIMPTNZ'\r\n ,'KANTENZV'\r\n #CONT\r\n ,'CONT' \r\n ,'CONT_ID'\r\n ,'CONT_LFDNR'\r\n ]]\r\n vROHR.rename(columns={'pk_x':'pk','tk_x':'tk','NAME':'LTGR_NAME','BESCHREIBUNG_y':'LTGR_BESCHREIBUNG','BESCHREIBUNG_x':'BESCHREIBUNG'},inplace=True)\r\n\r\n vROHR=vROHR[[\r\n 'BESCHREIBUNG'\r\n ,'IDREFERENZ'\r\n #Asset\r\n ,'BAUJAHR','HAL'\r\n ,'IPLANUNG','KENNUNG'\r\n #Reibung\r\n ,'L','LZU','RAU','ZAUS','ZEIN','ZUML'\r\n ,'JLAMBS','LAMBDA0'\r\n #inst.\r\n ,'ASOLL','INDSCHALL'\r\n #FW\r\n ,'fk2LROHR','KVR'\r\n #DTRO_ROWD\r\n ,'AUSFALLZEIT', 'DA', 'DI', 'DN', 'KT', 'PN', 'REHABILITATION','REPARATUR', 'S', 'WSTEIG', 'WTIEFE'\r\n #LTGR\r\n ,'LTGR_NAME','LTGR_BESCHREIBUNG','SICHTBARKEIT','VERLEGEART','fkDTRO','fkSRAT'\r\n #Ref.\r\n ,'fkSTRASSE'\r\n ,'fkKI','fkKK'\r\n #IDs \r\n ,'pk','tk'\r\n ,'GEOM_x','GRAF_x'\r\n #BZ\r\n ,'IRTRENN'\r\n ,'LECKSTART','LECKEND','LECKMENGE','LECKORT','LECKSTATUS'\r\n #Rest\r\n ,'QSVB'\r\n ,'ZVLIMPTNZ'\r\n ,'KANTENZV'\r\n #CONT\r\n ,'CONT' \r\n ,'CONT_ID'\r\n ,'CONT_LFDNR'\r\n ]]\r\n \r\n vROHR=pd.merge(vROHR,self.dataFrames['DTRO'],left_on='fkDTRO',right_on='pk')\r\n\r\n if 'IDREFERENZ_x' in vROHR.columns.tolist(): #90-12\r\n vROHR.rename(columns={'IDREFERENZ_x':'IDREFERENZ'},inplace=True)\r\n\r\n vROHR=vROHR[[\r\n 'BESCHREIBUNG_x'\r\n ,'IDREFERENZ'\r\n #Asset\r\n ,'BAUJAHR','HAL'\r\n ,'IPLANUNG','KENNUNG'\r\n #Reibung\r\n ,'L','LZU','RAU','ZAUS','ZEIN','ZUML'\r\n ,'JLAMBS','LAMBDA0'\r\n #inst.\r\n ,'ASOLL','INDSCHALL'\r\n #FW\r\n ,'fk2LROHR','KVR'\r\n #DTRO_ROWD\r\n ,'AUSFALLZEIT', 'DA', 'DI', 'DN', 'KT', 'PN', 'REHABILITATION','REPARATUR', 'S', 'WSTEIG', 'WTIEFE'\r\n #LTGR\r\n ,'LTGR_NAME','LTGR_BESCHREIBUNG','SICHTBARKEIT','VERLEGEART'\r\n #DTRO\r\n ,'NAME'\r\n ,'BESCHREIBUNG_y'\r\n ,'E'\r\n #Ref.\r\n ,'fkSTRASSE','fkSRAT'\r\n ,'fkKI','fkKK'\r\n #IDs \r\n ,'pk_x','tk_x'\r\n ,'GEOM_x','GRAF_x'\r\n #BZ\r\n ,'IRTRENN'\r\n ,'LECKSTART','LECKEND','LECKMENGE','LECKORT','LECKSTATUS'\r\n #Rest\r\n ,'QSVB'\r\n ,'ZVLIMPTNZ'\r\n ,'KANTENZV'\r\n #CONT\r\n ,'CONT' \r\n ,'CONT_ID'\r\n ,'CONT_LFDNR'\r\n ]]\r\n vROHR.rename(columns={'pk_x':'pk','tk_x':'tk','NAME':'DTRO_NAME','BESCHREIBUNG_y':'DTRO_BESCHREIBUNG','BESCHREIBUNG_x':'BESCHREIBUNG'},inplace=True)\r\n \r\n #logger.debug(\"{:s} vor fkKI: {!s:s}\".format(logStr,(vROHR))) \r\n vROHR=pd.merge(vROHR,vKNOT,left_on='fkKI',right_on='pk') \r\n #logger.debug(\"{:s} nach fkKI: {!s:s}\".format(logStr,(vROHR))) \r\n vROHR.rename(columns={'BESCHREIBUNG_x':'BESCHREIBUNG','IDREFERENZ_x':'IDREFERENZ'\r\n ,'pk_x':'pk','tk_x':'tk'\r\n ,'CONT_ID_x':'CONT_ID','CONT_LFDNR_x':'CONT_LFDNR'\r\n },inplace=True) \r\n\r\n vROHR=vROHR[[\r\n 'BESCHREIBUNG'\r\n ,'IDREFERENZ'\r\n #Asset\r\n ,'BAUJAHR','HAL'\r\n ,'IPLANUNG','KENNUNG'\r\n #Reibung\r\n ,'L','LZU','RAU','ZAUS','ZEIN','ZUML'\r\n ,'JLAMBS','LAMBDA0'\r\n #inst.\r\n ,'ASOLL','INDSCHALL'\r\n #FW\r\n ,'fk2LROHR','KVR_x'\r\n #DTRO_ROWD\r\n ,'AUSFALLZEIT', 'DA', 'DI', 'DN', 'KT', 'PN', 'REHABILITATION','REPARATUR', 'S', 'WSTEIG', 'WTIEFE'\r\n #LTGR\r\n ,'LTGR_NAME','LTGR_BESCHREIBUNG','SICHTBARKEIT','VERLEGEART'\r\n #DTRO\r\n ,'DTRO_NAME'\r\n ,'DTRO_BESCHREIBUNG'\r\n ,'E'\r\n #Ref.\r\n ,'fkSTRASSE','fkSRAT'\r\n ,'fkKK'\r\n #IDs \r\n ,'pk','tk'\r\n ,'GEOM_x','GRAF_x'\r\n #BZ\r\n ,'IRTRENN'\r\n ,'LECKSTART','LECKEND','LECKMENGE','LECKORT','LECKSTATUS'\r\n #Rest\r\n ,'QSVB'\r\n ,'ZVLIMPTNZ'\r\n ,'KANTENZV'\r\n #CONT\r\n ,'CONT_x' \r\n ,'CONT_ID'\r\n ,'CONT_LFDNR'\r\n #Ki\r\n ,'NAME'\r\n ,'KVR_y','TM'\r\n ,'XKOR','YKOR','ZKOR'\r\n ,'pXCor','pYCor'\r\n ]]\r\n\r\n vROHR.rename(columns={'NAME':'NAME_i','KVR_x':'KVR','KVR_y':'KVR_i','TM':'TM_i','CONT_x':'CONT'},inplace=True) \r\n vROHR.rename(columns={'XKOR':'XKOR_i','YKOR':'YKOR_i','ZKOR':'ZKOR_i'\r\n ,'pXCor':'pXCor_i'\r\n ,'pYCor':'pYCor_i'\r\n },inplace=True) \r\n \r\n vROHR=pd.merge(vROHR,vKNOT,left_on='fkKK',right_on='pk') \r\n vROHR.rename(columns={'BESCHREIBUNG_x':'BESCHREIBUNG','IDREFERENZ_x':'IDREFERENZ'\r\n ,'pk_x':'pk','tk_x':'tk'\r\n ,'CONT_ID_x':'CONT_ID','CONT_LFDNR_x':'CONT_LFDNR'\r\n },inplace=True) \r\n\r\n vROHR.rename(columns={'NAME':'NAME_k','KVR_x':'KVR','KVR_y':'KVR_k','TM':'TM_k','CONT_x':'CONT'},inplace=True) \r\n vROHR.rename(columns={'XKOR':'XKOR_k','YKOR':'YKOR_k','ZKOR':'ZKOR_k'\r\n ,'pXCor':'pXCor_k'\r\n ,'pYCor':'pYCor_k'\r\n },inplace=True) \r\n\r\n vROHR['pXCors']=[[xi,xk] for xi,xk in zip(vROHR['pXCor_i'],vROHR['pXCor_k'])]\r\n vROHR['pYCors']=[[xi,xk] for xi,xk in zip(vROHR['pYCor_i'],vROHR['pYCor_k'])]\r\n\r\n vROHR.rename(columns={'GEOM_x':'GEOM'},inplace=True) \r\n\r\n vROHR=pd.merge(vROHR,vROHR,left_on='fk2LROHR',right_on='pk',how='left',suffixes=('','_2L')) \r\n\r\n vROHR=vROHR[[\r\n 'BESCHREIBUNG'\r\n ,'IDREFERENZ'\r\n #Asset\r\n ,'BAUJAHR','HAL'\r\n ,'IPLANUNG','KENNUNG'\r\n #Reibung\r\n ,'L','LZU','RAU','ZAUS','ZEIN','ZUML'\r\n ,'JLAMBS','LAMBDA0'\r\n #inst.\r\n ,'ASOLL','INDSCHALL'\r\n #FW\r\n ,'NAME_i_2L'\r\n ,'NAME_k_2L'\r\n ,'KVR' \r\n #DTRO_ROWD\r\n ,'AUSFALLZEIT', 'DA', 'DI', 'DN', 'KT', 'PN', 'REHABILITATION','REPARATUR', 'S', 'WSTEIG', 'WTIEFE'\r\n #LTGR\r\n ,'LTGR_NAME','LTGR_BESCHREIBUNG','SICHTBARKEIT','VERLEGEART'\r\n #DTRO\r\n ,'DTRO_NAME'\r\n ,'DTRO_BESCHREIBUNG'\r\n ,'E'\r\n #Ref.\r\n ,'fkSTRASSE','fkSRAT'\r\n #IDs \r\n ,'pk','tk' \r\n #BZ\r\n ,'IRTRENN'\r\n ,'LECKSTART','LECKEND','LECKMENGE','LECKORT','LECKSTATUS'\r\n #Rest\r\n ,'QSVB'\r\n ,'ZVLIMPTNZ'\r\n ,'KANTENZV'\r\n #CONT\r\n ,'CONT' \r\n ,'CONT_ID'\r\n ,'CONT_LFDNR'\r\n #Ki\r\n ,'NAME_i'\r\n ,'KVR_i','TM_i'\r\n ,'XKOR_i','YKOR_i','ZKOR_i' \r\n #Kk\r\n ,'NAME_k'\r\n ,'KVR_k','TM_k'\r\n ,'XKOR_k','YKOR_k','ZKOR_k'\r\n #plotCors\r\n ,'pXCor_i','pYCor_i'\r\n ,'pXCor_k','pYCor_k'\r\n # matplotlibs's .plot(pXCors,pYCors,...)\r\n ,'pXCors','pYCors' # nur die Endpunkte \r\n # ...........\r\n ,'GEOM'\r\n ]]\r\n \r\n # WAYP ###\r\n vROHR['WAYP']=[list() for dummy in vROHR['pk']] # leere Liste von Wegpunkten\r\n for index,row in vROHR.iterrows():\r\n if pd.isnull(row.GEOM): \r\n continue\r\n geomBytes=base64.b64decode(row.GEOM)\r\n # 1. Byte: Endianess: 0=little\r\n # 1. Byte auslassen\r\n \r\n # 2 ints lesen ...\r\n headerData = struct.unpack('2i',geomBytes[1:9]) \r\n graphType,NOfWaypoints=headerData # graphType: Werte von 1 bis 6 bedeuten: Point, LineString, Polygon, MultiPoint, ...\r\n \r\n # xy-Koordinatenpaare lesen \r\n # 2 double: xi, yi\r\n for idx in range(NOfWaypoints):\r\n offset=9+idx*16 \r\n end=offset+16 \r\n waypXY=struct.unpack('2d',geomBytes[offset:end]) \r\n row.WAYP.append(waypXY)\r\n \r\n vROHR['pWAYPXCors']=[list() for dummy in vROHR['pk']] # leere Liste von pWegpunkten X\r\n vROHR['pWAYPYCors']=[list() for dummy in vROHR['pk']] # leere Liste von pWegpunkten Y\r\n for index,row in vROHR.iterrows():\r\n for waypXY in row.WAYP:\r\n X,Y=waypXY\r\n if int(row.CONT_ID)==1001:\r\n row.pWAYPXCors.append(X-self.pXCorZero)\r\n row.pWAYPYCors.append(Y-self.pYCorZero)\r\n else:\r\n row.pWAYPXCors.append(X)\r\n row.pWAYPYCors.append(Y)\r\n\r\n vROHR=vROHR[[\r\n 'BESCHREIBUNG'\r\n ,'IDREFERENZ'\r\n #Asset\r\n ,'BAUJAHR','HAL'\r\n ,'IPLANUNG','KENNUNG'\r\n #Reibung\r\n ,'L','LZU','RAU','ZAUS','ZEIN','ZUML'\r\n ,'JLAMBS','LAMBDA0'\r\n #inst.\r\n ,'ASOLL','INDSCHALL'\r\n #FW\r\n ,'NAME_i_2L'\r\n ,'NAME_k_2L'\r\n ,'KVR'\r\n #DTRO_ROWD\r\n ,'AUSFALLZEIT', 'DA', 'DI', 'DN', 'KT', 'PN', 'REHABILITATION','REPARATUR', 'S', 'WSTEIG', 'WTIEFE'\r\n #LTGR\r\n ,'LTGR_NAME','LTGR_BESCHREIBUNG','SICHTBARKEIT','VERLEGEART'\r\n #DTRO\r\n ,'DTRO_NAME'\r\n ,'DTRO_BESCHREIBUNG'\r\n ,'E'\r\n #Ref.\r\n ,'fkSTRASSE','fkSRAT'\r\n #IDs \r\n ,'pk','tk' \r\n #BZ\r\n ,'IRTRENN'\r\n ,'LECKSTART','LECKEND','LECKMENGE','LECKORT','LECKSTATUS'\r\n #Rest\r\n ,'QSVB'\r\n ,'ZVLIMPTNZ'\r\n ,'KANTENZV'\r\n #CONT\r\n ,'CONT' \r\n ,'CONT_ID'\r\n ,'CONT_LFDNR'\r\n #Ki\r\n ,'NAME_i'\r\n ,'KVR_i','TM_i'\r\n ,'XKOR_i','YKOR_i','ZKOR_i' \r\n #Kk\r\n ,'NAME_k'\r\n ,'KVR_k','TM_k'\r\n ,'XKOR_k','YKOR_k','ZKOR_k'\r\n #plotCors\r\n ,'pXCor_i','pYCor_i'\r\n ,'pXCor_k','pYCor_k'\r\n # matplotlibs's .plot(pXCors,pYCors,...)\r\n ,'pXCors','pYCors' # nur die Endpunkte\r\n ,'pWAYPXCors','pWAYPYCors' # alle Wegpunkte\r\n #WAYP\r\n ,'WAYP' #List of Tuples: [(x1,y1),...,(xN,yN)] \r\n ]]\r\n\r\n except Exception as e:\r\n logStrFinal=\"{:s}Exception: Line: {:d}: {!s:s}: {:s}\".format(logStr,sys.exc_info()[-1].tb_lineno,type(e),str(e))\r\n logger.error(logStrFinal) \r\n raise XmError(logStrFinal) \r\n finally:\r\n logger.debug(\"{0:s}{1:s}\".format(logStr,'_Done.')) \r\n return vROHR", "def _build_itemized_description_table(products: typing.List[Product] = []):\n numrows = len(products)\n table_001 = FlexibleColumnWidthTable(number_of_rows=numrows, number_of_columns=3) \n table_001.add(\n TableCell(\n Paragraph(\"Ensemble Name\", font_color=X11Color(\"White\")),\n background_color=HexColor(\"0b3954\"),\n preferred_width=Decimal(256),\n )\n )\n table_001.add(\n TableCell(\n Paragraph(\"Classification\", font_color=X11Color(\"White\")),\n background_color=HexColor(\"0b3954\"),\n preferred_width=Decimal(128),\n )\n )\n table_001.add(\n TableCell(\n Paragraph(\"Fee\", font_color=X11Color(\"White\")),\n background_color=HexColor(\"0b3954\"),\n preferred_width=Decimal(64),\n )\n )\n \n return table_001", "def done(self, cr, uid, ids, context=None):\n \n voucher_obj = self.pool.get('account.voucher')\n voucher_line_obj = self.pool.get('account.voucher.line')\n admin_affairs_model_obj = self.pool.get('admin.affairs.model')\n affairs_account_obj = self.pool.get('admin_affairs.account') \n model_id = admin_affairs_model_obj.search(cr, uid, [('model','=','environment.and.safety')], context=context)[0] \n affairs_account = affairs_account_obj.search(cr, uid, [('model_id','=',model_id)], context=context)\n if not affairs_account:\n raise osv.except_osv(_('Warning !'), _('Please insert account configuration For Environment and safety'))\n affairs_account_id = affairs_account[0]\n \n affairs_account_record = affairs_account_obj.browse(cr, uid, affairs_account_id,context=context) \n for record in self.browse(cr, uid, ids, context=context):\n if not record.allowances_lines_after :\n raise osv.except_osv(_('Partner Amount !'), _('Sorry no partner Amount After Rate To Transfer!'))\n notes = _(\"Enviroment and Safety allowances Contract: %s\")%(record.name)\n \n journal_id = affairs_account_record.journal_id\n analytic_id = affairs_account_record.analytic_id\n account_id = affairs_account_record.account_id\n\n\t\t# Creating Voucher / Ratitication\n voucher_id = voucher_obj.create(cr, uid, {\n 'amount': record.amount_total,\n 'type': 'ratification',\n 'date': time.strftime('%Y-%m-%d'),\n 'partner_id': record.partner_id.id,\n 'journal_id': journal_id and journal_id.id , \n 'state': 'draft',\n\t\t\t\t\t 'notes':record.notes,\n\t\t\t\t\t 'narration':notes ,\n \t 'company_id':record.company_id.id,\n })\n \t# Creating Voucher / Ratitication Lines\n for line in record.allowances_lines_after:\n '''account_id =line.category_id.account_id\n if not account_id:\n account_id = line.category_id.parent_id.account_id\n \n if not account_id:\n account_id = affairs_account_record.account_id \n\n if not account_id:\n raise osv.except_osv(_('Invalid action !'), _('Please insert Account configuration For Environment and safety Service')) ''' \n \n account_analytic_id =line.category_id.analytic_id\n if not account_analytic_id:\n account_analytic_id = line.category_id.parent_id.analytic_id \n \n if not account_analytic_id:\n account_analytic_id = affairs_account_record.analytic_id\n \n vocher_line_id = voucher_line_obj.create(cr, uid, {\n 'amount': record.amount_total,\n 'voucher_id': voucher_id,\n\t\t\t\t\t 'account_id':account_id and account_id.id,\n\t\t\t\t\t 'account_analytic_id':account_analytic_id and account_analytic_id.id ,\n 'type': 'dr',\n 'name':'environment and Safety allowances :' + record.name,\n })\n\t\t\n\t\t# Selecting Voucher Number / Refernece \n\n voucher_number = self.pool.get('account.voucher').browse(cr,uid,voucher_id)\n\n copy_attachments(self,cr,uid,[record.id],'services.contracts.archive',voucher_id,'account.voucher', context)\n self.write(cr, uid, ids, {'state':'done','transfer':True,'voucher_no':voucher_number.number}) \n return True", "def total_organic_compound(self):\n return self.indoor_air_quality[1]", "def baseline_total_organic_compound(self):\n return self.indoor_air_quality_baseline[1]", "def lcia_calculation(self) -> None:\n self.characterized_inventory = self.characterization_matrix * self.inventory", "def GOAL_TOTAL() -> int:\n return 21", "def fetchTAC(self):\n\n last_hour = datetime.datetime.now().date() - datetime.timedelta(hours = 1)\n last_hour = \"{}{}{}\".format(\"'\", last_hour, \"'\")\n last_hour = datetime.date(2011, 4, 5)\n\n self.hlr_cur.execute(\"SELECT id FROM Subscriber WHERE updated >= {date};\".format(date = last_hour))\n subscribers = self.hlr_cur.fetchall()\n\n parsed_data = {}\n unique_imei = {}\n #uid_count = 0\n\n for subscriber in subscribers:\n self.hlr_cur.execute(\"SELECT IMEI FROM Equipment WHERE id = (SELECT equipment_id FROM EquipmentWatch WHERE subscriber_id = {s_id});\".format(s_id = subscriber[0]))\n parsed_imei = self.hlr_cur.fetchall()\n\n if len(parsed_imei) > 0:\n for imei in parsed_imei:\n imei_number = imei[0] \n\n if imei_number not in unique_imei:\n unique_imei[imei_number] = subscriber[0]\n\n uid = unique_imei[imei_number]\n parsed_data.setdefault((uid), str(imei_number)[:8])\n\n self.saveRecords(parsed_data)", "def _build_itemized_description_table0(products: typing.List[Product] = []):\n table_001 = FixedColumnWidthTable(number_of_rows=15, number_of_columns=4)\n for h in [\"Ensemble Name\", \"Classification\", \"Info\", \"Fee\"]:\n table_001.add(\n TableCell(\n Paragraph(h, font_color=X11Color(\"White\")),\n background_color=HexColor(\"0b3954\"),\n )\n )\n\n odd_color = HexColor(\"f4f3f3\")\n even_color = HexColor(\"FFFFFF\")\n \n for row_number, item in enumerate(products):\n c = even_color if row_number % 2 == 0 else odd_color\n table_001.add(TableCell(Paragraph(item.name), background_color=c))\n table_001.add(TableCell(Paragraph(str(item.quantity)), background_color=c))\n table_001.add(\n TableCell(Paragraph(\"$ \" + str(item.price_per_sku)), background_color=c)\n )\n table_001.add(\n TableCell(\n Paragraph(\"$ \" + str(item.quantity * item.price_per_sku)),\n background_color=c,\n )\n )\n\n # Optionally add some empty rows to have a fixed number of rows for styling purposes\n for row_number in range(len(products), 10):\n c = even_color if row_number % 2 == 0 else odd_color\n for _ in range(0, 4):\n table_001.add(TableCell(Paragraph(\" \"), background_color=c))\n\n # subtotal\n subtotal: float = sum([x.price_per_sku * x.quantity for x in products])\n table_001.add(\n TableCell(\n Paragraph(\n \"Subtotal\",\n font=\"Helvetica-Bold\",\n horizontal_alignment=Alignment.RIGHT,\n ),\n col_span=3,\n )\n )\n table_001.add(\n TableCell(Paragraph(\"$ 1,180.00\", horizontal_alignment=Alignment.RIGHT))\n )\n\n # discounts\n table_001.add(\n TableCell(\n Paragraph(\n \"Discounts\",\n font=\"Helvetica-Bold\",\n horizontal_alignment=Alignment.RIGHT,\n ),\n col_span=3,\n )\n )\n table_001.add(TableCell(Paragraph(\"$ 0.00\", horizontal_alignment=Alignment.RIGHT)))\n\n # taxes\n taxes: float = subtotal * 0.06\n table_001.add(\n TableCell(\n Paragraph(\n \"Taxes\", font=\"Helvetica-Bold\", horizontal_alignment=Alignment.RIGHT\n ),\n col_span=3,\n )\n )\n table_001.add(\n TableCell(Paragraph(\"$ \" + str(taxes), horizontal_alignment=Alignment.RIGHT))\n )\n\n # total\n total: float = subtotal + taxes\n table_001.add(\n TableCell(\n Paragraph(\n \"Total\", font=\"Helvetica-Bold\", horizontal_alignment=Alignment.RIGHT\n ),\n col_span=3,\n )\n )\n table_001.add(\n TableCell(Paragraph(\"$ \" + str(total), horizontal_alignment=Alignment.RIGHT))\n )\n table_001.set_padding_on_all_cells(Decimal(2), Decimal(2), Decimal(2), Decimal(2))\n table_001.no_borders()\n return table_001", "def test_visualize_recipe_equipment_by_id(self):\n pass", "def _compute_results(self):\n self.ensure_one()\n Result = self.env['sla.employee.view']\n dom = []\n if self.supplier_category_name:\n if self.supplier_category_name == 'employee':\n dom += [('pay_to', '=', 'employee')]\n elif self.supplier_category_name == 'supplier':\n dom += [('pay_to', '!=', 'employee'),('invoice_id.partner_id.category_id.name', '!=', 'ต่างประเทศ')]\n elif self.supplier_category_name == 'foreign':\n dom += [('pay_to', '!=', 'employee'),('invoice_id.partner_id.category_id.name', '=', 'ต่างประเทศ')]\n if self.user_ids:\n dom += [('voucher_id.validate_user_id', 'in', self.user_ids.ids)]\n if self.source_document_type:\n dom += [('invoice_id.source_document_type', '=',\n self.source_document_type)]\n if self.fiscalyear_start_id:\n dom += [('voucher_id.date', '>=',\n self.fiscalyear_start_id.date_start)]\n if self.fiscalyear_end_id:\n dom += [('voucher_id.date', '<=',\n self.fiscalyear_end_id.date_stop)]\n if self.period_start_id:\n dom += [('voucher_id.date', '>=',\n self.period_start_id.date_start)]\n if self.period_end_id:\n dom += [('voucher_id.date', '<=',\n self.period_end_id.date_stop)]\n if self.date_start:\n dom += [('voucher_id.date', '>=', self.date_start)]\n if self.date_end:\n dom += [('voucher_id.date', '<=', self.date_end)]\n self.results = Result.search(\n dom, order=\"fiscalyear,voucher_number,invoice_number\")", "def equipments(self):\n selection = Equipment.objects.filter(responsible__location_id=self.object.id)\n return {\n 'selection': selection,\n 'count': selection.count()\n }", "def _compute_ingreso_subtotal(self):\n for sub in self:\n sub.recurring_total = sum(\n line.ingreso for line in sub.animales_ids)", "def _compute_cuantia_subtotal(self):\n for line in self:\n line.gasto = line.unidades * line.pvp", "def __unicode__(self):\r\n return \"%d (%d , %d, %d)\" % (self.pay_id, self.intent, self.state, self.client_id)", "def resultadosAnuales(self):\r\n self.checkingConnection()\r\n self.model = QSqlQueryModel()\r\n self.model.setQuery('''SELECT years, ingresos, compras, gastos, \r\n (ingresos - compras - gastos) AS Total FROM (\r\n\t\t\tSELECT years, \r\n ingresos, compras, gastos FROM ((SELECT Clients.year AS years, \r\n SUM(Clients.value) AS ingresos FROM Clients GROUP BY Clients.year) \r\n JOIN (SELECT Compras.year AS year2, SUM(Compras.value) AS compras \r\n FROM Compras GROUP BY Compras.year) JOIN (SELECT Gastos.year AS year3, \r\n SUM(Gastos.value) AS gastos FROM Gastos GROUP BY Gastos.year) \r\n ON years = year2 AND year2 = year3)\r\n\t\t\t) ''', self.db)\r\n # Getting the table values\r\n self.years = []\r\n self.ingresos = []\r\n self.compras = []\r\n self.gastos = []\r\n self.total = []\r\n # Save the Query values in each list\r\n for i in range(self.model.rowCount()):\r\n # record is the row and value the column\r\n self.years.append(self.model.record(i).value(\"years\"))\r\n self.ingresos.append(self.model.record(i).value(\"ingresos\"))\r\n self.compras.append(self.model.record(i).value(\"compras\"))\r\n self.gastos.append(self.model.record(i).value(\"gastos\"))\r\n self.total.append(self.model.record(i).value(\"Total\"))\r\n self.setModel(self.model)\r\n # Creating the Bar Graph\r\n self.grafica(self.years)", "def emissions_baseline(self):\n baseline = DataFrame(columns=[\"CO2\", \"NOx\", \"PM10\", \"PM2.5\", \"SO2\"])\n baseline = baseline.append(year_1(self.plant.emissions()))\n baseline = baseline.append(year_1(self.plant.fuel_reseller().emissions()))\n baseline = baseline.append(year_1(self.farmer.emissions_exante))\n baseline.loc[\"Total\"] = baseline.sum()\n baseline.loc[\"Total_plant\"] = baseline.iloc[0]\n baseline.loc[\"Total_transport\"] = baseline.iloc[1]\n baseline.loc[\"Total_field\"] = baseline.iloc[2]\n return baseline", "def get_intertie_values (self):\n #~ print self.new_intertie_data.get_item('community','model as intertie')\n if self.new_intertie_data is None:\n raise ValueError, \"No community to intertie to\"\n self.connect_to_intertie = \\\n self.new_intertie_data.get_item('community','model as intertie')\n\n\n self.intertie_generation_efficiency = \\\n self.new_intertie_data.get_item(\n 'community',\n 'diesel generation efficiency'\n )\n\n it_diesel_prices = self.new_intertie_data.get_item(\n 'community',\n 'diesel prices'\n )\n it_diesel_prices.index = it_diesel_prices.index.astype(int)\n #~ print it_diesel_prices.ix[self.start_year:self.end_year]\n self.intertie_diesel_prices = \\\n it_diesel_prices.ix[self.start_year:self.end_year].values.T[0]", "def generateAggregatedCsvData(self, context, obj, entities):\n return sum([long(e.prop1.replace('-', ''), 16) for e in entities])", "def task101(self):\n self.ex(\"\"\"\nSELECT\n a.account_id AS account_id,\n pt.name AS product_type,\n p.name AS product_name\nFROM account AS a\n RIGHT JOIN product AS p ON a.product_cd = p.product_cd\n INNER JOIN product_type AS pt ON p.product_type_cd = pt.product_type_cd\nORDER BY account_id\"\"\")", "def concept_id_fields(item):\n return scom.concept_id_fields(item)", "def print_vscsi_attributes(self,objects):\n print(\"\\n\")\n print((\"LocalPartitionID\".ljust(35),\":\",objects.LocalPartitionID.value()))\n print((\"VirtualSlotNumber\".ljust(35),\":\",objects.VirtualSlotNumber.value()))\n print((\"RequiredAdapter\".ljust(35),\":\",objects.RequiredAdapter.value()))\n print((\"RemoteLogicalPartitionID\".ljust(35),\":\",objects.RemoteLogicalPartitionID.value()))\n print((\"RemoteSlotNumber\".ljust(35),\":\",objects.RemoteSlotNumber.value()))", "def test_equipment_by_id_image(self):\n pass", "def table_summary():\n \n t = dict()\n t['name'] = get_names()\n t['Name'] = [get_properties(name)['label'] for name in t['name']]\n N = len(t['name'])\n \n # host\n t['host'] = ['Sagittarius', 'Sagittarius', 'none', 'Gaia-Sausage-Enceladus', 'Sagittarius', 'Sequoia / Arjuna / I\\'itoi', 'Sequoia / Arjuna', np.nan, np.nan, 'Sequoia / Arjuna', 'Gaia-Sausage-Enceladus', 'Sequoia / Arjuna', 'Helmi / Wukong', 'Helmi / Wukong', 'Sagittarius', 'in situ / Helmi / Wukong', 'Helmi / Wukong', 'Cetus', 'Cetus', 'Sagittarius', 'Sequoia / Arjuna / I\\'itoi', 'Cetus', 'Sequoia / Arjuna / I\\'itoi']\n \n # progenitor\n t['progenitor'] = [np.nan, np.nan, 'itself', 'NGC 5139', 'NGC 4590', np.nan, 'NGC 3201', '(Wukong / Helmi)', '(Wukong / Helmi)', np.nan, np.nan, np.nan, np.nan, 'NGC 5024', np.nan, 'NGC 5272', 'NGC 5024', 'NGC 5824', 'NGC 5824', np.nan, np.nan, np.nan, np.nan]\n \n # progenitor type\n t['type'] = ['DG' if name in ['elqui', 'indus', 'jhelum'] else 'GC' for name in t['name']]\n \n # metallicity\n t['feh'] = [-2.4, -2.4, -2.2, -1.5, -2.16, -2.3, -1.5, -2.1, -2.1, -1.6, -1.95, -1.6, -2.7, np.nan, -1.7, -1.1, -2.7, -1.9, np.nan, np.nan, -2.2, np.nan, -1.9]\n \n # associations\n t['friends'] = ['ATLAS', 'Aliqa Uma', np.nan, np.nan, np.nan, np.nan, np.nan, 'Jhelum', 'Indus', np.nan, np.nan, np.nan, np.nan, 'Sylgr', np.nan, np.nan, 'Ravi', 'Turbio', 'Triangulum', np.nan, np.nan, np.nan, np.nan]\n \n tout = Table(t)\n tout.pprint()\n tout.write('../data/stream_origin.fits', overwrite=True)", "def report_update():\r\n resources[\"water\"] = resources[\"water\"] - MENU[order][\"ingredients\"][\"water\"]\r\n resources[\"milk\"] = resources[\"milk\"] - MENU[order][\"ingredients\"][\"milk\"]\r\n resources[\"coffee\"] = resources[\"coffee\"] - MENU[order][\"ingredients\"][\"coffee\"]\r\n resources[\"money\"] = resources[\"money\"] + total", "def tablecost(self):\n subtotal_getter = operator.attrgetter(\"subtotal\")\n\n cost = 0.0\n\n cost += sum(map(subtotal_getter, self.materials))\n cost += sum(map(subtotal_getter, self.processes))\n cost += sum(map(subtotal_getter, self.fasteners))\n cost += sum(map(subtotal_getter, self.toolings))\n\n return cost", "def prepare_display(self):\n # copy attributes and occurrences...\n attributes = self.attrs.copy()\n occ_ids = ', '.join([occ_info.occurrence_id\n for occ_info in self.occurrences])\n attributes['occurrences'] = occ_ids\n old_attributes = self.old_attrs.copy()\n if self.old_occurrences is not None:\n old_occ_ids = ', '.join([occ_info.occurrence_id\n for occ_info in self.old_occurrences])\n old_attributes['occurrences'] = old_occ_ids\n # for new positions: add \"empty values\" for all database attributes\n if self.status == BOMEntry.New:\n for n in AssemblyComponent.GetFieldNames():\n if n not in attributes:\n attributes[n] = '-'\n # replace NULLs with \"-\"\n for attrDict in [attributes, old_attributes]:\n for k, v in attrDict.iteritems():\n if v is None:\n attrDict[k] = '-'\n # ... and delegate to TableRow\n self.item = Item.ByKeys(self.attrs['teilenummer'],\n self.attrs['t_index'])\n self.display_row = TableRow(self.display_expressions,\n attributes,\n old_attributes,\n self.item)", "def calc_Cinv_CCGT(CC_size_W, CCGT_cost_data):\n\n # if the Q_design is below the lowest capacity available for the technology, then it is replaced by the least\n # capacity for the corresponding technology from the database\n if CC_size_W < CCGT_cost_data['cap_min'][0]:\n CC_size_W = CCGT_cost_data['cap_min'][0]\n CCGT_cost_data = CCGT_cost_data[\n (CCGT_cost_data['cap_min'] <= CC_size_W) & (CCGT_cost_data['cap_max'] > CC_size_W)]\n\n\n #costs of connection\n connection_costs = ngas.calc_Cinv_gas(CC_size_W)\n\n Inv_a = CCGT_cost_data.iloc[0]['a']\n Inv_b = CCGT_cost_data.iloc[0]['b']\n Inv_c = CCGT_cost_data.iloc[0]['c']\n Inv_d = CCGT_cost_data.iloc[0]['d']\n Inv_e = CCGT_cost_data.iloc[0]['e']\n Inv_IR = CCGT_cost_data.iloc[0]['IR_%']\n Inv_LT = CCGT_cost_data.iloc[0]['LT_yr']\n Inv_OM = CCGT_cost_data.iloc[0]['O&M_%'] / 100\n\n InvC = Inv_a + Inv_b * (CC_size_W) ** Inv_c + (Inv_d + Inv_e * CC_size_W) * log(CC_size_W)\n\n Capex_a_CCGT_USD = calc_capex_annualized((InvC+connection_costs), Inv_IR, Inv_LT)\n Opex_fixed_CCGT_USD = InvC * Inv_OM\n Capex_CCGT_USD = InvC\n\n return Capex_a_CCGT_USD, Opex_fixed_CCGT_USD, Capex_CCGT_USD", "def onemin_ivls():\n dids = mw.col.decks.allIds()\n ivls = onemin_ivls_list(dids)\n display_result(ivls)", "def onchange_invoice(self):\n self.product_id = False\n self.date = self.invoice.date_invoice\n self.name = (self.invoice and self.invoice.reference) or ''\n self.analytic_account_id = False\n self.unit_amount = self.invoice.residual\n self.quantity = 1\n self.total_amount = self.unit_amount", "def inventory_report(self):\n mean_price = sum(Product.price for Product in sample) / len(sample)\n mean_weight = sum(Product.weight for Product in sample) / len(sample)\n mean_flam = sum(Product.flammability for Product in sample) / len(sample)\n return 'Unique Product Names: ', sample.unique, '/n Average Price: ', mean_price, \n '/n Average Weight: ', mean_weight, '/n Average Flammability: ', mean_flam", "def calc_annual_electric_savings (self):\n costs = self.comp_specs['diesel generator o&m']\n\n for kW in costs.keys():\n try:\n if self.average_load < int(kW):\n maintenance = self.comp_specs['diesel generator o&m'][kW]\n break\n except ValueError:\n maintenance = self.comp_specs['diesel generator o&m'][kW]\n\n self.baseline_generation_cost = maintenance + \\\n (self.pre_intertie_generation_fuel_used * self.diesel_prices)\n\n maintenance = self.capital_costs * \\\n (self.comp_specs['percent o&m'] / 100.0)\n self.proposed_generation_cost = maintenance + \\\n self.intertie_offset_generation_fuel_used * \\\n self.intertie_diesel_prices\n self.annual_electric_savings = self.baseline_generation_cost -\\\n self.proposed_generation_cost\n #~ print len(self.annual_electric_savings)\n #~ print 'self.annual_electric_savings',self.annual_electric_savings", "def getDetail(self):\n\t\t\n\t\treturn (super().setParameters(0,self.getDefense(),0))\n\t\t\n\t\t#return \"\\n#########################################################\\n\"+\"\\nItem of Defense, Name of item:\"+self.getName()+\"\\nCapacity of defense:\"+str(self.getDefense())+\"\\nCapacity of attack:0 \\n Capacity of heal:0 \\n\"+\"#########################################################\\n\"", "def add(table):\n\n # your code\n inventory_data = [\"Product: \", \"Manufacturer: \", \"Purchase Year: \", \"Durability: \"]\n inputs = ui.get_inputs(inventory_data, \"Add item\")\n ID = common.generate_random(table)\n table.append([ID, *inputs])\n return table", "def info_criteria(indep,\n dep,\n models,\n add_aicc=False):\n num_data = len(indep)\n\n bic_calc = BicCalculator(bic_type=BIC_TYPE.STANDARD)\n bic_calc_bkpt = BicCalculator(bic_type=BIC_TYPE.HYBRID)\n\n #bic_calc_bkpt = bic_calc\n #####bic_calc_bkpt = BicCalculator(bic_type = BIC_TYPE.HOS)\n\n bics = []\n aics = []\n aiccs = []\n for model in models:\n\n if model in [Model.ONE_BKPT, Model.TWO_BKPT]:\n bic_calc_to_use = bic_calc_bkpt\n else:\n bic_calc_to_use = bic_calc\n\n estimator = model.estimator(num_end_to_skip=NUM_END_TO_SKIP,\n num_between_to_skip=NUM_BETWEEN_TO_SKIP)\n estimator.fit(indep, dep)\n\n loglikelihood = estimator.loglikelihood\n num_params = estimator.num_params\n\n bic = bic_calc_to_use.bic(num_params=num_params,\n loglikelihood=loglikelihood,\n num_data=num_data)\n aic = stats_util.aic(num_params=num_params,\n loglikelihood=loglikelihood)\n aicc = stats_util.aicc(num_params=num_params,\n loglikelihood=loglikelihood,\n num_data=num_data)\n\n bics.append(bic)\n aics.append(aic)\n aiccs.append(aicc)\n ic_df = pd.DataFrame({\"BIC\": bics, \"AIC\": aics}, index=models)\n if add_aicc:\n ic_df[\"AICC\"] = aiccs\n\n wts_df = ic_df.apply(stats_util.bma_weights, axis=0)\n wts_cols = [x + \" Model Wt\" for x in wts_df.columns]\n wts_df.columns = wts_cols\n both = pd.concat([ic_df, wts_df], join=\"outer\", axis=1)\n return both", "def electricity_balance(dh: DataHandler):\n # creates DataFrame of all electricity flows in TWh with index = ['alltec','r']\n supply = dh.get(\"o_supply\").groupby([\"tec_supply\", \"r\"]).sum()\n supply.index.names = [\"alltec\", \"r\"]\n supply = pd.concat([supply], keys=[\"supply\"], names=[\"type\"])\n demand = dh.get(\"o_demand\").groupby([\"tec_demand\", \"r\"]).sum() * -1\n demand.index.names = [\"alltec\", \"r\"]\n demand = pd.concat([demand], keys=[\"demand\"], names=[\"type\"])\n imp = pd.concat(\n [\n pd.concat(\n [dh.get(\"o_import\").groupby(\"r\").sum()],\n keys=[\"import\"],\n names=[\"alltec\"],\n )\n ],\n keys=[\"demand\"],\n names=[\"type\"],\n )\n cur = pd.concat(\n [\n pd.concat(\n [dh.get(\"o_cur\").groupby(\"r\").sum().mul(-1)],\n keys=[\"cur\"],\n names=[\"alltec\"],\n )\n ],\n keys=[\"demand\"],\n names=[\"type\"],\n )\n\n elec_flow = pd.concat([supply, demand, imp, cur])\n elec_flow = elec_flow.div(1000)\n\n mI = pd.MultiIndex.from_product(\n [\n dh.merge_stored_sets(\"alltec\"),\n dh.merge_stored_sets(\"r\"),\n [\"supply\", \"demand\"],\n ],\n names=[\"alltec\", \"r\", \"type\"],\n )\n elec_flow = add_zeros(elec_flow, mI)\n\n return elec_flow", "def __init__(self, *args, **kwargs):\n super(ProcedureMeasure, self).__init__(*args, **kwargs)\n self.fields_to_group_by = ['bene_sk', 'clm_from_dt']", "def generate_amortization_table(self):\n payment = self.payment\n self.table = {\"index\":[index for index in self.index],\\\n \"payment\": [payment for n in self.index],\n \"interest\":[0],\n \"amortization\":[0],\n \"balance\":[self.principal]}\n for i in self.table[\"index\"][1:]:\n interest = self.table[\"balance\"][i-1] * self.interest\n self.table[\"interest\"].append(round(interest,0))\n amortization = payment - interest\n self.table[\"amortization\"].append(round(amortization,0))\n prior_balance = self.table[\"balance\"][i-1] \n ending_balance = prior_balance - amortization\n self.table[\"balance\"].append(round(ending_balance,0))\n\n return self.table", "def product_db() -> List[Text]:\n\n return [\n \"credit\",\n \"forex\",\n \"debit\",\n \"atm\"\n ]", "def get_existing_schedule_info():\n _,items,schedule = load_data()\n schedule['consumption'] = schedule[itemnames()].apply(lambda x: x * items.set_index('item') \\\n .T[itemnames()].as_matrix().T.flatten(), axis=1).sum(1)\n schedule['cost'] = schedule.consumption * schedule.price\n schedule['cumulative_cost'] = schedule.cost.cumsum()\n schedule.replace(0,np.nan,inplace=True)\n schedule.set_index('blockid',inplace=True)\n return(schedule)", "def GetIPCVoucherAttrManagerSummary(ivam):\n out_str = \"\"\n fmt = \"{: <#018x} {: <30s} {: <30s} {: <30s} {: <30s} {: <30s}\"\n \n if unsigned(ivam) == 0 :\n return \"{: <#018x}\".format(ivam)\n\n get_value_fn = kern.Symbolicate(unsigned(ivam.ivam_get_value))\n extract_fn = kern.Symbolicate(unsigned(ivam.ivam_extract_content))\n release_value_fn = kern.Symbolicate(unsigned(ivam.ivam_release_value))\n command_fn = kern.Symbolicate(unsigned(ivam.ivam_command))\n release_fn = kern.Symbolicate(unsigned(ivam.ivam_release))\n out_str += fmt.format(ivam, get_value_fn, extract_fn, release_value_fn, command_fn, release_fn)\n return out_str", "def _odl_inventory(self):\n return {\n \"id\": self._id,\n \"hard-timeout\": self._hard_timeout,\n \"idle-timeout\": self._idle_timeout,\n \"table_id\": self._table_id,\n \"priority\": self._priority,\n \"instructions\": {\n \"instruction\": [self._instructions[i].odl_inventory(i) for i in range(len(self._instructions))]\n },\n \"match\": self._match.odl_inventory()\n }" ]
[ "0.5435056", "0.4894137", "0.48406047", "0.47943744", "0.47545114", "0.47482443", "0.4708286", "0.46779168", "0.4671095", "0.4662206", "0.4640695", "0.46324465", "0.45957166", "0.45863223", "0.45708844", "0.45649207", "0.45606184", "0.4560529", "0.4551793", "0.45511213", "0.45378718", "0.45113486", "0.4499626", "0.4497712", "0.44976717", "0.44805804", "0.44793588", "0.44707382", "0.44707382", "0.44668087", "0.4463886", "0.4461916", "0.4448145", "0.44392133", "0.44373837", "0.4435482", "0.44345087", "0.44337812", "0.44234148", "0.44203255", "0.44155446", "0.44129044", "0.4396209", "0.4391999", "0.43796045", "0.43794304", "0.43763217", "0.43699566", "0.4368443", "0.43645644", "0.43633404", "0.4353403", "0.4352458", "0.43387067", "0.4319986", "0.43185896", "0.43159518", "0.43107313", "0.43104827", "0.4306463", "0.43027556", "0.42983258", "0.42925853", "0.42876107", "0.42825884", "0.4277776", "0.42774346", "0.4276806", "0.4274275", "0.42659172", "0.42589", "0.425682", "0.42566058", "0.42555794", "0.4252811", "0.42472056", "0.4247152", "0.42445955", "0.42363068", "0.42362958", "0.42359322", "0.42355803", "0.42353863", "0.42293552", "0.42288694", "0.42233616", "0.4222037", "0.42149383", "0.42144203", "0.4213606", "0.42098993", "0.42055964", "0.4203984", "0.4194942", "0.41923282", "0.41907057", "0.4189261", "0.41883957", "0.41871592", "0.4185399" ]
0.4452974
32
returns True if employee has rejoined otherwise False
def is_rejoinee(self): return len(self._start_date) > 1
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_employee():\n return _is_member('uw_employee')", "def is_expired(self):\n expiration_date = datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)\n return (self.date_joined + expiration_date <= datetime.datetime.now())", "def is_expired(self):\n expiration_date = datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)\n\n return (self.user.date_joined + expiration_date <= datetime.datetime.now())", "def _compute_can_reset(self):\n\t\tuser = self.env.user\n\t\tgroup_hr_manager = self.env.ref ('hr_holidays.group_hr_holidays_manager')\n\t\tfor holiday in self:\n\t\t\tif group_hr_manager in user.groups_id or holiday.employee_id and holiday.employee_id.user_id == user:\n\t\t\t\tholiday.can_reset = True", "def already_booked(slots, attendees, user_name):\n already_joined = False\n for i in attendees:\n if i[\"email\"] == user_name+'@student.wethinkcode.co.za':\n already_joined = True\n\n if already_joined == True:\n return False\n else:\n return True", "def is_emperor(user_id: int, table_id: int) -> bool:\n table = Table.query.get(table_id)\n return table.emperor == user_id", "def is_manager(self) -> bool:\n return self.role in EmployeeRole.manager_roles()", "def activation_expired(self):\n return self.date_joined + timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS) < timezone.now()", "def activation_key_expired(self):\r\n expiration_date = datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)\r\n return self.activation_key == \"ALREADY_ACTIVATED\" or \\\r\n (self.user.date_joined + expiration_date <= datetime.datetime.now())", "def activation_key_expired(self):\n expiration_date = datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)\n return self.activation_key == \"ALREADY_ACTIVATED\" or \\\n (self.user.date_joined + expiration_date <= datetime.datetime.now())", "def activation_key_expired(self):\n expiration_date = datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)\n return self.activation_key == RegistrationProfile.ACTIVATED or \\\n (self.user.date_joined + expiration_date <= datetime.datetime.now())", "def isOn(self):\r\n return len(self.__agenda)>2", "def is_student_employee():\n return _is_member('uw_affiliation_student-employee')", "def is_joined_days_passed(self, days):\n return timezone.now() >= self.user.date_joined + timedelta(days=days)", "def has_happened(self):\n\n return self.end < timezone.now()", "def all_leave(self):\n return self.num_leaves == self.num_workers", "def activation_key_expired(self):\n expiration_date = datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)\n return self.user.date_joined + expiration_date <= datetime.datetime.now()", "def is_retired(self):\n if str.__str__(self) in UID_dictionary:\n return bool(UID_dictionary[self][3])\n\n return False", "def activation_key_expired(self):\n exp_date = timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)\n return self.user.date_joined + exp_date <= datetime.now()", "def can_reschedule(self) -> bool:\n return pulumi.get(self, \"can_reschedule\")", "def same_user_or_shiftleader(self, user):\n try:\n return (\n self.get_object().userid == user\n or user.is_superuser\n or user.userprofile.has_shift_leader_rights\n )\n except UserProfile.DoesNotExist:\n return False", "def has_orcid(self):\n try:\n if self.orcid:\n return True\n except Orcid.DoesNotExist:\n pass\n return False", "def is_examiner(self, user_obj):\n return self.examiners.filter(pk=user_obj.pk).count() > 0", "def test_func(self):\n member_to_finish = self.get_object()\n return self.request.user.rfid == member_to_finish.rfid", "def get_employee(self):\n employee_ids = self.env['hr.employee'].search([('user_id', '=', self.env.uid)])\n return employee_ids[0] if employee_ids else False", "def replied(self):\n return bool(self.replied_at is not None)", "def _check_employee(self):\n\n for record in self:\n\n if record.nik_number:\n # find duplicate nik\n employee_ids = self.search([('id', 'not in', self.ids), ('nik_number', '=', record.nik_number)])\n if employee_ids:\n error_msg = _(\"There is duplicate of Employee Identity Number.\")\n raise ValidationError(error_msg)\n\n # check nik format. it required base_indonesia\n if not record._check_nik(record):\n error_msg = _(\"NIK did not match with Company Code.\")\n raise ValidationError(error_msg)\n\n if record.identification_id:\n employee_ids = self.search([('id', 'not in', self.ids), ('identification_id', '=', record.identification_id)])\n if employee_ids:\n error_msg = _(\"There is duplicate of Identification Number.\")\n raise ValidationError(error_msg)\n\n return True", "def reports_editable(self):\n end_plus_time = self.datetime_end + datetime.timedelta(days=CCR_DELTA)\n return timezone.now() < end_plus_time", "def user_is_attendee(user):\n exists = check_attendee_exists(user, user)\n if exists[0]:\n return True\n return False", "def user_has_selected_nickname(self):\n if self.fresh is None:\n delta = self.created - self.modified\n # Simulate delta = abs(delta)\n if delta.days < 0:\n delta = -delta\n self.fresh = (delta.days == 0 and delta.seconds < 2)\n return not self.fresh", "def resent(self):\n return 'Resent-Date' in self", "def shouldRollover(self, record):\n t = int(mod_time.time())\n if t >= self.next_rollover_time:\n return 1\n return 0", "def is_expired(self) -> bool:\n if self.purpose == Purpose.password_reset:\n now = datetime.utcnow()\n expires_after = timedelta(hours=24)\n return now >= (self.created_at + expires_after)\n else:\n return False", "def check_rpt_status(self) -> bool:\n return self.allele == self.fasta_alt", "def has_enacted(self, billing_cycle):\n return RecurredCost.objects.filter(\n recurring_cost=self,\n billing_cycle=billing_cycle,\n ).exists()", "def is_assigned(self):\n if self.status == \"ASSIGNED\":\n return True\n else:\n return False", "def reservation_mark_entrance(user: User, reservation: Reservation):\n owns_restaurant = reservation.restaurant.operator == user\n if owns_restaurant and reservation.status is ReservationState.ACCEPTED and reservation.reservation_time <= datetime.datetime.now():\n #Might want to add user notification\n reservation.entrance_time = datetime.datetime.now()\n reservation.status = ReservationState.SEATED\n db.session.commit()\n return True\n\n return False", "def is_teacher_leads(self, teacher_email):\n if not self.is_course_exists():\n print(\"Course with name - {} does not exist\".format(self._course_name))\n return\n course = self.get_course()\n if teacher_email == course._teacher:\n return True\n return False", "def isLeader(self):\n return self.datacenter_id == self.leader_id", "def do_hire(self):\n return f\"{self} is hiring employees\"", "def is_indeed(self) -> bool:\n return self.mukluk > 5", "def is_expired(self):\n\n return time.time() * 1000 - self._refreshed_on > self._expire", "def reservation_mark_exit(user: User, reservation: Reservation):\n owns_restaurant = reservation.restaurant.operator == user\n if owns_restaurant and reservation.status is ReservationState.SEATED:\n #Might want to add user notification\n reservation.exit_time = datetime.datetime.now()\n reservation.status = ReservationState.DONE\n db.session.commit()\n return True\n\n return False", "def all_enter(self):\n return self.num_enters == self.num_workers", "def is_happening(self):\n now = timezone.now()\n start = self.start\n end = self.end\n happening = False\n # check that the event has started and 'now' is btwn start & end:\n if (now >= start) and (start.time() <= now.time() <= end.time()):\n happening = True\n return happening", "def show_all_employees(self):\n try:\n employees = self.admin_repository.show_all_employees()\n if employees:\n for employee in employees:\n print(\"Employee Id : {}\".format(employee[0]))\n print(\"Name : {}\".format(employee[1]))\n print(\"Email : {}\".format(employee[2]))\n print(\"----------------------------\")\n return True\n else:\n print(\"No records found.\")\n return False\n except Exception as e:\n print(\"Some Error occurred.Please try again\")\n return False", "def check_have_attend_by_uid(self,uid,eid):\n uid = str(uid)\n eid = str(eid)\n count_info = self.db.get(\"SELECT COUNT(*) AS num FROM fs_user_event WHERE uid=%s and eid=%s and status=0 and checkstatus = 2\",uid,eid)\n return True if count_info['num'] else False", "def test_ReportingPeriodDetailView_current_employee_toggle(self):\n self.former_employee.user_data.current_employee = True\n self.former_employee.user_data.save()\n response = self.app.get(\n reverse(\n 'reports:ReportingPeriodDetailView',\n kwargs={'reporting_period': '2015-01-01'},\n )\n )\n self.assertEqual(\n len(response.html.find_all('tr', {'class': 'user'})), 3\n )\n self.former_employee", "def is_repetition(self):\n return self.id == 1", "def is_debtor(self):\n return bool(self.expired_invoices_count())", "def offrable(self) -> bool:\n if date.today() > self.cagnotte.fin_achat:\n return False\n return (\n self.beneficiaires == 0\n or self.offre_set.filter(valide=True).count() < self.beneficiaires\n )", "def can_renew(self):\n\n if not hasattr(self.user, 'memberships'):\n return False\n\n # look at active memberships\n\n active_memberships = self.user.memberships.filter(\n status=True, status_detail='active'\n )\n\n for membership in active_memberships:\n if membership.can_renew():\n return True\n\n return False", "def ref_user_flag(self):\n try:\n ref = User.objects.get(\n associated_emails__email__iexact=self.reference_email,\n associated_emails__is_verified=True)\n return True\n except ObjectDoesNotExist:\n return False", "def process_employee_exit(self):\n if self.is_employee_serving():\n self._end_date.append(datetime.now().isoformat())\n\n print(f\"Successfully processed exit for employee {self.name} on\" \\\n f\"{self._end_date[-1]}\\nWe wish {self.name} for future endeavours\")\n return\n raise RejoiningException(\"Employee not in service. Cannot process exit.\")", "def is_revised_by_staff(self):\n\n form_entries = self.form_entries.all().distinct('user')\n for f in form_entries:\n if f.user.is_staff or f.user.is_superuser:\n return True\n\n return False", "def is_password_reset_too_soon(cls, user):\r\n if not cls.is_password_reset_frequency_restricted():\r\n return False\r\n\r\n history = PasswordHistory.objects.filter(user=user).order_by('-time_set')\r\n\r\n if not history:\r\n return False\r\n\r\n now = timezone.now()\r\n\r\n delta = now - history[0].time_set\r\n\r\n return delta.days < settings.ADVANCED_SECURITY_CONFIG['MIN_TIME_IN_DAYS_BETWEEN_ALLOWED_RESETS']", "def is_active(self):\n return self.start_date <= timezone.now() <= self.end_date", "def supportsRelogin(self):\n return 0", "def membership_valid(self):\n\n today = date.today()\n\n if self.dues_paid is None:\n return False\n\n months = 12 if self.dues_paid_year else 6\n dues_due = datetime.combine(self.dues_paid, datetime.min.time()) + relativedelta(months=+months)\n dues_due = dues_due.date()\n\n return dues_due > today", "def should_user_reset_password_now(cls, user):\r\n if not settings.FEATURES['ADVANCED_SECURITY']:\r\n return False\r\n\r\n days_before_password_reset = None\r\n if user.is_staff:\r\n if cls.is_staff_forced_password_reset_enabled():\r\n days_before_password_reset = \\\r\n settings.ADVANCED_SECURITY_CONFIG['MIN_DAYS_FOR_STAFF_ACCOUNTS_PASSWORD_RESETS']\r\n elif cls.is_student_forced_password_reset_enabled():\r\n days_before_password_reset = \\\r\n settings.ADVANCED_SECURITY_CONFIG['MIN_DAYS_FOR_STUDENT_ACCOUNTS_PASSWORD_RESETS']\r\n\r\n if days_before_password_reset:\r\n history = PasswordHistory.objects.filter(user=user).order_by('-time_set')\r\n time_last_reset = None\r\n\r\n if history:\r\n # first element should be the last time we reset password\r\n time_last_reset = history[0].time_set\r\n else:\r\n # no history, then let's take the date the user joined\r\n time_last_reset = user.date_joined\r\n\r\n now = timezone.now()\r\n\r\n delta = now - time_last_reset\r\n\r\n return delta.days >= days_before_password_reset\r\n\r\n return False", "def is_mentor(self):\n return self.user_profile_status == self.MENTOR", "def gameOver(self):\n\t\treturn self.lives == 0", "def oneupped(self):\n\n oneup = self.oneups.filter_by(author_id=session[\"active_persona\"]).first()\n\n if oneup is None or oneup.state < 0:\n return False\n else:\n return True", "def test_user_retirement(self):\n new_status = 'canceled'\n\n self.enrollment.status = new_status\n self.enrollment.save()\n\n # Ensure that all the records had values for external_user_key\n assert self.enrollment.external_user_key == 'abc'\n\n assert self.enrollment.historical_records.all()\n for record in self.enrollment.historical_records.all():\n assert record.external_user_key == 'abc'\n\n ProgramEnrollment.retire_user(self.user.id)\n self.enrollment.refresh_from_db()\n\n # Ensure those values are retired\n assert self.enrollment.external_user_key.startswith('retired_external_key')\n\n assert self.enrollment.historical_records.all()\n for record in self.enrollment.historical_records.all():\n assert record.external_user_key.startswith('retired_external_key')", "def calculate_alive(self):\n return self.lives > 0", "def can_enroll():\r\n # if using registration method to restrict (say shibboleth)\r\n if settings.FEATURES.get('RESTRICT_ENROLL_BY_REG_METHOD') and course.enrollment_domain:\r\n if user is not None and user.is_authenticated() and \\\r\n ExternalAuthMap.objects.filter(user=user, external_domain=course.enrollment_domain):\r\n debug(\"Allow: external_auth of \" + course.enrollment_domain)\r\n reg_method_ok = True\r\n else:\r\n reg_method_ok = False\r\n else:\r\n reg_method_ok = True #if not using this access check, it's always OK.\r\n\r\n now = datetime.now(UTC())\r\n start = course.enrollment_start\r\n end = course.enrollment_end\r\n\r\n if reg_method_ok and (start is None or now > start) and (end is None or now < end):\r\n # in enrollment period, so any user is allowed to enroll.\r\n debug(\"Allow: in enrollment period\")\r\n return True\r\n\r\n # if user is in CourseEnrollmentAllowed with right course key then can also enroll\r\n # (note that course.id actually points to a CourseKey)\r\n # (the filter call uses course_id= since that's the legacy database schema)\r\n # (sorry that it's confusing :( )\r\n if user is not None and user.is_authenticated() and CourseEnrollmentAllowed:\r\n if CourseEnrollmentAllowed.objects.filter(email=user.email, course_id=course.id):\r\n return True\r\n\r\n # otherwise, need staff access\r\n return _has_staff_access_to_descriptor(user, course, course.id)", "def user_is_examiner(userobj):\n from .assignment_group import AssignmentGroup\n return AssignmentGroup.published_where_is_examiner(userobj).exists()", "def is_monster_lord(self):\n return True", "def valid(self):\r\n return self.resumable and self.sessionID", "def user_exist(cls,user_name):\n for user in cls.user_list:\n if user.user_name == user_name:\n return True\n return False", "def test_ReportingPeriodDetailView_current_employee_set_false(self):\n response = self.app.get(\n reverse(\n 'reports:ReportingPeriodDetailView',\n kwargs={'reporting_period': '2015-01-01'},\n )\n )\n self.assertEqual(\n len(response.html.find_all('tr', {'class': 'user'})), 2\n )", "def last_active_admin(self):\n number = User.objects.filter(role=User.ROLE_ADMIN,\n is_active=True).count()\n if number > 1:\n return False\n else:\n return True", "def expired(self):\n return rospy.get_rostime() - self.start_time > self.duration", "async def on_member_update(before, after):\r\n if Counter(before.roles) == Counter(after.roles):\r\n return\r\n await check_member_rules(after)", "def complete(self):\n return (self.memberDevices <= len(self.members)) or not self.exists", "def is_profile_complete(self):\n return bool(self.fullname and self.username and self.email)", "def has_history(self, user):\n\n header = connect(self.__path)\n curs = header.cursor()\n encrypted_id = md5((str(user.id) + \"typicaluser\").encode()).hexdigest()\n curs.execute(\"SELECT * FROM users WHERE id = (?)\", (encrypted_id,))\n data = curs.fetchall()\n return len(data) >= 1", "def is_elected_leader(resource):\n if is_clustered():\n if not is_crm_leader(resource):\n log('Deferring action to CRM leader.', level=INFO)\n return False\n else:\n peers = peer_units()\n if peers and not oldest_peer(peers):\n log('Deferring action to oldest service unit.', level=INFO)\n return False\n return True", "def should_keep_running(self):\n return len(self.party.active_users())", "def orbOfRebirth(self):\n\t\turl = \"https://habitica.com/api/v3/user/rebirth\"\n\t\treturn(postUrl(url, self.credentials))", "def check_overtime(self, cr, uid, att, context=None):\n if att:\n overtime_obj = self.pool.get('hr.overtime')\n orertime_ids = overtime_obj.search(cr, uid, [('employee_id', '=', att.employee_id.id),\n ('mode', '=', 'by_employee'),\n ('name', '=', att.day_tz),\n ('datetime_start', '<=', att.name),\n ('datetime_stop', '>=', att.name),\n ('state', 'not in', ['cancel', 'confirmed', 'done'])\n ])\n if orertime_ids:\n return True\n return False", "def unorphaned(self):\n return self.new_owner == self.user", "def check_for_end_of_game(self):\n return self.player_1.score + self.player_2.score >= self.number_of_cells", "def all_players_finish(self):\n return len(self.game_winners) == len(self.players)", "def has_expired(self) -> bool:\n raise NotImplementedError() # pragma: nocover", "def is_transfer_due_to_death_staff(reg_type: str) -> bool:\n return reg_type and reg_type in (MhrRegistrationTypes.TRANS_ADMIN,\n MhrRegistrationTypes.TRANS_AFFIDAVIT,\n MhrRegistrationTypes.TRANS_WILL)", "def isExpired(self):\n return True/False", "def is_email_address_already_assigned(email_address: str) -> bool:\n return _do_users_matching_filter_exist(DbUser.email_address, email_address)", "def is_expired(self) -> bool:\n return now() > self.expires", "def will_occur(self, now):\n return self.end_repeat is None or self.end_repeat >= now.date() or \\\n self.l_start_date >= now or self.l_end_date >= now", "def is_access_expired(self) -> bool:\n entitlement_contract = self.cfg.entitlements.get(self.name, {})\n # TODO(No expiry per resource in MVP yet)\n expire_str = entitlement_contract.get('expires')\n if not expire_str:\n return False\n expiry = datetime.strptime(expire_str, '%Y-%m-%dT%H:%M:%S.%fZ')\n if expiry >= datetime.utcnow():\n return False\n return True", "def isendofheated(self,lag):\n kmax = self.n\n v1 = self.v1\n v2 = self.v2\n for k in range(kmax-1):\n if lag[k+1]>=(v2+v1)/(v2-v1) * lag[k]:\n return False\n return True", "def can_leave_team(uid):\n current_user = get_user(uid=uid)\n current_team = api.team.get_team(current_user[\"tid\"])\n if current_team[\"team_name\"] == current_user[\"username\"]:\n return False\n if current_team[\"creator\"] == uid and current_team[\"size\"] != 1:\n return False\n if len(api.submissions.get_submissions(uid=uid)) > 0:\n return False\n return True", "def verified_connection_ft2(self, id1, id2):\n if id1 in self.users.keys():\n return id2 in self.users.keys() and len(self.users[id1] & self.users[id2]) != 0\n return False", "def is_examiner(self, user_obj):\n warnings.warn(\"deprecated\", DeprecationWarning)\n return self.examiners.filter(user__id=user_obj.pk).count() > 0", "def is_outdated(self):\n today = datetime.datetime.today()\n day = datetime.datetime.combine(self.date, self.start_time)\n return day <= today", "def can_renew2(self):\n\n if not hasattr(self.user, 'memberships'):\n return False\n\n # look at active memberships\n\n active_memberships = self.user.membershipdefault_set.filter(\n status=True, status_detail__iexact='active'\n )\n\n for membership in active_memberships:\n if membership.can_renew():\n return True\n\n return False", "def checkright(self, r):\n return r in self.server.getrights(self.idstring(), self)", "def _check_approval_update(self, state):\n\t\tcurrent_employee = self.env['hr.employee'].search([('user_id', '=', self.env.uid)], limit=1)\n\t\t# is_officer = self.env.user.has_group('hr_holidays.group_hr_holidays_user')\n\t\tis_manager = self.env.user.has_group('hr_holidays.group_hr_holidays_manager')\n\t\tfor holiday in self:\n\t\t\tval_type = holiday.holiday_status_id.validation_type\n\t\t\tif state == 'confirm':\n\t\t\t\tcontinue\n\n\t\t\tif state == 'draft':\n\t\t\t\tif holiday.employee_id != current_employee and not is_manager:\n\t\t\t\t\traise UserError(_('Only a Leave Manager can reset other people leaves.'))\n\t\t\t\tcontinue\n\n\t\t\t# if not is_officer:\n\t\t\t# \traise UserError(_('Only a Leave Officer or Manager can approve or refuse leave requests.'))\n\n\t\t\t# if is_officer:\n\t\t\t# \t# use ir.rule based first access check: department, members, ... (see security.xml)\n\t\t\tholiday.check_access_rule('write')\n\n\t\t\tif holiday.employee_id == current_employee and not is_manager:\n\t\t\t\traise UserError(_('Only a Leave Manager can approve its own requests.'))\n\n\t\t\tif (state == 'validate1' and val_type == 'both') or (state == 'validate' and val_type == 'manager'):\n\t\t\t\tmanager = holiday.employee_id.parent_id or holiday.employee_id.department_id.manager_id\n\t\t\t\tif (manager and manager != current_employee) and not self.env.user.has_group('hr_holidays.group_hr_holidays_manager'):\n\t\t\t\t\traise UserError(_('You must be either %s\\'s manager or Leave manager to approve this leave') % (holiday.employee_id.name))\n\n\t\t\tif state == 'validate' and val_type == 'both':\n\t\t\t\tif not self.env.user.has_group('hr_holidays.group_hr_holidays_manager'):\n\t\t\t\t\traise UserError(_('Only an Leave Manager can apply the second approval on leave requests.'))", "def isLeaveLeft(self,leave_type,days):\n if leave_type == 1 :\n return days<=self.earned_balance\n elif leave_type == 2 :\n return days<=self.hp_balance\n elif leave_type == 3 :\n return days*2<=self.hp_balance \n else :\n return False" ]
[ "0.6680365", "0.587085", "0.58500487", "0.57760894", "0.5741819", "0.57230127", "0.56929135", "0.56781346", "0.56605685", "0.5577466", "0.55369276", "0.5530934", "0.55146056", "0.55014586", "0.54542553", "0.5392969", "0.5388207", "0.53847474", "0.5377452", "0.53713554", "0.5348058", "0.5341284", "0.532348", "0.5295116", "0.5291401", "0.52853847", "0.5240574", "0.52337444", "0.52328223", "0.5220771", "0.52185595", "0.52002925", "0.5188502", "0.5157842", "0.5144262", "0.5142985", "0.5138413", "0.5132679", "0.5124886", "0.5124275", "0.5119517", "0.51158905", "0.5113644", "0.5106383", "0.5104425", "0.5102227", "0.50854033", "0.50820917", "0.5081668", "0.5064753", "0.5053601", "0.504589", "0.50099266", "0.50070846", "0.5006261", "0.50044364", "0.5000277", "0.49996814", "0.49945298", "0.49901643", "0.4989759", "0.49866468", "0.49855566", "0.4983485", "0.49811468", "0.4969684", "0.4968706", "0.49581918", "0.49499744", "0.49483746", "0.49461204", "0.49424943", "0.4936774", "0.4934839", "0.49207765", "0.4914759", "0.49126253", "0.4912319", "0.49111694", "0.49098825", "0.49098596", "0.49076363", "0.4904677", "0.4904334", "0.49016652", "0.48937428", "0.4889106", "0.48846138", "0.48835862", "0.48814386", "0.48685765", "0.48651534", "0.48612028", "0.48513135", "0.48489732", "0.48438546", "0.48379326", "0.483746", "0.48362428", "0.48342177" ]
0.61320335
1
Process the Exit of employee
def process_employee_exit(self): if self.is_employee_serving(): self._end_date.append(datetime.now().isoformat()) print(f"Successfully processed exit for employee {self.name} on" \ f"{self._end_date[-1]}\nWe wish {self.name} for future endeavours") return raise RejoiningException("Employee not in service. Cannot process exit.")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def do_exit(self, args):\n return -1", "def identify_result_exit(self, record):\n return [\"exit\"]", "def exit(self):\n pass", "def _PExit(self, m):\n pass", "def user_exit(cls):\n cls.exit_program(ErrorCodes.E_USER_EXIT)", "def do_exit(self, line): \n sys.exit(0)", "def do_exit(self,*args):\r\n return True", "def set_exit(self, exit_name):\r\n pass", "def do_exit(self, arg):\n return self._leave(arg)", "def on_exit(self):\n pass", "def exit(self):\n logger.debug(\"EXIT\")", "def __my_exit__(self, arg=0):\n self.services.error('Called sys.exit() from component code')\n raise Exception('Called sys.exit() from component code')", "def doExit(n, info):\n\tprint(info)\n\tsys.exit(n)", "def do_exit(self, args):\n sys.exit(1)", "def exit(self):\n self.exit_flag = True", "def __exit(self, *args):\n sys.exit(0)", "def exit(context):\n return _nfc.exit(context)", "def exit(self):\n return self.__exit", "def _exit(self, save_vars):\n raise NotImplementedError()", "def do_exit(self, args):\n return sys.exit(1)", "def exit(self): \n self.teo_exchange_intent = self.teo_wallet\n self.withdraw_intent = self.euro_wallet\n\n self.register_teo_exchange(self.teo_exchange_intent)\n self.register_withdraw(self.withdraw_intent)\n\n if self.teo_wallet + self.euro_wallet == 0:\n print('Agent exited: ', self.__class__.__name__)\n self.model.schedule.remove(self)", "def on_exit(self, userdata):\n pass", "def exit(self) -> None:\n self.on_exit(None)", "def ConsoleExit(self, errorcode=200):\n pass", "def exit(self):\n print(\"\\n***************************** Exit Metafor *****************************\")", "def __exit__(self, *ex_info):\n if self.device:\n self._device_ctx.__exit__(*ex_info)\n\n stdout('')\n stdout('Finished {0} in {1:0.1f}s'.format(self.name, self.timer_elapsed('script')))", "def do_exit(self, arg):\n self.db.close_db()\n print(\" \\\\o_ Bye-bye...\")\n print(\" / \")\n print(\"<\\\\\")\n sys.exit()", "def program_exit(self, button_object):\n\t\tsys.exit(0)", "def __exit_handler(signum, frame):\n #print \"EH START\"\n with this.lock:\n exit_barrier = this.exit_barrier\n\n if exit_barrier is not None:\n # Meet up with the worker\n this.exit_barrier.wait()\n #print \"EH FIRST BARRIER\"\n # Wait for the worker to be done\n this.finish_barrier.wait()\n #print \"EH HANDLER FINISHED\"\n #print \"EH DONE\"\n sys.exit(0)", "def do_exit(self, _):\n return True", "def exit(self, detail, exit_now=None):\n self.make_DBLog('system', 'exit', 'danger', detail=detail)\n logger.critical('Exit caused by {}'.format(detail))\n msg = 'exit'\n if exit_now: sys.exit() # exit here\n else: self.run = False # exit after end of current loop", "def exit():\n sys.exit(1)", "def do_exit(self, args):\n return True", "def exit_handler():\n logger.debug(\"Application exit caught.\")\n save_state()", "def _cmd_exit(self):\n raise EOFError()", "def exit(self, *args):\n self.stop('all')\n sys.exit(1)", "def do_exit(self, line):\n Rsp.close()\n sys.exit(-1)", "def __exit__(self, exc_type, exc_val, exc_tb):\n\n self.quit()", "def call_exit_alias(event):\n b = event.cli.current_buffer\n b.validate_and_handle()\n xonsh_exit([])", "def exitprogram():\n sys.exit()", "def exit_program(cls, return_code):\n print(\"Exiting due to: %s\" % (ErrorMessages.verbose(return_code)))\n sys.exit(return_code)", "def do_exit(_arg=None):\n return True", "def call_exit(self, _) -> None:\n self.save_class()\n for _ in range(self.PATH.count(\"/\") + 1):\n self.queue.insert(0, \"quit\")", "def _Exit(__s, __f):\n print(\"_Exit\")\n Exit()", "def exit_program():\n quit()", "def do_exit(self, arg):\n arg = arg\n return True", "def exit_handler(handle):\n handle.analyze()", "def exit(self):\n if self._isSubProcessRunning() and self._exitCommand is not None:\n self.__process.stdin.write(self._exitCommand)\n self.__process.stdin.write(os.linesep)\n self.__process.stdin.flush()\n time.sleep(0.5)\n \n if self._isSubProcessRunning() :\n self.__process.kill()\n time.sleep(0.1)\n print 'Done!'", "def exit(self) -> None:\n\n self.result = self.handle_success('finished-task')", "def _exit_exam(self):\n self.finger.back()\n self._goto(\"exit_exam\")\n self.finger.back()", "def exit(status=None): # real signature unknown; restored from __doc__\n pass", "def exitexec(arguments=0):\n sys.exit(arguments)", "def exexit(ex: BaseException, exit_code: int = 1) -> NoReturn:\n print(\"\\n\",Fore.YELLOW,\"[AB] \",\n Fore.RED, ex.__class__.__name__,\n Fore.YELLOW, \": \", ex,\n file=sys.stderr, sep='')\n sys.exit(exit_code)", "def exit_program():\n\n print(\"Thank you. Bye\")\n return \"exit\"", "def __exit__(self, exception, value, trace):\n self.manual_exit()", "def is_exit(self):\n # TODO: 存在意義があやしい\n return self._data_handler.is_exit()", "def __exit__(self, error_class, error_inst, error_tb):\n print ('Returning from exit ...')\n if error_class in (KeyError, IndexError):\n print (\"Got them thar errors\")\n return True # True means that the exception is not to be raised", "def run(self):\n sys.exit(-1)", "def on_exit(self, function):\n\t\tself.exit_functions += [function]", "def click_Exit(self, event):\n exit()", "def magic_Exit(self, parameter_s=''):\n\n raise SystemExit,'IPythonExit'", "def OnExit(self, event=None, force=False):\r\n\r\n self.Exit()", "def exit_program():\n today = date.today()\n current_date = today.strftime(\"%d/%m/%Y\")\n now = datetime.now()\n current_time = now.strftime(\"%H:%M:%S\")\n print('Ending program : Speech_Name_Organization.py - at : ' + current_time + ' on : ' + current_date)\n sys.exit()", "def exit_program():\n print(\"Good bye\")\n sys.exit()", "def exit_handler(self):\n self.logger.debug(\n \"Starting script shutdown in the class \" +\n self.__class__.__name__\n )\n\n # Clean up dead processes before exiting\n self.cleanup_dead_processes()\n \"\"\"\n print(\"FAKELOG: [\" + time.strftime(\"%c\") + \"] [UnisonCTRL] Exiting\\n\")\n \"\"\"\n self.logger.debug(\n \"Script shutdown complete in class \" +\n self.__class__.__name__\n )\n\n self.logger.info(\"Exiting UnisonCTRL\")", "def state_processing_exit(cfg, app, win):", "def __procFinished(self, exitCode, exitStatus):\n self.__finish()", "def ev_QUIT(self, event):\n raise SystemExit()", "def do_exit(self, args):\n logger.debug(\"do_exit() was called.\")\n \n raise Exception(\"Shutting server down.\")", "def on_exit(self) -> OnExitHandler:\n return self._on_exit", "def state_finish_exit(cfg, app, win):", "def analyze_on_exit(self):\n atexit.register(exit_handler, self)", "def __exit__(self, *args, **kwargs):\n\n pass", "def do_exit(self, line):\n\n return True", "def do_exit(self, line):\n return True", "def do_exit(self, line):\n return True", "def do_exit(self, line):\n return True", "def exit_client(self):\r\n\r\n sys.exit()", "def exited(self, mover):\n pass", "def exit_program():\n print(\"Good Bye! Happy Searching...\")", "def quitme(self, evt=None):\n if evt:\n self.dbgprint(\"bye!\")\n sys.exit()", "def present_exit_massage(self):\n print(\"Thank you for using the calculator....\")", "def server_exit():\n return", "def terminate(self):\n\t\tself.raise_exc(SystemExit)", "def _exit(msg):\n __exit(msg)", "def screen_exit(self, error=0):\n sys.exit(error)", "def _exit() -> None:\n\n print(\n \"Thanks for using TbSET. \"\n \"See you next time!\\n\"\n )", "def exit(self, profit=0, loss=0, trail_offset=0):\n self.exit_order = {'profit': profit, 'loss': loss, 'trail_offset': trail_offset}", "def on_menu_exit(self, event):\n if self.close_warning:\n TEXT = \"Data is not saved to a file yet!\\nTo properly save your data:\\n1) Analysis --> Save current interpretations to a redo file.\\nor\\n1) File --> Save MagIC tables.\\n\\n Press OK to exit without saving.\"\n dlg1 = wx.MessageDialog(\n None, caption=\"Warning:\", message=TEXT, style=wx.OK | wx.CANCEL | wx.ICON_EXCLAMATION)\n if self.show_dlg(dlg1) == wx.ID_OK:\n dlg1.Destroy()\n self.GUI_log.close()\n self.Destroy()\n # if a custom quit event is specified, fire it\n if self.evt_quit:\n event = self.evt_quit(self.GetId())\n self.GetEventHandler().ProcessEvent(event)\n if self.standalone:\n sys.exit()\n else:\n self.GUI_log.close()\n self.Destroy()\n # if a custom quit event is specified, fire it\n if self.evt_quit:\n event = self.evt_quit(self.GetId())\n self.GetEventHandler().ProcessEvent(event)\n if self.standalone:\n sys.exit()\n\n # self.Destroy() # works if matplotlib isn't using 'WXAgg', otherwise doesn't quit fully\n # wx.Exit() # works by itself, but if called in conjunction with self.Destroy you get a seg error\n # wx.Exit() # forces the program to exit, with no clean up. works, but not an ideal solution\n # sys.exit() # program closes, but with segmentation error\n # self.Close() # creates infinite recursion error, because we have\n # a binding to wx.EVT_CLOSE", "def close_program():\n print(\"End with Calculations\\nSee u later :).\")\n exit(0)", "def quit():\n raise EmbeddedConsoleExit", "def terminate(exitmsg: str):\n print(exitmsg)\n sys.exit(1)", "def menuExit(self, event):\n \n self.onClose(event)\n return", "def __exit__(self, exc_type, exc_value, traceback):\n if self.returncode is None and self.proc.poll() is None:\n self.proc.terminate()", "def exit(self, _):\n try:\n self.execution_manager.close()\n except QMapError as e:\n print(e)\n raise urwid.ExitMainLoop()", "def GET_kill(self):\n sys.exit(0)", "def normalExit(self):\n return self.normal", "def operation_quit(interpreter, scope, *args):\n print('Thank you! Come again!')\n if args:\n if isinstance(args[0], types.Int):\n sys.exit(int(to_python(args[0])))\n else:\n print(args[0])\n sys.exit(1)\n sys.exit()", "def dummy_exit():\r\n def dummy_exit(_exitcode=0):\r\n raise DummyExitException(exitcode=_exitcode)\r\n return dummy_exit", "def exit(self):\n if self.debug:\n print(\"%s exit\" % self.name)\n self.stop()" ]
[ "0.66456366", "0.66187525", "0.6506358", "0.64646137", "0.6454185", "0.6431671", "0.63921857", "0.63420844", "0.63289636", "0.6281149", "0.6274805", "0.6250398", "0.6227689", "0.6203884", "0.6194826", "0.6160062", "0.6157645", "0.61544615", "0.61414033", "0.6138231", "0.6130484", "0.61210597", "0.61201847", "0.6104874", "0.6077881", "0.6075954", "0.6054154", "0.6039468", "0.60306275", "0.6020049", "0.60181355", "0.6011836", "0.60030997", "0.6000915", "0.5984287", "0.59632844", "0.596029", "0.5959963", "0.59501463", "0.5940504", "0.5929368", "0.59249824", "0.5897377", "0.58950466", "0.5886812", "0.58787245", "0.5875067", "0.5874437", "0.5867766", "0.58580613", "0.5852476", "0.58511245", "0.5846672", "0.58464366", "0.5835558", "0.58208233", "0.58179516", "0.5816944", "0.58164954", "0.58023626", "0.57989657", "0.5792455", "0.5786765", "0.57727337", "0.5755047", "0.5753185", "0.57508624", "0.5736346", "0.5734233", "0.5728106", "0.5727589", "0.5725902", "0.57242846", "0.5720244", "0.57101786", "0.57101786", "0.57101786", "0.5709069", "0.5703047", "0.5697945", "0.569636", "0.5694621", "0.5694468", "0.5688369", "0.56817794", "0.56794375", "0.5672842", "0.5667544", "0.56629014", "0.5646064", "0.5637477", "0.56365687", "0.56350636", "0.5633826", "0.5633006", "0.5632502", "0.56322676", "0.5625852", "0.56235063", "0.5608492" ]
0.8065909
0
Check if growth results have expected no samples.
def has_nsamples(results, n): n_rates = results.growth_rates.sample_id.nunique() n_exchanges = results.exchanges.sample_id.nunique() return n_rates == n and n_exchanges == n
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def has_more_samples(self):\n return True", "def has_more_samples(self):\n return True", "def has_more_samples(self):\n return True", "def test_sufficient_statistics(self):\n assert (\n len(self.data),\n self.data.var(),\n self.data.mean(),\n ) == sufficient_statistics(self.data)", "def test_getSampleCount(self):\r\n self.assertEqual(self.estimator1.getSampleCount(), 1)", "def zero_failures(self) -> bool:\n return abs(self.failurerate) < 1e-7", "def sanity_check(self):\n res = True\n res = res and self.detected\n res = res and np.sum(self.diffs) < 30000 # experimental value\n return res", "def test_all_empty_grades(self):\r\n all_gradesets, all_errors = self._gradesets_and_errors_for(self.course.id, self.students)\r\n self.assertEqual(len(all_errors), 0)\r\n for gradeset in all_gradesets.values():\r\n self.assertIsNone(gradeset['grade'])\r\n self.assertEqual(gradeset['percent'], 0.0)", "def missing_samples(self):\n missing = [s for s in self.subjects if len(s.samples) == 0]\n if len(missing) == 0:\n return None\n return missing", "def check_training_samples(self):\n\n yidx = np.sum(self.datas[self.train_idx].gen_labels(), axis=0) < self.kfold_cv\n if np.any(yidx):\n xlist = ','.join(np.array(self.datas[self.train_idx].labels)[yidx])\n print('\\n *** WARNING ***\\n There are labels with very few samples: %s' % xlist)\n print(' If encounter chaotic errors, consider excluding these labels using --excludeloc %s\\n' % xlist)\n\n return", "def test_no_source_measurements(self):\n measurement = self.measurement(self.metric())\n self.assertEqual(None, measurement[\"count\"][\"value\"])", "def is_empty(self) -> bool:\n return not bool(self.samplers)", "def can_sample(self, n_samples):\n return len(self) >= n_samples", "def can_sample(self, n_samples):\n return len(self) >= n_samples", "def can_sample(self, n_samples):\n return len(self) >= n_samples", "def can_sample(self, n_samples):\n return len(self) >= n_samples", "def has_more_trials(self) -> bool:\r\n raise NotImplementedError", "def missing_values():\n print('Missings in the train data:', train_data.isnull().sum())", "def has_result(self):\n return len(self.__analysis_items) > 0", "def include_empty_measures(self) -> bool:\n return self._include_empty_measures", "def is_exhausted(self):\n return random.random() < 0.5", "def test_no_counterfactuals_found(self):\n threshold = 4.0\n self._config['Regression threshold'] = str(threshold)\n self._example = {'x_1': 1.0, 'x_2': 1.0}\n output = self._gen.generate(\n example=self._example,\n model=self._model,\n dataset=self._dataset,\n config=self._config)\n self.assertEmpty(output)", "def test_ensure_data_no_totaltracks(self):\n album = Album(artist='Artist', album='Album', totalseconds=120)\n with self.assertRaises(Exception):\n album.ensure_data()", "def missing_tests(session):\n print('The following samples do not have tests:')\n for sample in set(ALL_SAMPLE_DIRECTORIES) - set(ALL_TESTED_SAMPLES):\n print('* {}'.format(sample))", "def valid(self):\n return len(self._totals_) <= 1", "def test_time_supp_length_matches_no_timesteps(self):\n for no_timesteps in [5, 578, 993, 300072]:\n for dt in [0.1, 0.5, 3.0]:\n test_rec = rt.Recording(np.empty([6, no_timesteps, 1]), dt=dt)\n self.assertEqual(\n len(test_rec.time_supp),\n no_timesteps,\n 'Expected length of time_supp {} to match no_timesteps of '\n 'input {}.'.format(len(test_rec.time_supp), no_timesteps),\n )", "def test_no_values(self):\r\n values = []\r\n result = multipoint_mean_sd(values)\r\n\r\n self.assertEqual('', result['mean_result'])\r\n self.assertEqual('', result['sd_result'])", "def testSanity(self):\n\t\tga = GA.GA(2,3)\n\t\tgenomes = ga.seedGenomes()\n\t\tself.assertEqual(len(genomes), 2, \n\t\t \"Wrong number of genomes\")\n\t\tself.assertEqual(len(genomes[0]), 3, \n\t\t \"Wrong size in genomes\")\n\t\t#print genomes\n\t\t#live and learn\n\t\tfitnesses = [23, 45]\n\t\tga.fitnessUpdate(fitnesses)\n\t\tgenomes2 = ga.createNextGeneration()\n\t\tself.assertEqual(len(genomes2), 2, \n \"Wrong number of genomes\")\n\t\tself.assertEqual(len(genomes2[0]), 3, \n \"Wrong size in genomes\")", "def test_ground_truths(self):\n GT_subset = [CONCORDANT, SEEG_ES, POST_OP]\n\n # first test thresh 0 dropna which is equivalent to not dropping anything.\n # this should pass by definition\n df_thresh_zero = self.df.dropna(\n subset=GT_subset, thresh=0, axis=0, inplace=False)\n assert (df_thresh_zero.shape == self.df.shape)\n\n # if actual test fails, help to check the Semio2Brain Database entries:\n df_ground_truths_at_least_one_notNaN = self.df.dropna(\n subset=GT_subset, thresh=1, axis=0, inplace=False)\n\n if (df_ground_truths_at_least_one_notNaN.shape != self.df.shape):\n indices = [\n i for i in self.df.index if i not in df_ground_truths_at_least_one_notNaN.index]\n print('\\n\\nThese are the GT discrepancies: ',\n self.df.loc[indices, ['Reference', 'Reported Semiology']])\n\n # now for the actual test assertion\n assert (df_ground_truths_at_least_one_notNaN.shape == self.df.shape)", "def test_everything_none(self):\n with self.subTest(input='list'):\n self.assertEqual(get_n_chunks(self.test_data, iterable_len=None, chunk_size=None, n_splits=None,\n n_jobs=None), min(13, cpu_count() * 4))\n with self.subTest(input='numpy'):\n self.assertEqual(get_n_chunks(self.test_data_numpy, iterable_len=None, chunk_size=None, n_splits=None,\n n_jobs=None), min(100, cpu_count() * 4))", "def check_expectations(self):\n self.load_results()\n\n for (benchmark, producer), result in self.results.items():\n if not result.reports:\n print('No results found for ' + benchmark + ' ' + producer)\n result.test_passed = False\n else:\n for report in result.reports:\n if check_benchmark_result(report, result.expectation):\n print('Test passed: ' + result.directory)\n result.test_passed = True\n else:\n print('Test failed: ' + result.directory)\n result.test_passed = False", "async def test_nr_of_missing_metrics_without_correct_report(self):\n self.reports[\"reports\"] = []\n response = await self.collect(get_request_json_side_effect=[self.data_model, self.reports])\n self.assert_measurement(response, parse_error=\"No reports found with title or id\")", "def test_GA_sanity():\n\tga = GA.GA(2,3)\n\tgenomes = ga.seedGenomes()\n\tif len(genomes) != 2:\n\t\tprint \"Wrong number of genomes\"\n\tif len(genomes[0]) != 3:\n\t\tprint \"Wrong size in genomes\"\n\t#print genomes\n\t#live and learn\n\tfitnesses = [23, 45]\n\tga.fitnessUpdate(fitnesses)\n\tgenomes2 = ga.createNextGeneration()\n\tif len(genomes2) != 2:\n\t\tprint \"Wrong number of genomes\"\n\tif len(genomes2[0]) != 3:\n\t\tprint \"Wrong size in genomes\"\n\t#print genomes2", "def test_result_zero(self, init_wealth, n_bars):\n series_wealth = init_wealth + np.zeros(n_bars, dtype=float)\n result = self.MetricClass()._result_from_wealth(series_wealth)\n expected = init_wealth\n assert result == expected", "def test_compare_genomes_1(self):\n import_genome.compare_genomes(self.genome_pair, self.eval_flags)\n count = count_status(self.genome_pair, \"error\", \"warning\")\n with self.subTest():\n self.assertEqual(len(self.genome_pair.evaluations), 12)\n with self.subTest():\n self.assertEqual(count, 0)", "def verify_queue_empty(self):\n self.assert_sample_queue_size(DataParticleType.VELOCITY_PARTICLE, 0)\n self.assert_sample_queue_size(DataParticleType.TIME_PARTICLE, 0)", "def test_checks_population_size(self):\n with pm.Model() as model:\n n = pm.Normal(\"n\", mu=0, sigma=1)\n for stepper in TestPopulationSamplers.steppers:\n step = stepper()\n with pytest.raises(ValueError, match=\"requires at least 3 chains\"):\n pm.sample(draws=10, tune=10, chains=1, cores=1, step=step)\n # don't parallelize to make test faster\n pm.sample(\n draws=10,\n tune=10,\n chains=4,\n cores=1,\n step=step,\n compute_convergence_checks=False,\n )", "def is_empty(self):\n return len(self.steps) == 0", "def test_valid_calculation_of_quantile(alpha: Any) -> None:\n n = 30\n check_alpha_and_n_samples(alpha, n)", "def test_correct_p_values_empty(self):\r\n exp = []\r\n obs = self.mc._correct_p_values([])\r\n assert_almost_equal(obs, exp)", "def test_invalid_calculation_of_quantile(alpha: Any) -> None:\n n = 10\n with pytest.raises(\n ValueError, match=r\".*Number of samples of the score is too low*\"\n ):\n check_alpha_and_n_samples(alpha, n)", "def test_check_full_house_false(self):\n not_full_house_fixtures = [[1, 1, 1, 1, 2],\n [2, 1, 1, 1, 1],\n [1, 1, 2, 2, 3],\n [1, 2, 3, 4, 5],\n ]\n\n for fixture in not_full_house_fixtures:\n score = self.roll.check_full_house(fixture)\n\n self.assertNotEqual(score, 25)\n self.assertEqual(score, 0)\n self.assertEqual(len(fixture), 5)", "async def test_nr_of_missing_metrics_without_reports(self):\n self.set_source_parameter(\"reports\", [])\n response = await self.collect(get_request_json_side_effect=[self.data_model, self.reports])\n self.assert_measurement(\n response,\n value=str(len(self.entities)),\n total=self.expected_software_metrics,\n entities=self.entities,\n )", "def test_error(self):\n metric = self.metric()\n measurement = self.measurement(metric, sources=[self.source(metric, parse_error=\"error\")])\n self.assertEqual(None, measurement[\"count\"][\"value\"])", "def halt(population, generation_count):\n return generation_count > DEFAULT_MAX_GENERATION or population[0].fitness == 0", "def test_none_grade(self):\r\n self.basic_setup()\r\n self.check_grade_percent(0)\r\n self.assertEqual(self.get_grade_summary()['grade'], None)", "def test_ensure_data_no_totalseconds(self):\n album = Album(artist='Artist', album='Album', totaltracks=2)\n with self.assertRaises(Exception):\n album.ensure_data()", "def isgood(self):\n\t\tanswer = True\n\t\t\n\t\tif self.mes_flux <= 0.0:\n\t\t\tanswer = False\n\n\t\treturn answer", "def test_none_meet(self, initial_placement_fixture):\n assert len(ctx.cluster.influx_db.aggregate_performance()) == 0, \\\n \"Test should run on the basic model\"\n self.generic_function(above_objective=0)", "def test_no_errors(self):\n raised = False\n try:\n _ = RandomForest(n_estimators=1, max_depth=1, criterion=\"entropy\")\n except Exception as error:\n print(error)\n raised = True\n self.assertFalse(raised)", "def verify_psample_stats(dut, params):\n output = psample_stats(dut, params.keys())\n if not output:\n st.log(\"Observed empty output\")\n return False\n entries = filter_and_select(output, None, params)\n if not entries:\n st.log(\"PSAMPLE STATS VERIFICATION FAILED\")\n return False\n return True", "def testGenerateSamplesMeasureNotCalled(self):\n timer = timing_util.IntervalTimer()\n self.assertEqual(timer.intervals, [])\n samples = timer.GenerateSamples()\n self.assertEqual(timer.intervals, [])\n self.assertEqual(samples, [])", "def verify_aggPercSecTreatment(self):\n self.c.execute('''SELECT aggCode, (aggC2*aggGenerated/100)\n FROM Agglomerations\n WHERE (aggC2*aggGenerated/100 >= 2000)\n AND aggPercSecTreatment IS NULL OR aggPercSecTreatment = \"\" \n ''')\n res = self.c.fetchall()\n if (len(res) > 0):\n return [False,\n \"In the agglomeration '%s' aggPercSecTreatment must be reported since the generated load is '%s'\",\n res]\n else:\n return [True]", "def test_all_good(self):\n self.driver.start_sampling()\n\n self.create_sample_data_set_dir(\n \"node59p1_all_good1.dat\",\n TELEM_DIR,\n \"node59p1.dat\"\n )\n self.assert_data(\n (DostadParserTelemeteredDataParticle, DostadParserTelemeteredMetadataDataParticle),\n 'test_data_1-2.txt.result.yml',\n count=3\n )\n\n self.create_sample_data_set_dir(\n \"node59p1_all_good.dat\",\n TELEM_DIR,\n \"node59p1.dat\"\n )\n self.assert_data(\n DostadParserTelemeteredDataParticle,\n 'test_data_all_good.txt.result.yml',\n count=1\n )", "def verify_aggCalculation(self):\n self.c.execute('''SELECT aggCode\n FROM Agglomerations\n WHERE aggCalculation IS NULL OR aggCalculation = \"\"\n ''')\n res = self.c.fetchall()\n if (len(res) > 0):\n return [False,\n \"The agglomeration '%s' has no aggCalcultation reported\",\n res]\n else:\n return [True]", "def _evaluate(self):\n logging.warning('-> evaluate EMPTY experiment...')", "async def test_failed_samples(self):\n self.set_source_parameter(\"test_result\", [\"failed\"])\n response = await self.collect(get_request_json_return_value=self.JMETER_JSON)\n self.assert_measurement(response, value=\"6\", entities=[])", "def check(self):\r\n self.check_probabilities()\r\n self.check_sum()", "def test_effective_metrics_not_in_PM():\n assert \"effective_sample_size\" not in PM_METRICS\n assert \"pearson_r_eff_p_value\" not in PM_METRICS\n assert \"spearman_r_eff_p_value\" not in PM_METRICS", "def test_alerts_when_no_breath(app, events, data):\n time_intervals = 1 / DriverFactory.MOCK_SAMPLE_RATE_HZ\n num_of_samples = int(NO_BREATH_TIME / time_intervals)\n app.run_iterations(num_of_samples)\n assert alerts.AlertCodes.NO_BREATH in events.alerts_queue.active_alerts, \\\n f\"NO_BREATH missing from: {events.alerts_queue.active_alerts}\"", "def test_1sample(self):\r\n c = AlphaDiversityCalc(observed_otus)\r\n self.assertEqual(c(data_path=self.single_sample_otu_table_fp), [2])", "def check_test_results(self):\n\n mean_test_results = self.testresults['mean']['h']\n var_test_results = self.testresults['var']['h']\n\n if mean_test_results == 1 and var_test_results == 0:\n return True, 'mean'\n elif mean_test_results == 0 and var_test_results == 1:\n return True, 'var'\n elif mean_test_results == 1 and var_test_results == 1:\n return True, 'both'\n elif mean_test_results == 0 and var_test_results == 0:\n return False, None\n elif np.isnan(mean_test_results) and np.isnan(var_test_results):\n return None, None\n else:\n raise Exception('Unexpected test result')", "def _no_improve(self):\n improve = [p-f for (f,p),_ in self.population]\n return np.mean(improve) < 1.0", "def test_empty(self):\n p = Project()\n assert isinstance(p, \"Project\")\n assert len(p.samples) == 0", "def is_zero(self):\n for action, prob in self._regrets.items():\n if prob != 0.0:\n return False\n return True", "def testPluginNoError(self):\n schema = self.dataset.makeMinimalSchema()\n task = lsst.meas.base.SingleFrameMeasurementTask(schema=schema, config=self.config)\n exposure, cat = self.dataset.realize(noise=100.0, schema=schema, randomSeed=0)\n task.run(cat, exposure)\n source = cat[0]\n self.assertFalse(source.get(self.algName + \"_flag\"))\n self.assertFalse(source.get(self.algName + \"_flag_containsNan\"))\n self.assertFalse(source.get(self.algName + \"_flag_edge\"))", "def does_usage_charges_grid_have_no_records(self):\n return self.is_element_present(self.usage_charges_grid_no_record_found_message_locator)", "def check_response_data_quality(self, api_response):\n # Check to see if the data is golden (if it is not, this means it could change over time)\n try:\n is_data_golden = api_response['reports'][0]['data']['isDataGolden']\n except:\n log_msg = \"\"\"\n method='DataAccess.GoogleApi.GoogleAnalytics.check_response_data_quality'\n message='The isDataGolden key does not exist.'\n \"\"\"\n self.logging_obj.log(self.logging_obj.DEBUG, log_msg)\n else:\n if not is_data_golden:\n log_msg = \"\"\"\n method='DataAccess.GoogleApi.GoogleAnalytics.check_response_data_quality'\n message='This data set is not golden (data is golden when the exact same request will not \n produce any new results if asked at a later point in time).'\n \"\"\"\n self.logging_obj.log(self.logging_obj.WARN, log_msg)\n # Check to see if the data set is sampled\n try:\n samples_read_counts = api_response['reports'][0]['data']['samplesReadCounts']\n sampling_space_sizes = api_response['reports'][0]['data']['samplingSpaceSizes']\n except:\n log_msg = \"\"\"\n method='DataAccess.GoogleApi.GoogleAnalytics.check_response_data_quality'\n message='This data set is not sampled! Yay!!! :)'\n \"\"\"\n self.logging_obj.log(self.logging_obj.DEBUG, log_msg)\n else:\n log_msg = \"\"\"\n method='DataAccess.GoogleApi.GoogleAnalytics.check_response_data_quality'\n message='This data set IS sampled!!! Do not trust for analysis!'\n samples_read_counts='{samples_read_counts}'\n sampling_space_sizes='{sampling_space_sizes}'\n \"\"\".format(samples_read_counts=samples_read_counts,\n sampling_space_sizes=sampling_space_sizes)\n self.logging_obj.log(self.logging_obj.WARN, log_msg)", "def test_sanity_check (self):\n X, Y = self.dm.get_data(std=True, lag_indicator=True)\n\n # Ensure number of rows between what we expect.\n row_bound = (800, 1000)\n actual_rows = X.shape[0]\n msg = 'Number of rows not within expected bounds.'\n self.assertTrue(row_bound[0] < actual_rows < row_bound[1], msg)\n\n msg = 'X and Y have different number of rows.'\n self.assertEqual(X.shape[0], Y.shape[0], msg)\n\n # Ensure X columns match.\n expected_x_cols = ['SP500', 'ltc_px_std', 'xrp_px_std', 'xlm_px_std',\n 'eth_px_std', 'btc_px_std', 'ltc_volume_std',\n 'xrp_volume_std', 'xlm_volume_std', 'eth_volume_std',\n 'btc_volume_std', 'lagged_others']\n actual_x_cols = X.columns.tolist()\n msg = 'Number of X columns different than expected.'\n self.assertEqual(len(actual_x_cols), len(expected_x_cols), msg)\n\n for col in expected_x_cols:\n msg = 'Expected column not found: {}'.format(col)\n self.assertTrue(col in actual_x_cols, msg)", "def check_gaus_fit(hist):\n s = ROOT.TSpectrum(1)\n s.Search(hist, 1, \"new\")\n peaks_buff = s.GetPositionX()\n x_peak = peaks_buff[0]\n\n return (abs(hist.GetFunction('gaus').GetParameter(1) - x_peak) / abs(x_peak)) < 0.1", "def test_no_data(self):\n for demo in State.GROUP_NAMES:\n response = self.client.get(reverse('education:demographic_detail',args=(demo,)))\n self.assertEqual(response.status_code, 200)\n self.assertEqual(response.context.get(\"json_rate_data\"), None)\n self.assertNotEqual(response.context.get(\"message\"), None)\n self.assertContains(response, \"Home\")\n self.assertContains(response, \"No Data Available\")\n self.assertNotContains(response, '<svg id=\"popsvg\"')", "def test_mixed_incomplete_data_2():\n profiles = [[2, 0]]\n payoffs = [[1.0, 0.0]]\n game = paygame.game(2, 2, profiles, payoffs)\n devgains = regret.mixture_deviation_gains(game, [1, 0])\n assert np.allclose(\n devgains, [0, np.nan], equal_nan=True\n ), \"nonzero regret or deviation without payoff didn't return nan\"", "def testQDA(self):\n qdaObj = qda.QDA(self.data, self.classes)\n qdaObj.fit_model()\n correctAns = np.array([5.01, 3.42, 1.46, .24])\n npTest = np.testing.assert_array_almost_equal(qdaObj.fitted_model.means_[0], correctAns, decimal=2)\n self.assertEqual(npTest, None)", "def test_count_when_data_is_not_present(self):\n\n temp_data = []\n\n tt = TemperatureTracker()\n result = tt.count_from(temp_data)\n self.assertEqual(result, 0)", "def test_no_ground_truths(self):\n expected_accuracy = dict(num_recall=0, uniq_recall=0, num_precision=0, uniq_precision=0)\n self._run_and_validate(self.single_box, [], expected_accuracy)", "def testEmpty(self):\n\n\t\twg = waveform.Generator(frequency=Quantity(1, 'Hz'))\n\n\t\teq_(list(wg.waveform.data), [])\n\t\teq_(wg.waveform.markers, {})", "def test_score_garbage(test_input, expected):\n score = sp.score_garbage(test_input)\n assert score == expected", "def test_check_data_specifying_no_of_percentiles(self):\n expected_data = np.array(\n [self.percentile_25, self.percentile_50, self.percentile_75]\n )\n result = Plugin().process(self.cube, no_of_percentiles=3)\n self.assertArrayAlmostEqual(result.data, expected_data, decimal=5)", "def should_evaluate(self, epoch: int) -> bool:\n return False", "def test_increasing_trend_is_false_if_price_decreases(self):\n self.given_a_series_of_prices([8, 12, 10])\n self.assertFalse(self.goog.is_increasing_trend())", "def test_does_not_have_value(self) -> None:\n self.assertFalse(LogLevels.has_value(1))", "def n_remaining_samples(self):\n return -1", "def n_remaining_samples(self):\n return -1", "def n_remaining_samples(self):\n return -1", "def test_no_inputs(self):\n expected_accuracy = dict(num_recall=0, uniq_recall=0, num_precision=0, uniq_precision=0)\n self._run_and_validate([], [], expected_accuracy)", "def test_get_empty_rare(self):\r\n self.assertRaises(TableException, get_rare_data, self.otu_table,\r\n 50, include_small_samples=False)", "def valid_samples(x, params, all=False):\n if all is True:\n return x\n return (x - params['num_testcases']\n - params['lstm_timesteps']\n - params['lstm_predictions'])", "def validate(self, epn, num_samples_to_test = 1000):\n batch_size = epn.batch_size\n dataloader = torch.utils.data.DataLoader(dataset = self, batch_size = batch_size, shuffle=True)\n num_samples_evaluated = 0\n num_correct = 0\n for batch_idx, (x_data, y_target) in enumerate(dataloader):\n epn.randomize_initial_state(batch_size = batch_size)\n epn.set_x_state(x_data)\n s = epn.evolve_to_equilbrium(y_target = None, beta = 0)\n compared = s[:,epn.iy].argmax(dim = 1) == y_target[:].argmax(dim = 1)\n num_samples_evaluated += batch_size\n num_correct += torch.sum(compared)\n if num_samples_evaluated > num_samples_to_test:\n break\n error = (1-num_correct.item()/num_samples_evaluated)\n return error", "def test_half_life_unstable_isotopes():\n for isotope in data_about_isotopes:\n if (\n \"half_life\" not in data_about_isotopes[isotope]\n and not data_about_isotopes[isotope]\n ):\n with pytest.raises(MissingParticleDataError):\n half_life(isotope)", "def test_result_zero(self, rate, init_wealth, n_bars):\n series_wealth = init_wealth + np.zeros(n_bars)\n result = Return(rate=rate)._result_from_wealth(series_wealth)\n expected = np.zeros(n_bars)\n assert np.allclose(result, expected)", "def test_mixed_incomplete_data():\n profiles = [[2, 0], [1, 1]]\n payoffs = [[4.3, 0], [6.2, 6.7]]\n game = paygame.game(2, 2, profiles, payoffs)\n dev_gain = regret.mixture_deviation_gains(game, [1, 0])\n expected_gains = [0.0, 2.4]\n assert np.allclose(\n dev_gain, expected_gains\n ), \"mixture gains wrong {} instead of {}\".format(dev_gain, expected_gains)\n dev_gain = regret.mixture_deviation_gains(game, game.uniform_mixture())\n assert np.isnan(dev_gain).all(), \"had data for mixture without data\"", "def test(self):\r\n error_count = 0\r\n N_TESTING = len(self.TESTING_DATA)\r\n for i in range(N_TESTING):\r\n x_vec = self.TESTING_DATA[i][:-1]\r\n y = self.TESTING_DATA[i][-1]\r\n\r\n result = self.bp.classify(x_vec)\r\n if result != y: error_count += 1\r\n print(error_count, \" errors on the test data, out of \", N_TESTING, \"items.\")", "def test_self_consistency_no_noise(self):\n popt, pcov = sine_fit(self.data, self.periods)\n print(popt)\n assert_allclose(*fixed_signs(self.p_gt, popt), 1e-4)", "def test_next_window_time_no_sample_passed(self):\n test_window_scheme = WindowingScheme(self.window_test_filter, 3)\n time.sleep(4)\n collected_value = test_window_scheme.filter(self.more_than_upper_bound)\n self.assertEquals(collected_value, self.more_than_upper_bound)", "def __call__(self, read, info: ModificationInfo):\n return expected_errors(read.qualities) > self.max_errors", "def test_no_removed_datasets(self):\n removed_dataset_1 = factories.SourceDatasetFactory.create(source_study_version=self.study_version_1)\n removed_dataset_2 = factories.SourceDatasetFactory.create(\n source_study_version=self.study_version_2, i_accession=removed_dataset_1.i_accession)\n response = self.client.get(self.get_url(self.study.pk))\n context = response.context\n table = context['source_dataset_table']\n self.assertNotIn(removed_dataset_1, table.data)\n self.assertNotIn(removed_dataset_2, table.data)\n self.assertEqual(len(table.data), 0)", "def test_empty_inputs():\n g = deglint.GlintCorr(odc_meta_file, sub_product)\n\n with pytest.raises(Exception) as excinfo:\n g.glint_subtraction(\n vis_bands=[\"3\"],\n corr_band=\"7\", # this dummy band only contains nodata\n water_val=5,\n )\n assert \"only contains a single value\" in str(excinfo)\n\n with pytest.raises(Exception) as excinfo:\n g.glint_subtraction(\n vis_bands=[\"7\"], # this dummy band only contains nodata\n corr_band=\"6\",\n water_val=5,\n )\n assert \"only contains a single value\" in str(excinfo)", "def test_stats_init(self):\n stats_store = StatsStore()\n\n gen_stats = stats_store.general_stats_get()\n for cntr in [stats_store.General.NUM_APPS_MOVES, stats_store.General.NUM_ERR]:\n assert cntr in gen_stats\n assert gen_stats[cntr] == 0", "def test_empty_dataframe_during_daily_data_generation(self):\n # if we have an empty data frame, we should get one back\n result = self.post_processor._generate_daily_data(DataFrame())\n self.assertTrue(result.empty)", "def is_empty(self) -> bool:\n return self.num_grna() == 0" ]
[ "0.7002155", "0.7002155", "0.7002155", "0.6810179", "0.6439376", "0.6349528", "0.6328535", "0.6269995", "0.62555325", "0.6226843", "0.6206803", "0.6174545", "0.61745423", "0.61745423", "0.61745423", "0.61745423", "0.61406535", "0.61071175", "0.6073733", "0.6044353", "0.6042063", "0.6031384", "0.6018271", "0.60108024", "0.6009741", "0.59782386", "0.59773594", "0.5957542", "0.59504706", "0.59494036", "0.5944184", "0.5943425", "0.59407866", "0.5940376", "0.5938681", "0.5932132", "0.5925854", "0.5917582", "0.59122264", "0.59045863", "0.5903939", "0.5897974", "0.58893484", "0.5886415", "0.58855504", "0.5865362", "0.5847923", "0.5847722", "0.5842563", "0.58424187", "0.5837451", "0.5829741", "0.58292305", "0.5819912", "0.5817393", "0.58137727", "0.5811279", "0.58077776", "0.58066344", "0.5801252", "0.57891095", "0.5782119", "0.5781613", "0.57625467", "0.5758638", "0.575739", "0.573462", "0.57330996", "0.5730134", "0.5717115", "0.57169336", "0.5714098", "0.5713184", "0.57122445", "0.57121736", "0.5709915", "0.57085395", "0.569953", "0.5696246", "0.56929266", "0.56890744", "0.56795925", "0.56795925", "0.56795925", "0.56762964", "0.5672762", "0.5670511", "0.5664661", "0.56576604", "0.56536275", "0.5653053", "0.56511873", "0.5650915", "0.56452173", "0.5643275", "0.56414723", "0.56392413", "0.5637134", "0.56356", "0.56342584" ]
0.63572603
5
Index page where I test few functions and make sure routes are connected
def test(): print "SERVER IS RUNNING" return render_template("statemap.html") # return render_template("testworld.html")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_main(self):\n path = reverse(\"main\")\n request = RequestFactory().get(path)\n response = index(request)\n assert response.status_code == 200", "def test_index_view(self):\n response = self.client.get(url_for('main.index'))\n self.assertEqual(response.status_code, 200)", "def test_main_page_load(self):\n response = self.client.get(reverse(\"index\"))\n self.assertEqual(response.status_code, 200)", "def test_homepage(self):\n rv = self.app.get('/')\n assert 'Enter your url here' in rv.data", "def test_index_view_basic(self):\n request = self.factory.get(\"/\")\n with self.assertTemplateUsed('solos/index.html'):\n response = index(request)\n self.assertEqual(response.status_code, 200)", "def test_homepage(self):\n rc = self.app.get('/')\n assert b'Welcome to Code TA' in rc.data\n assert b'Logout' not in rc.data", "def test_index_view(self):\n response = self.client.get('/')\n eq_(response.status_code, 200)", "def test_gourde_views(self):\n rv = self.app.get(\"/-/\")\n self.assertEqual(rv.status_code, 200)\n\n rv = self.app.get(\"/-/threads\")\n self.assertEqual(rv.status_code, 200)\n\n rv = self.app.get(\"/-/ready\")\n self.assertEqual(rv.status_code, 200)", "def test_index(self):\n\n # make request to server\n result = self.client.get(\"/\")\n\n # check that / route renders login page\n self.assertIn(b'<h1>Login:</h1>',result.data)", "def test_home(self):\n response = self.app.get(\"/\")\n self.assertTrue(response.status_code, 200)", "def test_homepage_view(self):\n response = self.client.get(url_for('home'))\n self.assertEqual(response.status_code, 200)", "def test_homepage_view(self):\n response = self.client.get(url_for('home'))\n self.assertEqual(response.status_code, 200)", "def test_home(self):\n result = self.app.get('/')\n self.assertEqual(result.status_code, 200)", "def test_index_view(self):\n response = self.client.get(reverse('index'))\n self.assertEquals(response.status_code, 200)", "def test_index(self):\n response = self.client.get('')\n self.assertEqual(response.status_code, 200)\n self.assertTemplateUsed(response, 'home/index.html')", "def test_index(self):\n tester = app.test_client(self)\n response = tester.get(\"/\")\n self.assertEqual(response.status_code,200)\n assert b\"Moscow Ring Road Distance Finder\" in response.data\n assert b\"search address\" in response.data", "def testindex(self):\n rv = self.app.get('/')\n self.assertEqual(rv.status_code, 302, \"homepage didnot load\")", "def test_01_index(self):\r\n res = self.app.get(\"/\", follow_redirects=True)\r\n assert self.html_title() in res.data, res\r\n assert \"Create an App\" in res.data, res", "def test_home(self):\n response = self.client.get('/')\n self.assert_200(response)\n self.assert_template_used('index.html')", "def test_home(self):\n\n response = self.client.get(reverse('home'))\n\n assert response.status_code == 200", "def test_main_app(self):\n resp = self.app.get('/')\n # ensure relevant pieces of UI are returned\n assert 'Foggy Fork' in resp.data\n assert 'A San Francisco Food Truck Map' in resp.data\n assert 'Where in the fog are you looking for food?' in resp.data\n assert '<div id=\"map-canvas\"></div>' in resp.data", "def test_index(self):\n tester = app.test_client(self) # You can use self.app in place of tester\n response = tester.get('/', content_type='html/text')\n self.assertEqual(response.status_code, 200)", "def test_index_page(self):\n response = self.client.get(\"\")\n self.assertEqual(response.status_code, 200)\n self.assertTemplateUsed(response, \"search/base.html\")\n self.assertTemplateUsed(response, \"search/search_form.html\")\n self.assertTemplateUsed(response, \"search/index.html\")", "def test_index_url(self):\n location = self.client.get(\"/index/\")\n name = self.client.get(reverse('weather:weather-index'))\n self.assertEqual(location.status_code, 200)\n self.assertEqual(name.status_code, 200)", "def test_index(self):\n res = self.client.get('/')\n data = res.data.decode('utf-8')\n assert res.status == '200 OK'\n assert 'Motorbike Event Finder' in data", "def test_homepage(self):\n\n with self.client as client:\n response = client.get('/')\n ...\n # test that you're getting a template", "def test_index_route():\n response = client.get(\"/\")\n assert response.status_code == status.HTTP_200_OK\n assert response.raw != None", "def test_frontpage(self):\n response = self.client.get('/')\n self.assertEqual(response.status_code, 200)", "def test_public_pages_load(self):\r\n pages = (\r\n reverse('login'),\r\n reverse('signup'),\r\n )\r\n for page in pages:\r\n print(\"Checking '{0}'\".format(page))\r\n self.check_page_get(page, 200)", "def test_index(self):\n resp = self.app.get('/')\n self.assertEqual(resp.status_code, status.HTTP_200_OK)\n data = json.loads(resp.data)\n self.assertEqual(data['status'], 'success')", "def test_get_main_route():\n response = client.get(url)\n assert response.status_code == 200", "def test_homepage(self):\n\n with self.client as client:\n response = client.get('/')\n html = response.get_data(as_text=True)\n self.assertEqual(response.status_code, 200)\n self.assertIn('<table class=\"board\">', html)\n self.assertIn('<table', html)\n self.assertIn('boggle homepage. used in testing', html)\n # test that you're getting a template", "def test_index(self):\n resp = self.app.get('/')\n self.assertEqual(resp.status_code, status.HTTP_200_OK)\n self.assertIn('Inventory Demo REST API Service', resp.data)\n # resp = self.app.get('/')\n # self.assertEqual(resp.status_code, status.HTTP_200_OK)\n # data = json.loads(resp.data)\n # self.assertEqual(data['name'], 'Inventory Demo REST API Service')", "def index():\n pass", "def test_home_page(self):\r\n url = reverse('home')\r\n response = self.client.get(url)\r\n\r\n self.assertEqual(response.status_code, 200)", "def test_urls(self):\n self.base_check_request(\"get\", \"/\")\n self.base_check_request(\"get\", \"apartments/\")\n self.base_check_request(\"get\", \"complexes/\")\n self.base_check_request(\"get\", \"locations/\")\n self.base_check_request(\"get\", \"companies/\")\n self.base_check_request(\"get\", \"companies-types/\")\n\n self.base_check_request(\"get\", \"count/apartments/\")\n self.base_check_request(\"get\", \"count/complexes/\")\n\n self.base_check_request(\"get\", \"search-forms/apartments/\")\n self.base_check_request(\"get\", \"search-forms/complexes/\")\n self.base_check_request(\"get\", \"search-forms/main/\")\n\n self.base_check_request(\"get\", \"autocomplete/companies/\")\n self.base_check_request(\"get\", \"autocomplete/complexes/\")\n self.base_check_request(\"get\", \"autocomplete/locations/\")\n\n self.base_check_request(\"get\", \"apartments_for_maps/?count=1&fields=lat,lon\")\n # self.base_check_request(\"get\", \"reserve/\")\n # self.base_check_request(\"get\", \"complain/\")\n # self.base_check_request(\"post\", \"apartment-complain/\")\n # self.base_check_request(\"post\", \"order-apartment/\")", "def test_smoke_test(self):\n urls = [ ]\n urls.append('/')\n urls.append(reverse('api_doc'))\n urls.append(reverse('laws'))\n urls.append(reverse('issue_list_user', args=['test0']))\n\n for url in urls:\n response = self.client.get(url)\n self.assertEqual(response.status_code , 200)", "def test_homepage(self):\r\n\r\n result = self.client.get(\"/\")\r\n self.assertIn(b\"Welcome!\", result.data)", "def test_urls(self):\n assert reverse('main-index') == '/'", "def test_home(self):\n\n with self.client:\n result = self.client.get('/users')\n self.assertEqual(result.status_code, 200)\n self.assertIn(b'<h1 class=\"col-2\">Users</h1>', result.data)", "def test_0010_simple(self):\n self.setup_defaults()\n app = self.get_app()\n\n with app.test_request_context('/'):\n self.assertEqual(url_for('nereid.website.home'), '/')", "def test_root(self):\n response = self.app.test_client().get('/test/')\n self.assertEqual(response.status_code, 200)\n self.assert_template_used('test/index.html')", "def test_main_page(self):\n response = self.client.get(reverse('home'))\n self.assertEqual(response.status_code, 200)\n\n content = response.content.decode('utf-8')\n self.assertTrue('Improving the FOIA request experience' in content)", "def test_index(self):\n tester = app.test_client(self)\n response = tester.get('/', content_type = 'html_text')\n self.assertEqual(response.status_code, 200)", "def test_no_routes(self):\n response = self.client.get(reverse('routes_app:index'))\n self.assertEqual(response.status_code, 200)\n self.assertContains(response, \"No routes are available.\")", "def loadTest(app, store):\n\n @app.get(['/', '/test', '/test/route'])\n def testGet():\n \"\"\"\n Test endpoint for bottle application routes\n Shows location of this file\n Shows all routes in current bottle app\n \"\"\"\n bottle.response.set_header('content-type', 'text/plain')\n content = \"Web app file is located at %s\" % os.path.dirname(os.path.abspath(__file__))\n siteMap = \"\"\n\n for route in app.routes:\n siteMap = \"%s%s%s %s\" % (siteMap, '\\n' if siteMap else '', route.rule, route.method)\n target = route.config.get('mountpoint', {}).get('target')\n if target:\n for way in target.routes:\n siteMap = \"%s\\n %s %s\" % (siteMap, way.rule, way.method)\n\n content = \"%s\\n%s\" % (content, siteMap)\n return content\n\n @app.get(['/test/echo', '/test/echo/<action>'])\n @app.post(['/test/echo', '/test/echo/<action>'])\n def echoTest(action=None):\n \"\"\"\n Ajax test endpoint for web application service\n Echos back args as content\n \"\"\"\n # convert to json serializible dict\n result = odict(verb=bottle.request.method,\n url=bottle.request.url,\n action=action,\n query=odict(bottle.request.query.items()),\n headers=odict(bottle.request.headers.items()),\n data=bottle.request.json,\n form=odict(bottle.request.forms),\n body=bottle.request.body.read())\n\n return result\n\n @app.get(['/test/auth', '/test/auth/<token>'])\n @app.post(['/test/auth', '/test/auth/<token>'])\n def authTest(token=None):\n \"\"\"\n Auth credentials in body data as json\n or query parameters\n or token from end of url path\n or token from X-Auth-Token header\n \"\"\"\n if not token:\n token = bottle.request.get_header('X-Auth-Token')\n\n data = bottle.request.json\n if not token:\n user = data.get('user')\n password = data.get('password')\n\n query = odict(bottle.request.query.items())\n if not user or not password:\n user = query.get('user')\n password = query.get('password')\n\n if not token and (not user or not password):\n bottle.abort(400, \"Authentication credentials missing.\")\n\n result = odict(token=token,\n user=user,\n password=password,\n headers=odict(bottle.request.headers.items()),\n query=query,\n data=data,\n )\n return result\n\n @app.get('/test/stream')\n def streamTest():\n \"\"\"\n Create test server sent event stream that sends count events\n \"\"\"\n timer = StoreTimer(store, duration=2.0)\n bottle.response.set_header('Content-Type', 'text/event-stream') #text\n bottle.response.set_header('Cache-Control', 'no-cache')\n # Set client-side auto-reconnect timeout, ms.\n yield 'retry: 1000\\n\\n'\n i = 0\n yield 'id: {0}\\n'.format(i)\n i += 1\n yield 'data: START\\n\\n'\n n = 1\n while not timer.expired:\n yield 'id: {0}\\n'.format(i)\n i += 1\n yield 'data: {0}\\n\\n'.format(n)\n n += 1\n yield \"data: END\\n\\n\"\n\n return app", "def test_view_displays_all(self):\n set_up_one_user(self, 1, 0)\n login = self.client.login(username='test', password='2HJ1vRV0Z&3iD')\n response = self.client.get(reverse('index'))\n self.assertEqual(response.status_code, 200)\n self.assertEqual(str(response.context['user']), 'test')\n self.assertEqual(len(response.context['data']), 1)", "def index(request):\r\n badRequest(\"Url not found\")", "def test_index_loads_properly(self):\n response = self.client.get('localhost:8000')\n self.assertEqual(response.status_code, 200)", "def test_ingredients_index(self):\n app = self.create_app()\n\n c = app.test_client()\n\n c.get('/ingredients/', follow_redirects=True)\n self.assert_template_used(\"ingredients/index.html\")", "def test_homepage(self):\n\n response = self.client.get(\"/\")\n self.assertIn(\"Books</title>\", response.data)\n self.assertIn(\"Goodreads ID\", response.data)", "def test_about_view(self):\n print 'Running %s ...' % getName()\n# test that URL resolves to correct views function \n found = resolve('/sequencelistings/about/')\n self.assertEqual(found.func, views.about)\n \n self.sequenceListingFixture.create_sequence_instance(self.sequenceListing)\n \n response = self.client.get(reverse('sequencelistings:about'))\n self.assertEqual(response.status_code, 200)\n \n# test that the page returns expected html contents\n self.assertContains(response, 'About')\n self.assertContains(response, 'only for information purposes')", "def test_home(self):\n response = self.client.get('/')\n self.assertContains(response, 'Home Page', 1, 200)", "def test_all_http_stats(self):\n client = Client()\n response = client.get(reverse('home'))\n self.assertEqual(200, response.status_code)\n response = client.get(reverse('browse_produce'))\n self.assertEqual(200, response.status_code)\n response = client.get(reverse('browse_locations'))\n self.assertEqual(200, response.status_code)\n response = client.get(reverse('search'))\n self.assertEqual(200, response.status_code)\n response = client.get(reverse('faq'))\n self.assertEqual(200, response.status_code)", "def test_view_index(self):\r\n\r\n resp = self.client.get_html(self.url)\r\n self.assertEqual(resp.status_code, 200)\r\n self.assertIn('course-nav-list', resp.content)", "def test_application_running(self):\n response = self.client.get('/login', content_type='html/text')\n self.assertEqual(response.status_code, 200)\n response = self.client.get('/signup', content_type='html/text')\n self.assertEqual(response.status_code, 200)", "def test_show_on_homepage(self) -> None:\n self.assert_show_on_homepage(apps.wakeup.main.Controller)", "def test_01_front_page(self):\r\n url = '/'\r\n # As Anonymou user\r\n res = self.app.get(url, follow_redirects=True)\r\n dom = BeautifulSoup(res.data)\r\n err_msg = \"Top users should not be shown to anonymous users\"\r\n assert dom.find(id='top_users') is None, err_msg\r\n # As Authenticated user but NOT ADMIN\r\n self.signin()\r\n res = self.app.get(url, follow_redirects=True)\r\n dom = BeautifulSoup(res.data)\r\n err_msg = \"Top users should not be shown to authenticated users\"\r\n assert dom.find(id='top_users') is None, err_msg\r\n self.signout\r\n # As Authenticated user but ADMIN\r\n res = self.signin(email=self.root_addr, password=self.root_password)\r\n print res.data\r\n res = self.app.get(url, follow_redirects=True)\r\n dom = BeautifulSoup(res.data)\r\n err_msg = \"Top users should be shown to admin\"\r\n assert dom.find(id='top_users') is not None, err_msg\r\n self.signout()", "def test():\n test_app()\n test_pagebrowser()", "def test_about_view(self):\n response = self.client.get(url_for('main.about'))\n self.assertEqual(response.status_code, 200)", "def test_get_main_page_without_logged_in_user(self):\n response = self.testapp.get('/')\n self.assertEqual(response.status_int, 200)", "def test_help_route():\n response = client.get(\"/\")\n assert response.status_code == 200\n assert response.json() == {\n \"repositories\": f\"{DOMAIN_NAME}/repositories\",\n \"developers\": f\"{DOMAIN_NAME}/developers\",\n \"docs\": f\"{DOMAIN_NAME}/docs\",\n \"redoc\": f\"{DOMAIN_NAME}/redoc\",\n }", "def index():\n return 'OK'", "def test_home_exists(self):\n response = self.app.get('/')\n self.assertEqual(response.status_code, 200)", "def test_aboutpage_view(self):\n response = self.client.get(url_for('about'))\n self.assertEqual(response.status_code, 200)", "def test_01_front_page(self):\r\n url = '/'\r\n # As Anonymou user\r\n res = self.app.get(url, follow_redirects=True)\r\n dom = BeautifulSoup(res.data)\r\n err_msg = \"Top users should be shown to anonymous users\"\r\n assert dom.find(id='top_users') is not None, err_msg\r\n # As Authenticated user but NOT ADMIN\r\n self.signin()\r\n res = self.app.get(url, follow_redirects=True)\r\n dom = BeautifulSoup(res.data)\r\n err_msg = \"Top users should be shown to authenticated users\"\r\n assert dom.find(id='top_users') is not None, err_msg\r\n self.signout\r\n # As Authenticated user but ADMIN\r\n self.signin(email=self.root_addr, password=self.root_password)\r\n res = self.app.get(url, follow_redirects=True)\r\n dom = BeautifulSoup(res.data)\r\n err_msg = \"Top users should be shown to admin\"\r\n assert dom.find(id='top_users') is not None, err_msg\r\n self.signout()", "def test_index(self):\n resp = self.app.get(\"/\")\n self.assertEqual(resp.status_code, status.HTTP_200_OK)\n self.assertIn(b\"Product Demo REST API Service\", resp.data)", "def test_homepage(self):\n \n result = self.client.get(\"/\")\n self.assertEqual(result.status_code, 200)\n self.assertIn(b\"What type of user are you?\", result.data)", "def test_landing_page(self):\n response = self.app.get(\"/\", follow_redirects=True)\n self.assertEqual(response.status_code, 200)\n\n res_txt = response.get_data(as_text=True)\n\n self.assertIn(\"input\", res_txt)\n self.assertIn(\"button\", res_txt)\n self.assertIn(\"Welcome to\", res_txt)", "def test_index(self):\n r = self.client.get('/')\n self.assertEqual(r.status_code, 302)", "def test_home(self):\n\t\tresponse = self.client.get('/')\n\t\tself.assertContains(response, 'Home Page', 1, 200)", "def test_page():\n app = create_ctfd()\n with app.app_context():\n\n gen_page(app.db, title=\"Title\", route=\"this-is-a-route\", html=\"This is some HTML\")\n\n with app.test_client() as client:\n r = client.get('/this-is-a-route')\n assert r.status_code == 200\n destroy_ctfd(app)", "def test_index(self):\n result = self.client.get('/')\n self.assertEqual(result.status, '200 OK')\n self.assertIn(b'Game', result.data)", "def test_shoppinglist_page(self):\n # register and login a user\n self.app.post('/register', data=self.user_reg_details)\n self.app.post('/login', data=self.user_login_details)\n # send a GET request\n res = self.app.get('/shoppinglist')\n self.assertEqual(res.status_code, 200)\n # check if page was loaded by looking for text in the page\n self.assertIn(\"Shopping List\", str(res.data))", "def test_01_admin_index(self):\r\n self.register()\r\n res = self.app.get(\"/admin\", follow_redirects=True)\r\n dom = BeautifulSoup(res.data)\r\n err_msg = \"There should be an index page for admin users and apps\"\r\n assert \"Settings\" in res.data, err_msg\r\n divs = ['featured-apps', 'users', 'categories', 'users-list']\r\n for div in divs:\r\n err_msg = \"There should be a button for managing %s\" % div\r\n assert dom.find(id=div) is not None, err_msg", "def test_home(client):\n rv = client.get('/')\n assert 200 == rv.status_code", "def test_homepage(client, app):\n # test that viewing the page renders without template errors\n # assert client.get(\"/auth/register\").status_code == 200\n\n with app.app_context():\n r = client.get(\"/\")\n\n assert r.status_code == 200\n\n # Test Nav Item\n assert \"Reports\" in r.get_data(as_text=True)", "def test_home_route_is_status_ok(self):\n response = self.client.get(\"/\")\n self.assertTrue(response.status_code == 200)", "def test_index_view(server_app):\n body = server_app.index()\n\n dom = html_pyquery(body)\n\n # Check name is correct\n name = dom.find(\".page-header__title\")[0].text\n assert name == \"Video Registry\"\n name = dom.find(\".page-content__title\")[0].text\n assert name == \"Video Registry\"", "def test_index():\n\n with flask_app.test_client() as client:\n response = client.get(\"/\")\n assert response.status_code == 200\n assert b\"milhouse\" in response.data\n\n response = client.get(\"/?url=https://imgs.xkcd.com/comics/bad_code.png\")\n assert response.status_code == 200\n assert b\"milhouse\" not in response.data", "async def test_index(app: Quart) -> None:\n test_client = app.test_client()\n response = await test_client.get('/')\n assert response.status_code == 200", "def test_connexion_view(self):\n c = Client()\n response = c.get('/connexion/')\n self.assertEqual(response.status_code, 200)", "def test_view_url_accessible_by_name(self):\n response = self.client.get(reverse('home'))\n self.assertEqual(response.status_code, 200)", "def test_recipes_index(self):\n app = self.create_app()\n\n c = app.test_client()\n\n c.get('/recipes/', follow_redirects=True)\n self.assert_template_used(\"recipes/index.html\")", "def initialize_routes(app):\n\n\[email protected]('/api/analyse', methods= [ 'GET' ])\n\tdef api_route():\n\n\t\tinputStr = request.args.get('text')\n\n\t\tlabels = senti.test_probability([ inputStr ])\n\t\tlabel = senti.test([ inputStr ], False)\n\n\t\tjson_response = json.dumps({\n\t\t\t'input': inputStr,\n\t\t\t'label': label[0],# 'neg' if labels[0] < .5 else 'pos',\n\t\t\t'probabilities': {\n\t\t\t\t'pos': labels[0],\n\t\t\t\t'neg': 1 - labels[0],\n\t\t\t}\n\t\t})\n\n\t\tresponse= make_response(json_response, 200)\n\t\tresponse.headers['Content-Type'] = 'application/json'\n\n\t\treturn response\n\n\n\[email protected]('/')\n\tdef hello():\n\t\treturn render_template('index.html')", "def test_index(self):\n\n result = self.client.get(\"/\")\n self.assertEqual(result.status_code, 200)\n self.assertIn(\"<h2>Please Write your Text</h2>\", result.data)", "def test_home_route_context_foo(self):\n response = self.client.get(\"/\")\n self.assertContains(response, 'Imager Site')", "def test_success(self):\n response = self.client.get('/')\n self.assertEqual(response.status_code, 200)", "def test_if_home_is_successful(client):\n\n url = reverse(\"home\")\n response = client.get(url)\n assert response.status_code == 200", "def test_index(self):\n response = app.get('/')\n self.assertIn('kubernetes-e2e-gce', response)", "def test_home(self):\n res = self.client.get(\"/\")\n data = res.data.decode(\"utf-8\")\n assert res.status == \"200 OK\"\n assert \"Gandalf\" in data", "def test_index(client):\n ind = client.get('/')\n ind2 = client.get('/index/')\n assert ind.status_code == 200\n assert ind2.status_code == 200", "def test_home_route_has_entrys(testapp, fill_the_db):\n response = testapp.get('/', status=200)\n html = response.html\n assert html.find_all('li')[2].a.getText() == \"It's Monday Dude\"\n assert html.find_all('li')[3].a.getText() == \"It's Tuesday Dude\"", "def test_given_home_page_behavior(self):\n res = self.client().get('/')\n self.assertEqual(res.status_code, 200)\n json_res = json.loads(res.get_data(as_text=True))\n self.assertEqual('Home page', json_res['message'])", "def test_03_account_index(self):\r\n # Without users\r\n with self.flask_app.app_context():\r\n res = self.app.get('/account/page/15', follow_redirects=True)\r\n assert res.status_code == 404, res.status_code\r\n\r\n self.create()\r\n res = self.app.get('/account', follow_redirects=True)\r\n assert res.status_code == 200, res.status_code\r\n err_msg = \"There should be a Community page\"\r\n assert \"Community\" in res.data, err_msg", "def test_admin_index(self):\n response = self.client.get('/admin/')\n self.assertEqual(response.status_code, 200)\n self.assertContains(response, \"Djrill\")", "async def test_routes_exist(self, app: FastAPI, client: AsyncClient) -> None:\n res = await client.get(app.url_path_for(\"feed:get-todo-feed-for-user\"))\n assert res.status_code != status.HTTP_404_NOT_FOUND", "def test_showing_dietitian_homepage(self):\n\n result = self.client.get(\"/dietitian/1\")\n self.assertEqual(result.status_code, 200)\n self.assertIn(b\"Dietitian Dashboard\", result.data)\n\n result = self.client.get(\"/dietitian/2\", follow_redirects=True)\n self.assertEqual(result.status_code, 200)\n self.assertIn(b\"not authorized\", result.data)", "def test_faq_view(self):\n response = self.client.get(url_for('main.faq'))\n self.assertEqual(response.status_code, 200)", "def test_redirect_register_home(self):\n with self.client as c:\n\n res = c.get(\"/\")\n self.assertEqual(res.status_code, 302)\n\n res = c.get(\"/users/tester1\")\n self.assertEqual(res.status_code, 302)\n\n res = c.get(\"/lists/111111\")\n self.assertEqual(res.status_code, 200)", "def test_index_loads_properly(self):\n response = self.client.get('localhost:8000')\n self.assertEqual(response.status_code, 404)" ]
[ "0.77554923", "0.7587984", "0.7496175", "0.7468367", "0.7461118", "0.73280954", "0.72975385", "0.72921324", "0.72654486", "0.72471523", "0.724658", "0.724658", "0.72161573", "0.72141546", "0.71513706", "0.7143187", "0.7128356", "0.711231", "0.7095507", "0.7054877", "0.70058566", "0.6999593", "0.69334364", "0.68729746", "0.6872582", "0.68697625", "0.6864946", "0.68575865", "0.68350285", "0.68314105", "0.68222547", "0.67923737", "0.6791649", "0.6785814", "0.67695785", "0.6762346", "0.67358583", "0.6727771", "0.67157143", "0.67050326", "0.66991425", "0.668253", "0.6678165", "0.66772306", "0.6659276", "0.6649271", "0.6640988", "0.6622761", "0.66126025", "0.6609114", "0.6603878", "0.6593168", "0.6591343", "0.65901875", "0.6584934", "0.6582659", "0.65792805", "0.6574468", "0.65697896", "0.65636843", "0.6560523", "0.6560151", "0.6555463", "0.6555356", "0.654847", "0.6544917", "0.6542747", "0.6541614", "0.6532842", "0.6531378", "0.6531342", "0.6528086", "0.6525758", "0.652467", "0.6516378", "0.65142864", "0.6512792", "0.6479055", "0.64664555", "0.64663863", "0.64580816", "0.64539677", "0.6444624", "0.6441292", "0.643957", "0.6423333", "0.64227784", "0.64200747", "0.6417263", "0.64118546", "0.6411387", "0.63912547", "0.63886774", "0.638691", "0.63851917", "0.6383797", "0.6379896", "0.6375571", "0.63721025", "0.6360607", "0.63568217" ]
0.0
-1
Log user into site, find user in the DB and their their user id in the session then if they are logged in redirect them to map page
def process_login(): # Get form variables email = request.form["email"] password = request.form["password"] # printing data from form to BASH print "form password" print password # check user exisit and then asign them variable user user = User.query.filter_by(email=email).first() print "\n \n \n ", user # Conditions if not user: flash("No such user") return redirect("/") elif user.password != password: flash("Incorrect password") return redirect("/") else: session["user_id"] = user.user_id flash("Logged in") return redirect('/passport')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def logininfo():\n\n email = request.form.get(\"email\")\n password = request.form.get(\"password\")\n\n user = User.query.filter_by(email=email).first()\n # import pdb; pdb.set_trace()\n if not user:\n return redirect(\"/register\")\n\n if user.password != password:\n flash('Invalid password, please try again!')\n return redirect(\"/\")\n\n session[\"user_id\"] = user.user_id\n\n return redirect(\"/feedpage\")", "def do_login(request):\n distinct_id = request.session.pop('distinct_id')\n user = User.objects.get(id=distinct_id)\n login(request, user)\n return redirect_to_user_settings()", "def do_login(user):\n session[CURRENT_USER_KEY] = user.id", "def log_user_in():\n\n print request.form.to_dict()\n user_id = data_manager.get_user_by_email(request.form.to_dict())\n\n if not user_id:\n flash(\"We do not have an account registered with that email. Please make an account.\")\n return redirect(\"/register\")\n\n if user_id == \"Wrong password\":\n flash(\"Wrong password. Please try again.\")\n return redirect(\"/\")\n\n session['user_id'] = user_id\n session['email'] = request.form.get('email')\n\n return redirect(\"/user/%s\" % user_id)", "def login_user():\n email = request.form.get(\"email\")\n password = request.form.get(\"password\")\n # Find user instance from user table where email matches email from form\n user = User.query.filter_by(email=email).first()\n # Check if password matches to log them in\n if user.password == password:\n # Login stored in session\n session[\"user_id\"] = user.user_id\n flash(\"Logged in\")\n\n return redirect(f\"/users/{user.user_id}\")\n else:\n flash(\"Incorrect email/password\")\n\n return redirect(\"/login-page\")", "def login():\n if request.method == 'POST':\n db = database.getdb()\n user = db.execute(\"SELECT * FROM flaskuser WHERE username=?\", (request.form['username'],)).fetchone()\n if check(user[\"pword\"], request.form[\"password\"]):\n session.clear()\n session[\"user_id\"] = 'admin'\n return redirect(url_for('index'))\n return render_template('login.html', title='Log In')", "def login():\n if request.method == 'POST' and not session.get('logged_in'):\n db = get_db()\n cur = db.execute('select id, username, password from users where \\\n username = ? and password = ?',\n [request.form['username'], request.form['password']])\n rows = cur.fetchall()\n if len(rows) == 1:\n session['logged_in'] = True\n else:\n flash('Invalid username or password', 'error')\n return redirect(url_for('show_entries'))", "def login():\n if g.user:\n #return redirect(url_for('timeline'))\n return redirect(url_for('index'))\n error = None\n if request.method == 'POST':\n #user = query_db('''select * from user where username = ?''', [request.form['username']], one=True)\n try:\n pid = int(request.form['pid'])\n except:\n abort(404)\n user = User.query.filter_by(pid=pid).first()\n if user is None:\n error = 'Invalid username'\n elif not check_password_hash(user.pw_hash, request.form['password']):\n error = 'Invalid password'\n else:\n #flash(_('You were logged in'))\n session['user_id'] = user.user_id\n return redirect(url_for('index'))\n if error:\n flash(_('Wrong with phone number or password'))\n return render_template('login.html', error=error)", "def login_form():\n\n # retrieving email and password from user\n email = request.form.get(\"email\")\n form_password = request.form.get(\"password\")\n\n if not email or not form_password:\n flash(\"Please resubmit your information correctly.\")\n return redirect(\"/\")\n\n # hash the password provided\n hash_object = hashlib.md5(form_password.encode())\n\n # querying by the password provided by the user\n user = User.query.filter_by(email=email).first()\n password = user.password\n\n # if the hash password matches the password in the database then\n # log user in and create a session\n if hash_object.hexdigest() == password:\n\n # created a session to store the user id\n session[\"user_id\"] = user.user_id\n return redirect(f\"/user/{user.user_id}\")\n\n else:\n flash(\"Sorry, wrong password!\")\n return redirect(\"/\")", "def login():\n if request.method == \"POST\":\n username = request.form[\"username\"]\n password = request.form[\"password\"]\n db = get_db()\n error = None\n user = db.execute(\n \"SELECT * FROM user WHERE username = ?\", (username,)\n ).fetchone()\n\n if user is None:\n error = \"Incorrect username.\"\n elif not check_password_hash(user[\"password\"], password):\n error = \"Incorrect password.\"\n\n if error is None:\n # store the user id in a new session and return to the index\n session.clear()\n session[\"user_id\"] = user[\"id\"]\n return redirect(url_for(\"index\"))\n\n flash(error)\n\n return render_template(\"auth/login.html\")", "def login():\n if request.method == \"POST\":\n username = request.form[\"username\"]\n password = request.form[\"password\"]\n db = get_db()\n error = None\n user = db.execute(\n \"SELECT * FROM user WHERE username = ?\", (username,)\n ).fetchone()\n\n if user is None:\n error = \"Incorrect username.\"\n elif not check_password_hash(user[\"password\"], password):\n error = \"Incorrect password.\"\n\n if error is None:\n # store the user id in a new session and return to the index\n session.clear()\n session[\"user_id\"] = user[\"id\"]\n return redirect(url_for(\"index\"))\n\n flash(error)\n\n return render_template(\"auth/login.html\")", "def login():\n if session['state'] != request.args['state']:\n response = make_response(json.dumps('Invalid state parameter.'), 401)\n response.headers['Content-Type'] = 'application/json'\n return response\n authorization_response = request.url\n FLOW.fetch_token(authorization_response=authorization_response)\n auth_session = FLOW.authorized_session()\n userinfo = auth_session.get(USERINFO_URL).json()\n session['userinfo'] = {\n 'name': userinfo['name'],\n 'email': userinfo['email']}\n sqlsession = SQLSESSION()\n user = User(name=userinfo['name'], email=userinfo['email'])\n try:\n sqlsession.add(user)\n sqlsession.commit()\n except IntegrityError:\n # user already exists in DB\n pass\n if 'target' not in session.keys():\n return redirect(\"/\")\n return redirect(session['target'])", "def login_page():\n form = loginUser()\n\n if \"user\" in session:\n logged_user = session[\"user\"]\n return redirect(f\"users/{logged_user}\")\n\n if form.validate_on_submit():\n username=form.username.data\n password=form.password.data\n\n user = User.authenticate(username=username, password=password)\n\n if user:\n session[\"user\"] = user.username\n\n return redirect(f'/users/{username}')\n else:\n form.password.errors = ['Unable to log in']\n\n return render_template(\"login_form.html\", form=form)", "def log_in():\n\n user = request.form.get(\"username\")\n #password = request.form.get(\"password\")\n\n try:\n\n current_user = User.query.filter_by(email=user).one()\n flash('You were successfully logged in')\n session[\"user_id\"] = current_user.user_id\n\n return render_template('homepage.html')\n\n except sqlalchemy.orm.exc.NoResultFound:\n\n return render_template('/registration_form.html')", "def login():\n \n # Get values from login form\n login_email = request.form.get(\"login_email\")\n login_password = request.form.get(\"login_password\")\n\n # If user's email and password matches, log them in, redirecting them to homepage\n # Otherwise, ask them to log in with the correct password\n if db.session.query(User).filter(User.email == login_email, \n User.password == login_password).first():\n \n flash(\"Login SUCCESS.\", \"success\") \n\n # Query to get user's user id, in order to redirect user to their user profile\n user = User.query.filter(User.email == login_email).one()\n\n session[\"logged_in_user_email\"] = login_email\n session[\"logged_in_user\"] = user.user_id\n\n # Pass a variable through a string via string formatting\n # so we can pass user_id into the redirected route, which is a string!!\n return redirect(\"/users/%s\" % user.user_id)\n # return redirect(\"/\")\n\n else:\n flash(\"Incorrect password. Please try again!\", \"danger\")\n return redirect(\"/signup-login\")", "def process_login():\n\n # Get form variables\n email = request.form[\"email\"]\n password = request.form[\"password\"]\n\n # printing data from form to BASH\n print \"form password\"\n\n print password\n\n # check user exisit and then asign them variable user\n user = User.query.filter_by(email=email).first()\n\n print \"\\n \\n \\n \", user\n\n # Conditions\n if not user:\n\n flash(\"No such user\")\n\n return redirect(\"/\")\n\n elif user.password != password:\n\n flash(\"Incorrect password\")\n\n return redirect(\"/\")\n else:\n session[\"user_id\"] = user.user_id\n\n flash(\"Logged in\")\n\n return redirect('/decisions')", "def login_user():\n\n email = request.form.get('email_login')\n password = request.form.get('password_login')\n\n user = crud.get_user_by_password(email, password)\n if user:\n session['user_id'] = user.user_id\n flash('Logged in!')\n return redirect(f\"/users/{session['user_id']}\")\n else: \n flash('Password does not match. Try again.')\n return redirect('/login')", "def do_login():\n\n isTeacher = False\n\n # check if this_user is admin or normal user\n this_user = User.query.filter_by(username=request.form['username']).first()\n \n # is this_user is not student or admin then check teacher table\n if this_user is None:\n this_user = Teacher.query.filter_by(username=request.form['username']).first()\n isTeacher = True\n\n # if this_user is still none -> invalid user\n if this_user is not None:\n if this_user.password == request.form[\"password\"]:\n session['authenticated'] = True\n session['username'] = this_user.username\n session['name'] = this_user.name\n session['isTeacher'] = isTeacher\n if session['username'] == \"admin\":\n session['wasAt'] = \"manageusers\"\n try:\n session['cpi'] = this_user.cpi\n session['grp_size'] = this_user.group_size\n except:\n pass\n else:\n flash(\"Incorrect Password, Please Try Again\") \n else:\n flash(\"Invalid Username, Please Try Again\")\n return home()", "def post_login(self, came_from=lurl('/')):\n if not request.identity:\n login_counter = request.environ.get('repoze.who.logins', 0) + 1\n redirect('/login', params=dict(came_from=came_from, __logins=login_counter))\n return\n userid = request.identity['repoze.who.userid']\n flash(_('Welcome back, %s!') % userid)\n redirect(came_from)", "def login_user():\n\n email = request.form.get('email')\n password = request.form.get('password')\n\n user = crud.check_user_login_info(email, password)\n\n if user:\n session[\"user_id\"] = user.user_id\n session['logged_in'] = True\n fname = user.fname\n flash(f'Welcome {fname}')\n return redirect('/directory')\n\n else:\n flash('Login info is incorrect, try again.')\n return redirect('/signin')", "def login():\n\temail = request.form.get(\"email\")\n\tpassword = request.form.get(\"password\")\n\tuser = User.query.filter_by(email=email, password=password).first()\n\n\tif user:\n\t\tsession[\"user_id\"] = user.user_id\n\t\t# print user.user_id\n\t\tflash(\"Welcome, %s\" % user.name)\n\t\t\n\n\t\treturn render_template(\"/nowsearch.html\", user=user) \n\telif ValueError:\n\t\tflash(\"Invalid Login, please try again or register using the registration button below\")\n\t\treturn render_template(\"/index.html\")", "def login_perform():\n try:\n user_name = request.values['user_name']\n user_password = request.values['user_password']\n except KeyError:\n pass\n else:\n session = Session()\n user = session.query(User).filter_by(user_name=user_name).first()\n if not user or not user.password == hashlib.sha1(user_password).hexdigest():\n flash(\"Invalid credentials\", \"alert\")\n return redirect(url_for(\".login\"))\n try:\n remember = request.values['remember'].lower() == \"on\"\n except KeyError:\n remember = False\n login_user(user, remember=remember)\n return redirect(request.args.get(\"next\") or url_for(\"home.index\"))", "def user_login():\n error = None\n data = Myprofile.query.all()\n if request.method == 'POST':\n for each in data:\n if request.form['email'] == each.email and request.form['password'] == each.password:\n session['logged_in'] = True\n session['id_num'] = each.id_num\n # payload = {\"email\": request.form['email'], \"password\": request.form['password']}\n \n #add_header('Bearer')\n return redirect(url_for('profile_view', id_num=each.id_num))\n else:\n error = \"Invalid login data\"\n return render_template('profile_login.html', error=error)", "def post_login(self, came_from=lurl('/')):\n if not request.identity:\n login_counter = request.environ.get('repoze.who.logins', 0) + 1\n redirect('/login',\n params=dict(came_from=came_from, __logins=login_counter))\n userid = request.identity['repoze.who.userid']\n flash(_('Welcome back, %s!') % userid)\n\n # Do not use tg.redirect with tg.url as it will add the mountpoint\n # of the application twice.\n return HTTPFound(location=came_from)", "def login():\n if g.user:\n return redirect(url_for('leaderboard'))\n error = None\n if request.method == 'POST':\n user = query_db('''select * from user where\n username = ?''', [request.form['username']], one=True)\n if user is None:\n error = 'Invalid username'\n elif not check_password_hash(user['pw_hash'],\n request.form['password']):\n error = 'Invalid password'\n else:\n flash('You were logged in')\n session['user_id'] = user['user_id']\n return redirect(url_for('leaderboard'))\n return render_template('login.html', error=error)", "def login():\n if g.user:\n return redirect(url_for('home'))\n if request.method == 'POST':\n user = database.query_db('''select * from users where \n email = ?''', [request.form['email'].lower()], one=True)\n if user is None:\n flash('Invalid email')\n elif not check_password_hash(user['pw_hash'],\n request.form['password']):\n flash('Invalid password')\n else:\n session['uid'] = user['uid']\n return redirect(url_for('home'))\n return render_template('login.html')", "def load_logged_in_user():\n user_id = session.get(\"user_id\")\n\n if user_id is None:\n g.user = None\n else:\n g.user = (\n get_db().execute(\"SELECT * FROM user WHERE id = ?\", (user_id,)).fetchone()\n )", "def load_logged_in_user():\n user_id = session.get(\"user_id\")\n\n if user_id is None:\n g.user = None\n else:\n g.user = (\n get_db().execute(\"SELECT * FROM user WHERE id = ?\", (user_id,)).fetchone()\n )", "def login():\n\n if request.method == 'POST':\n username = request.form['username']\n password = request.form['password']\n db = get_db()\n user = db.execute(\n 'SELECT id, password FROM user WHERE username = ?', [username]\n ).fetchone()\n error = None\n\n if not username:\n error = 'Username is required'\n elif not password:\n error = 'Password is required'\n elif user is None:\n error = 'User {} is not registered'.format(username)\n elif not check_password_hash(user['password'], password):\n error = 'Password doesn\\'t match'\n\n if error:\n flash(error)\n else:\n session.clear()\n session['user_id'] = user['id']\n return redirect(url_for('index'))\n\n return render_template('auth/login.html')", "def post_login(self, came_from='/'):\n if not request.identity:\n login_counter = request.environ['repoze.who.logins'] + 1\n redirect('/login', came_from=came_from, __logins=login_counter)\n userid = request.identity['repoze.who.userid']\n flash(_('Welcome back, %s!') % userid)\n redirect(came_from)", "def login_check():\n\n user_email = request.form['email']\n user_password = request.form['password']\n\n # Check user info against database\n email_query = User.query.filter_by(email=user_email).first()\n if email_query == None:\n flash('Invalid email or password')\n return redirect('/')\n\n # Get user's id using email\n user_id = email_query.user_id\n\n # Valid user password\n if user_password == email_query.password:\n #create user session\n session['user'] = email_query.user_id\n return redirect('/user-%s' % user_id)\n else:\n flash('Invalid email or password')\n return redirect('/')", "def login():\n if request.method == \"POST\":\n existing_user = mongo.db.users.find_one(\n {\"username\": request.form.get(\"username\").lower()})\n\n if existing_user:\n if check_password_hash(\n existing_user[\"password\"], request.form.get(\n \"password\")):\n session[\"user\"] = request.form.get(\n \"username\").lower()\n return redirect(url_for(\n \"profile\", username=session[\"user\"]))\n\n else:\n flash(\"Incorrect username and/or password\")\n return redirect(url_for(\"login\"))\n\n else:\n flash(\"Incorrect username and/or password\")\n return redirect(url_for(\"login\"))\n\n return render_template(\"login.html\")", "def login():\n # try:\n # if g.user != None:\n # user_ID = \"Logged \" + str(g.user['_id'])\n # print \"insideNone\",user_ID\n # except ValueError:\n # print \"try again\"\n # if g.user:\n # print \"insideLog\"\n # return redirect(url_for('timeline'))\n user_ID = \"\"\n if g.user != None:\n user_ID = \"Logged \" + str(g.user['_id'])\n print \"insideNone\", user_ID\n\n ############### REDIS SESSION CODE #####################\n if redis_obj.get(user_ID):\n pKey = pickle.loads(redis_obj.get(user_ID))\n print \"insideLog\", user_ID\n return redirect(url_for('timeline'), pKey)\n error = None\n if request.method == 'POST':\n user = userdetails_query()\n if user is None:\n error = 'Invalid username'\n # abort(status.HTTP_401_UNAUTHORIZED)\n elif not check_password_hash(user['pw_hash'],\n request.form['password']):\n error = 'Invalid password'\n # abort(status.HTTP_401_UNAUTHORIZED)\n else:\n flash('You were logged in')\n session['user_id'] = user['_id']\n logUser = pickle.dumps(list(user))\n # print \"logUser\", logUser\n redis_obj.setex(\"Logged \" + str(session['user_id']), logUser, 60)\n return redirect(url_for('timeline'))\n return render_template('login.html', error=error)", "def loginAttempt(request):\n\n userid = request.unauthenticated_userid\n\n if userid:\n\n # TODO: Convert USERS to database object instead of local dictionary. If necessary.\n\n if userid in USERS:\n user = USERS[userid]\n\n else:\n user = _create_user(userid)\n\n if user.check_token(userid):\n headers = remember(request, userid)\n url = request.route_url('home', _app_url=get_app_url(request))\n return HTTPFound(location=url, headers=headers)\n\n url = request.route_url('failed', _app_url=get_app_url(request))\n return HTTPFound(location=url)", "def process_login():\n\n email = request.form.get('email')\n password = request.form.get('password')\n\n match_user = User.query.filter_by(email=email).first()\n\n\n if not match_user:\n flash(\"No such email address.\")\n return redirect('/login')\n\n\n real_password = User.query.filter_by(email=email).first().password\n\n if password != real_password:\n flash(\"Incorrect password.\")\n return redirect(\"/login\")\n\n session[\"logged_in_customer_email\"] = email\n flash(\"Logged in.\")\n return redirect(\"/\")", "def login_success(user_id):\n session['user'] = {}\n u_obj = User.query.filter(User.user_id == user_id).first()\n saved_events = UserEvent.query.filter_by(user_id = user_id).all()\n user = session['user']\n user['user_id'] = user_id\n user['name'] = u_obj.fname\n user['saved'] = [event.eventbrite_id for event in saved_events]", "def post_login(self, came_from='/'):\n if not request.identity:\n login_counter = request.environ['repoze.who.logins'] + 1\n redirect('/login', came_from=came_from, __logins=login_counter)\n\n userid = request.identity['repoze.who.userid']\n flash(_('Welcome back, %s!') % userid)\n redirect(came_from)", "def log_in():\n\n email = request.form.get('email')\n password = request.form.get('password')\n user = crud.get_user_by_email(email)\n\n if email == user.email and password == user.password:\n session['user'] = user.user_id\n flash('Logged in!')\n else:\n flash('Email and password do not match.')\n \n return redirect('/')", "def process_form():\n\n login = request.form.get('login')\n password = request.form.get('password')\n\n user = User.query.filter((User.email == login) | (User.username == login)).first()\n\n # if not user or if user is None:\n if not user:\n flash('Username or email not recognized, try again.')\n return redirect('/search')\n\n elif user.password != password:\n flash('Password is wrong, please log in again')\n return redirect('/search')\n\n else:\n session['logged_in'] = user.id\n flash('Log in successful!')\n return redirect('/users/' + str(user.id))", "def login(request):\n\n print('login')\n if request.method == 'POST':\n md5_pwd = ''\n admin = False\n\n username = request.POST.get('username', False)\n mode1 = 'Human'\n mode = NameSpace.objects.get(ns_id=mode1)\n password = request.POST.get('password', False)\n if username:\n username = username.replace(\"\\\"\", \"\").replace(\"'\", \"\")\n if password:\n password = password.replace(\"\\\"\", \"\").replace(\"'\", \"\")\n md5_pwd = hashlib.md5(password.encode()).hexdigest()\n if (username != False and password != False):\n user = User.objects.filter(username = username,ns_id=mode, password = md5_pwd)\n\n\n if user.exists():\n # print('LOGGATO')\n mode1 = 'Human'\n mode = NameSpace.objects.get(ns_id=mode1)\n user = User.objects.get(username=username, password=md5_pwd,ns_id=mode)\n print(\"username: \" + username)\n request.session['username'] = username\n request.session['mode'] = mode1\n admin = User.objects.filter(profile='Admin')\n if admin.exists():\n admin = admin.first()\n admin_name = admin.username\n request.session['team_member'] = admin_name\n else:\n request.session['team_member'] = 'Test'\n\n request.session['profile'] = user.profile\n return redirect('MedTAG_sket_dock_App:index')\n #return redirect('MedTAG_sket_dock_App:select_options')\n\n profile_list = User.objects.values_list('profile', flat=True).distinct()\n if 'Admin' in profile_list:\n admin = True\n context = {'errorMessage': \"Your username and password didn't match.\",\"admin\":admin}\n return render(request, 'MedTAG_sket_dock_App/login.html', context)\n\n else:\n username = request.session.get('username', False)\n admin = False\n profile_list = User.objects.values_list('profile', flat=True).distinct()\n if 'Admin' in profile_list:\n admin = True\n context = {'admin': admin}\n\n if username:\n return redirect('MedTAG_sket_dock_App:index')\n\n return render(request, 'MedTAG_sket_dock_App/login.html',context)", "def load_logged_in_user():\n user_id = session.get(\"user_id\")\n if user_id is None:\n g.user = None\n else:\n g.user = {\n \"user_id\": user_id,\n \"user_name\": session.get(\"user_name\")\n }", "def login():\n \n username = request.form['username']\n login_user = mongo.db.users.find_one({'username' : username})\n \n if login_user:\n if bcrypt.checkpw(request.form['password'].encode('utf-8'), \n login_user['password']):\n session['username'] = request.form.to_dict()['username']\n user_id = login_user['username']\n return redirect(url_for('user', user_id = user_id ))\n else:\n flash('Invalid username/password combination!')\n return render_template('register.html', genres=mongo.db.genres.find())\n else:\n flash('Invalid username/password combination!')\n \n return render_template('register.html', genres=mongo.db.genres.find())", "def login_page():\n try:\n if request.method == \"POST\":\n with Database() as database:\n db_password = database.checkPass(request.form['username'])\n if len(db_password) > 0:\n db_password = db_password[0][0]\n if pbkdf2_sha256.verify(request.form['password'], db_password):\n session['logged_in'] = True\n session['id'] = database.getID(request.form['username'])\n session['username'] = request.form['username']\n app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER + \\\n session['username']\n return redirect(url_for('index'))\n else:\n flash(\"Invalid credentials, try again!\")\n return render_template(\"login.html\")\n else:\n flash(\"Invalid credentials, try again!\")\n return render_template(\"login.html\")\n return render_template(\"login.html\")\n\n except Exception as e:\n flash(\"Something went wrong, please try again\")\n return render_template(\"login.html\")", "def login():\n form = LoginForm(request.form)\n # if request.method == 'POST' and form.validate():\n # existing_user = mongo.db.users.find_one(\n # {\"username\": request.form.get(\"username\").lower()})\n\n # if existing_user:\n # if check_password_hash(\n # existing_user[\"password\"], request.form.get(\"password\")):\n # session[\"user\"] = request.form.get(\"username\").lower()\n\n # flash(\"Welcome back {}!\".format(\n # request.form.get(\"username\")))\n # return redirect(url_for(\n # \"profile\", username=session[\"user\"]))\n # else:\n # flash(\"Incorrect Username/password, Please try again\")\n # return redirect(url_for(\"login\"))\n\n # else:\n # flash(\"Incorrect Username/password, Please try again\")\n # return redirect(url_for(\"login\"))\n\n return render_template(\"login.html\", title='Login', form=form)", "def login():\n if request.method == 'POST':\n user = mongo.db.users.find_one(\n {\"username\": request.form.get(\"username\").lower()})\n\n if user:\n password = bcrypt.hashpw(request.form['password'].encode('utf-8'),\n user['password'])\n if password == user['password']:\n session['username'] = request.form.get(\"username\").lower()\n flash(\", great to have you back!\")\n return render_template('pages/allrecipe.html', username=session[\"username\"], isFooter=True)\n\n flash(\"Incorrect Username or Password!\")\n return redirect(url_for(\"login\", isFooter=True))\n\n return render_template('pages/login.html', isFooter=True, isNav=True)", "def login():\n login_form = LoginForm() \n user = User.query.filter_by(username=login_form.username.data).first()\n \n if login_form.validate_on_submit():\n if user:\n if user.password == login_form.password.data:\n login_user(user)\n return redirect('/searchpage')\n else:\n flash(\"Sorry, the information you entered is incorrect\")\n return redirect('/')", "def login(self):\n identity = request.environ.get('repoze.who.identity')\n came_from = str(request.GET.get('came_from', '')) or \\\n url('/')\n if identity:\n redirect(url(came_from))\n else:\n c.came_from = came_from\n c.login_counter = request.environ['repoze.who.logins'] + 1\n return render('/forms/login.mako')", "def login():\n if request.method == \"POST\":\n username_exist = mongo.db.users.find_one(\n {\"username\": request.form.get(\"username\").lower()})\n\n if username_exist:\n\n if check_password_hash(\n username_exist[\"password\"], request.form.get(\"password\")):\n session[\"user\"] = request.form.get(\"username\").lower()\n return redirect(url_for(\"profile\", username=session[\"user\"]))\n\n else:\n flash(\n \"Username or password was incorrect, please try again.\")\n return redirect(url_for(\"login\"))\n\n else:\n flash(\"The username or password was incorrect, please try again.\")\n return redirect(url_for(\"login\"))\n\n return render_template(\"login.html\")", "def login():\n if request.method=='GET':\n # get info and render\n return render_template('login.html')\n else:\n # auth\n username = request.form.get('username')\n password = request.form.get('password')\n\n users = DATABASE[\"users\"]\n\n if username in users:\n if password == users[username][\"Password\"]:\n # success, set session\n session['Name'] = username\n session['Type'] = users[username]['Type']\n\n # get info and redirect\n return redirect(url_for('manage_resources', user=username), 302)\n return Response(\"Incorrect Login Details\", 401)\n\n return \"Incorrect login credentials\"", "def load_logged_in_user():\n\n user_id = session.get('user_id')\n\n if user_id is not None:\n g.user = get_db().execute(\n 'SELECT * FROM user WHERE id = ?', [user_id]\n ).fetchone()\n else:\n g.user = None", "def login_confirmation():\n\n email = request.form['email']\n password = request.form['password']\n\n user = User.query.filter_by(email=email, password=password).first()\n\n if user:\n session['user_id']=user.user_id\n # return redirect(\"/users/<int:user.user_id>\")\n return redirect(\"/users/%s\" % user.user_id)\n else:\n flash(\"Incorrect username or password\")\n return redirect(\"/login\")", "def login():\n \n # forget any logged in user\n session.clear()\n \n # if user reached via POST\n if request.method == \"POST\":\n \n # ensure credentials entered\n if not request.form.get(\"username\"):\n flash(\"Please enter a username.\", \"error\")\n return redirect(url_for(\"login\"))\n elif not request.form.get(\"password\"):\n flash(\"Please enter a password.\", \"error\")\n return redirect(url_for(\"login\"))\n \n # query database to check for user\n rows = db.execute(\"SELECT * FROM 'users' WHERE username = :username\", username=request.form.get(\"username\"))\n \n if len(rows) != 1 or not pwd_context.verify(request.form.get(\"password\"), rows[0][\"hash\"]):\n flash(\"Username or password is incorrect.\", \"error\")\n return redirect(url_for(\"login\"))\n \n # remember user if login valid\n session[\"user_id\"] = rows[0][\"id\"]\n \n # redirect to home page\n flash(\"You have successfully been logged in.\", \"success\")\n return redirect(url_for(\"index\"))\n \n # if reached via GET\n else:\n return render_template(\"login.html\")", "def login():\n if request.method == 'POST':\n username = request.form['username']\n password = request.form['password']\n\n user = get_user(username, password)\n\n if not user:\n flash('No such username and/or password', 'alert-danger')\n return redirect(url_for('login'))\n\n session['username'] = user.username\n session['user_id'] = user.id\n session['logged_in'] = True\n session['is_admin'] = user.is_admin\n\n return redirect(url_for('index'))\n\n return render_template('login.html')", "def user_for_login(request):\n log = get_log(\"user_for_login\")\n\n identifier = None\n\n # standard repoze related identity:\n if 'repoze.who.identity' in request.environ:\n identity = request.environ['repoze.who.identity']\n\n if 'username' in identity:\n identifier = identity['username']\n\n elif 'repoze.who.userid' in identity:\n identifier = identity['repoze.who.userid']\n\n # token based identity:\n elif 'pp.api_access.identity' in request.environ:\n identifier = request.environ['pp.api_access.identity']\n\n else:\n log.debug(\"No identifier recovered from environment!\")\n\n if not identifier:\n raise HTTPForbidden()\n\n if _USERS.get(identifier):\n found = _USERS.get(identifier)\n\n if _NAME_TO_ID.get(identifier):\n found = _USERS.get(_NAME_TO_ID.get(identifier))\n\n return found", "def login():\n # Output message if something goes wrong...\n msg = ''\n # Check if \"username\" and \"password\" POST requests exist (user submitted form)\n if request.method == 'POST' and 'username' in request.form and 'password' in request.form:\n # Create variables for easy access\n username = request.form['username']\n password = request.form['password']\n # user = User.query.get(password=password).first()\n # user = User.query.filter_by(username == username).first()\n\n # api call to save the user.\n response = requests.post(\n \"http://localhost:8080/api/login\", {\"email\": username, \"password\": password})\n print(response)\n data = json.loads(response.text)\n\n # If account exists in users table in out database\n if data:\n # Create session data, we can access this data in other routes\n session['loggedin'] = True\n session['userid'] = data['userid']\n session['username'] = data['email']\n # Redirect to home page\n return redirect(url_for('site.home'))\n else:\n # Account doesnt exist or username/password incorrect\n msg = 'Incorrect username/password!'\n # Show the login form with message (if any)\n return render_template('index.html', msg=msg)", "def do_login_login():\n print(inspect.stack()[1][3])\n print(request.form)\n query = select([User]).where(and_(User.columns.email == request.form['email'],User.columns.password==request.form['password'] ))\n ResultProxy = connection.execute(query)\n ResultSet = ResultProxy.fetchone()\n if ResultSet:\n session['logged_in'] = True\n else:\n flash('wrong password!')\n # return str(get_flashed_messages())\n return home(result)", "def login():\n\n if request.method == \"GET\":\n # Check if the user is auth'd\n user = auth_user_session()\n if user:\n # Send to homepage if they are auth'd\n return redirect(\"/\")\n else:\n # Otherwise send back to login\n return render_template(\"login.html\")\n\n if request.method == \"POST\":\n # Get values submitted through POST\n username = request.form[\"username\"]\n password = request.form[\"password\"]\n\n # Find the user in the database\n user = User.query.filter(User.username == username).first()\n if user:\n if user.check_password(password):\n # Update their cookie and commit\n cookie = update_session(user)\n db.session.add(user)\n db.session.commit()\n\n # Send cookie back in response\n response = make_response(redirect(\"/\"))\n response.set_cookie(\"session_cookie\", cookie)\n response.set_cookie(\"user\", f\"{user.id}\")\n\n # Return\n return response\n return render_template(\"loginfailure.html\")", "def login():\n form = LoginForm()\n\n if form.validate_on_submit():\n user = User.query.filter_by(username=form.username.data).first()\n next_page = request.args.get('next')\n if not next_page or url_parse(next_page).netloc != '':\n next_page = url_for('index.index')\n\n if user is None or not user.check_password(form.password.data):\n flash(message='Invalid username or password', category='danger')\n return redirect(url_for('auth.login', next=next_page))\n\n session.clear()\n session['user_id'] = user.id\n return redirect(next_page)\n\n return render_template('auth/login.html', title='Login Page', form=form)", "def login_user():\n pass", "def auth_user_session():\n if \"user\" in request.cookies:\n userid = request.cookies[\"user\"]\n if userid:\n user = User.query.filter(User.id == userid).first()\n if user:\n if \"session_cookie\" in request.cookies and user.cookie == request.cookies[\"session_cookie\"]:\n if user.cookie_expiration > datetime.now():\n return user\n\n # Return none if failure\n return None", "def login(request, session):\n\n if 'tid' in session: # we assume that if there is a tid in the session dict then the user is authenticated\n return {\"success\": 1, \"message\": \"你已经登录了.\"}\n teamname = request.form.get('teamname', None) # get the teamname and password from the POSTed form\n password = request.form.get('password', None)\n if teamname is None or teamname == '':\n return {'success': 0, 'message': \"用户名不能为空.\"}\n if password is None or password == '': # No password submitted\n return {\"success\": 0, \"message\": \"密码不能为空.\"}\n if len(teamname) > 250:\n return {\"success\": 0, \"message\": \"STAHP!\"}\n teamname = teamname.encode('utf8').strip()\n password = password.encode('utf8')\n teamCurr = db.teams.find({'teamname': teamname})\n if teamCurr.count() == 0: # No results returned from mongo when searching for the user\n return {\"success\": 0, \"message\": \"未找到用户名'%s'.\" % teamname}\n if teamCurr.count() > 1:\n return {\"success\": 0, \"message\": \"查找用户信息失败. 请联系管理员.\"}\n checkTeam = teamCurr[0]\n if checkTeam['email_verified'] == False:\n return {\"success\": 0, \"message\": \"请先访问邮箱查收验证邮件.\"}\n pwhash = checkTeam['pwhash'] # The pw hash from the db\n if bcrypt.hashpw(password, pwhash) == pwhash:\n if checkTeam.get('debugaccount', None):\n session['debugaccount'] = True\n if debug_disable_general_login:\n if 'debugaccount' not in checkTeam or not checkTeam['debugaccount']:\n return {'success': 2, \"message\": \"Correct credentials! But the game has not started yet...\"}\n if checkTeam['tid'] is not None:\n session['tid'] = checkTeam['tid']\n session['teamname'] = checkTeam['teamname']\n session['is_zju_user'] = utilities.is_zju_email(checkTeam['email'])\n else: # SET THE 'tid' TO str('_id') FOR MIGRATION PURPOSES AND ADD THE 'tid' TO THE DOCUMENT\n session['tid'] = checkTeam['_id']\n session['teamname'] = checkTeam['teamname']\n session['is_zju_user'] = utilities.is_zju_email(checkTeam['email'])\n db.teams.update({'_id': checkTeam['_id']}, {'tid': checkTeam['_id']})\n return {\"success\": 1, \"message\": \"用户'%s'登录成功.\" % teamname,\n 'teamname': session['teamname'], 'is_zju_user': session['is_zju_user']}\n return {\"success\": 0, \"message\": \"密码错误.\"}", "def user():\n if \"username\" in session:\n username = session[\"username\"]\n return f\"<h1>{username}</h1>\"\n return redirect(url_for(\"login\"))", "def test_logged_in(self):\n self.c.force_login(self.u)\n response = self.c.get(reverse(map_page), {'lat': 34.0, 'lng': 45.3})\n self.assertEqual(response.status_code, 200)", "def userLoggedOn(self, session, params):\n\n user = session.get('user')\n\n #get the details of this user\n user_detail = WebUserDetail.objects.get(user_id=user.uid)\n\n data = {}\n data['full_name'] = user_detail.full_name\n\n return {'user': data}", "def login():\n\n login_form = LoginForm()\n if login_form.validate_on_submit():\n # validates if user is in database\n user = User.query.filter_by(username=login_form.username.data).first()\n \n # if user exists, verify password\n if user is not None and user.verify_password(login_form.password.data):\n # log user in\n login_user(user)\n return redirect(url_for('home.search'))\n \n # if user does not exist, give message that says its wrong\n else:\n flash('Invalid username or password.')\n\n return render_template('auth/login.html', form=login_form, title='Login')", "def login():", "def login():", "def log_in():\n \n login_email = request.form.get(\"email\")\n login_password = request.form.get(\"password\")\n\n u = User.query\n\n # THIS DOESN\"T WORK = FIX IF STATEMENT **************************\n if u.filter(User.email == login_email):\n if u.filter(User.password == login_password):\n login_user = u.filter(User.email == login_email).first()\n session[\"current_user\"] = login_user.user_id\n flash(\"You are logged in.\")\n return redirect('/')\n else:\n flash(\"Incorrect password.\")\n return render_template(\"login.html\")\n else:\n flash(\"This email is not registered.\")\n return render_template(\"login.html\")\n\n # Verify email exists and password is correct.\n if request.form.get(\"age\"):\n age = request.form.get(\"age\")\n else:\n age = None", "def login():\n if request.method == \"POST\":\n email = request.form.get(\"email\", None)\n password = request.form.get(\"password\", None)\n\n user = mongo.db.users.find_one({\"email\": email})\n\n if bcrypt.checkpw(password.encode(\"utf-8\"), user[\"password\"]):\n session[\"user\"] = user[\"username\"]\n return redirect(url_for(\"recipes\"))\n return render_template(\"accounts/login.html\")", "def user_login(request):\n\n user = request.user\n if user.is_authenticated():\n status = user.get_profile().application.submitted #Getting the submission status\n if status: #If already submitted, takes to Completion Page\n return redirect('/allotter/complete/')\n else: #Otherwise to Details Submission form \n return redirect('/allotter/details/')\n\n if request.method == \"POST\":\n form = UserLoginForm(request.POST)\n if form.is_valid():\n user = form.cleaned_data\n login(request, user)\n status = user.get_profile().application.submitted #Getting the submission status \n if status:\n return redirect('/allotter/complete/') #Redirect to Completion Page\n else: \n return redirect('/allotter/details/') #Redirect to user details submission \n else:\n context = {\"form\": form}\n return render(request, 'allotter/login.html', context)\n else:\n form = UserLoginForm()\n context = {\"form\": form}\n return render(request, 'allotter/login.html', context)", "def login():\n\n if \"username\" in session:\n return redirect(f\"/users/{session['username']}\")\n\n form = LoginForm()\n\n if form.validate_on_submit():\n user = User.authenticate(form.data[\"username\"], form.data[\"password\"])\n if user is None:\n if User.query.filter_by(username=form.data[\"username\"]).count() == 0:\n form.username.errors.append(\"Invalid username\")\n else:\n form.password.errors.append(\"Invalid credentials\")\n return render_template(\"login.html\", form=form)\n\n session[\"username\"] = user.username\n return redirect(f\"/users/{user.username}\")\n \n return render_template(\"login.html\", form=form)", "def login_redirect():\n next_redirect = request.args.get('next')\n access_token = request.args.get('access_token')\n d = amazon_authorization(access_token)\n print(\"Amazon data:\", d)\n # # State token to prevent CSRF\n # state = ''.join(random.choice(string.ascii_uppercase + string.digits) for x in xrange(32))\n # login_session['state'] = state\n # Find user in database by email or create new record\n user = session.query(User).filter(User.email == d['email']).first()\n if user is None:\n print(\"Creating new user in database\")\n m = hashlib.md5()\n m.update(d['email'])\n gravatar = 'https://secure.gravatar.com/avatar/' + m.hexdigest() + '?size=35'\n user = User(name=d['name'], email=d['email'], picture=gravatar)\n session.add(user)\n session.commit()\n\n # Update the Amazon ID for the user if not already set\n if user.client_id != d['user_id']:\n user.client_id = d['user_id']\n session.commit()\n\n login_session['userid'] = user.id\n login_session['picture'] = user.picture\n login_session['name'] = user.name\n login_session['email'] = user.email\n login_session['client_id'] = user.client_id\n\n flash('You were successfully logged in')\n\n return redirect_dest(next_redirect)", "def login_user(session, username):\n session['user'] = username", "def login(self, user):\n\n res = self.conn.cursor().execute(\n \"SELECT id, password FROM users WHERE username=?\", (user['username'],)\n )\n self.conn.commit()\n result = res.fetchone()\n if not result:\n return None\n if len(result) == 0:\n return None\n hashed_password = result['password']\n if check_password(hashed_password, user['password']):\n return result['id']\n return None", "def do_user_login(user):\n conn.register([model.Rule])\n admindb = conn[current_app.config[\"CONFIGDB\"]]\n user.update_last_login()\n user.save()\n\n session['user'] = {\n 'id': str(user[\"_id\"]),\n 'label': user[\"label\"],\n 'email' : user[\"name\"],\n }\n session['site_admin'] = False\n session['last_activity'] = user[\"last_login\"]\n # Also add the rules information to the session\n if not (ObjectId(current_app.config[\"DEMO_RULE\"]) in user[\"rules\"]):\n user[\"rules\"].append(ObjectId(current_app.config[\"DEMO_RULE\"]))\n\n user.save()\n\n # Loop over the rules\n accesses = { }\n\n if user[\"type\"] == \"passwd\" and (user[\"name\"].endswith(\"brown.edu\") or user[\"name\"].endswith(\"kitware.com\")):\n # Here grant the user with the demo_brown rule\n # flash(\"You used password login, and you get donut ! \")\n # ObjectId(\"529d244959a3aee20f8a00ae\")\n brownrule = ObjectId(\"529d244959a3aee20f8a00ae\")\n if brownrule in user[\"rules\"]:\n # flash(\"You already have the rule\")\n pass\n else:\n # flash(\"Rule must be added\")\n user[\"rules\"].append(ObjectId(\"529d244959a3aee20f8a00ae\"))\n user.save()\n\n for arule in user[\"rules\"]:\n ruleobj = admindb[\"rules\"].Rule.find_one({\"_id\" : arule})\n if ruleobj == None:\n flash(\"Rule not found !! \" + str(arule), \"error\")\n continue\n\n # Create empty DBAccess for that db\n if not str(ruleobj[\"db\"]) in accesses.keys():\n accesses[str(ruleobj[\"db\"])] = DBAccess()\n\n if 'db_admin' in ruleobj and ruleobj[\"db_admin\"] == True:\n accesses[str(ruleobj[\"db\"])].db_admin = True\n if 'can_see_all' in ruleobj and ruleobj[\"can_see_all\"] == True:\n accesses[str(ruleobj[\"db\"])].can_see_all = True\n if 'can_see' in ruleobj:\n if len(ruleobj[\"can_see\"]) > 0:\n accesses[str(ruleobj[\"db\"])].can_see.append(ruleobj[\"can_see\"])\n if 'site_admin' in ruleobj and ruleobj[\"site_admin\"] == True:\n session[\"site_admin\"] = True\n\n # Insert that information in the session\n # In future, session may contain only session it,\n # and this could get into database\n\n# For debugging\n# for adb in accesses.keys():\n# flash(adb + \": \" + str(accesses[adb]), \"info\")\n#\n# flash(\"Site admin : \" + str(session[\"site_admin\"]), \"info\")\n\n\n flash('You are successfully logged in.', 'success')", "def login():\n\n if current_user.is_authenticated == True:\n return redirect(url_for('controller'))\n\n form = LoginForm(request.form)\n if request.method == 'POST'and form.validate():\n check_user = User.query.filter_by(login=form.login.data).first()\n if check_user:\n if check_password_hash(check_user.password, form.password.data):\n login_user(check_user)\n return redirect(url_for('controller'))\n\n return render_template('login.html', form=form)", "def login():\n # Initialise login form\n form = UserLoginForm()\n # Validate and process form data\n if form.validate_on_submit():\n # Get form data\n username = form.username.data\n password = form.password.data\n # Check if username and password is valid\n valid, userID = gdb.verifyuser(username, password)\n if(valid):\n user = gdb.getuserbyid(userID)\n login_user(user)\n return redirect(url_for('main.dashboard'))\n else:\n flash(\"Invalid username or password.\", category=\"error\")\n return redirect(url_for('main.login'))\n # Render template\n return render_template('login.html', form=form)", "def log_in():\n if request.method == 'POST':\n username = request.form['username']\n password = request.form['password']\n if PLAN.login_user(username, password):\n session['name'] = username\n flash(\"Login success ...\")\n return redirect(url_for('index'))\n flash(\"Login failed ...\")\n return render_template('login.html')\n return render_template('login.html')", "def submit(self):\n password = self.form_result['password']\n username = self.form_result['username']\n\n if not loginhelper.validateUsernamePassword(username, password):\n return render('login.html')\n\n # Mark user as logged\n session['user'] = username\n session.save()\n\n # Send user back to where they originally wanted\n if session.get('path_before_login'):\n redirect(session['path_before_login'])\n else:\n return render('loggedin.html')", "def user_logged_in():\n if not session.get('user_id'):\n return \"nope\", 401\n else:\n return \"yep\", 200", "def login_form_valid(self, form):\n self.request.session.update({\n 'user_is_none': None,\n 'user_is_active': None\n })\n\n email = form.cleaned_data['email']\n password = form.cleaned_data['password']\n user = authenticate(email=email, password=password)\n\n if user is None:\n self.request.session['user_is_none'] = True\n return HttpResponseRedirect('/user_account/')\n elif user.active is False:\n self.request.session['user_is_active'] = False\n return HttpResponseRedirect('/user_account/')\n else:\n self.request.session.update({\n 'user_is_none': False,\n 'user_is_active': True\n })\n login(self.request, user)\n return HttpResponseRedirect('/schedule/')", "def login():\n\n form = LoginForm()\n # Checking form has been filled in correctly\n if form.validate_on_submit():\n users = mongo.db.users\n get_user = users.find_one({'username': request.form['username']})\n # If the username exists, check passwords and sign in if match\n if get_user:\n password = form.password.data\n if check_password_hash(get_user['password'], password):\n flash(f'You are logged in as \\'{form.username.data}\\'',\n 'success')\n session['username'] = request.form['username']\n session['logged'] = True\n return redirect(url_for('index'))\n else:\n # If the passwords don't matach inform the user\n flash('Incorrect password please try again!', 'danger')\n return redirect(url_for('login'))\n else:\n # If the username doesn't exist inform the user\n flash(f'Username \\'{form.username.data}\\' does not exist',\n 'danger')\n return redirect(url_for('login'))\n\n return render_template('login.html', title=\"Login\", form=form)", "def login_success(user):\n return redirect('/')", "def loginView(request):\n username = request.POST.get('username')\n password = request.POST.get('password')\n user = authenticate(username=username, password=password)\n if user is not None:\n if user.is_active:\n dologin(request, user)\n if isOperator(user): # login as an operator\n return redirect('/operator/map')\n elif isAdmin(user): # login as an admin\n return redirect('/admin/map')\n return HttpResponse('ok')\n else:\n # Return a 'disabled account' error message\n return HttpResponse(\"Disabled account\")\n else:\n # Return an 'invalid login' error message.\n return HttpResponse(\"Invalid login\")", "def login(request):\r\n login_url = route_url('login', request)\r\n referrer = request.url\r\n if referrer == login_url:\r\n referrer = u'/' # never use the login form itself as came_from\r\n\r\n came_from = request.params.get('came_from', referrer)\r\n\r\n message = u''\r\n login = u''\r\n password = u''\r\n\r\n if 'form.submitted' in request.params:\r\n login = request.params['login'].lower()\r\n password = request.params['password']\r\n\r\n LOG.debug(login)\r\n auth = UserMgr.get(username=login)\r\n LOG.debug(auth)\r\n LOG.debug(UserMgr.get_list())\r\n\r\n if auth and auth.validate_password(password) and auth.activated:\r\n # We use the Primary Key as our identifier once someone has\r\n # authenticated rather than the username. You can change what is\r\n # returned as the userid by altering what is passed to remember.\r\n headers = remember(request, auth.id, max_age=60 * 60 * 24 * 30)\r\n auth.last_login = datetime.utcnow()\r\n\r\n # log the successful login\r\n AuthLog.login(login, True)\r\n\r\n # we're always going to return a user to their own /recent after a\r\n # login\r\n return HTTPFound(\r\n location=request.route_url(\r\n 'user_bmark_recent',\r\n username=auth.username),\r\n headers=headers)\r\n\r\n # log the right level of problem\r\n if auth and not auth.validate_password(password):\r\n message = \"Your login attempt has failed.\"\r\n AuthLog.login(login, False, password=password)\r\n\r\n elif auth and not auth.activated:\r\n message = \"User account deactivated. Please check your email.\"\r\n AuthLog.login(login, False, password=password)\r\n AuthLog.disabled(login)\r\n\r\n elif auth is None:\r\n message = \"Failed login\"\r\n AuthLog.login(login, False, password=password)\r\n\r\n return {\r\n 'message': message,\r\n 'came_from': came_from,\r\n 'login': login,\r\n 'password': password,\r\n }", "def login():\n form = LoginForm()\n if not 'username' in session:\n if request.method == 'POST':\n if form.validate_on_submit():\n user = mongo.db.user.find_one({'username':form.username.data})\n if user and bcrypt.checkpw(request.form['password'].encode('utf-8'), user['hashed_password']):\n session['username'] = form.username.data\n current_user = session['username']\n flash(f'Welcome back, {current_user}!', 'success')\n return redirect(url_for('dashboard'))\n \n flash('Please check login details.', 'danger')\n return render_template('pages/login.html', title='Login', form=form)\n flash('You are already logged in. Did you mean to go to your dashboard instead?', 'info')\n return redirect(url_for('dashboard'))", "def login():\n form = LoginForm(request.form)\n if request.method == 'POST' and form.validate():\n next_var = request.args.get('next')\n user = Users.query.get(form.email.data)\n if user:\n # sets the authenticated parameter which is needed for sessions to recognize the user\n user.authenticated = True\n db.session.add(user)\n db.session.commit()\n login_user(user, remember=True)\n return redirect(next_var or url_for('home'))\n return render_template('login.html', form=form, email=request.cookies.get('email'))", "def before_request():\n g.user = None\n if 'user_id' in session:\n g.user = User.query.get(session['user_id'])", "def post(self):\n username = request.form['username']\n password = request.form['password']\n error = None\n user = users_db.get_user_by_username(username)\n\n if not user:\n error = 'Incorrect username.'\n elif not BCRYPT.check_password_hash(user[0]['password'], password):\n error = 'Incorrect password.'\n\n flash(error)\n\n if error is None:\n session.clear()\n session['user_id'] = str(user[0]['_id'])\n\n return redirect(url_for(REDIRECT_URL))", "async def _login_user(self, user):\n self.request.session[\"user\"] = str(user.username)\n self.request.session[\"time\"] = time()", "def login():\n if request.method == \"POST\":\n # 客户端在login页面发起的POST请求\n username = request.form[\"username\"]\n password = request.form[\"password\"]\n ipaddr = request.form[\"ipaddr\"]\n database = request.form[\"database\"]\n\n db = db_login(username, password, ipaddr, database)\n\n if db == None:\n return render_template(\"login.html\",loginres=0)\n else:\n session['username'] = username\n session['password'] = password\n session['ipaddr'] = ipaddr\n session['database'] = database\n\n return redirect(url_for(\"mainpage\"))\n else :\n # 客户端GET 请求login页面时\n return render_template(\"login.html\")", "def login(uname, password, db, session):\n\tquery = db((db.User.username == uname) & (db.User.password == password))\n\tif query.count() == 1:\n\t\tsession.auth = query.select().first().id\n\t\treturn True\n\telse:\n\t\treturn False", "def login_view(request):\n if request.method == \"POST\":\n username = request.POST.get('username')\n password = request.POST.get('password')\n username_1 = User.objects.filter(username=username, password=password)\n username_length=len(username_1)\n if username_length == 1:\n user_obtained=User.objects.filter(username=username, password=password)[0]\n login(request, user_obtained)\n return render(request, \"county/index.html\")\n else:\n return render(request, \"county/login.html\", {\n \"message\": \"Invalid username and/or password.\"\n })\n else:\n if request.user.is_anonymous:\n return render(request, \"county/login.html\")\n else:\n return render(request, \"county/index.html\")", "def do_login(client, user_id):\n print('USER ID')\n print(user_id)\n with client.session_transaction() as sess:\n sess[CURR_USER_KEY] = user_id", "def login():\n if request.method == 'POST':\n user = request.form['username']\n user = get_user(Users, user)\n # Ensure user exists in the database\n if user is not None:\n password = request.form['password']\n # Check the password against the hash stored in the database\n if user.check_password_hash(password):\n # Log the login and redirect\n log(f'User <code>{user.name}</code> logged in via webpage!')\n login_user(user)\n next = request.args.get('next')\n if not is_safe_url(next):\n return abort(400)\n return redirect(next or url_for('events'))\n return f'Wrong password for {user.username}!'\n return f\"User <code>{request.form['username']}</code> doesn't exist!\"\n return render_template('login.html')", "def user_login():\n user = query_db('''select * from user where username = ?''', [request.authorization.username], one=True)\n if user is None:\n error = 'Invalid username'\n elif not check_password_hash(user['pw_hash'],request.authorization.password):\n error = 'Invalid password'\n else:\n flash('You were logged in')\n return jsonify({'user_id':user['user_id']}),200", "def login_user():\n\n form = AddUserForm()\n\n if form.validate_on_submit():\n username = form.username.data\n password = form.password.data\n \n user = User.authenticate(username, password)\n \n \n if user:\n flash(f\"Welcome back {username}!\", \"success\")\n session[\"username\"] = username\n return redirect(f\"/users/{username}\")\n \n else:\n form.username.errors = [\"Invalid Username/Password\"]\n\n return render_template(\"login.html\", form=form)", "def _shared_login(request):\n csession = request.session\n player = request.user\n sesslogin = csession.get(\"logged_in\", None)\n\n # check if user has authenticated to website\n if csession.session_key is None:\n # this is necessary to build the sessid key\n csession.save()\n elif player.is_authenticated():\n if not sesslogin:\n # User has already authenticated to website\n csession[\"logged_in\"] = player.id\n elif sesslogin:\n # The webclient has previously registered a login to this browser_session\n player = PlayerDB.objects.get(id=sesslogin)\n try:\n # calls our custom authenticate in web/utils/backends.py\n player = authenticate(autologin=player)\n login(request, player)\n except AttributeError:\n logger.log_trace()", "def me():\n if g.USER:\n return redirect(url_for(\"profile\", username=g.USER.username))\n return redirect(url_for(\"home\"))", "def log_in(codecool):\n\n login = school_view.get_login()\n password = school_view.get_password()\n\n password = utilities.hash_password(password)\n\n users = codecool.managers_list + codecool.administrators_list + codecool.mentors_list + codecool.students_list\n for user in users:\n if user.login == login and user.password == password:\n return user" ]
[ "0.70838326", "0.69987136", "0.67759037", "0.67270076", "0.657737", "0.6569989", "0.6562039", "0.65591156", "0.6542138", "0.6500258", "0.6500258", "0.64939904", "0.6488533", "0.6474254", "0.647039", "0.6464649", "0.64607286", "0.6430233", "0.6424035", "0.64013207", "0.638578", "0.6384612", "0.63490844", "0.6348807", "0.633649", "0.6318686", "0.6303545", "0.6303545", "0.62993205", "0.62937474", "0.6291232", "0.62850356", "0.6272464", "0.62616813", "0.6261517", "0.6247166", "0.623692", "0.62179196", "0.62148786", "0.6210983", "0.6202434", "0.6201025", "0.6200372", "0.6192699", "0.6187358", "0.6171459", "0.61615074", "0.61599547", "0.6159833", "0.6158772", "0.6144915", "0.6141529", "0.61395425", "0.6138392", "0.61181796", "0.61158586", "0.61123145", "0.60886824", "0.6080471", "0.60547036", "0.60267043", "0.60223913", "0.60162073", "0.6014819", "0.6004763", "0.59948367", "0.59948367", "0.5992771", "0.5990412", "0.5980707", "0.5969829", "0.5965635", "0.59559757", "0.59537476", "0.59536994", "0.5945801", "0.59445447", "0.59369695", "0.59361446", "0.5930522", "0.59304714", "0.5924306", "0.59175134", "0.59102714", "0.59086055", "0.5891094", "0.5882254", "0.5879961", "0.58793837", "0.5878509", "0.5877643", "0.5874828", "0.58723676", "0.587107", "0.58559287", "0.5852344", "0.58515286", "0.5836387", "0.5817905", "0.58145505" ]
0.6567707
6
removing user_id from session to logout user
def process_logout(): print " LOGGED OUT USER " del session["user_id"] flash("You have Successfully Logged Out!") return redirect("/")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def logout_user(session):\n del session['user']", "def logout():\n\n if session.get('user_id'):\n del session['user_id']\n flash('You are now logged out.')\n return redirect('/login')", "def logout(self):\n if 'user' in session:\n del session['user']\n session.save()\n return render('logout.html')", "def logout_user():\n\n # Delete session data to log out\n del session[\"user_id\"]\n flash(\"Successfully logged out!\")\n\n return redirect(\"/\")", "def logout():\n session.pop('user_id', None)\n flash('Your were logged out')\n return redirect(url_for('login'))", "def do_logout():\n del session[CURRENT_USER_KEY]", "def logout_user():\n pass", "def logout():\n # Remove session data, this will log the user out\n session.pop('loggedin', None)\n session.pop('userid', None)\n session.pop('username', None)\n # Redirect to login page\n return redirect(url_for('site.login'))", "def logout():\n session['user_id'] = None\n session['user_email'] = None\n return redirect(url_for('main'))", "def logout_user():\n\n session.clear()\n\n return redirect(\"/\")", "def logout():\n session.pop(\"user\")\n return redirect(url_for(\"home\"))", "def log_out_user(self):\n flask_login.logout_user()", "def logout():\n session.pop('username', None)\n session.pop('user_id', None)\n flash (\"You are logged out\")\n return redirect(url_for('index'))", "def logout_user():\n session.pop('username')\n return redirect('/login')", "def logout(user_id):\n if CURRENT_USER_KEY not in session or session[CURRENT_USER_KEY] != user_id:\n raise Unauthorized()\n do_logout()\n return redirect('/')", "def logout():\n if session.get('authed', False):\n for i in ['phone', 'authed', 'confirmation_code']:\n if session.has_key(i):\n del session[i]\n return redirect(my_url('index'))", "def logout():\n body = request.json\n user_id = body.get('user_id')\n user = User.get(User.id == user_id).username\n clear_token(user)\n return HTTPResponse(status=200, body={\"message\":\"Log out succesful.\"})", "def logout():\n u = current_user\n u.authenticated = False\n db.session.add(u)\n db.session.commit()\n logout_user()", "def logout():\n # remove user from session cookies\n flash(\"You have been logged out\")\n session.pop(\"user\")\n return redirect(url_for(\"login\"))", "def logout():\n session.pop('user', None)\n return redirect(url_for('index'))", "def logout():\n session.pop('user', None)\n return redirect(url_for('index'))", "def logout():\n response.cookies['curr_user_id'] = -1\n response.cookies['curr_user_id']['expires'] = -10\n response.cookies['curr_user_id']['path'] = '/'\n redirect(URL('default', 'index'))", "def logout():\n if session:\n session.pop('user')\n flash('You were successfully logged out')\n return redirect('/')\n else:\n return redirect('/')", "def logout():\n user = current_user\n user.authenticated = False\n db.session.add(user)\n db.session.commit()\n logout_user()\n return redirect(url_for('index'))", "def logout():\n user = current_user\n user.authenticated = False\n db.session.add(user)\n db.session.commit()\n logout_user()\n return redirect(url_for('index'))", "def log_out():\n\n del session[\"user_id\"]\n # print session[\"user_id\"]\n flash('You were successfully logged out')\n return render_template('homepage.html')\n\n #Additional reference for log in/log out can be found in project tracker project", "def user_logout():\n\n session.pop('logged_in', None)\n flash('You are now logged out')\n\n return redirect('/')", "def logout():\n flash(\"You have been logged out\")\n session.pop(\"user\")\n return redirect(url_for(\"login\"))", "def logout(self):\r\n session.clear()\r\n return redirect(\"/user/login\")", "def logout():\n flash('You were logged out')\n session.pop('user_id', None)\n return redirect(url_for('public_timeline'))", "def logout(self):\n\n self.response.headers.add_header('Set-Cookie', 'user_id=; Path=/')", "def logout():\n flash(_('You were logged out'))\n session.pop('user_id', None)\n return redirect(url_for('index'))\n #return redirect(url_for('public_timeline'))", "def logout():\n flash('You were logged out')\n session.pop('user_id', None)\n return redirect(url_for('leaderboard'))", "def logout():\n session.pop('user', None)\n# g.user = None\n# g.graph = None\n return redirect(url_for('index'))", "def logout():\n flash(u'Zostałeś wylogowany')\n session.pop('user_id', None)\n return redirect(url_for('index'))", "def logout(self):\n self.response.headers.add_header('Set-Cookie', 'user_id=; Path=/')", "def logout(self):\n self.response.headers.add_header('Set-Cookie', 'user_id=; Path=/')", "def signout(request):\n logging.debug('')\n try:\n logging.debug('deleting openid session var')\n del request.session['openid']\n except KeyError:\n logging.debug('failed')\n pass\n logout(request)\n logging.debug('user logged out')\n return HttpResponseRedirect(get_next_url(request))", "def logout_user():\n\n print \"Logging out.\"\n session.clear()\n flash(\"You are now logged out.\")\n\n return redirect('/')", "def logout():\n # Remove credentials key and user id from session\n session_helper = SessionHelper(session)\n session_helper.delete_credentials_from_session()\n session_helper.delete_user_from_session()\n return redirect(url_for('homepage.home_page_route'))", "def logout():\n session.pop('username', None)\n session.pop('user_id', None)\n session.pop('logged_in', None)\n session.pop('is_admin', None)\n\n flash('Successfully logged out', 'alert-info')\n\n return redirect(url_for('index'))", "def logout():\n user = current_user\n user.authenticated = False\n db.session.add(user)\n db.session.commit()\n logout_user()\n return render_template(\"login.html\")", "def logout():\r\n form = LoginForm()\r\n user = current_user\r\n user.authenticated = False\r\n db.session.add(user)\r\n db.session.commit()\r\n logout_user()\r\n return redirect(url_for('hello'))", "def logout():\n\n # remove the username from the session if it is there\n out_user = current_user.get_id()\n logout_user()\n logger.info(out_user + ' has been logged out.')\n return redirect(url_for('home'))", "def logout(self):\n user = self.get_user()\n if user:\n with atomic(self.conf['auth.dbfile']) as cursor:\n logout_user(cursor, user.username)\n request.user = self.tpls['user'] = None\n response.set_cookie(self.conf['auth.cookie_key'], '',\n secret=self.conf['auth.cookie_secret'], path='/')", "def logout():\n session.pop(\"username\")\n\n return redirect(\"/\")", "def logout(request):\n if request.session.get('username') is not None:\n call([\"rm\", \"-r\", request.session.get('username')])\n request.session.flush()\n return HttpResponseRedirect(reverse('index'))", "def logout(self):\n if self.user != self.unauthorized:\n self.user = self.unauthorized\n os.remove(self.full_session_path)", "def logout(self):\n try:\n if self._session_id is not None:\n url = (yield self.get_sitemap())['authorization'] + '/user/logout'\n response = yield self.authorized_fetch(\n url=url, auth_token=self._session_id, method='POST', body='{}')\n self._logger.info(\"Logout result: %s\", response.body)\n finally:\n # Clear the local session_id, no matter what katportal says\n self._session_id = None\n self._current_user_id = None", "def logout():\n\n session.pop(\"username\")\n return redirect(\"/login\")", "def dropsession():\n session.pop('user', None)\n return redirect(url_for('login'))", "def logout():\n return logout_user()", "def logout():\n flash(\"You have been logged out\")\n session.pop(\"user\")\n return redirect(url_for(\"tips\"))", "def logout(self):\n self.change_user(self.username, None, None)", "def logout():\n session.pop('logged_in', None)\n session.pop('fname', None)\n session.pop('patron', None)\n flash('You were logged out')\n return redirect('/')", "def logout():\n # clear user data from session and flag as logged out\n for x in ['provider', 'state', 'user']:\n if x in flask.session:\n del flask.session[x]\n flask.session['logged_in'] = False\n\n flash('logout successful', 'info')\n return redirect(request.referrer or url_for('catalog.index'))", "def logout(request):\n logout_user = request.authenticated_userid\n headers = forget(request)\n request.session.pop('user_groups', None)\n log.info('USER \"%s\" LOGGED OUT!', logout_user)\n loc = request.route_url('home')\n return HTTPFound(location=loc, headers=headers)", "def logout():", "def logout():\n session.pop('username', None)\n return redirect('/')", "def signout():\n session.pop('oauth2_state', None)\n session.pop('oauth2_token', None)\n session.pop('discord_user', None)\n return redirect('/')", "def auth_logout(request):\n\n \"\"\"\n user = getattr(request, 'user', None)\n if hasattr(user, 'is_authenticated') and not user.is_authenticated():\n user = None\n user_logged_out.send(sender=user.__class__, request=request, user=user)\n \"\"\"\n request.session.flush()\n \"\"\"\n if hasattr(request, 'user'):\n from django.contrib.auth.models import AnonymousUser\n request.user = AnonymousUser()\n \"\"\"\n ri = rest_interface(opensso_url=OPEN_AM_SERVER_URL)\n\n if OPENAM_COOKIE_NAME_FOR_TOKEN in request.COOKIES:\n unsigned_token = request.COOKIES[OPENAM_COOKIE_NAME_FOR_TOKEN]\n print('logout: token ='+request.COOKIES[OPENAM_COOKIE_NAME_FOR_TOKEN])\n print('logout: unsigned_token ='+unsigned_token)\n ri.do_logout(subject_id=unsigned_token)\n #del request.COOKIES[OPENAM_COOKIE_NAME_FOR_TOKEN]\n #request.COOKIES[OPENAM_COOKIE_NAME_FOR_TOKEN] = 'logged_out'\n ##ssouser = SSOUser(False)\n ##request.ssouser = ssouser", "def logout_user(request):\r\n # We do not log here, because we have a handler registered\r\n # to perform logging on successful logouts.\r\n logout(request)\r\n if settings.FEATURES.get('AUTH_USE_CAS'):\r\n target = reverse('cas-logout')\r\n else:\r\n target = '/'\r\n response = redirect(target)\r\n response.delete_cookie(\r\n settings.EDXMKTG_COOKIE_NAME,\r\n path='/', domain=settings.SESSION_COOKIE_DOMAIN,\r\n )\r\n return response", "def logout():\n session.pop('userinfo', None)\n # no more steps necessary, because we don't keep the token around\n if 'target' not in session.keys():\n return redirect(\"/\")\n return redirect(session['target'])", "def user_logged_out(self, sender, request, user, **kwargs):", "def sign_out():\n session.clear()\n return redirect(url_for('index'))", "def logout():\n session.pop('user', None)\n return jsonify(csrf_token = generate_csrf_token())", "def logoutuser(request):\n logout(request)\n return redirect('login')", "def logout(self):", "def logoutUser(request):\n logout(request)\n return redirect('login')", "def logout():\n if \"username\" in session:\n session.pop(\"username\", None)\n flash(\"You have been logged out.\")\n return redirect(url_for(\"index\"))", "def logout(request):\n request.user.auth_token.delete()\n return Response({}, status=status.HTTP_200_OK)", "def logout() -> Any:\n if \"user_id\" in session:\n del session[\"user_id\"]\n response = make_response({\"success\": True})\n response.set_cookie(\"is_authenticated\", max_age=0, expires=0)\n return response", "def logout():\n\n session.pop(\"leader_logged_in\", False)\n session.pop(\"leader_id\", None)\n session.pop(\"leader_email\", None)\n\n return redirect(f\"{BASEPATH}/login\")", "def logout():\n \n del session[\"logged_in\"]\n flash(\"See you later! ;)\")\n return redirect('/')", "def logout():\n session.clear()\n return redirect(\"/showlog\")", "def sign_out():\n next_url = request.args.get('next')\n session.pop(\"user\")\n flash(\"Sign Out Successful\", \"success\")\n return redirect(next_url or url_for('index'))", "def kill_session(user):\n\n # Destroy cookie\n user.cookie = None\n user.cookie_expiration = datetime.now()\n\n # Commit\n db.session.add(user)\n db.session.commit()", "def logout(self, request):\n pass", "def logout():\n session['logged_in'] = False\n return '', 204", "def logout(self):\n with self.client.post(\"/logout\", catch_response=True) as response:\n for r_hist in response.history:\n if r_hist.status_code > 200 and r_hist.status_code < 400:\n response.success()\n self.user.username = None\n # go to UnauthenticatedTasks\n self.interrupt()", "def logout():\n DBSessionMaker = sessionmaker(bind=engine)\n db_session = DBSessionMaker()\n\n # Find and delete user's session entry in the session table\n try:\n cookie_sess_id = request.cookies.get('session')\n db_session.query(Sessions).filter(Sessions.id==cookie_sess_id).delete()\n db_session.commit()\n logout_resp = Response(status=200)\n logout_resp.delete_cookie('session')\n return logout_resp\n except Exception:\n db_session.rollback()\n\n # Delete user's cookie if something went wrong\n err_resp = Response(status=500)\n err_resp.delete_cookie('session')\n return err_resp", "def signout(self):\n username = cherrypy.session.get('username')\n if username is None:\n pass\n else:\n cherrypy.lib.sessions.expire()\n raise cherrypy.HTTPRedirect('/')", "def logout():\n session.pop('logged_in', None)\n return redirect(url_for('home'))", "def sign_out():\n\n session.clear()\n response = make_response(redirect('/'))\n response.delete_cookie(\"logged-in\")\n return response", "def log_out():\n if 'name' in session:\n PLAN.logout_user(session['name'])\n session.pop('name', None)\n return redirect(url_for('log_in'))\n return redirect(url_for('log_in'))", "def logout():\n if \"username\" in session.keys():\n del session[\"username\"]\n if not app.config[\"DISABLE_AUTH\"]:\n return redirect(url_for(\"login\") + \"?slo\")\n else:\n return redirect(url_for(\"index\"))", "def user_logout(request):\r\n logout(request)\r\n return redirect('accounts:login')", "def logout():\n session.pop('logged_in', None)\n flash('You were logged out')\n return redirect(url_for('get_devices'))", "def s_logout(request):\n logout(request) # use django.contrib.auth.logout , clear all session , redirect to logout\n return redirect('/')", "def session_end(self, user):\n self._transport.delete(\"/service/v3/sessions\", self._subject, username=user)", "def logoutUser(request):\n\n logout(request)\n return redirect('loginpage')", "def logout():\n session.clear()\n return redirect(\"/\")", "def logout_user(request):\n\tlogout(request)\n\treturn HttpResponseRedirect('/')", "def logout():\n logout_user()\n return {'message': 'User logged out'}", "def logout(self):\n pass", "def log_out(request):\n logout(request)\n return redirect('user_login')", "def logout():\n session.pop('logged_in', None)\n flash('You were logged out', 'success')\n return redirect(url_for('show_entries'))", "def logout():\n if 'access_token' in login_session:\n del login_session['access_token']\n del login_session['email']\n flash(\"you are now logout\")\n return redirect(url_for('catelog'))", "def logout(request):\n args = load_user_objects(request)\n auth.logout(request)\n args['user'] = request.user\n return render_to_response('datata_profile/login/logout.html', args)" ]
[ "0.86715126", "0.81939507", "0.8084283", "0.8076383", "0.8072221", "0.80592704", "0.7940466", "0.792623", "0.7907761", "0.78814626", "0.78327316", "0.7786549", "0.7768795", "0.77528083", "0.7744378", "0.7737063", "0.7730634", "0.77184623", "0.7702954", "0.7678678", "0.7678678", "0.767486", "0.76727283", "0.7663524", "0.7663524", "0.76425797", "0.76170444", "0.7615265", "0.76122683", "0.75944805", "0.7587108", "0.7584403", "0.75784", "0.7569305", "0.75600153", "0.7552035", "0.7552035", "0.75312036", "0.75164586", "0.74777895", "0.7470771", "0.74398667", "0.7435532", "0.7434931", "0.7429751", "0.7405455", "0.739975", "0.739502", "0.7373991", "0.7352234", "0.73509574", "0.73332506", "0.73158824", "0.73149246", "0.7312903", "0.72832", "0.7272832", "0.72684807", "0.7266051", "0.72557664", "0.72437304", "0.72299373", "0.7227073", "0.7223176", "0.719185", "0.71907353", "0.7185483", "0.7182329", "0.71717244", "0.71710306", "0.7167616", "0.7166052", "0.7164683", "0.71613586", "0.71139884", "0.7112632", "0.71055114", "0.70947903", "0.7088619", "0.7086211", "0.70853156", "0.7083131", "0.70714587", "0.7066173", "0.70635015", "0.7048638", "0.70349413", "0.70330745", "0.7031964", "0.7028548", "0.7024858", "0.70080155", "0.7004284", "0.6988371", "0.6984159", "0.69710916", "0.6969818", "0.6960009", "0.6957874" ]
0.7882814
9
New user signup form
def register_processed(): print "REGISTER ROUTE IS WORKING" # Get variables from HTML form email = request.form["email"] password = request.form["password"] # query the DB for user new_user = User(email=email, password=password) # check DB for user searching by email same_email_user = User.query.filter(User.email == email).first() # users who registered / login will be redircted --> passport/profile pg. if same_email_user: flash("Email is already registered. Please signin to your account") return redirect("/") # check user by username --> condition to authentiate user same_username = User.query.filter(User.email == email).first() if same_username: flash("please pick another username") return redirect("/") # add user to db if they are new db.session.add(new_user) # commit transaction db.session.commit() # query db user by email add them to current session and redirect # user to passport page user = User.query.filter_by(email=email).first() flash("User %s added.You have successfully created an account! Welcome to Wanderlust" % email) session["user_id"] = user.user_id return redirect("/passport")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def signup_form(request):\n return {'signup_form': UserForm()}", "def signup():\n return render_template(\"new_user.html\")", "def signup():", "def signupview(request):\n if request.method != 'POST':\n # Display blank registration form\n form = UserCreationForm()\n else:\n # PRocess completed form\n form = UserCreationForm(data=request.POST)\n if form.is_valid():\n form.save()\n # authenticated_user = authenticate(username=new_user.username,\n # password=request.POST['password'])\n return HttpResponseRedirect(reverse('dashboard:user_profile'))\n\n context = {'form': form}\n template_name = 'registration/signup.html'\n return render(request, template_name, context)", "def signup(self, request, user):\n pass", "def sign_up():\n form = RegisterForm()\n if request.method == \"GET\":\n return render_template('adduser.html', title='Add New User', form=form)\n if request.method == 'POST' and form.validate_on_submit():\n username = form.username.data\n password = form.password1.data\n email = form.email.data\n account = db.check_item(\"username\", username)\n if account is not None:\n flash('This User name or Email is existing')\n return redirect(url_for('sign_up'))\n else:\n db.add_user(username, password, email)\n flash(\"You have add a new user successfully\")\n return redirect(url_for('sign_up'))\n return render_template('adduser.html', title='Add New User', form=form)", "def signup(request):\n if request.method == \"POST\":\n username = request.POST.get('username')\n password = request.POST.get('password')\n if User.objects.filter(username__exact=username).count():\n return HttpResponse('duplicate id', 400)\n else:\n user = User.objects.create_user(username, password=password)\n user.first_name = request.POST.get('name', '')\n user.save()\n signuser = SignUser()\n signuser.user = user\n signuser.save()\n macform = MacForm()\n return render(request, \"registration/signup_next.html\")\n\n elif request.method ==\"GET\":\n userform = RegisterForm()\n\n return render(request, \"registration/signup.html\", {\"userform\": userform})", "def view_signup(self):\n logged_in = authenticated_userid(self.request)\n message = u''\n username = u''\n password = u''\n\n # Create form by using schemas with validations\n form = Form(self.request, schema=SignUpSchema,\n state=State(request=self.request))\n\n if form.validate():\n username = self.request.params['username']\n password = self.request.params['password']\n email = self.request.params['email']\n self.context['users'].add(username, password, email)\n self.context['groups'].add(username, group_names['member'])\n self.context['groups'].add(username, u'u:%s' % username)\n\n message = msg['succeed_add_user'] + username\n\n return {\n 'message': message,\n 'url': self.request.application_url + '/signup',\n 'username': username,\n 'logged_in': logged_in,\n 'password': password,\n 'form': FormRenderer(form)\n }", "def signup(request):\n\tform = UserCreationEmailForm(request.POST or None)\n\tif form.is_valid():\n\t\tcomp = form.save()\n\t\tcomp.groups.add(Group.objects.get(name='comprador'))\n\t\tusername = request.POST['username']\n\t\tpassword = request.POST['password1']\n\t\tuser = authenticate(username=username, password=password)\n\t\tif user is not None:\n\t\t\tif user.is_active: login(request, user)\n\n\t\treturn HttpResponseRedirect(\"/comprador/registro/paso1\")\n\n\t# crear el user profile\n\t# redireccionar al home\n\ttemplate = 'customerbuy/signup.html'\n\treturn render(request, template,{'form':form})\n\t#return render_to_response(\"customer/signup.html\", {'form': form,}, context_instance=RequestContext(request))", "def create_user():\n if request.method == 'POST':\n PLAN.create_user(request.form['fname'],\n request.form['lname'],\n request.form['username'],\n request.form['password'],\n request.form['email'])\n return redirect(url_for('index'))\n return render_template('newuser.html')", "def register(request):\n\n form = CreateUserForm()\n if request.method == 'POST':\n form = CreateUserForm(request.POST)\n if form.is_valid():\n form.save()\n user = form.cleaned_data.get('username')\n messages.success(request,f\"The Account has been created : {user}\")\n return redirect('login')\n context = {'form':form}\n return HttpResponse(render(request,'noxusProject/sign_up.html',context))", "def signup():\n if request.method == 'GET':\n form = SignUpForm(obj=current_user)\n\n else:\n form = SignUpForm(request.form)\n if request.method == 'POST' and form.validate():\n email = form.email.data\n password = form.password.data\n\n # Check if they they exist already\n user = Users.get_one(email = email)\n if not user:\n email = form.email.data\n first_name = form.first_name.data\n last_name = form.last_name.data\n user = User(**{'email':email, 'first_name':first_name, 'last_name':last_name})\n user.set_password(password)\n user.provider_type = form.provider_type.data\n user.practice_name = form.practice_name.data\n user.practice_type = form.practice_type.data\n try:\n user.save()\n except Exception as e:\n log.exception(f\"Exception trying to save user {email}\")\n else:\n return redirect('/')\n else:\n form.errors = \"User already exists\"\n \n context = {'form':form}\n content = render_template( 'signup.html', **context )\n return content", "def sign_up(request):\n form = UserCreationForm()\n if request.method == 'POST':\n form = UserCreationForm(data=request.POST)\n if form.is_valid():\n form.save()\n user = authenticate(\n username=form.cleaned_data['username'],\n password=form.cleaned_data['password1']\n )\n login(request, user)\n messages.success(\n request,\n \"You're now a user! You've been signed in, too.\"\n )\n return HttpResponseRedirect(reverse('home'))\n return render(request, 'accounts/sign_up.html', {'form': form})", "def make_new_user():\n return render_template('users/new_user_form.html')", "def signup(request):\n \n user_form = UserCreationForm()\n \n if request.method == 'POST':\n user_form = UserCreationForm(request.POST)\n if user_form.is_valid():\n \"\"\" new user account is created here\"\"\"\n \"\"\" @fixme: this is a buggy peice of code; cannot do commit=False; because a M-M relation cannot be attached to a non-existing object. \"\"\"\n new_user = user_form.save()\n \n \"\"\" @fixme: group is added after the account is created/commited to the DB; this is kinda bad; required two DB calls.\"\"\"\n# new_user.groups.add(Group.objects.get(name='student'))\n return HttpResponseRedirect(reverse(\"home.views.index\"))\n \n return render_to_response(\"auth/signup.html\", {\n 'form' : user_form\n }, context_instance=RequestContext(request))", "def signup(request):\r\n return {}", "def signup(request):\n try:\n registered = False\n if request.method == 'POST':\n user_form = UserForm(data=request.POST)\n user_form.username = request.POST['email']\n profile_form = UserProfileInfoForm(data=request.POST)\n if user_form.is_valid() and profile_form.is_valid():\n user = User()\n user.first_name = request.POST.get('first_name')\n user.last_name = request.POST.get('last_name')\n user.email = request.POST.get('email')\n user.username = request.POST.get('email')\n user.set_password(request.POST.get('password'))\n user.save()\n profile = profile_form.save(commit=False)\n profile.user = user\n profile.save()\n registered = True\n else:\n print(user_form.errors,profile_form.errors)\n else:\n user_form = UserForm()\n profile_form = UserProfileInfoForm()\n return render(request,'accounts/registration.html',\n {'user_form':user_form,\n 'profile_form':profile_form,\n 'registered':registered})\n except Exception as e:\n return HttpResponse(e, status=500)", "def register_new_user():\n register_form = UserAddForm()\n login_form = LoginForm()\n\n if register_form.validate_on_submit():\n try:\n user = User.signup(\n email=register_form.new_email.data,\n password=register_form.new_password.data,\n username=register_form.new_username.data,\n first_name=register_form.first_name.data.capitalize(),\n last_name=register_form.last_name.data.capitalize(),\n image_url=register_form.image_url.data or User.image_url.default.arg,\n cover_url=register_form.cover_url.data or User.cover_url.default.arg\n )\n db.session.commit()\n\n do_login(user)\n return redirect('/')\n except IntegrityError:\n flash(\n \"Email or username already registered! Please log in or try again\", 'danger')\n return render_template('home_anon.html', register_form=register_form, login_form=login_form)\n\n else:\n return render_template('home_anon.html', register_form=register_form, login_form=login_form)", "def register(request):\r\n if request.method == 'POST':\r\n form = bforms.UserCreationForm(request.POST)\r\n if form.is_valid():\r\n form.save()\r\n return HttpResponseRedirect('/accounts/login/')\r\n if request.method == 'GET':\r\n form = bforms.UserCreationForm()\r\n payload = {'form':form}\r\n return render(request, 'registration/create_user.html', payload)", "def signup():\n signup_form = SignupForm(request.form) # Rempli les champs créer dans le SignupForm avec les valeurs du forumlaire corerspondantes au nom donné au champs\n # Les champs créer dans le SignupForm peuvent être parcouru grâce à la methode __setitem__ et __getitem__.\n if request.method == 'POST':\n if signup_form.validate(): # Utilise les validators renseignés dans SignupForm pour vérifier les valeurs des champs\n email = signup_form.email.data\n last_name = signup_form.last_name.data\n first_name = signup_form.first_name.data\n phone = signup_form.phone.data\n password = signup_form.password.data\n\n if not UserController().exists(email):\n hashed_password = hashlib.sha256(password.encode('utf8')).hexdigest()\n user = UserController().insert(email, hashed_password, last_name, first_name, phone)\n login_user(user)\n return redirect(url_for('main_bp.home'))\n flash('Un utlisateur utilise déjà cette adresse mail')\n return redirect(url_for('auth_bp.signup'))\n\n return render_template('signup.html',\n current_user=current_user,\n form=signup_form)", "def signup(request):\n if request.method == \"POST\":\n # Double check the form was actually submitted, instead of the page being\n # loaded\n form = BeeUserCreationForm(request.POST)\n\n if form.is_valid():\n form.save()\n username = form.cleaned_data.get(\"username\")\n raw_password = form.cleaned_data.get(\"password\")\n user = authenticate(username=username, password=raw_password)\n login(request, user)\n return redirect(\"home\")\n\n else:\n # If the page was just loaded, set the form to the proper one\n form = BeeUserCreationForm()\n\n return render(request, \"signup.html\", {\"form\": form})", "def register(request):\n if request.method != \"POST\":\n form = UserCreationForm()\n else:\n form = UserCreationForm(data=request.POST)\n if form.is_valid():\n new_user = form.save()\n login(request, new_user)\n return redirect(\"pybasic:index\")\n context = {\"form\": form}\n return render(request, \"users/register.html\", context)", "def signup(**kwargs):\n\n pass", "def process_signup():\n\n\temail = request.form.get('email');\n\tpassword = request.form.get('password');\n\n\tif email:\n\t\tnew_user = model.User(email=email, password=password)\n\t\tmodel.session.add(new_user)\n\t\tmodel.session.commit()\n\t\tsession['email'] = email\t\n\n\treturn render_template(\"signup.html\")", "def signup():\n return render_template('auth/signup.html')", "def signUp(user=None):\n\n\tif user == None:\n\t\tuser = defaultUser\n\n\treturn render_template('signUp.html', user=user.name)", "def sign_up():\n return render_template('sign_up.html')", "def register_view(request):\n\n if request.method == 'POST':\n form = UserCreationForm(request.POST)\n\n if form.is_valid():\n user = User.objects.create_user(username=form.cleaned_data['username'],\n password=form.cleaned_data['password1'])\n user.save()\n return redirect('wiki:login')\n else:\n form = UserCreationForm\n\n return render(request, 'registration/register.html', {'form': form})", "def sign_up():\n #POST - the info coming from the sign-up-form\n\n #get username and password that was filled in sign-up form\n #if username exits - flash \"username taken\" and redirct to /sign-up-form\n\n #else save the new user to the database - user table, flash success message\n #and redirect back to /more-details/cat_id", "def add_user():\n\n return render_template('register-form.html')", "def registration():\n form = forms.RegistrationForm()\n if form.validate_on_submit():\n models.User.create_user(username=form.username.data,\n email=form.email.data,\n password=form.password.data)\n return redirect(url_for('login'))\n return render_template(\"register.html\", form=form)", "def signup(request):\n\n if request.method == 'POST':\n form = SignUpForm(request.POST)\n if form.is_valid():\n # Save the form\n form.save()\n # Create the user\n username = form.cleaned_data.get('username')\n raw_password = form.cleaned_data.get('password1')\n # also make sure that the user gets into the STAFF,\n # otherwise he/she may not see the admin pages\n user = authenticate(username=username, \n password=raw_password,\n is_staff=True)\n user.is_staff = True\n user.save()\n # Add user to the \"RegistryUser\" group\n gQs = Group.objects.filter(name=\"seeker_user\")\n if gQs.count() > 0:\n g = gQs[0]\n g.user_set.add(user)\n # Log in as the user\n login(request, user)\n return redirect('home')\n else:\n form = SignUpForm()\n return render(request, 'signup.html', {'form': form})", "def register(request): \n\tif request.method != 'POST':\n\t\tform = UserCreationForm() \n\telse: \n\t\tform = UserCreationForm(data = request.POST)\n\n\tif form.is_valid():\n\t\tnew_user = form.save()\n\t\tauthenticated_user = authenticate(username=new_user.username, password=request.POST['password1']) \n\t\tlogin(request, authenticated_user) \n\t\treturn HttpResponseRedirect(reverse('Toeic:index'))\n\n\tcontext = {'form': form}\n\treturn render(request, 'users/register.html', context)", "def signup(self):\n # sign up\n new_username = generate_username()\n success = signup_helper(self, new_username)\n if success:\n # go to AuthenticatedTasks\n self.user.username = new_username\n self.interrupt()", "def signup():\n\n form = UserAddForm()\n \n\n if form.validate_on_submit():\n try:\n user = User.signup(\n username=form.username.data,\n password=form.password.data,\n email=form.email.data,\n image_url=form.image_url.data or User.image_url.default.arg,\n )\n db.session.commit()\n\n except IntegrityError:\n flash(\"Username already taken\", 'danger')\n return render_template('users/signup.html', form=form)\n\n do_login(user)\n\n return redirect(\"/\")\n\n else:\n return render_template('users/signup.html', form=form)", "def signup():\n\n form = UserAddForm()\n if form.validate_on_submit():\n try:\n user = User.signup(\n username=form.username.data,\n password=form.password.data,\n email=form.email.data,\n image_url=form.image_url.data or User.image_url.default.arg,\n )\n db.session.commit()\n\n except IntegrityError:\n flash(\"Username already taken\", 'danger')\n return render_template('users/signup.html', form=form)\n\n do_login(user)\n\n return redirect(\"/\")\n\n else:\n return render_template('users/signup.html', form=form)", "def signup():\n req = request.get_json()\n user = req['user']\n is_created = views.UserManagement().create(user)\n if not is_created:\n return jsonify(msg.ALREADY_USE), 400\n\n return jsonify(msg.SUCCESS), 200", "def register(request):\n\n data = request.POST.copy() or None\n\n user_creation_form = auth_forms.UserCreationForm(data)\n if user_creation_form.is_bound:\n if user_creation_form.is_valid():\n user = user_creation_form.save()\n\n user = authenticate(username=user.username, password=user_creation_form.cleaned_data['password1'])\n login(request, user)\n\n return HttpResponseRedirect(reverse('user', args=(user.username,)))\n\n context = {\n 'user_creation_form': user_creation_form,\n }\n req_ctx = RequestContext(request, context)\n\n return render_to_response('register.html', req_ctx)", "def signup(self, request, user):\n user.first_name = self.cleaned_data['first_name']\n user.last_name = self.cleaned_data['last_name']\n user.save()\n\n return user", "def signup():\n\n form = UserAddForm()\n\n if form.validate_on_submit():\n try:\n user = User.signup(\n username=form.username.data,\n password=form.password.data,\n )\n db.session.commit()\n\n except IntegrityError:\n flash(\"Username already taken\", 'danger')\n return render_template('users/signup.html', form=form)\n\n do_login(user)\n\n return redirect(\"/\")\n\n else:\n return render_template('signup.html', form=form)", "def register(request):\n register_form = UserCreationForm()\n return render(request, 'metro_app/register.html', {'form': register_form})", "def show_new_user_form():\r\n return render_template('user-form.html')", "def signup():\n\n form = UserAddForm()\n\n if form.validate_on_submit():\n try:\n user = User.signup(\n username=form.username.data,\n password=form.password.data,\n email=form.email.data,\n image_url=form.image_url.data or User.image_url.default.arg,\n )\n db.session.add(user)\n db.session.commit()\n\n except IntegrityError:\n flash(\"Username / Email already taken\", 'danger')\n return render_template('users/signup.html', form=form)\n\n do_login(user)\n\n return redirect(\"/\")\n\n else:\n return render_template('users/signup.html', form=form)", "def signup(request):\n if request.method == 'POST':\n form = SignUpForm(request.POST)\n if form.is_valid():\n user = form.save(commit=False)\n user.is_active = False\n user.save()\n current_site = get_current_site(request)\n subject = 'Activate Your neighwatch Account'\n message = render_to_string('registration/activation_email.html', {\n 'user': user,\n 'domain': current_site.domain,\n 'uid': urlsafe_base64_encode(force_bytes(user.pk)),\n 'token': account_activation_token.make_token(user),\n })\n user.email_user(subject, message)\n return redirect('account_activation_sent')\n else:\n form = SignUpForm()\n return render(request, 'registration/registration_form.html', {'form': form})", "def user_signup():\n\n if request.method == \"GET\":\n return render_template(\"signup_form.html\")\n\n # post request logic starts here\n email = request.form.get(\"email\")\n password = request.form.get(\"password\")\n\n if email_is_valid(email):\n\n flash(\"It looks like you are already signed up for Readerboard! Try signing in instead.\")\n return redirect(\"/signin\")\n\n else:\n\n new_user = User()\n db.session.add(new_user)\n db.session.commit()\n new_acct = Account(user_id=new_user.user_id, email=email, password=password)\n db.session.add(new_acct)\n\n db.session.commit()\n session['acct'] = new_acct.acct_id\n\n return redirect(\"/auth/goodreads\")", "def register_users(request):\n form = UserProfileForm(request.POST or None)\n if form.is_valid():\n cd = form.cleaned_data\n User.objects.create_user(\n # default parameters\n username=cd.get('username'),\n email=cd.get('email'),\n password=cd.get('password1'),\n # parameters **kargs\n first_name=cd.get('first_name'),\n last_name=cd.get('last_name'),\n )\n return HttpResponseRedirect('/')\n return render_to_response('users/registrar_usuario.html', {'formulario': form},\n context_instance=RequestContext(request))", "def registerPage(request):\n\n form = CreateUserForm()\n\n if request.method == \"POST\":\n form = CreateUserForm(request.POST)\n if form.is_valid():\n form.save()\n return redirect('loginpage') \n\n context = {'form': form}\n return render(request, 'registerpage.html', context)", "def new_user():\n pass", "def create_account(request):\n if request.method == 'POST':\n\n post = request.POST\n form = forms.RegisterForm(post)\n\n if form.is_valid():\n # create a new user\n user = models.HAWCUser.objects.create_user(post['email'],\n post['password1'])\n user.first_name = post['first_name']\n user.last_name = post['last_name']\n user.full_clean()\n user.save()\n\n # create a new user profile\n profile = models.UserProfile(user=user)\n profile.save()\n\n # after save, log user in\n user = authenticate(username=post['email'],\n password=post['password1'])\n login(request, user)\n return redirect('portal')\n else:\n form = forms.RegisterForm()\n\n return render(request, 'registration/create_account.html', {'form': form})", "def signup(request):\n if request.method == 'POST':\n form = SignUpForm(data=request.POST)\n if form.is_valid():\n email = form.data.get('email')\n username = form.data.get('username')\n if User.objects.filter(username=username).exists():\n messages.error(request, \"Your username is already taken!\")\n form = SignUpForm()\n else: \n raw_password = form.data.get('raw_password')\n user = authenticate(email=email,username=username, password=raw_password)\n form.save()\n return redirect(reverse('login'))\n else:\n form = SignUpForm()\n return render(request,'registration/createaccount.html', {'form': form})", "def register_page():\n form = addUser()\n\n if form.validate_on_submit():\n username=form.username.data\n password=form.password.data\n email=form.email.data\n first_name=form.first_name.data\n last_name=form.last_name.data\n \n new_user = User.register(username=username, password=password, email=email, first_name=first_name, last_name=last_name)\n\n db.session.add(new_user)\n db.session.commit()\n\n session[\"user\"] = new_user.username\n return redirect(f'/users/{username}')\n else:\n return render_template(\"reg_form.html\", form=form)", "def register_view(request):\n data = {\"title\": \"Sign Up\"}\n form = UserRegistraionForm(request.POST or None)\n if form.is_valid():\n new_user = form.save(commit=False)\n password = form.cleaned_data.get(\"password\")\n new_user.set_password(password)\n new_user.save()\n return redirect('login')\n\n data[\"form\"] = form\n return render(request, \"pages/form.html\", data)", "def create_user_form():\n template_name = \"create_user.html\"\n users = []\n print request.form\n\n flash(request.form['username'])\n flash(request.form['email'])\n\n return render_template(template_name, users=users)", "def signup(self, request):\n # TODO: Add user authentication. Currently, we will create an acct \n new_user = Account.add_new_user(request)\n if new_user is None:\n return AccountResponse(errmsg=\"Username already exists!\")\n return AccountResponse(id=new_user.key.id())", "def register():\r\n form = RegisterForm(request.form)\r\n\r\n if request.method == 'POST' and form.validate():\r\n new_user = User(form.email.data, form.password.data)\r\n g.session.add(new_user)\r\n g.session.commit()\r\n\r\n new_profile = Profile(form.first_name.data, form.last_name.data, new_user.id)\r\n g.session.add(new_profile)\r\n g.session.commit()\r\n # TODO: make it async\r\n if current_app.config[\"REQUIRE_EMAIL_CONFIRMATION\"]:\r\n send_confirmation(new_user)\r\n new_user.init_folders()\r\n logout_user()\r\n return redirect(url_for(\".login\"))\r\n return render_template(\"account/register_user.pug\", form=form)", "def sign_up():\n\n form = SignupForm()\n # Checking form has been filled in correctly\n if form.validate_on_submit():\n users = mongo.db.users\n existing_user = users.find_one({'username': request.form['username']})\n\n # If username isn't already in database\n if existing_user is None:\n hash_password = generate_password_hash(request.form['password'])\n # Create an account\n users.insert_one({'username': request.form['username'],\n 'password': hash_password})\n # Notify them\n flash(f'Account created for \\'{form.username.data}\\'!', 'success')\n session['username'] = request.form['username']\n session['logged'] = True\n return redirect(url_for('index'))\n else:\n # If username already exists then tell user to try another username\n flash(f'Username \\'{form.username.data}\\' already exists!' +\n 'Please choose a different username', 'danger')\n return redirect(url_for('sign_up'))\n\n return render_template('sign_up.html', title=\"Sign Up\", form=form)", "def sign_up():\n form = SignUpForm()\n form['csrf_token'].data = request.cookies['csrf_token']\n if form.validate_on_submit():\n\n # Create user, default program, and default membership records\n user = User(\n username=form.data['username'],\n email=form.data['email'],\n password=form.data['password'],\n first_name=form.data['first_name'],\n last_name=form.data['last_name'],\n birthday=form.data['birthday']\n )\n program = Program(program=f\"{form.data['username']}'s Habits\",\n creator=user,)\n membership = Member(program=program,\n member=user,\n stamper=user,)\n db.session.add(user)\n db.session.add(program)\n db.session.add(membership)\n db.session.commit()\n\n login_user(user)\n\n # Set cookie\n # res = make_response(jsonify(user_schema.dump(user)))\n # res.set_cookie(\"uid_cookie\", str(user.id))\n\n return queryUserFullData(user.id)\n return {'errors': validation_errors_to_error_messages(form.errors)}", "def sign_up(request):\n context = {\n 'signup_form': SignUpForm(),\n 'profile_form': ProfileForm(),\n 'login_form': LoginForm(),\n }\n\n if request.method == 'GET':\n return render(request, 'registration/sign_up.html', context)\n\n # Execute the below if the signup form is posted.\n if request.method == 'POST':\n context['signup_form'] = SignUpForm(request.POST)\n context['profile_form'] = ProfileForm(request.POST)\n if context['signup_form'].is_valid() and context['profile_form'].is_valid():\n username = context['signup_form'].cleaned_data.get('username')\n email = context['signup_form'].cleaned_data.get('email')\n password = context['signup_form'].cleaned_data.get('password')\n gender = context['profile_form'].cleaned_data.get('gender')\n birth_year = context['profile_form'].cleaned_data.get('birth_year')\n birth_month = context['profile_form'].cleaned_data.get('birth_month')\n birth_day = context['profile_form'].cleaned_data.get('birth_day')\n\n user = User.objects.create_user(username, email, password)\n user.profile.gender = gender\n if birth_day and birth_month and birth_year:\n birth_date = date(int(birth_year), int(birth_month), int(birth_day)).isoformat()\n user.profile.birth_date = birth_date\n user.save()\n\n user = authenticate(request, username=username, password=password)\n auth_login(request, user, backend='django.contrib.auth.backends.ModelBackend')\n messages.add_message(request, messages.SUCCESS, 'ユーザー登録が完了しました!')\n return redirect('home')", "def create(request):\n if request.method == 'POST':\n form = SignUpForm(request.POST, request.FILES)\n if form.is_valid():\n user = form.save(commit=False)\n user.lat, user.lng = get_coords(request)\n user.save()\n login(request, user, backend=django.contrib.auth.backends.ModelBackend)\n else:\n form = SignUpForm()\n return render(request, 'users/create.html', {'form': form})", "def register(request):#, success_url=reverse('registrationsuccess')):\n\tif request.method == 'POST':\n\t\tform = RegistrationForm(request.POST)\n\t\tif form.is_valid():\n\t\t\tnew_user = RegistrationProfile.objects.create_inactive_user(username=form.cleaned_data['username'],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tpassword=form.cleaned_data['password1'],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\temail=form.cleaned_data['email'],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tname=form.cleaned_data['first_name'],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tsurname=form.cleaned_data['last_name'])\n\t\t\treturn HttpResponseRedirect(reverse('registrationsuccess'))\n\telse:\n\t\tform = RegistrationForm()\n\treturn render_to_response(request, 'registration/registration_form.html', {'form': form })", "def post(self,request,*args,**kwargs):\n form = UserCreationForm(request.POST)\n context = {}\n template = ''\n\n if form.is_valid():\n user = form.save(commit=False)\n user.is_active = False\n user.save()\n return HttpResponseRedirect(reverse('index:home'))\n\n else:\n context['form'] = form\n template = 'authentication/register.html'\n\n return render(request,template, context)", "def signup_post():\n email = request.form.get('email')\n name = request.form.get('name')\n password = request.form.get('password')\n hname = '{0}|{1}'.format(name, email)\n hid = sql_queries.new_household(hname)\n user = User.query.filter_by(email=email).first()\n\n if user:\n flash('Email address already exists.')\n return redirect(url_for('auth.login'))\n\n new_user = User(email=email, name=name,\n password=generate_password_hash(password, method='sha256'), household_id=hid)\n\n db.session.add(new_user)\n db.session.commit()\n\n return redirect(url_for('auth.login'))", "def signup():\n print(\"In signup.....\")\n auth_service = AuthService()\n form = SignUpForm()\n if request.method == 'GET':\n return render_template('auth/signup.html', title='Sign Up', form=form)\n\n elif request.method == 'POST':\n if form.validate_on_submit():\n user_dto = UserDto(form.email.data, form.password.data, form.name.data, form.contact.data)\n try:\n auth_service.create_user(user_dto)\n flash('SignUp successfull name = \"%s\" , email = \"%s\"' % (form.name.data, form.email.data))\n return redirect(url_for('auth.signin'))\n except UserExistsException:\n flash(\"User already exists\")\n return redirect(url_for('auth.signup'))\n flash('SignUp Failed')\n return render_template('auth/signup.html', title='Sign Up', form=form)", "def register(self, form):\n new_user = form.save(commit=False)\n username_field = getattr(new_user, 'USERNAME_FIELD', 'username')\n # Save lowercased email as username.\n setattr(new_user, username_field, form.cleaned_data['email'].lower())\n new_user.first_name = form.cleaned_data['first_name']\n new_user.last_name = form.cleaned_data['last_name']\n new_user.save()\n new_user = authenticate(username=getattr(new_user, username_field), password=form.cleaned_data['password1'])\n login(self.request, new_user)\n user_registered.send(sender=self.__class__, user=new_user, request=self.request)\n profile, _ = Profile.objects.get_or_create(user=new_user)\n self.request.session['signed_up'] = True\n profile.payment_plan = int(form.cleaned_data['payment_plan'])\n profile.company_name = form.cleaned_data['company']\n profile.phone = form.cleaned_data['phone']\n profile.save(update_fields=['payment_plan', 'company_name', 'phone'])\n if profile.payment_plan != Profile.PAYMENT_PLAN_FREE:\n messages.add_message(self.request, messages.INFO,\n 'Congratulations! We won\\'t charge you for this plan for now.')\n return new_user", "def registeruser(request):\n form = UserRegisterForm()\n\n if request.method == 'POST':\n form = UserRegisterForm(request.POST)\n if form.is_valid():\n form.save()\n user = form.cleaned_data.get('username')\n messages.success(request, 'Account was created for '+user)\n return redirect('login')\n\n contex = {'form': form}\n return render(request, 'users/register.html', contex)", "def create_user():\n form = UserForm(prefix='register')\n\n if not form.validate_on_submit():\n flash('Invalid input.', 'warning')\n return view_index(form)\n else:\n user, exists = db_insert_or_get(User, name=form.name.data, defaults={'password': form.password.data})\n if exists:\n flash('Username taken.', 'warning')\n else:\n db.session.commit()\n\n session['user_name'] = user.name\n app.logger.info('User %s created successfully.', user.name)\n flash('User created successfully.', 'success')\n\n return redirect(url_for('view_index'))", "def sign_up():\n\n if current_user.is_authenticated:\n return redirect(url_for('main.index'))\n\n form = SignUpForm()\n if form.validate_on_submit():\n existing_user = User.query.filter(\n (User.email == form.email.data) | (User.username == form.username.data)\n ).first()\n if existing_user is None:\n user = User(\n username=form.username.data,\n email=form.email.data,\n role=Role.user.name\n )\n user.set_password(form.password.data)\n db.session.add(user)\n db.session.commit()\n login_user(user)\n return redirect(url_for('main.index'))\n flask.flash('A user already exists with that username or email address.')\n return render_template(\n 'auth/signup.html',\n title='Sign up',\n form=form\n )", "def signup():\n return render_template(\n 'signup.html',\n title='Signup',\n time=datetime.now(),\n message='not aviliable'\n )", "def post(self, request, *args, **kwargs):\n return super(SignUpView, self).post(request, query=app.SIGN_USER, *args, **kwargs)", "def sign_up():\n if request.method == 'POST':\n result = register(request.form['name'], request.form['username'],\n request.form['password'], request.form['rpt_password'])\n if result == \"Registration successful\":\n flash(result, 'info')\n return redirect(url_for('sign_in'))\n flash(result, 'warning')\n return render_template('register.html')", "def register(request):\n if request.method == 'GET':\n form = CustomUserCreationForm()\n elif request.method == 'POST':\n form = CustomUserCreationForm( data=request.POST )\n\n if form.is_valid():\n user = form.save( commit=False )\n # we can make any last second changes to the user\n user.save()\n return redirect( '/' )\n\n context = {'form': form}\n return render( request, 'register.html', context )", "def signup():\n\n return render_template('signup.HTML', allergen=allergen_response.json())", "def sign_up(request):\n\n sign_form = forms.SignUp()\n\n if request.method == 'POST':\n sign_form = forms.SignUp(request.POST)\n\n # Checking validation.\n if sign_form.is_valid():\n sign_form.save(commit=True)\n\n # return homepage on submit.\n return home(request)\n else:\n print(\"invalid Form\")\n\n return render(request, r\"app_two/signup.html\", context={'form': sign_form})", "def users_create():", "def register():\n\n if request.method == 'POST':\n new_account = Account(fullname = request.form['fullname'],\n email = request.form['email'],\n username = request.form['username'],\n password = request.form['password'])\n \n new_account.save()\n return \"Welcome\"\n else:\n return render_template('register.html')", "def registerView(request):\n form = UserForm()\n\n if request.method == \"POST\":\n form = UserForm(request.POST)\n if form.is_valid():\n form.save()\n username = form.cleaned_data.get(\"username\")\n messages.success(request, \"Account created for \" + username)\n return redirect('login')\n\n context = {\n 'form': form,\n }\n # template stored in website/templates/registration directory\n return render(request, 'registration/register.html', context)", "def register(request):\n registered = False\n if request.method == 'POST':\n user_form = UserForm(data=request.POST)\n profile_form = UserProfileInfoForm(data=request.POST)\n if user_form.is_valid() and profile_form.is_valid():\n user = user_form.save()\n user.set_password(user.password)\n user.save()\n profile = profile_form.save(commit=False)\n profile.user = user\n profile.save()\n registered = True\n else:\n print(user_form.errors,profile_form.errors)\n else:\n user_form = UserForm()\n profile_form = UserProfileInfoForm()\n return render(request,'footBallApp/registration.html',\n {'user_form':user_form,\n 'profile_form':profile_form,\n 'registered':registered})", "def registerPage(request):\n if request.user.is_authenticated:\n return redirect('indexPage')\n form = PersonalUserCreationForm()\n if request.method == 'POST':\n form = PersonalUserCreationForm(request.POST)\n if form.is_valid():\n form.save()\n user = form.cleaned_data.get('username')\n messages.success(\n request, 'Un nouveau compte vient d\\'être créé pour ' + user\n )\n return redirect('loginPage')\n context.update({'form': form})\n return render(request, 'register.html', context)", "def user_register():\n \n data = user_obj.user_register(request.forms) \n return data", "def register():\n\n from .forms import RegisterForm\n\n form = RegisterForm(request.form)\n\n if form.validate_on_submit():\n username = request.form['username']\n password = request.form['password1']\n app.add_user_and_password(username, password)\n logger.info('Created account for ' + username + '.')\n\n if \"rememberMe\" in request.form:\n user = User()\n user.id = username\n session['username'] = username\n session['registrations'] = []\n login_user(user, remember=True)\n logger.info('Logged ' + username + ' in after account creation.')\n\n return redirect(url_for('home'))\n\n return render_template('signup.html', form=form)", "def register_view():\n form = RegisterForm()\n if form.validate_on_submit():\n user = User.query.filter_by(email=form.email.data).first()\n\n # Check if user has already existed in the database\n if user:\n flash(\"Email already registered.\")\n \n # New user creation\n user = User()\n user.email = form.email.data\n user.hash_password(form.password.data)\n \n db.session.add(user)\n db.session.commit()\n\n login_user(user)\n return redirect(url_for(\"profiles.create_view\"))\n return render_template(\"register.html\", form=form)", "def create_user(request):\n message = None\n if request.method == 'POST':\n form = UserCreationForm(request.POST)\n if form.is_valid():\n # This creates a new User in the database\n new_user = form.save()\n # now we create a new blank profile, link it to the new user and save it\n new_profile = Profile()\n new_profile.user = new_user\n new_profile.save()\n # Get the user object we just created\n new_user = authenticate(username=form.cleaned_data['username'],\n password=form.cleaned_data['password1'])\n login(request, new_user)\n return HttpResponseRedirect('/index')\n else:\n form = UserCreationForm()\n\n return render(request, 'woofer/show_form.html', {\n 'form' : form,\n 'message' : message,\n 'form_action' : reverse('create-user'),\n 'title' : \"Create Account\"\n })", "def add_user_form():\n\n return render_template(\"add_user.html\", headline=\"Add New Blogly User\")", "def signup():\n\n # Get values from signup form\n signup_email = request.form.get(\"signup_email\")\n signup_password = request.form.get(\"signup_password\")\n\n # If user exists, ask them to log in\n # Otherwise, add user into database and log them in, redirecting to homepage\n if db.session.query(User).filter(User.email == signup_email).first():\n flash(\"You already have an account please use login!\", \"danger\")\n return redirect(\"/signup-login\")\n\n else:\n new_user = User(email=signup_email, password=signup_password, age=None, zipcode=None)\n db.session.add(new_user)\n db.session.commit()\n \n session[\"logged_in_user_email\"] = signup_email\n session[\"logged_in_user\"] = new_user.user_id\n \n flash(\"Your account has been created! You now are logged in!\", \"success\")\n \n return redirect(\"/\")", "def register_account_view(request):\n if request.user.is_authenticated:\n return redirect('home')\n\n if request.method == 'POST':\n register_form = RegisterNewUserForm(request.POST)\n if register_form.is_valid():\n register_form.save()\n username = register_form.cleaned_data.get('username')\n messages.success(\n request, \"You have successfully created an account, log-in now!\")\n return redirect('login')\n else:\n register_form = RegisterNewUserForm()\n return render(request, 'register.html', context={\"register_form\": register_form})", "def create_user():\n\n return render_template(\"users/create_user.html\")", "def signup():\n\tif request.method=='POST':\n\t\tform = request.form\n\t\tusername=request.form['username']\n\t\temail=request.form['email']\n\t\tpassword=request.form['password']\n\t\tquery=\"SELECT * from users WHERE username=%s\"\n\t\tparam=username\n\t\tc.execute(query,param)\n\t\tif c.fetchone() is not None:\n\t\t\tsession['message'] = \"That username is already taken...\"\n\t\t\treturn render_template('register.html',form=form)\n\t\telse:\n\t\t\tquery=\"SELECT * from users WHERE email=%s\"\n\t\t\tparam=email\n\t\t\tc.execute(query,param)\n\t\t\tif c.fetchone() is not None:\n\t\t\t\tsession['message'] = \"That Email is already taken...\"\n\t\t\t\treturn render_template('register.html',form=form)\n\t\t\telse:\n\t\t\t\tc.execute(\"\"\"INSERT into users(username,email,password) VALUES (%s,%s,%s)\"\"\",(username,email,password))\n\t\t\t\tconn.commit()\n\t\t\t\treturn redirect('/login')", "def signup(request):\r\n\tif request.user.is_authenticated:\r\n\t\t# Redirect user to home if already logged in\r\n\t\tgames = Game.objects.all()\r\n\t\treturn redirect('/', {'games': games, 'MEDIA_URL': settings.MEDIA_URL})\r\n\tif request.method == 'POST':\r\n\t\tform = SignUpForm(request.POST)\r\n\t\tif form.is_valid():\r\n\t\t\tuser = form.save()\r\n\t\t\tuser.refresh_from_db() # Retreive the newly saved object\r\n\t\t\tuser.is_active = False\r\n\t\t\tuser.profile.is_developer = form.cleaned_data.get('is_developer')\r\n\t\t\tuser.save()\r\n\t\t\t# Get current domain name and generate the user token\r\n\t\t\tcurrent_site = get_current_site(request)\r\n\t\t\tencodeded_uid = urlsafe_base64_encode(force_bytes(user.pk))\r\n\r\n\t\t\t# Create email subject and body\r\n\t\t\tsubject = 'Activate Your PlayMe Account'\r\n\t\t\tmessage = render_to_string('account_activation_email.html', {\r\n\t\t\t\t'user': user,\r\n\t\t\t\t'domain': current_site.domain,\r\n\t\t\t\t'uid': encodeded_uid.decode('utf-8'),\r\n\t\t\t\t'token': account_activation_token.make_token(user),\r\n\t\t\t})\r\n\t\t\tuser.email_user(subject, message)\r\n\t\t\treturn redirect('account_activation_sent')\r\n\telse:\r\n\t\tform = SignUpForm()\r\n\treturn render(request, 'registration/signup.html', {'form': form})", "def register():\n\n form= RegistrationForm()\n\n if form.validate_on_submit():\n\n user = User(email = form.email.data,\n username = form.username.data,\n password = form.password.data)\n\n db.session.add(user)\n db.session.commit()\n flash('You have sucesfully registered your account')\n\n return redirect(url_for('users.login'))\n\n return render_template('register.html', form=form)", "def create():\n\t# create the form\n\tif len(all_schools()) > 1:\n\t\tform = UserAddForm(request.form, schools=[g.school,], next=request.args.get('next'))\n\telse:\n\t\tform = UserAddFormOneSchool(request.form, next=request.args.get('next'))\n\n\t# submit\n\tif form.validate_on_submit():\n\t\tu = User(password=form.new_password.data)\n\t\tform.populate_obj(u)\n\t\tu.save()\n\t\tlogin_user(u)\n\t\tflash(_(\"Welcome %(user)s!\", user=u.display_name), 'success')\n\t\t#return redirect(form.next.data or url_for_school('schools.home', user_school=True))\n\t\treturn redirect(form.next.data or url_for('schools.home'))\n\t# Our simple custom captcha implementation\n\t#gotcha = 'which letter in this sentence is uppercase?'\n\t#gotcha_cap = '-'\n\t#while gotcha_cap not in string.letters:\n\t#\tidx = randint(0, len(gotcha)-1)\n\t#\tgotcha_cap = gotcha[idx]\n\t#form.gotcha.label = gotcha[:idx].lower() + gotcha[idx:].capitalize()\n\t#session['gotcha'] = gotcha_cap\n\treturn render_template('user/create.html',\n\t\ttitle=_('Create an account'),\n\t\tform=form)", "def register_new_user():\n\n register_new_user_form = RegisterForm()\n user = User.query.filter_by(username=register_new_user_form.username.data).first()\n email = User.query.filter_by(email = register_new_user_form.email.data).first()\n if user or email:\n flash(\"Sorry, the username or email already exists in the database\")\n return redirect('/')\n elif register_new_user_form.validate_on_submit():\n new_user = User(username=register_new_user_form.username.data,\n email=register_new_user_form.email.data,\n password=register_new_user_form.password.data)\n db.session.add(new_user)\n db.session.commit()\n login_user(new_user)\n return redirect('/searchpage')", "def get(self,request,*args,**kwargs):\n form = UserCreationForm()\n context = {'form':form}\n template = 'authentication/register.html'\n return render(request,template,context)", "def signup(request):\n user_info = JSONParser().parse(request)\n if 'username' not in user_info or 'password' not in user_info or 'email' not in user_info:\n return Response({\"detail\": \"username, password, or email not provided\"}, status=status.HTTP_400_BAD_REQUEST)\n\n username, password, email = user_info['username'], user_info['password'], user_info['email']\n try:\n get_user_model().objects.get(username=username)\n return Response({\"detail\": \"username already exists\"}, status=status.HTTP_400_BAD_REQUEST)\n except get_user_model().DoesNotExist:\n pass\n\n user = get_user_model().objects.create_user(username=username, email=email, password=password)\n return Response(model_to_dict(user), status=status.HTTP_201_CREATED)", "def signup(request):\n if request.method == 'POST':\n if request.POST['password1'] == request.POST['password2']:\n try:\n user = User.objects.get(username=request.POST['username'])\n return render(request, 'accounts/signup.html', {'error': 'Username already exists'})\n except User.DoesNotExist:\n user = User.objects.create_user(request.POST['username'], password=request.POST['password1'])\n auth.login(request, user)\n return redirect('home')\n else:\n return render(request, 'accounts/signup.html', {'error': 'Passwords do not match'})\n else:\n # User wants to enter info\n return render(request, 'accounts/signup.html')", "def register(request):\n if request.method != 'POST':\n return render(request, 'register.html')\n\n user_form = forms.UserForm(request.POST)\n profile_form = forms.UserProfileForm(request.POST)\n\n if user_form.is_valid() and profile_form.is_valid():\n user = user_form.save()\n user.set_password(user.password)\n user.save()\n\n profile = profile_form.save(commit=False)\n profile.user = user\n profile.save()\n\n # Log the new user in\n return login(request)", "def register():\r\n # TODO: re-enable csrf\r\n form = RegisterForm(request.form)\r\n if request.method == 'POST' and form.validate():\r\n account = model.user.User(fullname=form.fullname.data,\r\n name=form.name.data,\r\n email_addr=form.email_addr.data)\r\n account.set_password(form.password.data)\r\n # account.locale = get_locale()\r\n db.session.add(account)\r\n db.session.commit()\r\n login_user(account, remember=True)\r\n flash(gettext('Thanks for signing-up'), 'success')\r\n return redirect(url_for('home.home'))\r\n if request.method == 'POST' and not form.validate():\r\n flash(gettext('Please correct the errors'), 'error')\r\n return render_template('account/register.html',\r\n title=gettext(\"Register\"), form=form)", "def register_account():\n\n form = SignupForm()\n\n if form.validate_on_submit():\n username = form.username.data\n first_name = form.first_name.data\n last_name = form.last_name.data\n description = form.description.data\n email = form.email.data\n password = form.password.data\n image_url = form.image_url.data\n\n new_user = User.register(\n username,\n first_name,\n last_name,\n description,\n email,\n password,\n image_url or None\n )\n\n db.session.add(new_user)\n\n # put this in a try/ except IntegrityError\n # render the form again with an error message of \"username already taken\"\n try:\n \n db.session.commit()\n do_login(new_user)\n\n flash(\"You are signed up and logged in.\")\n return redirect(f'/cafes')\n\n except IntegrityError:\n flash(\"Username already taken.\")\n render_template('auth/signup-form.html', form=form)\n\n\n return render_template(\n 'auth/signup-form.html', form=form\n )", "def register():\n form = RegistrationForm()\n if form.validate_on_submit():\n user = User(email=form.email.data,\n first_name=form.first_name.data,\n last_name=form.last_name.data,\n password=form.password.data)\n\n # add user to the database\n db.session.add(user)\n db.session.commit()\n flash('You have successfully Create a user! You may now login.')\n # redirect to the login page\n return redirect(url_for('view.dashboard'))\n # load registration template\n return render_template('registration.html', form=form, title='Register')", "def register_user():\n\n form = UserForm()\n\n if form.validate_on_submit():\n username = form.username.data\n password = form.password.data\n email = form.email.data\n first_name = form.first_name.data\n last_name = form.last_name.data\n new_user = User.register(username, password, email, first_name, last_name)\n\n db.session.add(new_user)\n db.session.commit()\n session['username'] = new_user.username\n\n flash(f'Created {username} user.')\n return redirect('/users/<username>')\n\n else:\n return render_template('users/register.html', form=form)", "def registered_form():\n # print \"Hello POST\"\n # if request.method == \"POST\":\n reg_email = request.form.get(\"email\")\n\n reg_password = request.form.get(\"password\")\n\n # Get age value, or assign as None.\n if request.form.get(\"age\"):\n age = request.form.get(\"age\")\n else:\n age = None\n\n # Get zipcode value, or assign as None.\n if request.form.get(\"zipcode\"):\n zipcode = request.form.get(\"zipcode\")\n else:\n zipcode = None\n\n print reg_email\n\n if User.query.filter(User.email == reg_email):\n flash(\"There is already an account for that email address.\")\n return redirect('/')\n else:\n new_user = User(email=reg_email, password=reg_password, age=age, zipcode=zipcode)\n print new_user\n db.session.add(new_user)\n db.session.commit()\n \n return redirect(\"/\")", "def register():\n if request.method == 'POST':\n required_fields = ('name', 'username', 'password', 'email')\n for field in required_fields:\n if field not in request.form:\n return f'{field} is required!'\n name = request.form['name']\n username = request.form['username']\n password = request.form['password']\n email = request.form['email']\n\n # Create user object\n u = Users()\n u.name = name\n u.username = username\n u.generate_password_hash(password)\n u.email = email\n\n # Add user object to list of objects to be inserted\n objects = [u]\n\n # If you're a TSG member, you get some access by default\n if is_user_tsg(email):\n objects.append(Access(event='tsg', user=username))\n objects.append(Access(event='test_users', user=username))\n\n success, reason = insert(objects)\n\n if not success:\n return f'Error occurred, {reason}', 400\n log(f'User <code>{u.name}</code> has been registered!')\n\n # Login to the new user account!\n login_user(u)\n\n return (\n f\"Hello {username}, your account has been successfully created.<br/>You're logged into your account, feel \"\n f\"free to browse around \"\n )\n\n # Logout current user before trying to register a new account\n if not current_user.is_anonymous:\n logout_user()\n return render_template('register.html')" ]
[ "0.82840896", "0.82833594", "0.8074069", "0.79622245", "0.7919632", "0.7732329", "0.76864487", "0.7676215", "0.7661683", "0.7627001", "0.7598874", "0.7574743", "0.7571235", "0.7540624", "0.75386494", "0.75366706", "0.75335234", "0.75069493", "0.75018096", "0.7460415", "0.7452393", "0.7416859", "0.7408049", "0.7404077", "0.7388891", "0.73829633", "0.7376546", "0.7373179", "0.7369515", "0.7348243", "0.7341478", "0.734094", "0.7329672", "0.73026776", "0.7286652", "0.72755283", "0.7273622", "0.7273412", "0.7261144", "0.7257152", "0.7253936", "0.72414196", "0.7241115", "0.72332686", "0.722447", "0.7214578", "0.72080845", "0.7198129", "0.71929854", "0.7184044", "0.71663874", "0.7158702", "0.7152264", "0.7148238", "0.71233034", "0.7114835", "0.7105109", "0.70998347", "0.70894593", "0.7077246", "0.707625", "0.70726126", "0.70705515", "0.70669657", "0.70606154", "0.7057276", "0.7056604", "0.705208", "0.70377725", "0.7028117", "0.7015629", "0.70152736", "0.7014297", "0.6976343", "0.69737035", "0.69718504", "0.69710153", "0.6970078", "0.6969613", "0.6965653", "0.6964033", "0.6961946", "0.696094", "0.6954426", "0.69470584", "0.6933007", "0.6931788", "0.6931653", "0.6920289", "0.6896572", "0.6893813", "0.6893284", "0.6888571", "0.6880422", "0.6868251", "0.6862989", "0.685572", "0.6850515", "0.68431836", "0.68427265", "0.68354976" ]
0.0
-1
wanderlist list interative list where users can add new items to their travel bucket list
def passport(): user_id = session['user_id'] # query users list items and load list items when user access passport page places = db.session.query(AdventureList.adventure_item).filter(AdventureList.user_id == user_id).all() # take users wanderlist and loads list items # create empty list bind list to variable name new_place_list new_place_list = [] # iterate over items and append them to empty list starting from index[0] for item in places: new_place_list.append(item[0]) print new_place_list return render_template('passport.html', places=new_place_list)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def add_new_item():\n\n lst = item_list()\n return render_template('index.html', sell_flag=1, items=lst)", "def NewItems(self) -> _n_1_t_7:", "def view_list():\n # an HTML representation of the user shopping list\n printed_list = user[\"name\"]\n printed_list += \"<form>\"\n printed_list += '<br>'\n printed_list += 'New Item:<br>'\n printed_list += '<input type=\"text\" name=\"newitem\">'\n printed_list += '<br>'\n printed_list += '<input type=\"submit\" value=\"Submit\">'\n printed_list += \"</form>\"\n printed_list += list_to_html(user[\"shopping_list\"])\n\n return printed_list", "def add_element_function(listt):\n\n\tservice_name = str(input(\"Enter web site/app name: \"))\n\tuser_name = str(input(\"Enter user name: \"))\n\tuser_password = str(input(\"Enter user password: \"))\n\n\tnew_item = {\n\t\t\"service\": service_name, \n\t\t\"user\": user_name,\n\t\t\"password\": user_password\n\t\t}\n\n\tlistt.append(new_item)\n\tprint(\"Status: Element added\")\n\tprint(f\"Current elements: {len(listt)}\")\n\tprint()\n\treturn listt", "def new():\n list_new()", "def list(self):", "def manipulate_bucketlist():\n pass", "def DoAdd(self,event):\r\n newItem = self.data.add()\r\n if newItem and newItem not in self.items:\r\n self.items = self.data.getItemList()\r\n index = self.items.index(newItem)\r\n self.list.InsertItems([newItem],index)", "def add_item(todo_list):\r\n text = input(\"Please enter the name of the new item\\n\")\r\n priority = check_priority_overlap(\r\n int(clean_input(\"Please enter the priority of this item\")), todo_list)\r\n # group = int(clean_input(\"Please enter the group number of this item\"))\r\n group = 0 # Set the group value to zero, group system NYI\r\n visible = True\r\n todo_list.insert(0, ListItem(text, priority, group, visible)) # Join\r\n # the inputs to be added to the overall list\r\n return", "def list():", "def list():", "def handle_list_items(self, object, name, old, new):\n raise NotImplementedError", "def addItems(*args):", "def add_list(self):\n the_list = models.List(user_id=1,\n list_name=self.test_list,\n description=self.test_list_desc)\n the_list.add()", "def add_list(action, user):\n \n userprofile = user.get_profile()\n \n board = userprofile.get_board(action['boardId'])\n \n # Create the list\n l = List()\n l.title = action['what']['title']\n l.color = action['what']['color']\n l.creator = user\n l.save()\n \n # Add the list to the user's lists\n \n board.lists.append(l.id)\n userprofile.save()\n \n return l;", "def add_bucketlist_item(self, email, password, buckelist_id, item_name):\r\n test_date = str(date(2020, 9, 22))\r\n headers = self.authentication_headers(email=email, password=password)\r\n return self.client.post(\r\n '/api/v1/bucketlist/{}/items/'.format(buckelist_id),\r\n data=json.dumps({\"name\": item_name, \"finished_by\": test_date}),\r\n content_type=\"application/json\",\r\n headers=headers,\r\n follow_redirects=True\r\n )", "def ShowCurrentItemsInList(list_of_rows):\r\n print(\"******* The current items ToDo are: *******\")\r\n for row in list_of_rows:\r\n print(row.product_name + \" (\" + str(row.product_price) + \")\")\r\n print(\"*******************************************\")\r\n print() # Add an extra line for looks\r", "def build_item_list(client, word=''):\n\titem_group = client.search_metadata('collection_name:cooee')\n\titems = item_group.get_all()\n\titem_list_name = word + '_list'\n\titem_urls = []\n\tfor item in items:\n\t\tprimary_text = item.get_primary_text()\n\t\tif word in primary_text:\n\t\t\tprint item.url()\n\t\t\titem_urls.append(item.url())\n\t\t\t#client.add_to_item_list_by_name([item.url()], item_list_name)\n\n\tfor url in item_urls:\n\t\tclient.add_to_item_list_by_name(item_urls, item_list_name)", "def update_list_view(self):\n # Clear the list/tree view.\n self.list_view.clear()\n\n # Find all the selected things in Maya.\n selected = cmds.ls(selection=True)\n\n # For each of the selected things, create a widget item.\n for thing in selected:\n item = QtGui.QListWidgetItem(thing)\n item.setFlags(item.flags() | QtCore.Qt.ItemIsEditable)\n self.list_view.addItem(item)\n # Set the flags on the widget item so it is editable.", "def _add_item(self, item):\n for i in range(self.list_widget.count()):\n if item <= self.list_widget.item(i).text():\n self.list_widget.insertItem(i, item)\n return\n self.list_widget.addItem(item)", "def put_on_wish_list():\n book = request.form\n flash(\"The Wish list feature is under construction! Please check back soon!\")\n return render_template('book_details.html', list_of_books=book)", "def post_bucketlist():\n pass", "def _link_items(self):\n pass", "def addWebPageListToLayout(self, gridLayout, startingRow):\n\n from consider.notifications import options\n\n webPages = self.model.getWebPages()\n\n if verbose:\n print('DEBUG: current web pages: ' + str(webPages))\n row = startingRow\n\n webPageLabel = QLabel('WebPage:')\n gridLayout.addWidget(webPageLabel, row, 0, 1, 4)\n\n clientLabel = QLabel('Client')\n gridLayout.addWidget(clientLabel, row, 4)\n emailLabel = QLabel('Email')\n gridLayout.addWidget(emailLabel, row, 5)\n #smsLabel = QLabel('SMS')\n #gridLayout.addWidget(smsLabel, row, 6)\n frequencyLabel = QLabel('Frequency')\n gridLayout.addWidget(frequencyLabel, row, 7)\n minWordLabel = QLabel('Sensitivity')\n gridLayout.addWidget(minWordLabel, row, 8)\n\n for webPage in webPages:\n row = row + 1\n linkLineEdit = QLineEdit(webPage)\n gridLayout.addWidget(linkLineEdit, row, 0, 1, 4)\n\n clientCheck = QCheckBox()\n if options.NOTIFICATION_TYPE_CLIENT in webPages[webPage].getNotificationTypes():\n clientCheck.setChecked(1)\n self.connect(clientCheck,\n SIGNAL('stateChanged(int)'),\n self.checkBoxHandlerBuilder(webPage, options.NOTIFICATION_TYPE_CLIENT))\n gridLayout.addWidget(clientCheck, row, 4)\n\n emailCheck = QCheckBox()\n if options.NOTIFICATION_TYPE_EMAIL in webPages[webPage].getNotificationTypes():\n emailCheck.setChecked(1)\n self.connect(emailCheck,\n SIGNAL('stateChanged(int)'),\n self.checkBoxHandlerBuilder(webPage, options.NOTIFICATION_TYPE_EMAIL))\n gridLayout.addWidget(emailCheck, row, 5)\n\n #smsCheck = QCheckBox()\n #if options.NOTIFICATION_TYPE_SMS in webPages[webPage].getNotificationTypes():\n # smsCheck.setChecked(1)\n #self.connect(smsCheck,\n # SIGNAL('stateChanged(int)'),\n # self.checkBoxHandlerBuilder(webPage, options.NOTIFICATION_TYPE_SMS))\n #gridLayout.addWidget(smsCheck, row, 6)\n\n frequencySlider = QSlider(Qt.Horizontal)\n frequencySlider.setTracking(False)\n frequencySlider.setMaximum(options.MAX_FREQUENCY)\n frequencySlider.setMinimum(options.MIN_FREQUENCY)\n frequencySlider.setValue(webPages[webPage].getFrequency())\n self.connect(frequencySlider, SIGNAL('valueChanged(int)'), self.sliderChangeBuilder(webPage) )\n gridLayout.addWidget(frequencySlider, row, 7)\n\n wordCountSpinBox = QSpinBox()\n wordCountSpinBox.setMinimum(options.MIN_WC_THRESHOLD)\n wordCountSpinBox.setMaximum(options.MAX_WC_THRESHOLD)\n wordCountSpinBox.setValue(webPages[webPage].getWCThreshold())\n self.connect(wordCountSpinBox, SIGNAL('valueChanged(int)'), self.spinboxChangeBuilder(webPage))\n gridLayout.addWidget(wordCountSpinBox, row, 8)\n\n removeButton = QPushButton('Remove')\n self.connect(removeButton, SIGNAL('clicked()'), self.removeWebPageBuilder((webPage)))\n gridLayout.addWidget(removeButton, row, 10)\n \n # add a blank line for adding new entries\n row = row + 1\n self.newWebPageLink = QLineEdit(\"<Location>\")\n gridLayout.addWidget(self.newWebPageLink, row, 0, 1, 4)\n # FIXME\n #clientCheck = QCheckBox()\n #gridLayout.addWidget(clientCheck, row, 2)\n #emailCheck = QCheckBox()\n #gridLayout.addWidget(emailCheck, row, 3)\n #smsCheck = QCheckBox()\n #gridLayout.addWidget(smsCheck, row, 4)\n\n addButton = QPushButton(\"Add\")\n self.connect(addButton, SIGNAL(\"clicked()\"), self.addNewWebPage)\n gridLayout.addWidget(addButton, row, 10)\n return row+1", "def items():", "def items(self):", "def leader_list():\n\n add_trainee_form = AddTraineeForm()\n return render_template(\n \"leaders_list.html\",\n add_trainee_form=add_trainee_form,\n title=\"Encadrants\",\n )", "def add_command():\n backend.insert(title_text.get(),\n author_text.get(),\n year_text.get(), \n isbn_text.get())\n \n # listing.delete(0, END)\n listing.insert(END, \n (title_text.get(),\n author_text.get(), \n year_text.get(), \n isbn_text.get()))", "def update_bucketlist():\n pass", "def add(table):\n\n list_to_add=ui.get_inputs(list_labels,\"\")\n \n list_to_add.insert(0,common.generate_random(table))\n\n table.append(list_to_add)\n return table", "def update(self):\n brains = self.query\n items_with_bodytext = ['Document', 'News Item']\n folderish_items = ['Folder', 'nva.flexfolder.flexfolder']\n counter = 1\n objectlist = []\n for i in brains:\n entry = {}\n if i.portal_type in items_with_bodytext:\n obj = i.getObject()\n entry['title'] = obj.Title()\n entry['desc'] = obj.Description()\n entry['text'] = obj.getText()\n entry['marker'] = 'collapse-%s' % counter\n if i.portal_type in folderish_items:\n info = self.createHtmlSnippet(i.getObject())\n if not info:\n info = u'<p>Für weitere Informationen klicken Sie bitte <a class=\"internal-link\" href=\"%s\">hier.</a></p>' %i.getURL() \n entry['title'] = i.Title\n entry['desc'] = i.Description\n entry['text'] = info\n entry['marker'] = 'collapse-%s' % counter\n else:\n info = u'<p>Für weitere Informationen klicken Sie bitte <a class=\"internal-link\" href=\"%s\">hier.</a></p>' %i.getURL() \n entry['title'] = i.Title\n entry['desc'] = i.Description\n entry['text'] = info\n entry['marker'] = 'collapse-%s' % counter\n objectlist.append(entry)\n counter += 1\n self.objectlist = objectlist", "def list(\n self,\n name,\n ):\n pass", "async def __list(self, ctx):\n server = ctx.message.server\n if server.id not in self.db:\n self.db[server.id] = {}\n self.save_db()\n else:\n db = self.db[server.id]\n if \"bookkeeper\" not in self.db[server.id]:\n self.db[server.id][\"bookkeeper\"] = []\n self.save_db()\n await self.bot.say(\"Bookkeeper list is currently empty, add new bookkeepers using points keeper add\"\n \" <Discord name or nickname>\")\n return\n else:\n bookkeeper = db[\"bookkeeper\"][:]\n msg = \"\"\n for x in bookkeeper:\n bookkeeper[bookkeeper.index(x)] = discord.utils.find(lambda N: N.id == x, server.members).display_name\n bookkeeper = sorted(bookkeeper, key=lambda item: (int(item.partition(' ')[0])\n if item[0].isdigit() else float('inf'), item))\n msg = \", \".join(bookkeeper[:-2] + [\" and \".join(bookkeeper[-2:])])\n await self.bot.say(\"Current bookkeepers assigned are: {}\".format(msg))", "def create_work_item(self):", "def add_joint_to_list(list_widget, combo_box, add_btn, del_btn, forward):\n\n global ftm_list # Forward transition matrices list\n global btm_list # Backward transition matrices list\n global robot_obj\n\n # Getting the current item\n ind = combo_box.currentIndex()\n\n # Finding the associated joint\n i_joint = 0\n for _, _, node in robot_obj.tree:\n type_, nb = node.name.split('_')\n nb = int(nb)\n\n if type_ == 'joint':\n if forward:\n if 'joint_' + str(nb) in ftm_list:\n i_joint += 1\n continue\n else:\n if 'joint_' + str(nb) in btm_list:\n i_joint += 1\n continue\n if ind == nb:\n text = robot_obj.joints[nb].name\n list_widget.addItem(text)\n\n # Disabling the item in the combo box\n combo_box.model().item(i_joint).setEnabled(False)\n\n # If all the joints are added\n if list_widget.count() == combo_box.count():\n add_btn.setEnabled(False)\n del_btn.setEnabled(True)\n\n if forward:\n ftm_list.append(\"joint_\" + str(nb))\n else:\n btm_list.append(\"joint_\" + str(nb))\n\n i_joint += 1", "def __init__(self):\n self.item_list = []", "def wants_new(self, name):\n self.wants[name] = []\n self.db.wants_new(name)\n util.log(\"Want list '\" + name + \"' created\", util.LogLevel.Info)\n self.push_status(\"Created want list '\" + name + \"'\")", "def _create_list_item(self, str):\n para = nodes.paragraph()\n para += nodes.strong('', str)\n\n item = nodes.list_item()\n item += para\n\n return item", "def addItem(list,item):\n print \"I added this item: \", item\n list.append(item)", "def setup_lists(self):\n pass", "def add_task(action, user):\n \n item = Item()\n item.description = action['what'].get('description', '')\n item.id = action['what']['id']\n item.position = action['what']['position']\n \n l = List.objects.get(id=action['listId'])\n verify_permission(l, user)\n \n l.items.append(item)\n l.save()\n \n return l", "async def _list(self, ctx):\n server = ctx.message.server\n if server.id not in self.db:\n self.db[server.id] = {}\n self.save_db()\n await self.bot.say(\"List is empty, please add members first using [p]points add <Discord name or nickname>\")\n return\n else:\n db = self.db[server.id]\n try:\n columns = [sorted([y for y in self.db[server.id][x].keys()], reverse=True)\n for x in db if x != \"bookkeeper\"][0]\n i, j = columns.index(columns[1]), columns.index(columns[2])\n columns[i], columns[j] = columns[j], columns[i]\n rows = sorted([[db[x][\"Name\"], db[x][\"Lifetime Gain\"], db[x][\"Lifetime Loss\"], db[x][\"Balance\"]]\n for x in db if x != \"bookkeeper\"], key=itemgetter(3, 0), reverse=True)\n except IndexError:\n await self.bot.say(\"No one has been added to the list, please use points member add\"\n \" <Discord name or nickname> to do so first.\")\n return\n if len(rows) > 15:\n n = 14\n l = 15\n m = 0\n\n for x in range(n, len(rows)+15, l):\n if x == n:\n await self.bot.say(box(tabulate(rows[:x], headers=columns), lang=\"prolog\"))\n else:\n await self.bot.say(box(tabulate(rows[m:x], headers=columns), lang=\"prolog\"))\n m = x\n else:\n await self.bot.say(box(tabulate(rows, headers=columns), lang=\"prolog\"))", "def handle_list_items(self, object, name, old, new):\n self.handle_list(object, name, new.removed, new.added)", "def add_shoppinglist(self, user_id, name):\n new_shoppinglist = ShoppingList(name)\n new_shoppinglist_details = new_shoppinglist.get_details()\n user = self.get_single_user(user_id)\n new_shoppinglist_details['id'] = len(user['shopping_lists']) + 1\n for item in user['shopping_lists']:\n if item['name'].lower() == name.lower():\n return \"Shopping list \" + str(name) + \" exits. Try editing it\"\n if new_shoppinglist_details['id'] == item['id']:\n new_shoppinglist_details['id'] = (\n new_shoppinglist_details['id'] + 1\n )\n user['shopping_lists'].append(new_shoppinglist_details)\n return \"Shopping list \" + str(name) + \" Created\"", "def do_list_items(self, arg):\n try:\n cprint (\"These are your items: \\n\", 'blue')\n my_items = arg[\"<all_items>\"]\n choice = arg[\"--choice\"]\n if choice == \"name\":\n my_items_str = \" \".join(my_items)\n print(my_items_str)\n elif choice == \"id\":\n my_items_str = int(\" \".join(my_items))\n print (my_items_str)\n app.ToDoApp.to_view_items(my_items_str)\n \n\n\n \n except ValueError as e:\n cprint((e), 'red')", "def _additems(self, w,h):\n for idx in range(len(self.data['items'])):\n default={\n 'color': self.data['itemscolor'],\n 'textscale': self.data['itemsscale'],\n 'textfont': self.data['textfont'],\n 'width': w-(self.data['margin'][0]*2.),\n }\n self.data['items'][idx].update(default)\n self.addItem(idx, **self.data['items'][idx])", "def add_plan_item(self, plan_container, plan_li):\n new_steps_li = []\n new_plan_frame = ttk.Frame(plan_container)\n new_plan_frame.pack(side=tk.TOP, expand=tk.YES, fill=tk.X)\n\n \"\"\"Description\"\"\"\n description_row = ttk.Frame(new_plan_frame)\n description_row.pack(expand=tk.YES, fill=tk.X)\n\n description_lab = ttk.Label(\n description_row,\n text=\"Description\",\n font=ANNOTATE_FONT)\n description_lab.pack(side=tk.LEFT, anchor='nw',\n pady=SMALL_PAD, padx=(0, SMALL_PAD))\n\n \"\"\"Delete Button\"\"\"\n TRASH_ICON = PhotoImage(file=\"Icon/trash.png\").subsample(4, 4)\n\n new_delete_button = ttk.Button(\n description_row,\n text=\"Delete\",\n image=TRASH_ICON,\n command=lambda: self.delete_item(full_plan_entry, plan_li,\n plan_container, item_type=\"plan\"))\n new_delete_button.image = TRASH_ICON\n new_delete_button.pack(side=tk.LEFT)\n new_delete_button.config(width=SMALL_BUTTON_WIDTH)\n\n \"\"\"Description Entry Box\"\"\"\n new_plan_box = ttk.Entry(description_row)\n new_plan_box.pack(side=tk.LEFT, expand=tk.YES, fill=tk.X)\n\n \"\"\"Aux Info\"\"\"\n aux_row = ttk.Frame(new_plan_frame)\n aux_row.pack(expand=tk.YES, fill=tk.X)\n\n \"\"\"Status\"\"\"\n\n status_title_lab = ttk.Label(\n aux_row,\n text=\"Status\\t\",\n font=ANNOTATE_FONT)\n status_title_lab.pack(side=tk.LEFT, anchor='nw',\n pady=SMALL_PAD, padx=(0, 0))\n\n status_menu = ttk.Combobox(aux_row,\n values=PLAN_STATUSES,\n style=COMBOBOX_STYLE,\n state=\"readonly\",\n justify=tk.CENTER, width=10)\n status_menu.current([0])\n status_menu.pack(side=tk.LEFT)\n\n status_title_lab = ttk.Label(\n aux_row,\n text=\"\\tPriority\\t\",\n font=ANNOTATE_FONT)\n status_title_lab.pack(side=tk.LEFT, anchor='nw',\n pady=SMALL_PAD, padx=(0, SMALL_PAD))\n\n \"\"\"Priority\"\"\"\n priority_menu = ttk.Combobox(aux_row,\n values=PRIORITY_LVL,\n style=COMBOBOX_STYLE,\n state=\"readonly\",\n justify=tk.CENTER, width=10)\n priority_menu.pack(side=tk.LEFT)\n\n \"\"\"\n Steps\n \"\"\"\n steps_row = ttk.Frame(new_plan_frame)\n steps_row.pack(fill=tk.X, pady=(0, SMALL_PAD))\n\n steps_lab = ttk.Label(steps_row,\n text=\"Steps\\t\",\n font=ANNOTATE_FONT)\n steps_lab.pack(side=tk.LEFT, anchor='nw',\n pady=SMALL_PAD, padx=(0, SMALL_PAD))\n\n add_step_button = ttk.Button(steps_row, text=\"+ step\",\n command=lambda: self.add_step_item(steps_row, new_steps_li))\n add_step_button.pack(side=tk.TOP, pady=(SMALL_PAD, 0))\n\n \"\"\"Draft\"\"\"\n draft_row = ttk.Frame(new_plan_frame)\n draft_row.pack(side=tk.TOP, expand=tk.YES, fill=tk.X,\n pady=(0, SMALL_PAD))\n\n draft_title_lab = ttk.Label(\n draft_row,\n text=\"Draft\\t\",\n font=ANNOTATE_FONT)\n draft_title_lab.pack(side=tk.LEFT, anchor='nw',\n pady=SMALL_PAD)\n draft_check_var = tk.StringVar()\n draft_check_button = ttk.Checkbutton(\n draft_row, variable=draft_check_var,\n onvalue=\"Draft\", offvalue=\"Log\")\n draft_check_var.set(\"Log\")\n\n draft_check_button.pack(side=tk.LEFT, padx=(SMALL_PAD, 0))\n\n \"\"\"Append full plan entry\"\"\"\n\n full_plan_entry = {\"Description\": new_plan_box,\n \"Plan_Type\": draft_check_var,\n \"Status\": status_menu,\n \"Priority\": priority_menu,\n \"Steps\": new_steps_li}\n plan_li.append(full_plan_entry)", "def add_list_data(self, data):\n\n # TODO: I'd like to use, say, a QListWidget or something, but controlling the widget\n # height on those was annoying, and I wanted the items to be easily copy+pasteable.\n # In the end I'm just going with a multiline QLabel inside a QScrollArea\n\n if len(data) == 0:\n return None\n\n scroll = QtWidgets.QScrollArea(self)\n scroll.setFrameShadow(QtWidgets.QFrame.Sunken)\n scroll.setFrameShape(QtWidgets.QFrame.Panel)\n w = QtWidgets.QLabel('<tt>{}</tt>'.format('<br/>'.join(data)), self)\n w.setTextInteractionFlags(QtCore.Qt.TextSelectableByMouse)\n scroll.setWidget(w)\n self.grid.addWidget(scroll, self.cur_row, 1)\n return w", "def test_create_bucketlist_item(self):\n resp = self.client.post('/bucketlists',\n data=json.dumps(self.bucketlist),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n response = self.client.get(\n \"/bucketlists/1\", headers={\n \"Authorization\": self.token})\n self.assertEqual(response.status_code, 200)\n resp_item = self.client.post('/bucketlistitems/1/items',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n result = json.loads(resp_item.data)\n self.assertEqual(result[\"message\"],\n \"Bucket list item added successfully.\")\n self.assertEqual(resp.status_code, 201)", "def push_addr_reservation_list(self, lst_new):\n self.__not_implemented()", "def get_wish_lists():\n flash(\"The Wish list feature is under construction! Please check back soon!\")\n return render_template('index.html')", "def handle_list(self, object, name, old, new):\n raise NotImplementedError", "def add(self, item):", "def update_cloud_watch_obj_list(old_list, new_list):\n\n # Add new.\n for new_item in new_list:\n if new_item not in old_list:\n new_item.added = True\n old_list.append(new_item)\n\n # Remove deleted.\n for old_item in old_list:\n if old_item not in new_list:\n old_list.remove(old_item)\n\n return old_list", "def create_dashboard(wlist):\n wb = openpyxl.Workbook()\n page = wb.active\n page.title = 'Dashboard'\n # TODO add in other tabs per project\n # TODO change WO # to be a URL\n # TODO formatting\n # TODO names ranges, so this can just feed in the raw data, and charts & pivots can be done off that?\n\n # populate dashboard tab with a header\n wb['Dashboard'].append(wlist[0].get_dashboard_header())\n # populate dashboard tab with each WO\n counter = 2 #offset to find the first cell with a WO in it \"A2\"\n for row in wlist:\n # add the meat\n wb['Dashboard'].append(row.get_dashboard_content())\n # convert the WO number into a link\n wb['Dashboard'].cell('A' + str(counter)).hyperlink = row.wo_link\n counter += 1\n\n # add in autofilter, automatically determining the last column to use\n wb['Dashboard'].auto_filter.ref = 'A1:' + chr(ord('A') -1 + len(wlist[0].get_dashboard_header())) + '1'\n print wb['Dashboard'].auto_filter.ref\n wb['Dashboard'].auto_filter.add_sort_condition('D1:D1')\n\n # commit sheet to file, beware - this silently overwrites!\n wb.save('EVM Dashboard.xlsx')", "def add_com(ui):\n global com_list\n\n content = content_fk_jac_loops(ui, \"com\")\n if content in com_list:\n return\n com_list.append(content)\n ui.listWidget_com.addItem(f\"Center of Mass {parse_content(content)}\")", "def create_bucketlist(self, title, intro):\n bucketlist_ = Bucketlist(owner_id=self._id,\n title=title,\n intro=intro,\n owner=self.username)\n bucketlist_.save_to_bucketlists()", "def update_lst(button_1):\r\n\r\n if head_lst:\r\n\r\n if check_click_legal(head_lst[-1],\r\n button_1) and button_1 not in head_lst:\r\n button_1['bg'] = 'Purple'\r\n wrd_lst.append(button_1['text'])\r\n head_lst.append(button_1)\r\n word_display.configure(text=\"\".join(wrd_lst))\r\n for i in head_lst:\r\n if i != head_lst[-1]:\r\n i['bg'] = 'Cyan'", "def __addList(self, node, name):\n\n if name == \"OrderedList\":\n listId = self.letter.addList(List.ARABIC)\n else:\n listId = List.BULLETED\n self.pieces.append(f\"{{\\\\li580{{\\\\ls{listId:d} \")\n for item in node.findall(\"ListItem\"):\n self.pieces.append(self.__getText(item).strip())\n self.pieces.append(\"\\\\par\\n\")\n self.pieces.append(\"}}\\n\\\\par\\n\")", "def change_list(ctx, start, count):\n data = ctx.obj.get_all_changes(start=start, count=count)\n output_json_data(data)", "def addItem(*args):", "def addItem(*args):", "def addItem(*args):", "def __append_to_item_list(self):\n Item.get_item_list().append(self)", "def showlist(liste):\n lines = []\n for configuration in liste:\n for line in gentikz(configuration.copy()):\n lines.append(line)\n lines.append(r\"\\newpage\")\n return lines", "def do_list(self, arg):\n try:\n cprint (\"Here are your todo lists: \\n\", 'blue')\n app.ToDoApp.to_view_todo()\n\n except ValueError as e:\n cprint(e, 'red')", "def handleList(self, _): # pylint: disable=invalid-name", "def _button_generate_list_clicked(self, *a):\r\n _debug('GUISignalGenerator: _button_generate_list_clicked()', a)\r\n \r\n N = self.settings['Generate-List/Steps']\r\n \r\n # Generate a list in the direction we wish to step\r\n if self.settings['Generate-List/Direction'] == '1->2':\r\n f1 = self.settings['Generate-List/f1']\r\n f2 = self.settings['Generate-List/f2']\r\n d1 = self.settings['Generate-List/P1']\r\n d2 = self.settings['Generate-List/P2']\r\n else:\r\n f1 = self.settings['Generate-List/f2']\r\n f2 = self.settings['Generate-List/f1']\r\n d1 = self.settings['Generate-List/P2']\r\n d2 = self.settings['Generate-List/P1']\r\n \r\n # Get the powers in mW\r\n P1 = 10**(d1/10.0)\r\n P2 = 10**(d2/10.0)\r\n \r\n # Clear the plot\r\n self.plot_list.clear()\r\n self.settings.send_to_databox_header(self.plot_list)\r\n\r\n # Indices\r\n self.plot_list['n'] = _n.linspace(0, N-1, N)\r\n \r\n # Create the sweep in either linear or log space.\r\n if self.settings['Generate-List/Mode'] == 'Linear':\r\n self.plot_list['f_Hz'] = _n.linspace(f1, f2, N)\r\n self.plot_list['P_dBm'] = 10*_n.log10(_n.linspace(P1, P2, N))\r\n \r\n # Log steps\r\n else:\r\n self.plot_list['f_Hz'] = _s.fun.erange(f1, f2, N)\r\n self.plot_list['P_dBm'] = _n.linspace (d1, d2, N)\r\n \r\n # Plot it\r\n self.plot_list.plot()\r\n self.window.process_events()\r\n \r\n # Enable send list\r\n self.button_send_list.enable()\r\n \r\n # Remove visible knowledge\r\n self.label_list_status.set_text('Shown list does not match device list.')", "def addToWatchlist(self, items):\n if not isinstance(items, list):\n items = [items]\n\n for item in items:\n if self.onWatchlist(item):\n raise BadRequest(f'\"{item.title}\" is already on the watchlist')\n ratingKey = item.guid.rsplit('/', 1)[-1]\n self.query(f'{self.METADATA}/actions/addToWatchlist?ratingKey={ratingKey}', method=self._session.put)\n return self", "def add_new_item(self):\n self.recs += 1\n self.add_rec_fields(self.recs)\n vbar = self.scrl.verticalScrollBar()\n vbar.setMaximum(vbar.maximum() + 36)\n vbar.setValue(vbar.maximum())", "def list():\n data = getInstaData()\n return render_template(\"list.html\", data=data)", "def getListItem(*args):", "def getListItem(*args):", "def getListItem(*args):", "def swing_list(self):\n return None", "def showList(parent,header,items,maxItems=0,title=''):\r\n numItems = len(items)\r\n if maxItems <= 0: maxItems = numItems\r\n message = string.Template(header).substitute(count=numItems)\r\n message += '\\n* '+'\\n* '.join(items[:min(numItems,maxItems)])\r\n if numItems > maxItems:\r\n message += _('\\n(And %d others.)') % (numItems - maxItems,)\r\n return askStyled(parent,message,title,wx.OK)", "def add_step_item(self, steps_container, steps_li):\n if len(steps_li) == 0:\n add_step_button_name = steps_container.winfo_children()[1]\n add_step_button = steps_container._nametowidget(\n add_step_button_name)\n add_step_button.destroy()\n\n new_step_row = ttk.Frame(steps_container)\n new_step_row.pack(\n side=tk.TOP, expand=tk.YES, fill=tk.X)\n\n check_var = tk.StringVar()\n new_step_checkb = ttk.Checkbutton(\n new_step_row, variable=check_var,\n onvalue=\"Completed\", offvalue=\"Incomplete\")\n check_var.set(\"Incomplete\")\n\n new_step_checkb.pack(side=tk.LEFT)\n\n new_step_box = ttk.Entry(new_step_row)\n new_step_box.pack(side=tk.LEFT, expand=tk.YES, fill=tk.X)\n\n full_step_entry = {\"Description\": new_step_box, \"Status\": check_var}\n steps_li.append(full_step_entry)\n\n \"\"\"Add Button\"\"\"\n\n new_button_add_new = ttk.Button(\n new_step_row, text=\"+\",\n command=lambda: self.add_step_item(steps_container, steps_li))\n new_button_add_new.pack(side=tk.LEFT)\n new_button_add_new.config(width=SMALL_BUTTON_WIDTH)\n\n \"\"\"Delete Button\"\"\"\n new_delete_button = ttk.Button(\n new_step_row, text=\"-\",\n command=lambda: self.delete_item(\n full_step_entry, steps_li, steps_container, item_type=\"step\")\n\n )\n new_delete_button.pack(side=tk.LEFT)\n new_delete_button.config(width=SMALL_BUTTON_WIDTH)", "def wishlist(request):\n items = []\n user = get_object_or_404(UserProfile, user=request.user)\n wishlist = Wishlist.objects.get_or_create(user=user)\n wishlist_user = wishlist[0]\n existingWishlist = WishlistItem.objects.filter(\n wishlist=wishlist_user).exists()\n\n if existingWishlist:\n user_wishlist = get_list_or_404(WishlistItem, wishlist=wishlist_user)\n for obj in user_wishlist:\n product = get_object_or_404(Product, name=obj)\n items.append(product)\n context = {\n 'wishlist': True,\n 'products': items\n }\n return render(request, 'wishlist/wishlist.html', context)\n\n else:\n context = {\n 'wishlist': False,\n }\n return render(request, 'wishlist/wishlist.html', context)", "def add_items(list_id):\n\n item_title = request.form[\"item_title\"]\n item_description = request.form[\"item_description\"]\n user_id = session.get(\"user_id\")\n\n if not user_id:\n raise Exception(\"No user logged in.\")\n\n to_do_list = ToDoList.query.get(list_id)\n\n new_item = ToDoItem(item_title=item_title,\n item_description=item_description)\n to_do_list.to_do_items.append(new_item)\n db.session.add(new_item)\n db.session.commit()\n\n return redirect(f\"/lists/{list_id}\")", "def create_banner_list():\n template_vars = {\n 'title' : 'Banners - ' + sitesettings.SITE_NAME,\n 'siteurl' : sitesettings.SITE_URL,\n 'sitename' : sitesettings.SITE_NAME,\n 'meta_desc' : 'List of step-up banners in Final Fantasy Brave Exvius (FFBE)',\n 'last_four_banners' : nav.get_last_four_banners('all'),\n 'all_banner_info' : get_all_banner_info(),\n }\n\n bn_path = os.path.join(sitesettings.LOCAL_FILE_PATH, 'banner')\n\n if not os.path.exists(bn_path):\n os.makedirs(bn_path)\n\n template_file = 'bannerlist.html'\n html_file_loc = os.path.join(bn_path, 'index.html')\n generatehtml.generate_html(\n html_file_loc, template_file, template_vars, os.path.join(os.getcwd(), 'templates'))", "def add_list(user_id):\n\n list_title = request.form[\"list_title\"]\n user_id = session.get(\"user_id\")\n\n if not user_id:\n raise Exception(\"No user logged in.\")\n\n to_do_list = ToDoList.query.filter_by(list_title=list_title).first()\n\n if to_do_list:\n flash(\"List name already exists. Please select a new name.\")\n return redirect(\"/dashboard\")\n\n new_list = ToDoList(list_title=list_title, user_id=user_id)\n \n db.session.add(new_list)\n db.session.commit()\n \n return redirect(\"/dashboard\")", "def post(self, user):\n # parse request data\n bucketlist_name = self.request.form['name']\n\n # validate bucketlist\n if not bucketlist_name:\n return \"Name cannot be empty\", 401\n\n # create bucketlist and save bucketlist\n bucketlist = Bucketlist(name=bucketlist_name, date_created=datetime.utcnow(\n ), created_by=user.username, author=user)\n bucketlist.save()\n\n return \"Successfully created bucketlist\", 201", "def create_and_update_menu(self, list_of_news):\n self.create_menu(list_of_news)", "def _add_todo_items(self):\n\n todo_list = ToDoList(day=self.day, user=self.user.user.rolllistuser)\n todo_list.save()\n\n items = [\n 'feed the cats',\n 'drive to work',\n 'read a book',\n 'eat some food',\n ]\n todo_items = []\n for item in items:\n new_item = ToDoItem(\n title=item,\n to_do_list=todo_list,\n priority=1\n )\n new_item.save()\n todo_items.append(new_item)\n return todo_items", "def add_wishlist_ajax(request, template_name=\"picks/list.html\"):\n\n if request.method == 'POST':\n name = request.POST.get('name')\n wishlist = Wishlist.objects.get_or_create(customer=request.user,name=name)\n\n return render_to_response(template_name, {\n \"wishlists\": wishlist, \n }, context_instance=RequestContext(request))", "def test_add_bucketlist_items(self):\r\n email = \"[email protected]\"\r\n _pword = \"test\"\r\n user = User.query.filter_by(email=email).first()\r\n bucketlist = BucketList.query.filter_by(user_id=user.id, name=\"test bucketlist\").first()\r\n item_no = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id).count()\r\n response = self.add_bucketlist_item(email, _pword, bucketlist.id, \"bucketlist item name\")\r\n result = json.loads(response.data.decode('utf-8'))\r\n self.assertEqual(response.status, '201 CREATED')\r\n self.assertEqual(result['message'], 'Bucket list item added')\r\n new_item_no = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id).count()\r\n self.assertLess(item_no, new_item_no)", "def updateGrooveList(n):\n\n global mkGrooveList\n\n mkGrooveList.append(n)", "def list_viewer(listt):\n\tif len(listt) == 0:\n\t\tprint(\"There are no elements\")\n\t\tprint()\n\telse:\n\t\ti = 0\n\t\tfor dictionary in listt:\n\t\t\ti += 1\n\t\t\tprint(f\"Account #{i} »»\")\n\t\t\tprint(\n\t\t\t\t\"\\tService Name: \", dictionary[\"service\"], \"\\n\",\n\t\t\t\t\"\\tUser Name: \", dictionary[\"user\"], \"\\n\",\n\t\t\t\t\"\\tPassword: \", dictionary[\"password\"], \"\\n\",\n\t\t\t\t)", "def repopulate(self):\n new_items = self._list_populate_function()\n\n new_set = set(new_items.values() if isinstance(new_items, dict) else new_items)\n\n if len(new_items) != len(self._display_list):\n if isinstance(new_items, dict):\n # for dictionaries store the key as user role data\n for key in sorted(new_items.keys()):\n item = new_items[key]\n if item not in self._display_list:\n self.list_widget.addItem(item)\n self.list_widget.item(self.list_widget.count() - 1).setData(Qt.UserRole, key)\n else:\n for item in new_items:\n if item not in self._display_list:\n self._add_item(item)\n self._display_list = sorted(set(new_set) | set(self._display_list))", "def create_list(starting, ending):\n pass # remove this line when starting your function", "def view_wishlist(request):\n result = {'wishes':[]}\n\n u = request.user\n\n wishes = Wishlist.objects.filter(party=u).exclude(fulfilled=True).order_by('-added') \n\n result['wishes'] = [w.get_json(level=1, me=u) for w in wishes]\n\n return JSONHttpResponse(result)", "def add_to_list(my_list):\n\n list_item = raw_input(\"What would you like to add to the list? \")\n my_list.append(list_item)", "def public_list(request, username, list):\n if request.user.is_authenticated:\n return HttpResponseRedirect('/person/%s/%s/' % (username, list))\n else:\n person = get_object_or_404(UserProfile, username=username).user\n args = fetch_profile(request, person)\n\n if list == 'listings':\n listings = []\n for copy in person.own_books.all().order_by('-add_time'):\n item = {}\n item['book'] = copy.book\n item['courses'] = [course for course in copy.book.courses.all()]\n item['copy'] = copy\n listings.append(item)\n args['listings'] = listings\n\n elif list == 'wishlist':\n wish_books = []\n for book in person.wish_books.all():\n item = {}\n item['book'] = book\n item['courses'] = [course for course in book.courses.all()]\n wish_books.append(item)\n\n args['wish_books'] = wish_books\n\n return render_to_response('profile/public_profile_%s.html'%list, args, context_instance=RequestContext(request))", "def edit_list(action, user):\n \n editable_attributes = ('title', 'color', 'collapse')\n \n l = List.objects.get(id=action['listId'])\n verify_permission(l, user)\n \n for key, value in action['what'].iteritems():\n if key in editable_attributes:\n l.__setattr__(key, value)\n l.save()\n \n return l", "def _list(self):\n raise NotImplementedError", "def Push(self, item):\n self.list.append(item)", "def _apply(self, list_type):\n self.editor.web.eval(\"\"\"\n document.execCommand('insertUnorderedList');\n var ulElem = window.getSelection().focusNode.parentNode;\n if (ulElem !== null) {\n var setAttrs = true;\n while (ulElem.toString() !== \"[object HTMLUListElement]\") {\n ulElem = ulElem.parentNode;\n if (ulElem === null) {\n setAttrs = false;\n break;\n }\n }\n if (setAttrs) {\n ulElem.style.listStyleType = \"%s\";\n }\n }\n \"\"\" % list_type)", "def collect(item):\n inventory.append(item)\n print(f'You now have: {inventory}')", "def watchlists(request):\n \"\"\"\n Alternative to @login_required decorator: manually test with:\n request.user.is_authenticated\n \"\"\"\n assert isinstance(request, HttpRequest)\n\n # Get all of the user's watchlists\n watchlists = WatchList.objects.filter(user=request.user).all()\n \n # Store the stocks in each watchlist in a dictionary\n # Each key is the watchList_name from the user's watchlists\n # Each value is a list of Stocks (as StockList model objects) \n # present in the watchlist\n stocks = []\n counter = 0\n\n for w in watchlists:\n stocks.append([])\n for stock in w.stockResults.all():\n # No need to check if key is in the dict, since \n # it is added above\n stocks[counter].append(stock)\n counter += 1\n\n print(f'Watchlists:{watchlists}\\tStocks:{stocks}')\n\n if watchlists.count() != 0 and len(stocks) != 0:\n watchlist_stocks = zip(watchlists, stocks)\n else:\n watchlist_stocks = None\n\n context = {\n 'title':'Watchlists',\n 'message':'Your Watchlist page.',\n 'year':datetime.now().year,\n 'user': request.user,\n 'data': watchlist_stocks,\n }\n\n return render(\n request,\n 'app/watchlists.html',\n context\n )", "def build_watchlist(wl_cnt=15, dir=\"gainer\"):\n\n print(\"Running build_watchlist function\")\n # Ping WeBull\n l_watchlist = []\n premkt_gnrs = wb.active_gainer_loser(direction=dir, rank_type=rank_type, count=wl_cnt)\n ### Must dig into list within list\n l_tickers = [r[\"ticker\"][\"symbol\"] for r in premkt_gnrs[\"data\"]]\n print(f\"Watchlist: {l_tickers}\")\n for ticker in l_tickers:\n print(f\"Getting fundamentals for {ticker}\")\n ticker_dct = fundamentals(ticker)\n #### Add to full DF to be reported at end of processing\n l_watchlist.append(ticker_dct)\n\n print(\"Built watchlist. Writing data out to files.\")\n # Write data out to file for storage\n with open(f\"{cur_wd}/bigbeta/stocks/history/{rank_type}/dt_{cur_dt}__tm__{cur_tm}.json\", \"w\") as f:\n json.dump(l_watchlist, f)\n # Write it out again to an overwritten file for easy retrieval\n with open(f\"{cur_wd}/bigbeta/stocks/current_run/{rank_type}/current_data.json\", \"w\") as f:\n json.dump(l_watchlist, f)\n # Writes out time of last run\n with open(f\"{cur_wd}/bigbeta/stocks/current_run/{rank_type}/last_run.txt\", \"w\") as f:\n f.write(f\"{rank_type} last ran at {cur_tm_log} EST\")\n\n print(\"Finished build_watchlist.\")\n return l_watchlist" ]
[ "0.63726956", "0.5953915", "0.58900404", "0.56818676", "0.5578206", "0.5571558", "0.5569564", "0.55676425", "0.55432063", "0.5523418", "0.5523418", "0.55003536", "0.5476887", "0.5428944", "0.5371039", "0.53561133", "0.53371036", "0.53357625", "0.5332503", "0.53266263", "0.5324393", "0.5323742", "0.5319222", "0.5315521", "0.53095496", "0.5307342", "0.5307015", "0.5305015", "0.5301567", "0.5293908", "0.52860206", "0.5282536", "0.5279462", "0.52672315", "0.52658296", "0.5256508", "0.524911", "0.5248305", "0.52458125", "0.52270573", "0.5212655", "0.5209978", "0.5206353", "0.51977557", "0.5194451", "0.5189935", "0.5171588", "0.51652676", "0.5164177", "0.51546", "0.5149827", "0.51472", "0.51459", "0.51388234", "0.5138412", "0.513459", "0.51298183", "0.5121901", "0.51178944", "0.51073086", "0.5106495", "0.5106495", "0.5106495", "0.51059556", "0.5103931", "0.5102793", "0.5102586", "0.51013696", "0.5100721", "0.509821", "0.5097191", "0.5095742", "0.5095742", "0.5095742", "0.5094754", "0.50835836", "0.50803536", "0.50746334", "0.5060354", "0.5056915", "0.50545305", "0.50518584", "0.5044235", "0.5039495", "0.5036997", "0.5025819", "0.5021927", "0.5011102", "0.5006463", "0.5006403", "0.5001578", "0.49957335", "0.49940124", "0.49908426", "0.49879748", "0.49876148", "0.49866852", "0.4979348", "0.49758568", "0.4974915" ]
0.54712594
13
user clicks on state, when color changes, state is stored as a visit.
def state_map(): # get current user from session user_id = session["user_id"] print user_id # inputs from state map in console.log [feature.id] = state_id feature = state state_id = request.form['feature_id'] print state_id state = db.session.query(State).filter_by(state_id=state_id).one() user_state_obj = User_State(state_id=state_id, user_id=user_id, visited_at=datetime.now()) db.session.add(user_state_obj) db.session.commit() user_state_json_data = {"state_id": state.state_id, "state_name": state.state_name, "visited_at": user_state_obj.visited_at} return jsonify(user_state_json_data) ################## REMOVING ##########################
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _get_click_state(self, event):\n raise NotImplementedError", "def select_action(self, state):", "def state_changed(self, oldstate, newstate, event, *args, **kwargs):", "def state_changed(self, oldstate, newstate, event, *args, **kwargs):", "def on_state_change(self, new_state):\n self.state = new_state", "def act(self, state):\n return", "def state(self, state: str) -> None:", "def take_action(self, state):", "def set_state( self ):", "def change_state(self):\n transitions = self.transition_map[self.current_state]\n self.current_state = select_from_probability_dict(random(),transitions)", "def clicked(self, event=None):\n\t\tif event.widget == self.history:\n\t\t\tselection = self.history.curselection()\n\t\t\tif selection:\n\t\t\t\tself.next_generation(self.history.get(selection))\n\t\telse:\n\t\t\tself.next_generation(event.widget['bg'] )", "def state_chosen_do(cfg, app, win, events):", "def changeState(self, node, name, state):", "def select_action(self, state):\n pass", "def set_state(self, state: int):", "def highlight(self, number, state):\n\n marker = game.markers[number]\n link = game.markers[marker.link]\n board = self.ids.board\n ui_link = board.children[-link.index - 1]\n\n # Toggle highlighting on\n if state == 'on':\n ui_link.old_color = ui_link.color\n ui_link.color = scheme.white\n\n # Toggle highlighting off\n elif state == 'off':\n ui_link.color = ui_link.old_color", "def colour_press(self):\n global last_button\n if last_button is None:\n # If there is no \"last button press\", set this as the latest one\n last_button = self\n else:\n # Another button has been pressed before. Switch the colours of the two\n last_button.background_color, self.background_color = self.background_color, last_button.background_color\n # Set their states back to normal and reset the last button pressed\n last_button.state = 'normal'\n self.state = 'normal'\n last_button = None\n # Check if the switch removed any blocks\n points = self.screen.check_removal()\n if points == 0:\n # If nothing has been removed, the player gets one step closer to losing\n self.screen.misses += 1\n else:\n # Give the player the points\n self.screen.points += points\n if self.screen.misses > 3:\n # Player has lost, leave the game\n self.screen.leave()", "def state_choose_do(cfg, app, win, events):", "def state_changed(self, old_state, new_state, target_state):\n pass", "def __change_state(self, state):\n self.state = state", "def _get_click_state(self, event):\n return Quartz.CGEventGetIntegerValueField(\n event, Quartz.kCGMouseEventClickState)", "def __state_cb(self, data):\n self.state = data", "def on_state_change(self, state):\n return state", "def state(self):\n pass", "def on_stateico_clicked(self, *a):\n\t\tself.window1.set_property('visible', True)\n\t\tself.stateico.set_visible(False)\n\t\tself.window1.present()", "def on_selected(self):\n self.colour = self.selected_colour\n self.is_selected = True\n self.redraw()", "def on_go(state):\n pass", "def storeState(self):\n\n self.action_history[self.trial] = self.action\n self.ball_history[self.trial] = self.ballcolor", "def mouseClicked(self,mouseEvent):\n\t\tself.canvas.nodeColorize(self)", "def set_state(self,s):\n self.state = s", "def state_changed(self):\n if self.goto and self.get_position() != self.c.p:\n self.goto_node()\n\n if self.mode == 'edit':\n self.edit_frame.show()\n self.view_frame.hide()\n elif self.mode == 'view':\n self.edit_frame.hide()\n self.view_frame.show()\n else:\n self.edit_frame.show()\n self.view_frame.show()\n\n self.update_position(self.c.p)", "def switch_to_state(self, state):\n self.switch_state = state", "def act(self, state):\n raise NotImplementedError()", "def update_to_state(self, game_state):\n pass", "def event_m20_11_x78():\n \"\"\"State 0,1: State\"\"\"\n ChangeOwnObjState(30)\n \"\"\"State 2: End state\"\"\"\n return 0", "def state(self, state_id, state):\n\n self.fire('source-state', {\n \"stateId\": state_id,\n \"state\": state\n })", "def state(self, state):\n self._state = state", "def eventHandler(self, event: pygame.event):\n # change selected color if this button's rectangle was clicked\n if event.type == pygame.MOUSEBUTTONDOWN:\n if event.button == 1:\n if self.rect.collidepoint(event.pos): # is mouse over button\n self.image = self._images[ButtonImages.CLICKING_IMAGE.value]\n self.beingClicked = True\n for func, *args in self.functionsToInvokeWhenClicked:\n func(*args)\n elif event.type == pygame.MOUSEBUTTONUP and self.beingClicked:\n if event.button == 1:\n self.beingClicked = False\n self.image = self._images[ButtonImages.DEFAULT_IMAGE.value]", "def state_preview_do(cfg, app, win, events):", "def on_click():\n action = self.screens[self.curr_screen].on_click()\n\n if screen_actions.CHANGE_SCREEN == action[\"screen action\"]:\n self.curr_screen = action[\"value\"]\n self.screens[self.curr_screen].update_screen()\n self.lcd_display.show()\n elif screen_actions.UPDATE_REDIS == action[\"screen action\"]:\n self.redis_client.set(action[\"redis key\"], action[\"value\"])\n self.redis_dict[action[\"redis key\"]] = action[\"value\"]\n print(\n \"Key: {}, Value: {}\".format(\n action[\"redis key\"],\n self.redis_client.get(action[\"redis key\"]).decode(\"UTF-8\"),\n )\n )", "def get_new_state(self, curstate: int, event: str) -> int:\n isdetected = self.isdetected_lab.is_A_state()\n evtup = (self.isexpected, isdetected, curstate, event)\n newstate = self.dd.get(evtup, curstate)\n print(\"FSM transition {} : {}\".format(evtup, newstate))\n return newstate", "def set_state(self, new_state):\n self.state = new_state", "def _see_state(self, new_state: State) -> None:\n entity_id = new_state.entity_id\n domain = new_state.domain\n state = new_state.state\n registry: GroupIntegrationRegistry = self.hass.data[REG_KEY]\n self._assumed[entity_id] = bool(new_state.attributes.get(ATTR_ASSUMED_STATE))\n\n if domain not in registry.on_states_by_domain:\n # Handle the group of a group case\n if state in registry.on_off_mapping:\n self._on_states.add(state)\n elif state in registry.off_on_mapping:\n self._on_states.add(registry.off_on_mapping[state])\n self._on_off[entity_id] = state in registry.on_off_mapping\n else:\n entity_on_state = registry.on_states_by_domain[domain]\n if domain in registry.on_states_by_domain:\n self._on_states.update(entity_on_state)\n self._on_off[entity_id] = state in entity_on_state", "def switch_to_state(self, Rover, name):\n name.execute(Rover)\n self.curr_state = name", "def step(self, state):", "def state_capture_do(cfg, app, win, events):", "def set_state(self, value):\n self.state = value", "def onColorMenu(self, item):\n self.canvas.color = item.color\n return 1", "def change_equals_state(self, state):\n self.equals['state'] = state", "def state_transition(self, curr_state, curr_action):\n #The state transition happens from the current state to the next state based on agent's action\n curr_state[curr_action[0]]=curr_action[1]\n return curr_state", "def debug_state_change(self, old_state, new_state):\n self.emit(QtCore.SIGNAL(\"debug_state_change(int,int)\"), old_state, new_state)", "def switch_state():\n\tDmg.OpenWindow()", "def button_clicked(self, event_name, data, kwargs):\n if data[\"entity_id\"] == self.button:\n if data[\"click_type\"] == \"single\":\n if float(self.cached_fade_in_time) > 0:\n self.log(\n \"Turning on {}\".format(self.friendly_name(self.wakeup_light))\n )\n self.call_service(\n \"light/turn_on\", entity_id=self.wakeup_light, brightness_pct=1\n )\n transition = int(\n float(self.cached_fade_in_time)\n * int(self.fade_in_time_multiplicator)\n )\n self.log(\n \"Transitioning light in over {} seconds\".format(transition)\n )\n self.timer_handle_list.append(\n self.run_in(\n self.run_fade_in, 1, transition=transition, brightness_pct=1\n )\n )", "def press(self, number, state):\n\n number = int(number)\n marker = game.markers[number]\n\n # Highlight already linked markers\n if marker.symbol is not None and marker.symbol != 'R' and marker.symbol != 'H':\n\n toggle = ''\n\n # On press\n if state == 'press':\n toggle = 'on'\n if state == 'release':\n toggle = 'off'\n\n # Highlight marker's link\n if marker.symbol is not None and marker.symbol != 'R' and marker.symbol != 'H':\n sm.get_screen('game_screen').highlight(marker.number, toggle)\n\n # On release\n elif state == 'release':\n\n # Marker has not been used before\n if marker.symbol is None:\n\n # Send ray\n game.beam(number)\n\n # Update marker symbol\n self.text = str(marker.symbol)\n\n # Update color\n if self.text == 'H':\n self.color = scheme.hit\n elif self.text == 'R':\n self.color = scheme.reflection\n else:\n self.color = scheme.black\n self.color = scheme.next()\n\n # Update linked marker symbol, if applicable\n if marker.link is not None:\n sm.get_screen('game_screen').symbol(marker.link, self.color)\n\n # Update\n sm.get_screen('game_screen').update()", "def click(self):\r\n pass", "def change_state(self,state):\n if self.__currentState:\n self.__currentState.stop()\n \n try:\n idler=self[state]\n except KeyError:\n raise \"%s is not a state of %s\" % (state,self)\n \n self.__currentState=idler()\n self.__currentState.idle()\n self.__currentState=None", "def SetState(self, new_state):\r\n\r\n self.state = new_state", "def callback_selectstate(self, attrname, old, new):\n self._update_chart(self.selectstate.value)", "def target_state(self, s):\n raise NotImplementedError()", "def state_transition(self, curr_state, curr_action):\n curr_state[curr_action[0]] = curr_action[1]\n return curr_state", "def click(self, buttons, x, y):\n cur_state = self._state.query(x, y)\n action_name = self._button_action_map[buttons][0]\n action = self.__class__.actions[action_name]\n action_args = self._button_action_map[buttons][1]\n new_state = action(cur_state, action_args)\n self._state.redefine(x, y, new_state)", "def get_next_state(self, state, action):\n pass", "def click(self, event):\n x, y = self.canvas.invert([event.x, event.y])\n i, j = int(floor(x)), int(floor(y))\n patch = self.get_cell(i, j)\n if patch and patch.state == \"green\":\n cluster = self.get_cluster(patch)\n self.show_cluster(cluster)", "def gotoState(self, newStateName, layerName):\n layer = StateManager.layers.get(layerName)\n if not layer:\n raise StateException(\"No layer %s\" % layerName) \n currentState = self.currentStates.get(layerName)\n if not currentState:\n raise StateException(\"No curreState for layer %s\" % layerName)\n newState = layer.get(newStateName, None)\n if not newState:\n raise StateException(\"New state %s doesnt exist\" % newStateName) \n currentState.leave(self.actor)\n self.currentStates[layerName] = newState\n interval = newState.enter(self.actor)\n self.updateTimes[layerName] = pyui.readTimer() + interval\n return 1", "def state(self, state):\n\n self._state = state", "def state(self, state):\n\n self._state = state", "def state(self, state):\n\n self._state = state", "def state(self, state):\n\n self._state = state", "def state(self, state):\n\n self._state = state", "def state(self, state):\n\n self._state = state", "def state(self, state):\n\n self._state = state", "def state(self, state):\n\n self._state = state", "def state(self, state):\n\n self._state = state", "def state(self, state):\n\n self._state = state", "def state(self, state):\n\n self._state = state", "def state(self, state):\n\n self._state = state", "def state(self, state):\n\n self._state = state", "def mouse_hover(self):\n self.color1 = self.color # Color changes\n position = pygame.mouse.get_pos() # Get mouse position\n if self.rect.collidepoint(position): # If the mouse is inside the button rect\n self.color1 = LIGHT_GREEN # Change color to light green", "def btnHandler(val):\n print(\"Button State Changed. Value={}\".format(val) )", "def _onEdit(self, event):\n index = self.colorlist.GetSelection()\n icol = self._indexTupleToColor(index)\n icd = wx.ColourData()\n icd.SetColour(icol)\n dialog = wx.ColourDialog(self, icd)\n\n if dialog.ShowModal() == wx.ID_OK:\n tup = _colorDataToTuple(dialog.GetColourData())\n self.graphColors[index] = tup\n self._tupleListToStrings()\n self._updateButtons(None)", "def state_change(\n self,\n cb: CircuitBreaker,\n old_state: CircuitBreakerState | None,\n new_state: CircuitBreakerState,\n ) -> None:", "def go_to_next_state(self):\n pass", "def mouse_in(self, event):\r\n self['background'] = '#E5F3FF'", "def click(self, x, y, button, press):\n\n if self.is_in_screen(x, y) and not self.pause:\n self.get_color(x, y)\n self.record(x, y, button, press)", "def setState(self, state):\n self.state = state", "def pickColor(colorWindow, colorPick):\n picked = False #False until choice is made\n while not picked:\n inputClick = colorWindow.getMouse() #gets input on where click has been made\n for i in range(4): #Runs all 4 choices to see if any match\n sqr_to_check = colorPick[i] #Compares to new square\n #Compares X and Y of square to see if click matches. If True square i has been clicked\n if sqr_to_check.p1.x < inputClick.x < sqr_to_check.p2.x and sqr_to_check.p1.y < inputClick.y < sqr_to_check.p2.y: \n if i == 0: \n return \"Blue\"\n if i == 1:\n return \"Green\"\n if i == 2:\n return \"Yellow\"\n if i == 3:\n return \"Red\"", "def input(self, event: pygame.event) -> None:\n if event.type == pygame.MOUSEBUTTONDOWN and event.button == 1:\n self.user_clicked = True", "def set_state(self, state):\n self.state = state", "def debug_state_change(self, old_state, new_state):\n raise NotImplementedError", "def clickCell(self, row, col):\n self.clicked[row, col] = 1", "def change_color(self, x, y, state):\n if state == 1:\n color = self.tile_color\n else:\n color = self.background_color\n self.canvas.itemconfig(self.board[(x, y)], fill=color)", "def handle_state(self):\r\n if self.state == 'walk':\r\n self.walking()\r\n elif self.state == 'fall':\r\n self.falling()\r\n elif self.state == 'jumped on':\r\n self.jumped_on()\r\n elif self.state == 'shell slide':\r\n self.shell_sliding()\r\n elif self.state == 'death jump':\r\n self.death_jumping()", "def set_state(self, state):\n self.history = state", "def set_state(self, value):\n _LOGGER.debug(\"%s: Set state to %d\", self.entity_id, value)\n self._flag_state = True\n\n params = {ATTR_ENTITY_ID: self.entity_id}\n if value == 0:\n if self.char_current_state.value != value:\n self.char_current_state.set_value(3)\n self.call_service(DOMAIN, SERVICE_OPEN_COVER, params)\n elif value == 1:\n if self.char_current_state.value != value:\n self.char_current_state.set_value(2)\n self.call_service(DOMAIN, SERVICE_CLOSE_COVER, params)", "def next_state(self, action):\n self.state = self.states[action][self.state]", "def _draw_state(self, game_states, surface, viewscreen_size, space_radius):\n \n self.selection += 1\n if self.selection >= len(game_states): \n self.selection = 0\n input()\n\n print(self.selection)\n\n game_state = game_states[self.selection]\n\n colors = [(0, 0, 0), (255, 255, 255), (100, 100, 100)]\n\n spacing_x = viewscreen_size[0] / 10\n spacing_y = (viewscreen_size[1]* 0.866025403784) / 10 #hardcoded sqrt(3)/2 aka sin(pi/3)\n offset_y = viewscreen_size[1] * ((1-0.866025403784)/2)\n\n for coord in all_coords():\n x, y = coord\n\n offset_x = (5-y) * (spacing_x/2)\n\n draw_y = int(viewscreen_size[1] - (spacing_y * y)) - offset_y\n draw_x = int(spacing_x * x) + offset_x\n\n color = colors[game_state[(x, y)]]\n\n pygame.draw.circle(surface, color, (draw_x, draw_y), space_radius)", "def actions(self, state):\n raise NotImplementedError # Override this!", "def click_cell(self, event):\n if (self.world_setable):\n x, y = event.x, event.y\n row = y / self.cell_size\n col = x / self.cell_size\n if ((row in range(self.cell_row)) and\n (col in range(self.cell_col))):\n status_now = not self.world_status.now[row, col]\n if (status_now):\n color = self.color_alive\n else:\n color = self.color_dead\n item_id = self.world[row, col]\n self.canvas.itemconfig(item_id, fill=color)\n self.world_status.now[row, col] = status_now\n self.world_status.next = self.world_status.now.copy()\n self.init_world = self.world_status.now.copy()", "def update_state(self, context):\n pass", "def advance_state_machine():\r\n global state_num\r\n if state_num == 0:\r\n red.color(\"black\")\r\n orange.color(\"black\")\r\n green.color(\"green\")\r\n state_num = 1\r\n wn.ontimer(advance_state_machine, 3000)\r\n elif state_num == 1:\r\n red.color(\"black\")\r\n orange.color(\"orange\")\r\n green.color(\"green\")\r\n state_num = 2\r\n wn.ontimer(advance_state_machine, 1000)\r\n elif state_num == 2:\r\n red.color(\"black\")\r\n orange.color(\"orange\")\r\n green.color(\"black\")\r\n state_num = 3\r\n wn.ontimer(advance_state_machine, 1000)\r\n else:\r\n red.color(\"red\")\r\n orange.color(\"black\")\r\n green.color(\"black\") # Show up our green\r\n state_num = 0\r\n wn.ontimer(advance_state_machine, 2000)", "def state(self):\n raise NotImplementedError" ]
[ "0.63449645", "0.63005996", "0.62316686", "0.62316686", "0.6165384", "0.61550564", "0.608084", "0.6026097", "0.59596413", "0.5938203", "0.5936117", "0.59297", "0.5917869", "0.5911632", "0.5898261", "0.58883375", "0.5822766", "0.58192563", "0.57938737", "0.5782378", "0.5760959", "0.5738214", "0.5731165", "0.5718634", "0.57136333", "0.5654898", "0.56301636", "0.56284136", "0.56194866", "0.55969673", "0.55649143", "0.55565965", "0.5549944", "0.5541159", "0.55328405", "0.5524672", "0.55241805", "0.55239224", "0.5514119", "0.551394", "0.5511564", "0.55081546", "0.5499734", "0.5499504", "0.54990935", "0.54936117", "0.5487652", "0.54670495", "0.5450488", "0.5442328", "0.54344994", "0.5419577", "0.5415407", "0.54024875", "0.5401534", "0.53998655", "0.5399813", "0.5389868", "0.5359535", "0.5349735", "0.53255373", "0.53227156", "0.5322405", "0.5320912", "0.531101", "0.531101", "0.531101", "0.531101", "0.531101", "0.531101", "0.531101", "0.531101", "0.531101", "0.531101", "0.531101", "0.531101", "0.531101", "0.53056324", "0.53051007", "0.5304829", "0.5291512", "0.5290135", "0.52892774", "0.52785873", "0.5276567", "0.52684176", "0.5267964", "0.5266222", "0.5261837", "0.5261175", "0.52611464", "0.52605456", "0.5256523", "0.5244508", "0.5234779", "0.52263725", "0.52189577", "0.52161115", "0.5216047", "0.5215579", "0.5212558" ]
0.0
-1
delete function for removing state visit
def removeStateVisit(): user_id = session["user_id"] print user_id state_id = request.form.get('feature_id') state = db.session.query(State).filter_by(state_id=state_id).one() user_state_obj = db.session.query(User_State).filter(User_State.user_id == user_id, User_State.state_id == state_id).first() print user_state_obj user_state_json_data = "error" if user_state_obj: db.session.delete(user_state_obj) db.session.commit() user_state_json_data = {"state_id": state_id, "state_name": state.state_name, "visited_at": user_state_obj.visited_at} return jsonify(user_state_json_data) ################### LOAD USERS STATE VISITS #######################
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete_state(self, s):\n state = self.state(s)\n for transition in self.transitions():\n if transition.to_state == state:\n self.delete_transition(transition)\n self._states_.remove(state)\n try:\n del self._states_dict_[state.label()]\n except AttributeError:\n pass", "def delete_instigator_state(self, origin_id: str):", "def delete_state(state_id):\n state = storage.get(State, state_id)\n if state:\n state.delete(), storage.save()\n return {}\n else:\n abort(404)", "def del_state(state_id):\n state_objs = storage.all(State).values()\n for state in state_objs:\n if state_id == state.id:\n state.delete()\n storage.save()\n return jsonify({}), 200\n abort(404)", "def remove(self):", "def remove_state(self, state):\n self._validate_state(state)\n self._state.remove(state)", "def remove():", "def delete_state(state_id):\n state = storage.get(State, state_id)\n if not state:\n abort(404)\n storage.delete(state)\n storage.save()\n return jsonify({}), 200", "def delete_state(state_id):\n obj = storage.get('State', state_id)\n if obj is not None:\n storage.delete(obj)\n storage.save()\n return jsonify({})\n else:\n abort(404)", "def __delete__(self):\n pass", "def delete_state(state_id):\n state = storage.get(State, state_id)\n if state is None:\n abort(404)\n storage.delete(state)\n storage.save()\n return jsonify({}), 200", "def delete_state(state_id):\n state = storage.get(State, state_id)\n if state is None:\n abort(404)\n state.delete()\n storage.save()\n return jsonify({}), 200", "def src_delete(state):\n _lib.src_delete(state)", "def delete_state(state_id):\n state = storage.get(State, state_id)\n if state is None:\n abort(404)\n storage.delete(state)\n storage.save()\n empty = jsonify({})\n return (empty), 200", "def delete_state(state_id):\n for key, values in storage.all(\"State\").items():\n if state_id in key:\n storage.delete(values)\n storage.save()\n storage.close()\n return jsonify({}), 200\n abort(404)", "def delete_state(state_id):\n state = storage.get(\"State\", id=state_id)\n if state:\n storage.delete(state)\n storage.save()\n return jsonify({}), 200\n abort(404)", "def delete():", "def delete_job_state(self, job_origin_id):", "def __del__(self) -> None:\n self.map.ent_id.discard(self.id)", "def delete_transition(self, t):\n transition = self.transition(t)\n transition.from_state.transitions.remove(transition)", "def remove_state(self, state):\n if isinstance(self.options, list):\n self.options.remove(state)\n else:\n temp = list(self.options)\n temp.remove(state)\n self.options = tuple(temp)", "def remove():\n pass", "def delete(self):\n ...", "def delete(self):\n pass", "def delete(self):\n pass", "def delete(self):\n pass", "def delete(self):\n pass", "def del_states_id(state_id):\n thing = storage.all('State')\n muricanState = \"State.\" + state_id\n state = thing.get(muricanState)\n if state is None:\n abort(404)\n else:\n state.delete()\n storage.save()\n return (jsonify({}), 200)", "def remove(self):\n pass", "def remove(self):\n pass", "def remove(self):\n pass", "def statesDelete(state_id):\n obj = storage.get(State, state_id)\n if obj:\n storage.delete(obj)\n storage.save()\n return jsonify({}), 200\n return jsonify({\"error\": \"Not found\"}), 404", "def __delitem__(name):", "def delete(self):\n self.graph._del(handle=self.handle)", "def remove(self):\r\n\t\tself._delete()", "def __delitem__(self, key):\n\t\tdel self.__dStore[key]", "def _remove(self):\n pass", "def __del__(self):\r\n self.clearList()", "def __del__(self) -> None:\n self.map.solid_id.discard(self.id)", "def delete_by_id(state_id):\n delete_state = storage.get('State', state_id)\n if not delete_state:\n abort(404)\n else:\n delete_state.delete()\n storage.save()\n return jsonify({}), 200", "def delete_state_model(exploration_id, state_id):\n state_memcache_key = _get_state_memcache_key(exploration_id, state_id)\n memcache_services.delete(state_memcache_key)\n state_model = exp_models.StateModel.get(exploration_id, state_id)\n state_model.delete()", "def del_step_hook(hindex):\n del_hook(step, hindex)", "def __delitem__(self, feature):\n self[feature] = None", "def delete(self):\n\t\tself.canvas.delete('node_'+self.identifier)\n\t\tself.canvas.tag_unbind('node_'+self.identifier,\"<Any>\")", "def __delete__(self, instance):\r\n self._set_instance_tag_cache(instance, '')", "def __del__(self) -> None:\n self.map.face_id.discard(self.id)", "def delete(self) -> None:\n self.pop()", "def delete_this_region(self):", "def delete(self):\n del self.shx.atoms[self.index]", "def clearState(self):\n self.physicalState = (None for unused in self.indVars)", "def _del(self, name):\n raise NotImplementedError", "def __delitem__(self,key):\n if key in self.changed: self.changed.remove(key)\n if key not in self.deleted: self.deleted.append(key)\n del self.data[key]", "def __del__(self):\n del self.board_\n del self.children_edges_\n self.board_ = None\n del self.parent_edge_\n # print(\"destruct node\")", "def __delitem__(self, i):\n # An element of a policy function can't be deleted", "def __delitem__(self, index):\n del self.chromosome_list[index]", "def delete(self):\n raise NotImplementedError", "def delete(self):\n self._vertex_list.delete()\n self._vertex_list = None", "def drop(self):\n pass", "def drop(self):\n pass", "def del_stkpnt(*args):\n return _ida_frame.del_stkpnt(*args)", "def _del(self, entry):\n entry.key = dummy\n entry.value = None\n self.used -= 1", "def _delete(self, current_node):\n pass", "def remove_states(self, keys: list):\n if self.spec.graph:\n self.spec.graph.clear_children(keys)", "def onDestroy(self, event):\n self.fsm.inspecting = 0\n for si in self.stateInspectorDict.values():\n self.ignore(self.name + '_' + si.getName() + '_entered')\n self.ignore(self.name + '_' + si.getName() + '_exited')", "def clear_repeat(self):\n\t\tself.visitedStates.clear()", "def destroy(self):", "def destroy(self):", "def destroy(self):", "def on_unassign(self):", "def destroy(self):\n del self.nodes\n self.nodes = {}", "def deassert_state(self, state):\n return DeassertStateVariable(self, state)", "def __del__(self):\n\t\tif self.ispersist() and self._input:\n\t\t\t\tpservlet.pipe_push_state(self._pipe_desc, self._state)", "def __del__(self):\n # Only an integer is passed to the call\n self.ph.remove(self.ID)\n # No new references were created, nothing retained", "def _del(self) -> None:\n self.variables.pop(prop_name, None)", "def _delete(self):\n self.prev.next = self.next\n self.next.prev = self.prev", "def test_delete_page_state(self):\n response = self.client.open(\n '/rui_support/page-state/{tempIdentifier}'.format(tempIdentifier='tempIdentifier_example'),\n method='DELETE',\n content_type='application/ld+json')\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))", "def __del__(self):\n\t\tself._pc.gid_clear()", "def remove(self):\n raise NotImplementedError", "def remove(self):\n raise NotImplementedError", "def remove(func):", "def removeNeighbor(self, neighborID):", "def delete_activity():\n pass", "def __delitem__(self, key):\n\n del self._vertices[key]", "def delete_state(committer_id, exploration_id, state_id):\n exploration = get_exploration_by_id(exploration_id)\n if state_id not in exploration.state_ids:\n raise ValueError('Invalid state id %s for exploration %s' %\n (state_id, exploration.id))\n\n # Do not allow deletion of initial states.\n if exploration.state_ids[0] == state_id:\n raise ValueError('Cannot delete initial state of an exploration.')\n\n def _delete_state_transaction(committer_id, exploration_id, state_id):\n exploration = get_exploration_by_id(exploration_id)\n\n # Find all destinations in the exploration which equal the deleted\n # state, and change them to loop back to their containing state.\n for other_state_id in exploration.state_ids:\n other_state = get_state_by_id(exploration_id, other_state_id)\n changed = False\n for handler in other_state.widget.handlers:\n for rule in handler.rule_specs:\n if rule.dest == state_id:\n rule.dest = other_state_id\n changed = True\n if changed:\n save_state(committer_id, exploration_id, other_state)\n\n # Delete the state with id state_id.\n exploration_memcache_key = _get_exploration_memcache_key(\n exploration_id)\n memcache_services.delete(exploration_memcache_key)\n\n delete_state_model(exploration_id, state_id)\n exploration.state_ids.remove(state_id)\n save_exploration(committer_id, exploration)\n\n transaction_services.run_in_transaction(\n _delete_state_transaction, committer_id, exploration_id, state_id)", "def delete_instance_state(self, instance_id):\n if instance_id in self._state_info:\n del self._state_info[instance_id]\n if instance_id in self._instances_to_purge:\n self._instances_to_purge.remove(instance_id)\n self._dirty = True", "def deleteStep( self ):\n assert isinstance( self._env, Env )\n assert isinstance( self._steps, list )\n\n # Save the stuff we need\n oldSteps = self._steps\n oldSteps.pop( )\n\n # Reinitialize this instance\n self._initialize( oldSteps )", "def __del__(self):\n \n pass", "def __value_del(self):\n self.delete()", "def checkpoint_unset():\n unwind(checkpoints.pop())", "def __del__(self):\n return", "def flowdetail_delete(fd):\n return IMPL.flowdetail_delete(fd)", "def _del(self, *args):\n return _ida_hexrays.qvector_history_t__del(self, *args)", "def __delitem__(self, key: tuple):\n s, a = key\n del self.store[s][a]", "def delete(self):\n raise NotImplementedError()", "def delete(self):\n raise NotImplementedError()", "def _delete_conntrack_state(self, ip):\n\n # If no flow entries are deleted, `conntrack -D` will return 1\n try:\n utils.execute(['conntrack', '-D', '-d', str(ip)], self.root_helper)\n except RuntimeError:\n LOG.debug(\n 'Failed deleting ingress connection state of %s' % ip\n )\n try:\n utils.execute(['conntrack', '-D', '-q', str(ip)], self.root_helper)\n except RuntimeError:\n LOG.debug(\n 'Failed deleting egress connection state of %s' % ip\n )", "def removeNode(self, node):", "def del_ly(self):\r\n del self._ly", "def __del__(self):\r\n pass", "def delete(self):\n\n lod_history = self.repo._get_lod_history(self.lod)\n assert lod_history.exists()\n lod_history.update(self.repo._youngest, None)\n self._mark_deleted()" ]
[ "0.7199102", "0.69216", "0.6769232", "0.6697931", "0.66519755", "0.663655", "0.6609245", "0.65962636", "0.659605", "0.6585508", "0.65570766", "0.65462", "0.65451986", "0.6523356", "0.65081394", "0.6460999", "0.64571273", "0.64526856", "0.63653034", "0.635942", "0.63002914", "0.62902683", "0.6277267", "0.62552285", "0.62552285", "0.62552285", "0.62552285", "0.62236434", "0.6216604", "0.6216604", "0.6216604", "0.6179038", "0.61694646", "0.61679953", "0.6162125", "0.6134654", "0.6132116", "0.6117163", "0.61097854", "0.6108413", "0.61046815", "0.6095338", "0.60883665", "0.60673225", "0.6065598", "0.6058281", "0.6042986", "0.59849405", "0.5977804", "0.5970359", "0.5967529", "0.5963306", "0.59523493", "0.5928794", "0.5928535", "0.59253854", "0.5919781", "0.59108377", "0.59108377", "0.59028095", "0.58961755", "0.58901674", "0.5888426", "0.58830404", "0.58792037", "0.5874724", "0.5874724", "0.5874724", "0.5871159", "0.58701354", "0.58610743", "0.5860375", "0.5856933", "0.58458555", "0.58407027", "0.5837257", "0.58258903", "0.58157706", "0.58157706", "0.58151823", "0.5810572", "0.5806292", "0.5804984", "0.58041644", "0.580158", "0.58009386", "0.57977265", "0.57956815", "0.57848966", "0.57804686", "0.5775989", "0.57678527", "0.5765441", "0.57613856", "0.57613856", "0.5760211", "0.5754197", "0.5749242", "0.57483536", "0.5743939" ]
0.6547938
11
RETURN user profile information
def profile(): user_id = session["user_id"] # from HTML form getting inputs from ajax call first = request.form.get('first', None) last = request.form.get('last', None) username = request.form.get('username', None) city = request.form.get('city', None) state = request.form.get('state', None) quote = request.form.get('quote', None) about = request.form.get('about', None) print "profile", first, last, city, state, quote, about # query db for current user user = db.session.query(User).filter_by(user_id=user_id).one() # ajax request inputs user.first_name = first user.last_name = last user.username = username user.city = city user.state = state user.quote = quote user.about = about db.session.commit() # profile_info_data = {"key": value} profile_info_data = {"first": first, "last": last, "username": username, "city": city, "state": state, "quote": quote, "about": about} # query DB for this user if the unser is none print "Profile been has been stored in DB" return jsonify(profile_info_data) # @app.route('/disply-profile-info', methods=['POST'])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_user_profile(self):\n return self.request('get', 'id/users')", "def get_profile():\n logger.debug(\"entering function get_profile\")\n response = read_user_profile()\n logger.debug(\"exiting function get_profile\")\n return jsonify(response)", "def user_info(self):\n response = self.query('user_info')\n return response", "def profile(self) -> dict:\n endpoint = \"/api/users/profile/\"\n ret = self._request(endpoint=endpoint)\n return ret", "def read_user_profile():\n logger.debug(\"entering function read_profile\")\n find_query = {\"user_id\": current_user.id}\n project_query = {\"_id\": 0, \"user_id\": 0, \"password\": 0}\n result = run_find_one_query(config.USERS_COL, find_query, project_query, error=True,\n error_msg=NO_USER_ERR_MSG)\n logger.info(\"fetched user profile for %s\", current_user.id)\n response = get_success_response(data=result)\n logger.debug(\"exiting function read_profile\")\n return response", "def describe_my_user_profile():\n pass", "def getUserInfo(data):\n\tusername = data[\"session_username\"]\n\tuser = Users.objects.filter(username=username).first()\n\n\tresponse = {}\n\n\tif not user:\n\t\treturn {\"Success\": False, \"Error\": \"Unable to retrieve the user information from database\"}\n\n\tresponse[\"Success\"] = True\n\tresponse[\"Username\"] = user.username\n\tresponse[\"Email\"] = user.email\n\tresponse[\"Verified\"] = user.verified\n\tresponse[\"Level\"] = user.level\n\tresponse[\"Experience\"] = user.experience\n\tresponse[\"Coins\"] = user.coins\n\tresponse[\"Preferences\"] = {\"Grid Opacity\": user.pref_grid}\n\n\treturn response", "def get_user_profile_info(token):\n user_profile_endpoint = 'https://api.spotify.com/v1/me'\n headers = {'Authorization': 'Bearer %s' % token}\n\n r = requests.get(user_profile_endpoint, headers=headers)\n if r.status_code != 200:\n return None\n profile = r.json()\n\n display_name = profile['display_name']\n profile_id = profile['id']\n email = profile['email']\n\n return User(display_name=display_name,\n profile_id=profile_id,\n email=email)", "def get_users_info(): \n \n data = user_obj.get_users_info()\n return data", "def get_my_profile(self):\n\n url = self.api_base_url + \"user/profile\"\n\n try:\n raw_response = self.request_handler.make_request(ApiRequestHandler.GET, url)\n except RequestFailed:\n raise\n\n jsonified_response = json.loads(raw_response.text)\n user_profile = jsonified_response\n\n return user_profile", "def profile() -> str:\n session_id = flask.request.cookies.get('session_id')\n user = AUTH.get_user_from_session_id(session_id)\n if user:\n return flask.jsonify({\"email\": user.email}), 200\n else:\n flask.abort(403)", "def getProfile(self):\n # GET /profile\n debugMain('getProfile')\n return self._genericGet('/profile')", "def getProfileInfo(self):\n doc = minidom.parse(urllib.urlopen(serverString + \"/rest/user/\" + self.name))\n for element in doc.getElementsByTagName(\"user\")[0].childNodes:\n if element.nodeType != minidom.Node.ELEMENT_NODE:\n continue\n elif element.tagName == \"status\" and int(element.firstChild.data) != 1:\n raise ServerError(element.firstChild.data)\n elif element.tagName == \"input\":\n self.name = element.firstChild.data\n elif element.tagName == \"id\":\n self.id = element.firstChild.data\n elif element.tagName == \"image\":\n self.image = element.firstChild.data\n elif element.tagName == \"tagline\":\n if element.firstChild == None:\n self.tagline = None\n else:\n self.tagline = element.firstChild.data\n elif element.tagName == \"creation\":\n self.created = datetime.datetime.strptime(element.firstChild.data[:element.firstChild.data.rfind(\".\")]+\".GMT\", \"%Y-%m-%d %H:%M:%S.%Z\")", "def get_profile(self):\n endpoint = '/profile'\n return self.get_request(endpoint)", "def profile():\n # Check if user is loggedin\n if 'loggedin' in session:\n # We need all the account info for the user so we can display it on the profile page\n response = requests.get(\n \"http://localhost:8080/api/userbyid/\"+str(session['userid']))\n acc = json.loads(response.text)\n # Show the profile page with account info\n return render_template('profile.html', account=acc)\n # users is not loggedin redirect to login page\n return redirect(url_for('site.login'))", "def get(self, username):\n\t\tdb = getattr(g, 'db', None)\n\n\t\tqry = \"SELECT username,email,active,steamid FROM\\\n\t\t\tprofiles WHERE username = %s;\"\n\t\twith db as cursor:\n\t\t\tcursor.execute(qry, (username,))\n\n\t\treturn {'profile':cursor.fetchone()}", "def get_user_info(self) -> str:\n return self._searcher.get_user_info()", "def get_user_profile(self):\n return self.user.profile", "def list_user_info(service):\n profile = service.users().getProfile(userId='me').execute()\n return profile", "def get_profile():\n\n if request['user_id']:\n\n user = User.select().where(User.id == request['user_id']).get()\n uSchema = UserSchema()\n jsonUser = uSchema.dumps(user)\n\n del request['user_id']\n return jsonUser.data\n\n return", "def user_info(self):\n return self.auth.get_user_by_session()", "def _request_user_info(credentials):\n http = httplib2.Http()\n\n # User information stored here\n credentials.authorize(http)\n resp, content = http.request('https://www.googleapis.com/plus/v1/people/me')\n\n if resp.status != 200:\n current_app.logger.error(\"Error while obtaining user profile: %s\" % resp)\n return None\n\n # Check whether user is authenticating with the allowed domain.\n if (current_app.config['RESTRICT_DOMAIN'] is True and \n credentials.id_token.get('hd') != current_app.config['REQUIRED_DOMAIN']):\n\n # Replace with logging for a real app\n print(\"\\n------------------------------------------------------\")\n print(\"User attempted to authenticate with disallowed domain.\")\n print(\"------------------------------------------------------\\n\")\n\n # User information deleted here\n oauth2.storage.delete()\n return None\n\n # Stores the users information in the session profile dictionary\n session['profile'] = json.loads(content.decode('utf-8'))\n\n # Remove this in production. It's here so you can see what information is stored.\n print(\"\\n------------------------------------------------------\")\n print(\"SESSION PROFILE INFORMATION\")\n print(\"------------------------------------------------------\")\n for k,v in session['profile'].items():\n print(k,\"--->\",v)\n print(\"------------------------------------------------------\\n\")", "def profile():\r\n user_data = load_user(current_user.id, current_user)\r\n if user_data is not None:\r\n user, followers, following = user_data\r\n\r\n return render_base_template(\"profile.html\", profile=user, followers=followers,\r\n following=following,\r\n os_projects=[])\r\n\r\n return abort(404)", "def profile():\n token = request.json['token']\n u = user.User.query.filter(user.User.token == token).first()\n if u is None:\n abort(404)\n return jsonify(u.to_dict())", "def read_user(self, username):\n\n self.c.execute(\"SELECT * FROM profiles WHERE name=?\", (username,))\n user_profile = self.c.fetchone()\n print user_profile\n return user_profile", "def userinfo(self, **kwargs):\n metadata = self.load_server_metadata()\n resp = self.get(metadata['userinfo_endpoint'], **kwargs)\n resp.raise_for_status()\n data = resp.json()\n return UserInfo(data)", "def user_profile():\n access_token = _request_ctx_stack.top.current_user_token\n message_log(\"Got access token for user profile\")\n user_profile = get_user_profile(access_token)\n user_profile['access_token'] = access_token\n return json.dumps(user_profile)", "def user_info(self):\n resp = self._get(get_url('user'))\n raise_on_error(resp)\n ret = resp.json()\n return UserInfo(ret)", "def get_my_profile_info(\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = GetMyProfileInfo.create(\n namespace=namespace,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)", "def user(request):\n\tprofile = {}\n\tif (request.user.is_authenticated()==True) and(request.user is not None):\n\t\tprofile = UserProfile.objects.get(user_id=request.user)\n\treturn {\n\t\t'user': request.user,\n\t\t'profile':profile\n\t}", "def profile(self, name=\"johndoe\"):\r\n url = \"/account/%s\" % name\r\n return self.app.get(url, follow_redirects=True)", "def get_user_info():\n if session and session.get(\"email\") and session.get(\"display_name\"):\n email = session.get(\"email\")\n display_name = session.get(\"display_name\")\n data = dict(email=email, displayName=display_name)\n app.logger.debug(\"Success in getting log information on user: {} at email: {}\".format(display_name, email))\n return jsonify(data)\n else:\n return jsonify(dict(email=\"error\", display_name=\"Could not get info for this user\"))", "def extra_data(self, user, uid, response, details):\n try:\n return self.get_steam_profile(response)\n except:\n return \"\"", "def show_user_profile(username):\n\n name = USERS[username]\n return f\"<h1>Profile for {name}</h1>\"", "def get_profile(user):\n if user.is_authenticated():\n # Return the PootleProfile associated with authenticated users\n return user.get_profile()\n else:\n # Anonymous users get the PootleProfile associated with the 'nobody' user\n return User.objects.get(username='nobody').get_profile()", "def get_user_details(self, response):\n # Build the username with the team $username@$team_url\n # Necessary to get unique names for all of slack\n username = response.get('user')\n if self.setting('USERNAME_WITH_TEAM', True):\n match = re.search(r'//([^.]+)\\.slack\\.com', response['url'])\n username = '{0}@{1}'.format(username, match.group(1))\n\n out = {'username': username}\n if 'profile' in response:\n out.update({\n 'email': response['profile'].get('email'),\n 'fullname': response['profile'].get('real_name'),\n 'first_name': response['profile'].get('first_name'),\n 'last_name': response['profile'].get('last_name'),\n 'team_name': response.get('team_name')\n })\n return out", "def user_info(self):\r\n param = {}\r\n param['appid'] = self.apiKey\r\n param['nonce'] = int(time.time()*1000)\r\n param['timestamp'] = int(time.time())\r\n return self.__signed_GET('/api/v1/users/me', param, self.timeout)", "def user_info(username):\n print(json.dumps(client.user_info(username)))", "def getdat(user):\r\n profile = user.profile\r\n return [user.username, user.email] + [getattr(profile, xkey, '') for xkey in profkeys]", "def get_profile(request):\n collected_values = {}\n\n # Only allow GET requests on this endpoint\n if request.method != 'GET':\n collected_values[\"success\"] = False\n collected_values[\"errmsg\"] = \"Wrong HTTP verb\"\n return JsonResponse(collected_values, status=400)\n\n # Extract params\n uid = request.GET['uid']\n key = request.GET['key']\n\n # Hardcoded key for security\n if key != SUPER_SECURE_STRING:\n collected_values[\"success\"] = False\n collected_values[\"errmsg\"] = \"Invalid Key\"\n return JsonResponse(collected_values, status=400)\n\n # Grab the user's profile information\n users = LUser.objects.filter(user_id=uid)\n user = users[0]\n\n # Collect values\n collected_values[\"user_info\"] = user.get_map()\n collected_values[\"success\"] = True\n\n LOGGER.info(\"Get Profile Result: %s\", user)\n return JsonResponse(collected_values, status=200)", "def get_user_info_by_name(self, username: str) -> dict:", "def GetUserInformation(self):\n if _USER_INFO_KEY in self._session:\n return self._session[_USER_INFO_KEY]\n return self._ReCreateUserInfo()", "def get_user_info(self):\n user_info = self.data_source.get_user_info(self.user_id)\n\n return user_info", "def GetUserInfo(self):\n user = users.get_current_user()\n user_info = GetInfoForUser(user)\n if user:\n # Check to see if the user has auxiliary info for Swyzl, and if not\n # then create it.\n if not user_info:\n user_info = models.UserInfo()\n user_info.user = user\n user_info.put()\n\n url = users.create_logout_url(self.request.uri)\n url_link_text = 'Logout'\n else:\n url = users.create_login_url(self.request.uri)\n url_link_text = 'Login'\n return (user, url, url_link_text)", "def get_user_profile(self):\n\t\treturn Job(SDK.PrlSrv_GetUserProfile(self.handle)[0])", "def get_user_info(self):\n\n if self._access_token is None:\n raise RequiresAccessTokenError()\n response = self.__make_oauth_request(USER_INFO_URL, token=self._access_token, signed=True)\n return simplejson.loads(response.read())", "def user_profile_data(id):\n user = User.query.get(id)\n return user.to_dict_profile()", "def get_facebook_user_info(access_token):\n required_data_list = []\n for per in settings.FACEBOOK_EXTENDED_PERMISSIONS:\n required_data_list.append(per.replace(\"user_\",\"\"))\n \n required_data_list.append(\"picture.type(large)\")\n required_data = (\", \").join([data for data in required_data_list])\n \n graph_url = \"https://graph.facebook.com/me?access_token=%s&fields=%s\" % (access_token,required_data)\n public_info_url = \"https://graph.facebook.com/me?access_token=%s\" % access_token\n \n profile = json.load(urllib.urlopen(graph_url))\n profile_info = json.load(urllib.urlopen(public_info_url))\n \n profile_response_dict = {}\n profile_response_dict.update(profile)\n profile_response_dict.update(profile_info)\n profile_response_json = json.dumps(profile_response_dict)\n\n return (profile_response_json, profile_response_dict)", "def getUserInfo(self):\r\n userJson = self.httpGet(ReaderUrl.USER_INFO_URL)\r\n result = json.loads(userJson, strict=False)\r\n self.userId = result['userId']\r\n return result", "def user_info(user_id):\n return User.query.filter_by(id=user_id).first()", "def userinfo(self):\n return self._userinfo", "def user_profile(first, last, **add_info):\n profile = {}\n profile['firstname'] = first\n profile['lastname'] = last\n\n for key, value in add_info.items():\n profile[key] = value\n \n return profile", "def get_user_details(self, response):\n token = response.get('access_token')\n headers = {\"Authorization\": \"Bearer %s\" % token}\n endpoint = self.USER_INFO_URL\n response = requests.get(endpoint, headers=headers)\n return {'email': response.json()['email'] or '',\n # We'll need sub, the unique ID, for get_user_id.\n 'sub': response.json()['sub']}", "def userinfo(self, access_token: str) -> dict[str, Any]:\n data: dict[str, Any] = self.client.get(\n url=f\"{self.protocol}://{self.domain}/userinfo\",\n headers={\"Authorization\": f\"Bearer {access_token}\"},\n )\n return data", "def get(self, request):\n current_user = request.user\n user_profile = self.serializer_class(current_user.profile)\n return Response({\n \"message\": PROFILE_MSGS['MY_PROFILE'],\n \"profile\": user_profile.data\n }, status=status.HTTP_200_OK)", "def print_user_profile(self):\n print ('User Name: {name}').format(name=self.name)\n print ('User Email: {email}').format(email=self.email)\n print ('User Phone Number: {birthday}').format(birthday=self.phone_number)\n print ('User Country: {country}').format(country=self.country)\n print ('User Address: {address}').format(address=self.address)", "def get_user_details(self, response):\n return {\n \"username\": response.get(\"username\"),\n \"email\": response.get(\"email\"),\n \"fullname\": response.get(\"username\"),\n }", "def get_user_details():\n rv = query_db('select * from user')\n return rv[0] if rv else None", "def profile_details(self, profile_name):\n url = get_url('profile details', profile=profile_name)\n response = self._get(url)\n if response.status_code == 404:\n return None\n raise_on_error(response)\n return Profile(response.json())", "def profile(request):\n user = request.user\n characters = get_all_characters(user)\n if get_logged_in_char(characters):\n context = get_profile_context(user, characters)\n return render(request, 'coreapp/individual.html', context)\n else:\n family_members = characters\n userlname = user.last_name\n context = {'family_members' : family_members,\n 'lastname' : userlname,\n }\n return render(request, 'coreapp/profile.html', context)", "def profile():\n \n return render_template(\"profile.html\")", "async def get_my_profile_info_async(\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = GetMyProfileInfo.create(\n namespace=namespace,\n )\n return await run_request_async(\n request, additional_headers=x_additional_headers, **kwargs\n )", "def user_info(self) -> UserInfo:\n return self.__userInfo", "def get_user_details(self, response):\n first_name, last_name = response['first-name'], response['last-name']\n email = response.get('email-address', '')\n return {'username': first_name + last_name,\n 'fullname': first_name + ' ' + last_name,\n 'first_name': first_name,\n 'last_name': last_name,\n 'email': email}", "def profile():\n\n user_id = session.get(\"user_id\")\n userbuses = crud.show_all_userbus(user_id)\n\n \n\n if user_id:\n user = crud.get_user_by_id(user_id)\n return render_template('user_profile.html', user=user, userbuses=userbuses)\n \n else:\n flash('Please sign in')\n return render_template('login.html')", "def get_profile_info(self):\n\n drill_path = str(Path.home())+\"/Documents/ball_e_profiles/drill_profiles/{drill_name}/{drill_name}.csv\".format(\n drill_name=self.drill_name)\n with open(drill_path) as file:\n csv_reader = csv.reader(file, delimiter=',')\n row_count = 0\n info_dict = dict()\n for row in csv_reader:\n if row_count == 0:\n row_count += 1\n else:\n info_dict[row[0]] = [row[1], row[2], row[3]]\n row_count += 1\n\n return info_dict", "def get_profile(username):\n if username not in Profiles.keys():\n return jsonify({'message': 'User {} not found'.format(username)}), 404\n\n return jsonify(Profiles[username]), 200", "def get_user_details(self, response):\n values = {\n 'username': unquote(response['nick']),\n 'email': unquote(response['email']),\n 'first_name': unquote(response['first_name']),\n 'last_name': unquote(response['last_name'])\n }\n\n if values['first_name'] and values['last_name']:\n values['fullname'] = '%s %s' % (values['first_name'],\n values['last_name'])\n return values", "def user_info(self):\n \n return self.auth.get_user_by_session()", "def getUserInfo(UserId):\n url = f\"https://users.roblox.com/v1/users/{UserId}\"\n r = requests.get(url)\n j = json.loads(r.text)\n displayName = j['displayName']\n name = j['name']\n uid = j['id']\n isBanned = j['isBanned']\n joinDate = j['created']\n description = j['description']\n return displayName,name,uid,isBanned,joinDate,description", "def _getProfileFromUser(self):\n # Make sure user is authenticated\n user = endpoints.get_current_user()\n if not user:\n raise endpoints.UnauthorizedException('Authorization required')\n # Get Profile from datastore\n user_id = user.email()\n p_key = ndb.Key(Profile, user_id)\n profile = p_key.get()\n # Create new Profile if not there\n if not profile:\n profile = Profile(\n key = p_key,\n displayName = user.nickname(),\n mainEmail= user.email(),\n teeShirtSize = str(TeeShirtSize.NOT_SPECIFIED),\n )\n profile.put()\n return profile", "def get_user_details(self, response):\n name = response.get(\"name\")\n return {\n \"username\": str(response.get(\"account_id\")),\n \"email\": response.get(\"email\"),\n \"fullname\": name.get(\"display_name\"),\n \"first_name\": name.get(\"given_name\"),\n \"last_name\": name.get(\"surname\"),\n }", "def get_user_profile(email): # GET\n # NOTE: This method previously called LCS with director credentials in order to retrieve the user's name\n # We will update TeamRU to store names along with our user objects, saving the need to call LCS again\n user_profile = coll(\"users\").find_one({\"_id\": email})\n if not user_profile:\n return {\"message\": \"User not found\"}, 404\n user_profile[\"user_id\"] = user_profile.pop(\"_id\")\n return user_profile, 200", "def display_profile(self):\n print(f\"Id: {self._id}\")\n print(f\"username: {self.username}\")\n print(f\"name: {self.name}\")\n print(f\"contact: {self.contact}\")\n print(f\"address: {self.address}\")", "def __str__(self):\n profile = {\n 'address': self.address,\n 'bio': self.bio,\n 'website': self.website,\n 'hireable': self.hireable,\n 'travel_radius': self.travel_radius,\n 'phone number': self.phone_number,\n 'camera type': self.camera_type,\n 'type of photography': self.photography_type\n }\n return str(self.user.username)", "def profile():\n from flickrAPI import FlickrAPI\n #flickr = FlickrAPI(key=session['resource_owner_key'], secret=session['resource_owner_secret'])\n flickr = FlickrAPI(key=request.cookies.get('oauth_token'), secret=request.cookies.get('oauth_token_secret'))\n faves = flickr.favorites_getList(user_id=\"44124394781@N01\", page=1, per_page=5, extras='owner_name')\n return str(faves)", "def profile(request):\n profile = request.user.profile\n return render(request, 'accounts/profile.html', {'profile': profile})", "def get_user_details(self, response):\n fullname, first_name, last_name = self.get_user_names(\n response.get(\"fullName\"),\n response.get(\"firstName\"),\n response.get(\"lastName\"),\n )\n return {\n \"username\": response.get(\"username\"),\n \"email\": response.get(\"email\") or \"\",\n \"fullname\": fullname,\n \"first_name\": first_name,\n \"last_name\": last_name,\n }", "def current_user_info():\n\n return current_user", "def userProfile(userid):\n images = get_uploaded_images()\n record = UserProfile.query.filter_by(id=userid).first()\n return render_template('userProfile.html', images=images, record =record)", "def _get_user_details():\n with open(USER_DETAILS_FILE) as f:\n fitbit_user = json.load(f)\n access_token = fitbit_user['access_token']\n refresh_token = fitbit_user['refresh_token']\n expires_at = fitbit_user['expires_at']\n\n return access_token, refresh_token, expires_at", "def _get_user_info(self):\n\n if not self._refresh_token:\n raise ValueError(\"Refresh Token not set\")\n\n # Add access token to the headers\n add_headers = dict(self._default_headers)\n add_headers['Authorization'] = self._access_token\n\n resp = requests.get(BASE_URL + \"user/{}\".format(self._user_id), headers=add_headers, verify=False)\n if resp.status_code >= 300:\n raise Exception(\"Failed to retrieve user info: {}\".format(resp))\n\n vals = etree_to_dict(ET.XML(resp.content.decode('utf-8')))\n\n # Print generic user info\n print(\"\")\n print(\"== USER INFO ==\")\n print(\"Username: {}\".format(vals.get('user').get('username')))\n print(\"Nickname: {}\".format(vals.get('user').get('nickname')))\n print(\"Usage: {} MB / {} MB\".format(int(int(vals.get('user').get('quota').get('usage')) / (1024*1024)),\n int(int(vals.get('user').get('quota').get('limit')) / (1024*1024))))\n print(\"\")\n\n # Grab folder ids we care about\n self._user_sync_folders_url = vals.get('user').get('syncfolders')", "def get_user_info(self, access_token, openid):\n url = get_config(\"login.wechat.user_info_url\") % (access_token, openid)\n return self._access_wxapi_or_raise(url)", "def _get_userinfo(self):\n if not hasattr(self, \"_userinfo\"):\n self._userinfo = {\n \"name\" : self.user_name,\n \"email\" : self.user_email\n }\n if self.user_id:\n u = self.user\n if u.email:\n self._userinfo[\"email\"] = u.email\n\n # If the user has a full name, use that for the user name.\n # However, a given user_name overrides the raw user.username,\n # so only use that if this review has no associated name.\n if u.get_full_name():\n self._userinfo[\"name\"] = self.user.get_full_name()\n elif not self.user_name:\n self._userinfo[\"name\"] = u.username\n return self._userinfo", "def profile(username):\n username = mongo.db.users.find_one(\n {\"username\": session[\"user\"]})[\"username\"]\n return render_template(\"profile.html\", username=username)", "def getprofile(self, *args, **kwargs):\n return _image.image_getprofile(self, *args, **kwargs)", "def profile():\n freesound = OAuth2Session(client_id, token=session['oauth_token'])\n return jsonify(freesound.get('https://freesound.org/apiv2/me').json())", "def get_user_details(self, response):\n\n kaccount_email = \"\"\n kakao_account = response.get(\"kakao_account\", \"\")\n if kakao_account:\n kaccount_email = kakao_account.get(\"email\", \"\")\n properties = response.get(\"properties\", \"\")\n nickname = properties.get(\"nickname\") if properties else \"\"\n return {\n \"username\": nickname,\n \"email\": kaccount_email,\n \"fullname\": nickname,\n \"first_name\": nickname[1:] if nickname else \"\",\n \"last_name\": nickname[0] if nickname else \"\",\n }", "def user_details():\n url = 'https://api.github.com/orgs/facebook/repos'\n json_obj = urllib2.urlopen(url)\n userdata = json.load(json_obj)\n if 'error' in userdata:\n print 'errors are scanned in data'\n for data in userdata:\n if 'name' in data:\n if data['name'] == 'codemod':\n print 'language used'\n print data['language']\n print 'number of watchers'\n print data['watchers']\n print 'git url'\n print data['git_url']\n print 'open issues'\n print data['open_issues']\n print 'permissions for user'\n print 'push'\n print data['permissions']['push']\n print 'pull'\n print data['permissions']['pull']", "def get_login_user_profile(uid):\n # fetch the user info from db,\n # just in case the info has been updated somewhere\n json_user = User.find(uid).to_dict_with_mobile()\n json_user['work_experiences'] = work_service.get_work_experiences(uid)\n return json_user", "def userinfo(message):\n info = \"\"\n post_info = \"\"\n for filename in os.listdir(\"userdata/\"):\n with open(\"userdata/{}\".format(filename)) as data_file:\n userdata = json.load(data_file)\n\n if userdata[\"access\"]:\n db_list = []\n for instance in userdata[\"access\"]:\n db_list.append(instance[\"db\"])\n\n info += \"\" + userdata[\"name\"] + \" - \" + \", \".join(db_list) + \"\\n\"\n\n elif \"servers\" in userdata and userdata[\"servers\"]:\n post_info += userdata[\"name\"] + \" - \" + \", \".join(userdata[\"servers\"]) + \"\\n\"\n\n if not info:\n info = \"None!\"\n\n if not post_info:\n post_info = \"None!\"\n\n message.reply(\"Current user access:\\n```{}```\\nCurrently expired users that are still logged in:\\n```{}```\".format(info, post_info))", "def get_user_info_by_id(self, user_id: int) -> dict:", "def getUserInfo(user_id):\r\n user = session.query(User_info).filter_by(id=user_id).one()\r\n return user", "def user_profile(token, u_id):\n # pylint: disable=unused-argument\n # NB: Supressed this warning because token is in fact used in\n # the decorator, however pylint doesn't check for this.\n user = database.get_user_data(u_id)\n return {\"user\": user}", "def getInfo(self):\n request = self._connection.get('bookmarklet')\n userdata = self._userinfo_regex.search(request.text)\n if userdata is None: userdata = self._userinfo_regex_2.search(request.text)\n if userdata is None: raise errors.DiaspyError('cannot find user data')\n userdata = userdata.group(1)\n return json.loads(userdata)", "def get_user_profile(self, username: str = None) -> Profile:\n if username:\n res: dict[str, Any] = self.api.users.get(user_ids=username).pop()\n else:\n res = self.api.users.get().pop()\n return VkontakteConnector.build_profile(res)", "def user_profile():\n if CURR_USER_KEY in session:\n return render_template('/profile/detail.html')\n else:\n return redirect('/login')", "def get_profile(self,fields=('id','first-name','last-name','headline','summary')):\n\n if not self._access_token:\n raise FBError(\"Authentication needed!\")\n \n token = oauth.Token(self._access_token['oauth_token'], self._access_token['oauth_token_secret'])\n client = oauth.Client(self.consumer, token)\n profile_url = self.profile_url % \",\".join(fields)\n resp, content = client.request(profile_url,headers={\"x-li-format\":'json'})\n \n if resp['status'] != '200':\n print resp\n raise FBError(\"Invalid response %s.\" % resp['status'])\n \n try:\n return json.loads(content)\n except Exception, e:\n raise FBError(\"Invalid json %s.\" % unicode(e))", "def get_user_info(uid):\r\n session = tables.get_session()\r\n account_name = ''\r\n description = ''\r\n if session is None:\r\n return account_name, description\r\n try:\r\n user_account = UserAccount()\r\n account_name = user_account.get_field_by_key(UserAccount.account_name, UserAccount.user_id, uid,\r\n session)\r\n description = user_account.get_field_by_key(UserAccount.description, UserAccount.user_id, uid,\r\n session)\r\n except SQLAlchemyError as err:\r\n LOGGER.error('User login failed: %s', err)\r\n return account_name, description\r\n finally:\r\n session.close()\r\n return account_name, description", "def __str__(self):\n return self.user.username + \"'s Profile\"" ]
[ "0.79147726", "0.76836777", "0.7588437", "0.7583951", "0.75469744", "0.7536266", "0.74995303", "0.74964917", "0.7494386", "0.7476675", "0.74665", "0.74247897", "0.7411005", "0.7400891", "0.73773926", "0.73690546", "0.7363023", "0.73458177", "0.73306763", "0.73088706", "0.73054224", "0.73026615", "0.7292453", "0.72804105", "0.7255451", "0.724474", "0.724456", "0.7231989", "0.7162134", "0.7151556", "0.713262", "0.71175015", "0.70912254", "0.7070726", "0.70611644", "0.705813", "0.70569015", "0.70503765", "0.7048788", "0.7034491", "0.7032265", "0.7023922", "0.7020531", "0.7019899", "0.701507", "0.7012122", "0.70093864", "0.6998405", "0.6988417", "0.69728774", "0.6972742", "0.6964587", "0.6964286", "0.69594765", "0.6933268", "0.693238", "0.69319504", "0.69317913", "0.69278973", "0.6925567", "0.6921072", "0.691801", "0.6917568", "0.69146234", "0.69145435", "0.69050777", "0.69005436", "0.68941784", "0.6889629", "0.6888591", "0.68857783", "0.6881785", "0.6879689", "0.6872972", "0.68669164", "0.6862327", "0.6860903", "0.6856349", "0.68526614", "0.6821831", "0.6820235", "0.6814423", "0.6812135", "0.6799982", "0.6798971", "0.67756855", "0.67731506", "0.67699623", "0.6767965", "0.6760815", "0.6754735", "0.67496085", "0.6742481", "0.67411214", "0.6737864", "0.6736501", "0.67271084", "0.6721437", "0.6719247", "0.67132115" ]
0.67085403
100
google address form that prepopulates address
def google_postcard_form_ajax(): user_id = session["user_id"] # input from ajax call from HTML form street_number = request.form.get('street_number') route_address = request.form.get('route') city = request.form.get('locality') postal_code = request.form.get('postal_code') state = request.form.get("state") country = request.form.get('country') message = request.form.get('message', None) print "google-postcard-ajax", street_number, route_address, city, postal_code, state, country, message # commit form information to Database db.session.commit() postcard_data = {"street_number": street_number, "route": route_address, "city": city, "state": state, "country": country, "message": message} return jsonify(postcard_data)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_autofill_address():\n try:\n # get data sent by client\n client_data = request.get_json()\n print(' ')\n print('\\n------ getting autofill_address ------')\n print(f\"recived: input:{client_data['text']}\")\n\n place = gmaps.place(client_data['value'])\n address = place['result']['address_components']\n data = {}\n for field in address:\n if 'street_number' in field['types']:\n data['street_number'] = field['short_name']\n continue\n if 'route' in field['types']:\n data['route'] = field['long_name']\n continue\n if 'locality' in field['types']:\n data['locality'] = field['long_name']\n continue\n if 'administrative_area_level_1' in field['types']:\n data['administrative_area_level_1'] = field['short_name']\n continue\n if 'postal_code' in field['types']:\n data['postal_code'] = field['short_name']\n continue\n\n # Pass data to the front end\n print(f'returning: {data}')\n return jsonify(data)\n\n except Exception as e:\n print(\"AJAX excepted \" + str(e))\n return str(e)", "def process_address():\n #get address info from form\n user_details = request.form\n #validate address with google geocoding\n update_details = apiapijoyjoy.validate_address(user_details)\n #update ino in db\n dbwrangler.newaddress(update_details)\n \n return redirect(\"/\")", "def build_address(record):\n pass", "def add_new_address(self, address: dict) -> None:\n self.new_address_button.click()\n\n self.address_form.select_location(address['address'])\n self.address_form.label_input.fill(address['name'])\n\n self.address_form.save_button.click()", "def get_address():\r\n address = input(\"What is the customer's address?: \")\r\n\r\n return address", "def getAddress(user):", "def generateNewAddress(self, currency):\n pass", "def add_address(intent, session):\n slots = intent.get('slots')\n sess_data = session.setdefault('attributes', {})\n sess_data['add_address'] = True\n sess_data.setdefault('next_step', 'which')\n if sess_data['next_step'] == 'which':\n if slots['which_address'].get('value') in ORIGIN_NAMES:\n sess_data['which'] = 'origin'\n sess_data['next_step'] = 'num_and_name'\n return reply.build(\"Okay, storing your origin address. \"\n \"What's the street number and name?\",\n reprompt=\"What's the street number and name?\",\n persist=sess_data,\n is_end=False)\n elif slots['which_address'].get('value') in DEST_NAMES:\n sess_data['which'] = 'destination'\n sess_data['next_step'] = 'num_and_name'\n return reply.build(\"Okay, storing your destination address. \"\n \"What's the street number and name?\",\n reprompt=\"What's the street number and name?\",\n persist=sess_data,\n is_end=False)\n else:\n sess_data['next_step'] = 'which'\n return reply.build(\"Would you like to set the address here or at \"\n \"your destination?\",\n reprompt='You can say \"here\" or \"destination\".',\n persist=sess_data,\n is_end=False)\n elif sess_data['next_step'] == 'num_and_name':\n if slots['address_street'].get('value'):\n num = slots.get('address_number', {}).get('value', '')\n direction = slots.get('direction', {}).get('value', '')\n st = slots.get('address_street', {}).get('value', '')\n sess_data['spoken_address'] = (('%s %s %s' %\n (num, direction, st))\n .replace(' ', ' ')\n .strip())\n sess_data['next_step'] = 'zip'\n return reply.build(\"Got it. Now what's the zip code? \"\n \"You can tell me \"\n \"to skip it if you don't know.\",\n reprompt=\"What's the zip code?\",\n persist=sess_data,\n is_end=False)\n else:\n return reply.build(\"Please say a street number and street name.\",\n reprompt=\"What's the street number and name?\",\n persist=sess_data,\n is_end=False)\n elif sess_data['next_step'] == 'zip':\n if not slots['address_number'].get('value'):\n return reply.build(\"I need the zip code now.\",\n reprompt=\"What's the zip code?\",\n persist=sess_data,\n is_end=False)\n sess_data['next_step'] = 'check_address'\n sess_data['zip_code'] = slots['address_number']['value']\n return add_address(intent, session)\n elif sess_data['next_step'] == 'check_address':\n if sess_data['zip_code']:\n # Assume that network subscribers are always interested\n # in in-state addresses, but not necessarily in the city.\n addr = '%s, %s, %s' % (sess_data['spoken_address'],\n config.default_state,\n sess_data['zip_code'])\n else:\n # Without a zip code, assume the network's home city\n # to add necessary specificity.\n addr = '%s, %s, %s' % (sess_data['spoken_address'],\n config.default_city,\n config.default_state)\n lat, lon, full_address = geocoding.get_lat_lon(addr)\n if full_address.endswith(\", USA\"):\n # We don't need to keep the country name.\n full_address = full_address[:-5]\n\n if full_address.lower().startswith(\"%s, %s\" %\n (config.default_city.lower(),\n config.default_state.lower())):\n # If the geocoding fails to find a specific address,\n # it will return a generic city location.\n sess_data['next_step'] = 'num_and_name'\n return reply.build(\"I'm sorry, I heard the address \\\"%s\\\", \"\n \"but I can't figure out where that is. \"\n \"Try a different address, something I can \"\n \"look up on the map.\" % addr,\n reprompt=\"What's the street number and name?\",\n persist=sess_data,\n is_end=False)\n\n sess_data['latitude'], sess_data['longitude'] = lat, lon\n sess_data['full_address'] = full_address\n sess_data['next_step'] = 'store_address'\n return reply.build(\"Thanks! Do you want to set \"\n \"your %s address to %s?\" %\n (sess_data['which'],\n location.text_to_speech(full_address)),\n reprompt=\"Is that the correct address?\",\n persist=sess_data,\n is_end=False)\n elif sess_data['next_step'] == 'store_address':\n # The user should have said \"yes\" or \"no\" after\n # being asked if we should store the address.\n # Only get here if they didn't.\n full_address = sess_data['full_address']\n return reply.build(\"Sorry, I didn't understand that. \"\n \"Do you want to set \"\n \"your %s address to %s?\" %\n (sess_data['which'],\n location.text_to_speech(full_address)),\n reprompt=\"Is that the correct address?\",\n persist=sess_data,\n is_end=False)\n else:\n return reply.build(\"I'm sorry, I got confused. What do you mean?\",\n persist=sess_data,\n is_end=False)", "def address(self, new_address):\n house_num, street_name, apt_num = new_address\n self._address.house_num = house_num\n self._address.street_name = street_name\n self._address.apt_num = apt_num", "def edit_address(self, new_label: str) -> None:\n self.address_form.label_input.fill(new_label)\n self.address_form.save_button.click()", "def address():\n # We start with generating the street name. For this we choose\n # between the most common prefixes and our own prefixes\n prefix = dice.randint(1, 100)\n if prefix <= 10: # 10%\n prefix = \"Haupt\"\n elif prefix <= 18: # 8%\n prefix = \"Schul\"\n elif prefix <= 25: # 7%\n prefix = \"Garten\"\n elif prefix <= 32: # 7%\n prefix = \"Dorf\"\n elif prefix <= 39: # 7%\n prefix = \"Bahnhof\"\n elif prefix <= 46: # 7%\n prefix = \"Wiesen\"\n elif prefix <= 52: # 6%\n prefix = \"Berg\"\n elif prefix <= 56: # 4%\n prefix = \"Kirch\"\n elif prefix <= 60: # 4%\n prefix = \"Wald\"\n elif prefix <= 64: # 4%\n prefix = \"Ring\"\n else:\n prefix = dice.choice(names.prefix)\n\n # Now we can add the suffix\n suffix = dice.randint(1, 100)\n if suffix <= 78:\n suffix = \"straße\"\n elif suffix <= 96:\n suffix = \"weg\"\n elif suffix <= 98:\n suffix = \"allee\"\n elif suffix == 99:\n suffix = \"ring\"\n elif suffix == 100:\n suffix = \"platz\"\n\n # When we have a city name as prefix, we need to capitalize the\n # suffix since it will be two words\n if prefix[-1] == \" \":\n suffix = suffix.capitalize()\n\n # Now we can add them together\n street = prefix + suffix\n\n # We need a house number as well. In Germany most numbers have\n # between one and four digits, so we will use this as base. Lower\n # numbers are more common, so we'll give it a 10% probability of\n # using 3 digits and 1% of using 4 digits\n digits = dice.randint(1, 100)\n if digits == 100:\n house_number = str(dice.randint(1000, 9999))\n elif digits >= 90:\n house_number = str(dice.randint(100, 999))\n else:\n house_number = str(dice.randint(1, 99))\n address_full = street + \" \" + house_number\n return address_full", "def address(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"address\")", "def concat_address_full(**kwargs):\r\n result = \"{concat_address} {city_name}, {state_code}\".format(**kwargs)\r\n if kwargs[\"five_digit_zip_code\"]:\r\n result += \" {five_digit_zip_code}\".format(**kwargs)\r\n if kwargs[\"four_digit_zip_code\"]:\r\n result += \"-{four_digit_zip_code}\".format(**kwargs)\r\n return result", "def test_valid_address_go_through(self):\n form = forms.GroupInviteForm({'emails': '[email protected]'})\n self.assertTrue(form.is_valid())", "def validate_address(self, value):\n # It seems we want delete address\n if not value:\n return None\n\n\n results = google_places.text_search(query=value)\n\n # We use google places autocomplite but we need recheck data\n # (we do not believe the data from frontend) and get first result\n if len(results.places) > 0:\n p = results.places[0]\n try:\n place = Place.objects.get(api_id=p.id)\n except Place.DoesNotExist:\n place = Place()\n place.populate_from_api(p)\n return place\n raise serializers.ValidationError('Please enter correct address')", "def format_address(**args):\n #Begin with the organisation and PO Box number, if applicable.\n address = ''.join([args[entry] + '\\n' \n for entry in ['organisation', 'PO box']\n if args.get(entry)])\n #Format building name/number components.\n address += format_building_components(*[args.get(x) for x in \n ['sub-building name', \n 'building name', \n 'building number',\n 'concatenation indicator']])\n #Add thoroughfare (if present), locality/town and postcode.\n address += ''.join([args[entry] + '\\n' \n for entry in ['dependent thoroughfare', \n 'thoroughfare',\n 'double dependent locality',\n 'dependent locality',\n 'town',\n 'postcode']\n if args.get(entry)])\n return address.strip()", "async def google(self, query):\r\n g_api = \"http://maps.googleapis.com/maps/api/geocode/json?\"\r\n url = g_api + urllib.parse.urlencode({'address': query})\r\n json_data = requests.get(url).json()\r\n formatted_address=json_data['results'][0]['formatted_address']\r\n sat1=json_data['results'][0]['geometry']['location']['lat']\r\n sat2 = json_data['results'][0]['geometry']['location']['lng']\r\n info = discord.Embed(title=query, color=0xefefef)\r\n info.add_field(name=\"\\u200b\", value=formatted_address, inline=False)\r\n info.add_field(name=\"\\u200b\", value=\"Lat:\"+str(sat1), inline=False)\r\n info.add_field(name=\"\\u200b\", value=\"Lng:\"+str(sat2), inline=False)\r\n await self.bot.say(embed=info)", "def edit_address(self) -> object:\n self.edit_button.click()\n\n if 'admin' not in self.driver.current_url:\n return WebAddressForm(self).wait_for_component_to_be_present()\n return AdminAddressForm(self).wait_for_component_to_be_present()", "def add_search_form():\n g.form = forms.SearchPlaces(formdata=None)\n g.action = url_for(\"page.search_query\")", "def set_address(self, address):\n if address == \"\":\n self.address = Address(\"\", \"\", \"\")\n else:\n self.address = address", "def address(self):\n ...", "def _clean_address(self, field):\n data = self.cleaned_data[field]\n if data != \"\" and not is_valid_address(data):\n raise ValidationError(\"Provided value is not a valid Algorand address!\")\n return data", "def form_params(self, lat, long):\n data = {'mode': 'retrieveAddresses',\n 'prox': \"{0},{1}\".format(lat,long),\n 'app_id': self._app_id,\n 'app_code': self._app_code}\n return data", "def makeAddressToGeocodeRequest(address):\n global headersGlobal, URL_addressToGeocode # get global variables\n\n key = variables.bingMapsAPIKey # api key\n\n # construct the url\n url = URL_addressToGeocode + str(address[0]) + \"/\" + str(address[1]) + \"/\" + str(address[2]) + \"/\" + str(\n address[3]) + \"/\" + str(address[4]) + \"?key=\" + key\n\n request = requests.get(url, headers=headersGlobal) # make the request\n return request # return the request", "def get_address(query):\n address = \"Dis-moi, quel endroit tu cherches ?\"\n data = get_data(query)\n try:\n address_data = data[\"results\"][0][\"formatted_address\"]\n address = (\"Si je ne me trompe pas,\"\n \" l'adresse que tu cherche, c'est ... \" + address_data + \". Sinon\"\n \", dis-moi le nom de lieu exact\")\n except IndexError:\n address = \"Désolé, je n'ai pas compris quel endroit tu cherches ?\"\n finally:\n return address", "def set_address(self, new_address, ):\n self.address.append(new_address)\n self.save()", "def get_address(self):\n\n return \"{}\\n{}\\n{},\\n{},\\n{}\".format(\n self.address_line_1, self.city, self.state, self.postal_code, self.country\n )", "def _compute_adress(self):\r\n\t\tfor leads in self:\r\n\t\t\tleads.address = leads.street + \" \" + leads.street2", "def address(self):\n return str(self.street) + str(self.city) + str(self.state) + str(self.zipcode)", "def street_address(self):\n\t\tif self.address2:\n\t\t\treturn '{}, {}'.format(self.address, self.address2)\n\t\treturn self.address", "def address1(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"address1\")", "def address1(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"address1\")", "def street_address():\r\n\r\n return _random.choice(\r\n [\r\n '%d-%d %s' % (\r\n _random.randrange(999),\r\n _random.randrange(999),\r\n street_name()\r\n ),\r\n '%d %s' % (\r\n _random.randrange(999),\r\n street_name()\r\n ),\r\n '%s %d, %s' % (\r\n 'P.O. Box',\r\n _random.randrange(999),\r\n street_name()\r\n )\r\n ]\r\n )", "def choose_new_address(self) -> \"CheckoutPage\":\n self.accordion = BillingDetailsUser(self.driver)\n self.accordion.btn_new_address.click()\n return self", "def get_apartment_address(self, soup, apartment_dict):\n\n info_class = soup.find_all('div', {'class': 'info'})\n if info_class and len(info_class) > 0:\n info_class = info_class[0]\n address = info_class.find('h2').text.strip()\n\n from parse import parse\n address = parse(\"Location: {}\", address)[0]\n apartment_dict['address'] = address\n else:\n logging.warning(\"Failed to parse apartment address\")\n return", "def get_address(post_content):\n post_address = post_content.find(\"div\", {\"class\": \"mapaddress\"})\n address_attr = {\"address\": \"\"}\n if post_address is not None:\n address_attr[\"address\"] = post_address.text\n return address_attr", "def get_address(self):\n return self.address.line[0]+\", \"+self.address.city+\", \"+self.address.state+\", \"+self.address.country", "def test_address_other_parameters():\n address = lob.Address.create(name='Siddharth Saha', address_line1='104, Printing Boulevard',\n address_line2='Sunset Town', email='[email protected]',\n address_city='Boston', address_state='MA', address_country='US',\n address_zip='12345')\n print address.to_dict()", "def address_line_1(self):\n return \"{} {} {}\".format(\n self.fake.randomize_nb_elements(1000),\n self.fake.last_name(),\n self.fake.random_element(elements=STREET_SUFFIX)\n )", "def build_search_url(query):\n google_url = []\n # Build URL to query Google\n google_url.append('https://www.google.com/search?')\n # I'm feeling lucky: go to first result\n google_url.append('btnI=1')\n # Limit results to only this specific website\n google_url.append('&as_sitesearch=docs.aws.amazon.com')\n # Build query\n query = \"aws cloudformation \" + query\n # This line escapes spaces and the like\n query = urllib.quote_plus(query.strip())\n # Attach query to URL\n google_url.append(\"&q=\")\n google_url.append(query)\n return \"\".join(google_url)", "def get_label(self):\r\n return _(\"Address:\")", "def create_or_update_address(address, customer):\n\tname = frappe.db.get_value('Address', { 'entity_id': address.get('entity_id') })\n\tif not name:\n\t\taddr = frappe.new_doc('Address')\n\t\taddr.address_title = \"{} {} {}\".format(\n\t\t\taddress.get(\"firstname\"),\n\t\t\taddress.get(\"lastname\"),\n\t\t\taddress.get(\"entity_id\")\n\t\t)\n\telse:\n\t\taddr = frappe.get_doc(\"Address\", name)\n\n\taddr.address_type = get_address_type(address).get('type')\n\taddr.entity_id = address.get('entity_id')\n\taddr.address_line1 = address.get('street')[0]\n\taddr.address_line2 = address.get('street')[1] if len(address.get('street')) > 1 else \"\"\n\taddr.city = address.get('city')\n\taddr.country = frappe.db.get_value('Country', { 'code': address.get('country_id') })\n\taddr.state = address.get('region')\n\taddr.pincode = address.get('postcode')\n\taddr.phone = address.get('telephone') or '00000'\n\taddr.fax = address.get('fax')\n\taddr.customer = customer\n\taddr.customer_name = address.get('firstname')+' '+address.get('lastname')\n\taddr.is_primary_address = get_address_type(address).get('is_primary_address')\n\taddr.is_shipping_address = get_address_type(address).get('is_shipping_address')\n\n\taddr.save(ignore_permissions=True)", "def __init__(self, **address):\n self.sub_building_name = address.get('sub-building name','')\n self.building_name = address.get('building name', '')\n self.building_number = address.get('building number', '')\n self.dependent_thoroughfare = address.get('dependent thoroughfare', '')\n self.thoroughfare = address.get('thoroughfare', '')\n self.postcode = address.get('postcode', '')\n self.double_dependent_locality = address.get('double dependent locality', '')\n self.dependent_locality = address.get('dependent locality', '')\n self.town = address.get('post town', '')\n self.department = address.get('department name', '')\n self.organisation = address.get('organisation name', '')\n self.concatenation_indicator = address.get('concatenation indicator', False)\n self.po_box_num = address.get('po box', '')", "def form_valid(self, ppform, address_form,cuform):\n addr = address_form.save()\n cuformo = cuform.save()\n ppform.save()\n self.object.address = addr\n self.object.user = cuformo\n self.object.save()\n\n return HttpResponseRedirect(self.get_success_url())", "def street_address1(self) -> str:\n return pulumi.get(self, \"street_address1\")", "def _format_address(address):\n if 'country' in address and address['country']:\n country = address['country']\n if country == 'CA':\n address['country'] = 'CANADA'\n elif country == 'US':\n address['country'] = 'UNITED STATES OF AMERICA'\n else:\n try:\n country: str = pycountry.countries.search_fuzzy(country)[0].name\n address['country'] = country.upper()\n except (AttributeError, TypeError):\n address['country'] = country\n\n return address", "def set_Street(self, value):\n super(AddressValidationInputSet, self)._set_input('Street', value)", "def google(self):\r\n prefix ='https://maps.googleapis.com/maps/api/staticmap?center='\r\n middle = '&zoom=14&size=400x400&markers='\r\n suffix = '&key=AIzaSyD5nqmDGFH1SUZxJAYVtFHP7RNjjFE9CHg'\r\n marker = '+'.join(self.placeToSearch) # marker in google format, no space but + separator\r\n request = prefix + marker+middle+marker+suffix\r\n\r\n return request", "def __str__(self):\n return format_address(**self._get_elements())", "def nomad_address():\n\n print(nomad.get_address())", "def address1(self, instance):\r\n return instance.user.profile.address1", "def add_url_form(request, template_name=\"us/url_form.html\"):\n if request.method == \"GET\":\n form = UrlForm()\n else:\n form = UrlForm(data=request.POST)\n if form.is_valid():\n if form.cleaned_data.get(\"short_url\").strip() != \"\":\n new_short_url = form.cleaned_data.get(\"short_url\")\n else:\n while 1:\n new_short_url = ''.join(random.choice(string.ascii_letters) for _ in range(4))\n try:\n Url.objects.get(short_url=new_short_url)\n except Url.DoesNotExist:\n break\n\n url, created = Url.objects.get_or_create(\n short_url=new_short_url,\n )\n\n url.url = form.cleaned_data.get(\"url\")\n url.save()\n\n return HttpResponse(url.short_url)\n\n return render_to_response(template_name, RequestContext(request, {\n \"form\": form,\n }))", "def process_address(text):\n return sanitize(text[9:])", "def _get_address(self, address1, address2):\n return f'{address1}\\n{address2}' if address2 else address1", "def resolve_addressing(parsed_template: parse_templates.ParsedTemplateRefined,\n grpd: parse_pronoun_data.GRPD) -> (parse_templates.ParsedTemplateRefined,\n parse_pronoun_data.GRPD):\n\n new_template = copy.deepcopy(parsed_template)\n new_grpd = copy.deepcopy(grpd)\n for i in range(1, len(new_template), 2):\n id_value = new_template[i][\"id\"]\n if new_template[i][\"context\"] == \"address\":\n if ContextValues.get_value(grpd, id_value, \"gender-addressing\") in (\"f\", \"false\"):\n new_template[i][\"context\"] = \"personal-name\"\n\n return new_template, new_grpd", "def test_address_other_parameters():\n address = lob.Address.create(name = 'Siddharth Saha', address_line1 = '104, Printing Boulevard',\n address_line2 = 'Sunset Town', email = '[email protected]', \n address_city = 'Boston', address_state = 'MA', address_country = 'US',\n address_zip = '12345')\n print address.to_dict()", "def address(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"address\")", "def address_regex(self) -> Any:", "def forward_geocode(self, params, address_input_data ):\n processed_address_list = []\n # check avoids redundancy for combined 'forward geocode and validate' \n # option as API does both by default\n if self.__is_address_list_processed:\n processed_address_list = address_input_data\n else:\n request_list = self.__prepare_smarty_request_list(address_input_data)\n processed_address_list = self.__process_smarty_request_list(request_list, \n address_input_data )\n self.__is_address_list_processed = True\n print(f'< {self.num_addresses_processed} addresses processed >')\n return processed_address_list", "def format_single_address(address: Address | str) -> str:\n address = coerce_address(address)\n name = address.display_name\n if not name:\n return address.addr_spec\n\n if not needs_qp_encode(name):\n if specials_regex.search(name):\n # simple quoting works here, since we disallow\n # backslash escaping double quotes.\n name = f'\"{name}\"'\n return f'{name} <{address.addr_spec}>'\n\n name = qp_encode_display_name(name)\n return f'{name} <{address.addr_spec}>'", "def show_fresh_address(self):\n\t\treturn self.__fresh_account()[\"address\"]", "def _formatting_address_fields(self):\n return self._address_fields()", "def get_address(self):\n if self.get_entity: # needs an entity to work\n if self.building:\n address = self.get_entity.get_institutional_address()\n address.extend(self.building.get_postal_address())\n return address\n else:\n return self.get_entity.get_address()", "def _get_address(self, soup):\n street, city, state, zipcode = None, None, None, None\n try:\n # property detail tag\n street = soup.find('div', class_='main-address').get_text().strip()\n # find address tag\n address = soup.find('div', class_='c-address')\n \n # pattern for the address in this website\n locality = address.find_all('span', class_='locality')\n city = locality[0].get_text().strip()\n if len(locality) > 1:\n city = locality[1].get_text().strip()\n state = address.find('span', class_='region').get_text().strip()\n zipcode = address.find('span', class_='postal-code').get_text().strip()\n return street, city, state, zipcode\n except:\n return street, city, state, zipcode", "def postcode(full_address):\n return capture_address_element(POSTCODE_PATTERN, full_address)", "def url_construction(company):\n postcode = company[\"registered_address\"].strip()\n postcode = postcode.split(\" \")\n for i in range(len(postcode) - 1, 0, -1): # loop backwards in the obtained string\n if postcode[i].strip().isdigit(): # if the obtained string is fully a number\n postcode = postcode[i].strip()\n break\n\n keyword = company[\"name\"].strip().replace(\" \",\n \"%20\").strip() # gets the name and replaces empty spaces with \"%20\" in order to be used as a keyword in the url\n keyword = keyword.replace(\"&\",\n \"%26\").strip() # gets the name and replaces & symbols with \"%26\" in order to be used as a keyword in the url\n\n url = \"https://www.xing.com/search/companies?zip_code=\" + postcode + \"&keywords=\" + keyword # making the full url of the search operation\n return url", "def __str__(self):\n if self._street_name != self.DEFAULT_STREET_NAME and \\\n self._house_num != self.DEFAULT_HOUSE_NUM and \\\n self._apt_num != self.DEFAULT_APT_NUM:\n address = f\"\\n{self._house_num} {self._street_name} Street, \" \\\n f\"#{self._apt_num}\"\n return address\n else:\n return \"<None>\"", "def address_str(self):\n return self._plrevgeoloc.addressString", "def format_address(line1, line2, city, state, zipcode):\n\t\n\tstreetlines = line1\n\tcityline = city\n\t\n\tif len(streetlines) > 0 and len(line2) > 0:\n\t\tstreetlines += \"\\n\"\n\t\n\tif len(cityline) > 0 and len(state) > 0:\n\t\tcityline += \", \"\n\t\n\tstreetlines += line2\n\tcityline += state\n\t\n\treturn \"\\n\".join([streetlines, cityline, zipcode])", "def normalize_address(patched_address: OrderedDict[str, str]) -> location.Address:\n\n address_kwargs = {\n # \"street1\",\n # \"city\",\n # \"state\",\n # \"zip\"\n }\n street_buffer: List[str] = []\n suite_buffer: List[str] = []\n while len(patched_address) > 0:\n component, value = patched_address.popitem(last=False)\n if component == \"PlaceName\":\n address_kwargs[\"city\"] = value\n elif component == \"StateName\":\n address_kwargs[\"state\"] = value\n elif component == \"ZipCode\":\n address_kwargs[\"zip\"] = value\n elif component == \"OccupancyType\":\n suite_buffer.append(value)\n elif component == \"OccupancyIdentifier\":\n suite_buffer.append(value)\n else:\n street_buffer.append(value)\n address_kwargs[\"street1\"] = \" \".join(street_buffer)\n if len(suite_buffer) > 0:\n address_kwargs[\"street2\"] = \" \".join(suite_buffer)\n\n return location.Address(**address_kwargs)", "def add_address(self, **kwargs):\n addressitem = AddressItem(**kwargs)\n self.addresses.append(addressitem)\n # TODO check uniqueness of email addresses", "def integrated_address_regex(self) -> Any:", "def set_query_string(self):\n\n if self.search_by == 'by-postal-code':\n self.querystring = {'postalCode': self.search_input, 'countryCode': \"US\"}\n else :\n self.querystring = {'city': self.search_input}", "def get_address(self):\n entity = self\n if entity.abstract_entity:\n entity = self.get_real_ancestor()\n if entity:\n address = entity.get_institutional_address()\n building = entity.get_building()\n if building:\n if entity.building_recapitulates_entity_name: \n address.extend(building.get_postal_address()[1:])\n else:\n address.extend(building.get_postal_address())\n return address", "def store_address(intent, session):\n sess_data = session.setdefault('attributes', {})\n if not sess_data.get('add_address') and \\\n not sess_data['next_step'] == 'store_address':\n raise RuntimeError('Something went wrong.')\n\n data = {sess_data['which']: dict(latitude=str(sess_data['latitude']),\n longitude=str(sess_data['longitude']),\n address=str(sess_data['full_address']))}\n success = database.update_user_data(session['user']['userId'], **data)\n if not success:\n return reply.build(\"I'm sorry, something went wrong and I could't \"\n \"store the address.\", is_end=True)\n else:\n return reply.build(\"Okay, I've saved your %s \"\n \"address.\" % sess_data['which'],\n is_end=True)", "def address2(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"address2\")", "def address2(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"address2\")", "def update_address(cls, address_data):\n address_instance = cls.objects.get(email=address_data['customer']['email'])\n address_data = address_data.get('addresses')\n for field_name, values in address_data:\n setattr(address_instance, field_name, values)\n address_instance.save()\n return address_instance.save()", "def get_address_string(self):\n output = ''\n if self.address_line_1:\n output += '{}'.format(self.address_line_1)\n if self.address_line_2:\n output += ', {}'.format(self.address_line_2)\n if self.city:\n output += ', {}'.format(self.city)\n if self.state:\n output += ', {}'.format(self.state)\n if self.zipcode:\n output += ' {}'.format(self.zipcode)\n return output", "def on_GenerateRandomAccountAddress_2_clicked(self):\n # TODO: not implemented yet\n raise NotImplementedError", "def produce_link_google(self, value_search:str) -> str:\n # https://www.google.com/search?channel=fs&client=ubuntu&q=a%24ap+rocky+testing\n for replace, replaced in [(' ', '+'), \n ('$', '%24')]:\n value_search = value_search.replace(replace, replaced)\n return ''.join(['https://www.google.com/search?',\n 'channel=fs&client=ubuntu&',\n f'q={value_search}'])", "def as_address(self, base_clazz):\n address = base_clazz()\n address.name = self.name\n address.line1 = self.line1\n address.line2 = self.line2\n address.city = self.city\n address.region = self.region\n address.country = self.country\n address.post_code = self.post_code\n address.phone = self.phone\n return address", "def get_address(address: str) -> Tuple[str, str, str]:\n\n # Try to geocode the address as given\n g = geocoder.osm(address)\n\n if g.json is not None:\n\n # TODO this is inefficient and hacky\n\n # First thing we attempt if the result isn't complete is just to\n # add the housenumber (often the issue).\n if not good_geocoder_result(g.json):\n g.json['housenumber'] = usaddress.tag(address)[0]['AddressNumber']\n\n # If the result is now good, return it\n if good_geocoder_result(g.json):\n\n # Geocoding was successful. Return the result\n return (\n # First part is a nicely formatted address\n f\"{g.json['housenumber']} {g.json['street']}, {g.json['city']}, {g.json['state']} {g.json['postal']}\",\n # Second is the latitude\n g.json['lat'],\n # And third is the longitude\n g.json['lng']\n )\n\n # Geocoding was unsuccessful.\n # Let's try to create a cleaner address by first parsing out the pieces we need, then try again.\n \n # Parsing the address components...\n parsed, addr_type = usaddress.tag(address)\n if addr_type != \"Street Address\":\n raise ValueError(f\"Address could not be properly parsed. Resulting type: {addr_type}. Result: \\n{parsed}\")\n \n # Trim off any whitespace from the parsed components.\n for part in parsed:\n parsed[part] = parsed[part].strip()\n\n reqd_address_parts = ['AddressNumber', 'StreetName', 'PlaceName']\n if any(address_part not in parsed for address_part in reqd_address_parts):\n raise ValueError(f\"The address must have at least a house number, street, and city.\")\n \n # Initialize the resulting address string with the address number (aka house/street number)\n new_address = parsed['AddressNumber']\n \n # If the streetname is just a number, make it ordinal\n if parsed['StreetName'].isnumeric():\n parsed['StreetName'] = ordinal(parsed['StreetName'])\n \n # Get the whole street name\n for k, v in [(k, v) for k, v in parsed.items() if k.startswith(\"StreetName\")]:\n new_address += f\" {v}\"\n \n # Add the city...\n new_address += f\", {parsed['PlaceName']}\"\n # Add the state, if it exists\n if 'StateName' in parsed:\n new_address += f\", {parsed['StateName']}\"\n # And the zip code, if it exists\n if 'ZipCode' in parsed:\n new_address += f\" {parsed['ZipCode']}\"\n \n # Now try to geocode this improved address\n g = geocoder.osm(new_address)\n\n if g.json is not None:\n\n # Geocoding was successful. Return the result\n return (\n # First part is a nicely formatted address\n f\"{g.json['housenumber']} {g.json['street']}, {g.json['city']}, {g.json['state']} {g.json['postal']}\",\n # Second is the latitude\n g.json['lat'],\n # And third is the longitude\n g.json['lng']\n )\n \n # Still can't geocode the address. Throw an error\n else:\n raise ValueError(f\"Could not geocode this address: {address}\")", "def __unicode__(self):\n return smart_unicode(\"%s %s %s %s\" % (self.address1, self.address2 if self.address2 else '', self.zipcode, self.city))", "def test_address_correct(self):\n tester = app.test_client(self)\n response = tester.post(\"/result\",\n data = dict(location=\"Chennai\"),\n follow_redirects=True)\n self.assertIn(b\"Chennai, Chennai District, Tamil Nadu, 600001, India\", response.data)", "def on_GenerateRandomAccountAddress_clicked(self):\n # TODO: not implemented yet\n raise NotImplementedError", "def geolocate_address(self):\n self.geolocator = Nominatim(user_agent=\"fundaft\")\n\n # If latitude / longitude are missing, try to geocode them on the basis\n # of the address \n self.coords = [self.get_coords(address) if np.isnan(lat)\n else (lat, lon) for address, lat, lon in\n zip(self.df_ads['property_title'], \n self.df_ads['latitude'], \n self.df_ads['longitude'])]\n \n df = pd.DataFrame(self.coords, columns=['latitude', 'longitude'])\n \n # If new coordinates are not in Dublin, change to na again\n df = self.is_in_dublin(df)\n\n self.df_ads[[\"latitude\",\"longitude\"]] = df", "def clean_address(self, s):\n # The letter \"O\" instead of the numeral \"0\" is a common mistake.\n s = re.sub(\n r\"\\b[A-Z][O0-9][A-Z]\\s?[O0-9][A-Z][O0-9]\\b\", lambda x: x.group(0).replace(\"O\", \"0\"), clean_string(s)\n )\n for k, v in province_or_territory_abbreviations().items():\n # Replace a province/territory name with its abbreviation.\n s = re.sub(\n r\"[,\\n ]+\"\n r\"\\(?\" + k + r\"\\)?\"\n r\"(?=(?:[,\\n ]+Canada)?(?:[,\\n ]+[A-Z][0-9][A-Z]\\s?[0-9][A-Z][0-9])?\\Z)\",\n \" \" + v,\n s,\n )\n # Add spaces between province/territory abbreviation, FSA and LDU and remove \"Canada\".\n return re.sub(\n r\"[,\\n ]+\" r\"([A-Z]{2})\" r\"(?:[,\\n ]+Canada)?\" r\"[,\\n ]+([A-Z][0-9][A-Z])\\s?([0-9][A-Z][0-9])\" r\"\\Z\",\n r\" \\1 \\2 \\3\",\n s,\n )", "def add_address(self, address_item):\r\n self.addresses_to_validate.append(address_item)", "def get_address(self, ):\n return self.get_parameter('address')", "def form(update, context):\n update.message.reply_text(\"\"\"Fill out the form 👇 👇 👇\n https://forms.gle/VREhdtCNqJ6rZNfQ7\"\"\")", "def normalize_address(self, address, domain):\n if address is not None and not self.address_regex.match(address):\n if domain is not None:\n address = \"{address}@{domain}\".format(address=address, domain=domain)\n else:\n address = None\n\n return address", "def address2(self, instance):\r\n return instance.user.profile.address2", "def format_address(value):\n if type(value) in (tuple, list):\n return ', '.join([format_address(v) for v in value])\n name, addr = parseaddr(value)\n return formataddr((encode_header(name), addr.encode('ascii')))", "def address(self) -> str:\n return pulumi.get(self, \"address\")", "def address(self) -> str:\n return pulumi.get(self, \"address\")", "def address(self) -> str:\n return pulumi.get(self, \"address\")", "def get_address(self, code, title) -> str:\n return f'{self.path(code)}?title={title}&redirect=no'", "def set_address(self, address):\n pass", "def address1(self, address1):\n\n self._address1 = address1", "def get_location_gecode_address_str(address):\n location = {\n 'Latitude': {\n 'Value': None\n },\n 'Longitude': {\n 'Value': None\n }\n }\n geo_res = []\n if bool(address): # Check if address is non-falsey \n geo_res = gmaps.geocode(address)\n if len(geo_res) != 0:\n latitude = geo_res[0]['geometry']['location']['lat']\n longitude = geo_res[0]['geometry']['location']['lng']\n location['Latitude']['Value'] = latitude\n location['Longitude']['Value'] = longitude\n return location" ]
[ "0.63437706", "0.6269668", "0.6109527", "0.5922518", "0.5916802", "0.57907397", "0.57753795", "0.5754547", "0.57486415", "0.5744301", "0.57018614", "0.5698119", "0.5649869", "0.56383955", "0.559803", "0.55916196", "0.55846936", "0.55796117", "0.55482423", "0.554614", "0.55070263", "0.5491783", "0.54625946", "0.54613537", "0.5443895", "0.54396963", "0.5434926", "0.5434183", "0.5419733", "0.5366783", "0.53532255", "0.53532255", "0.53350854", "0.5332192", "0.53102404", "0.5305409", "0.52992755", "0.5285387", "0.5260205", "0.52494526", "0.52477413", "0.5243341", "0.5242599", "0.52343297", "0.5217845", "0.5216959", "0.5214021", "0.52003324", "0.5199715", "0.51988876", "0.51988757", "0.5198697", "0.51854455", "0.51850915", "0.5183191", "0.51712716", "0.51681715", "0.5146285", "0.5146105", "0.514604", "0.5143209", "0.51369", "0.5135809", "0.51332736", "0.51285833", "0.5124522", "0.5120971", "0.5115402", "0.5083347", "0.5078267", "0.5076802", "0.50731814", "0.50634956", "0.50526744", "0.50404006", "0.5025167", "0.5025167", "0.50232553", "0.5008759", "0.49978647", "0.49916488", "0.49887082", "0.4984458", "0.49723795", "0.49721032", "0.49634945", "0.49551082", "0.49473375", "0.49453714", "0.49422708", "0.49376485", "0.49358034", "0.49347347", "0.49307266", "0.49293596", "0.49293596", "0.49293596", "0.49285695", "0.49255538", "0.4924827", "0.49233016" ]
0.0
-1
d3 state map where users can click on country and changes colors
def d3_world_map(): return render_template("world.html")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def states_traveled(person, color):\r\n folium.GeoJson(data=state_geo,\r\n name=person['Name'] + ' - '\r\n + str(len(person['States'])),\r\n style_function=state_style(person, color)\r\n ).add_to(us_map)", "def make_map(df):\n fig = px.choropleth(df, locations='iso', color=np.log10(df['value']),\n hover_data=[df['value'], df['country_region']],\n color_continuous_scale='Plasma_r',\n labels={'color': 'Active <br> cases'})\n fig.update_layout(title='Click on map to select a country',\n coloraxis_colorbar_tickprefix='1.e',\n margin=dict(l=0, r=0),\n height=FIRST_LINE_HEIGHT)\n fig.update_traces(\n hovertemplate='<b>Country</b>:%{customdata[1]}<br><b>Cases</b>:%{customdata[0]}',\n )\n return fig", "def do_stateplot(df: pd.DataFrame, thru: date):\n\tst_dict = dict({\"1\":\"AL\",\"2\":\"AK\",\"4\":\"AZ\",\"5\":\"AR\",\"6\":\"CA\",\"8\":\"CO\",\"9\":\"CT\",\"10\":\"DE\",\"11\":\"DC\",\"12\":\"FL\",\"13\":\"GA\",\"15\":\"HI\",\n\t\"16\":\"ID\",\"17\":\"IL\",\"18\":\"IN\",\"19\":\"IA\",\"20\":\"KS\",\"21\":\"KY\",\"22\":\"LA\",\"23\":\"ME\",\"24\":\"MD\",\"25\":\"MA\",\"26\":\"MI\",\"27\":\"MN\",\"28\":\"MS\",\n\t\"29\":\"MO\",\"29\":\"MO\",\"30\":\"MT\",\"31\":\"NE\",\"32\":\"NV\",\"33\":\"NH\",\"34\":\"NJ\",\"35\":\"NM\",\"36\":\"NY\",\"37\":\"NC\",\"38\":\"ND\",\"39\":\"OH\",\"40\":\"OK\",\n\t\"41\":\"OR\",\"42\":\"PA\",\"44\":\"RI\",\"45\":\"SC\",\"46\":\"SD\",\"47\":\"TN\",\"48\":\"TX\",\"49\":\"UT\",\"50\":\"VT\",\"51\":\"VA\",\"53\":\"WA\",\"54\":\"WV\",\"55\":\"WI\",\n\t\"56\":\"WY\"})\n\tlocs = []\n\tfor x in iter(df.fips):\n\t\tlocs.append(st_dict[x])\n\tdf['text'] = \"Total Deaths: \"+ str(df['Deaths'].astype('int'))\n\n\tfig = go.Figure(data=go.Choropleth(locations=locs,\n\t\tlocationmode='USA-states', z=df.fatalityrate.round(2),\n\t\tcolorscale='Viridis', hovertext=df['text'],\n\t\tcolorbar_title=\"Deaths per 100 residents\"\n\t\t))\n\n\tfig.update_layout(hovermode=\"x unified\"\n\t\t)\n\tfig.update_layout(title_text='covid mortality by State thru ' +\n\t\tthru.strftime('%m-%d-%Y')+ \" -custom data analysis by Brian Herbert\", geo_scope='usa'\n\t\t)\n\treturn fig", "def plot_choropleth(_type, state_id=6):\n if _type == 'states':\n return states_choropleth.plot_map(DATA)\n elif _type == 'state':\n return state_choropleth.plot_map(DATA, state_id)", "def add_raster_of_country2ds(ds, country='South Africa',\n set_all_regions2one=True,\n test_plot=False, dpi=320):\n # Get shapes for country\n shapes = get_shapes4country(country=country)\n # Add country's states as a layer\n ds['states'] = rasterize(shapes, ds.coords)\n # Test plot of this?\n if test_plot:\n from . plotting import quick_map_plot\n savename = 'spatial_plot_of_shapes4country_{}'.format(country)\n quick_map_plot(ds, var2plot='states', savename=savename)\n\n # set all the regions (e.g. counties/states) in a country to 1\n if set_all_regions2one:\n arr = ds['states'].values\n arr[np.where(~np.isnan(arr))] = 1\n ds['states'].values = arr\n return ds", "def test_choropleth_pass():\n m = view(world, column=\"pop_est\")", "def update_state_call(state_id):\n update_state = plot_choropleth('state', state_id).to_html()\n return update_state", "def state_style(person, color):\r\n return lambda x: {'fillColor': color if x['id']\r\n in person['States'] else 'white',\r\n 'color': 'black',\r\n 'weight': 0.3,\r\n 'fillOpacity': 0.5 if x['id']\r\n in person['States'] else 0.0\r\n }", "def plot_p2_states(data, axs):\r\n vmin, vmax = (data[\"MD_EARN_WNE_P10\"].min(),\r\n data[\"MD_EARN_WNE_P10\"].nlargest(2).iloc[1])\r\n plot_args = {\"vmin\": vmin, \"vmax\": vmax}\r\n plot_states(data[~data[\"name\"].isin([\"Alaska\", \"Hawaii\"])], axs[0],\r\n \"MD_EARN_WNE_P10\", \"Mainland\", draw_bound=False,\r\n plot_args={\"legend\": True, \"cax\": axs[-1], **plot_args})\r\n plot_states(data[data[\"name\"] == \"Alaska\"], axs[1], \"MD_EARN_WNE_P10\",\r\n \"Alaska\", plot_args=plot_args)\r\n plot_states(data[data[\"name\"] == \"Hawaii\"], axs[2], \"MD_EARN_WNE_P10\",\r\n \"Hawaii\", plot_args=plot_args)", "def get_colors_st(top_cities_reviews):\n unique_states = top_cities_reviews['state'].unique()\n\n st = {}\n for state in unique_states:\n r = random.uniform(0, 1)\n g = random.uniform(0, 1)\n b = random.uniform(0, 1)\n st[state] = [r, g, b]\n\n return st", "def map_plot(iso3_codes, countries_organisations_amount,countries_list):\n d = {'ISO-3': iso3_codes, 'spending': countries_organisations_amount, 'countries': countries_list}\n df = pd.DataFrame(data=d)\n fig = px.choropleth(df,\n locations='ISO-3',\n color=\"spending\",\n scope=\"world\",\n labels={'spending': 'Amount of organisations'},\n height=500,\n hover_name=df['countries'],\n hover_data=['spending'],\n custom_data=['spending','countries']\n )\n\n fig.update_layout(\n title_text='Number of organisations lobbying in the EU',\n geo=dict(\n showframe=False,\n showcoastlines=False,\n projection_type='equirectangular'))\n fig.update_traces(hovertemplate=\"<b> %{customdata[1]} </b> : Number of organisations: %{customdata[0]}\")\n return fig", "def plotly_map():\n df = process_life_expectancy_dataset(\"regression\")\n\n selected_df = convert_ohe_columns_into_one(df, \"x0\", \"country\")\n\n # Choosing year 1800 for map plots\n selected_df = selected_df[selected_df[\"year\"] == \"1800\"]\n\n # Plotting on Map\n fig = px.choropleth(selected_df, locations=\"country\", locationmode=\"country names\", color=\"value\",\n hover_name=\"country\", color_continuous_scale = px.colors.sequential.Plasma)\n\n return fig", "def create_map_coloring_csp():\n csp = CSP()\n states = [ 'WA', 'NT', 'Q', 'NSW', 'V', 'SA', 'T' ]\n edges = { 'SA': [ 'WA', 'NT', 'Q', 'NSW', 'V' ], 'NT': [ 'WA', 'Q' ], 'NSW': [ 'Q', 'V' ] }\n colors = [ 'red', 'green', 'blue' ]\n for state in states:\n csp.add_variable(state, colors)\n for state, other_states in edges.items():\n for other_state in other_states:\n csp.add_constraint_one_way(state, other_state, lambda i, j: i != j)\n csp.add_constraint_one_way(other_state, state, lambda i, j: i != j)\n return csp", "def plot_states(data, ax, column, title=None, draw_bound=True,\r\n bound_args=None, plot_args=None):\r\n bound_args = bound_args if bound_args else {}\r\n plot_args = plot_args if plot_args else {}\r\n if draw_bound:\r\n draw_geo_bound(data, ax, **bound_args)\r\n if title:\r\n ax.set_title(title)\r\n data.plot(column=column, ax=ax, **plot_args)", "def map_county_data_compare(dem_16, rep_16, outname, county_svg):\n\n county_svg = open(county_svg, 'r').read()\n county_data = csv.reader(open('processed_data.csv'), delimiter = \",\")\n \n soup = BeautifulSoup(county_svg, \"html.parser\")\n paths = soup.findAll('path')\n blue_red = ['#FF0000','#EC0517','#DA092E','#C70E46','#B5135D','#A21774','#901C8B','#7D20A2','#6B25B9','#582AD1','#462EE8','#3333FF', '#FFFFFF']\n # County style\n path_style = 'font-size:12px;fill-rule:nonzero;stroke:#FFFFFF;stroke-opacity:1; stroke-width:0.1;stroke-miterlimit:4;stroke-dasharray:none;stroke-linecap:butt; marker-start:none;stroke-linejoin:bevel;fill:'\n\n for p in paths:\n if p['id'] not in ['State_Lines', 'separator']:\n #print(str(p['id']))\n try:\n rate = rep_16[int(p['id'])]\n rate2 = dem_16[int(p['id'])]\n except: \n continue\n if rate < 0.1 or rate2 > 0.9:\n color_class = 9\n elif rate < 0.2 or rate2 > 0.8: \n color_class = 8\n elif rate < 0.3 or rate2 > 0.7: \n color_class = 7\n elif rate < 0.4 or rate2 > 0.6: \n color_class = 6\n elif rate < 0.5 or rate2 > 0.5: \n color_class = 5\n elif rate < 0.6 or rate2 > 0.4: \n color_class = 4\n elif rate < 0.7 or rate2 > 0.3: \n color_class = 3\n elif rate < 0.8 or rate2 > 0.2: \n color_class = 2\n elif rate < 0.9 or rate2 > 0.1: \n color_class = 0\n elif rate < 1.0: \n color_class = 1\n else: color_class = 10\n\n color = blue_red[color_class]\n p['style'] = path_style + color\n \n\n f = open(outname, 'w')\n f.write(soup.prettify())\n f.close()", "def add_raster_of_oceans2ds(ds, featurecla='ocean', set_all_regions2one=False,\n test_plot=False, dpi=320):\n # Get shapes for country\n shapes = get_shapes4oceans(featurecla=featurecla)\n # Add country's states as a layer\n ds[featurecla] = rasterize(shapes, ds.coords)\n # Test plot of this?\n if test_plot:\n from . plotting import quick_map_plot\n savename = 'spatial_plot_of_shapes4oceans_{}'.format(featurecla)\n quick_map_plot(ds, var2plot=featurecla, savename=savename)\n # set all the regions (e.g. counties/states) in a country to 1\n if set_all_regions2one:\n arr = ds[featurecla].values\n arr[np.where(~np.isnan(arr))] = 1\n ds[featurecla].values = arr\n return ds", "def generate_map(chart_data):\n fig = px.choropleth(chart_data, locations=\"country_code\",\n color=\"Confirmed\",\n hover_name=\"Country/Region\",\n color_continuous_scale='Reds',\n projection= 'equirectangular',\n labels= {'Confirmed':'Confirmed Cases'},\n width = 700,\n height = 300\n )\n \n fig.update_layout(\n geo=dict(\n showframe=False,\n showcoastlines=False,\n projection_type='equirectangular'),\n margin={\"r\":0,\"t\":20,\"l\":0,\"b\":0}) \n\n return fig", "def world_map():\n\n # AJAX CALL FOR USER STATE VISIT\n\n # get current user from session\n user_id = session[\"user_id\"]\n print user_id\n\n # inputs from state map in console.log [feature.id] = state_id feature = state\n country_id = request.form['mapData.id']\n print country_id\n\n\n\n country = db.session.query(Country).filter_by(country_id=country_id).one()\n\n\n user_country_obj = User_Country(country_id=country_id, user_id=user_id, visited_at=datetime.now())\n\n\n # TODO: make the object be added\n db.session.add(user_country_obj)\n db.session.commit()\n\n\n# # # TODO: query datbase for the information to go into this json\n\n user_country_json_data = {\"country_id\": country.country_id, \"country_name\": country.country_name, \"visited_at\": user_country_obj.visited_at}\n\n\n return jsonify(user_country_json_data)", "def map_county_data(percentage, outname, county_svg, dem = False):\n\n county_svg = open(county_svg, 'r').read()\n county_data = csv.reader(open('processed_data.csv'), delimiter = \",\")\n \n soup = BeautifulSoup(county_svg, \"html.parser\")\n paths = soup.findAll('path')\n red_colors = ['#FF0033', \"#FF4066\", \"#FF8099\", '#FFBFCC', '#FFFFFF']\n blue_colors = ['#3333FF', \"#6666FF\", \"#9999FF\", \"#CCCCFF\", \"#FFFFFF\"]\n \n path_style = 'font-size:12px;fill-rule:nonzero;stroke:#FFFFFF;stroke-opacity:1; stroke-width:0.1;stroke-miterlimit:4;stroke-dasharray:none;stroke-linecap:butt; marker-start:none;stroke-linejoin:bevel;fill:' \n \n if dem:\n colors = blue_colors\n else:\n colors = red_colors\n \n for p in paths:\n if p['id'] not in ['State_Lines', 'separator']:\n #print(str(p['id']))\n try:\n rate = percentage[int(p['id'])]\n except: \n continue\n \n if rate < 0.1:\n color_class = 4\n elif rate < 0.3: \n color_class = 3\n elif rate < 0.5: \n color_class = 2\n elif rate < 0.7: \n color_class = 1\n elif rate < 0.9: \n color_class = 0\n \n color = colors[color_class]\n p['style'] = path_style + color\n \n\n f = open(outname, 'w')\n f.write(soup.prettify())\n f.close()", "def drought_state_risk_map(request):\n \n view_center = [-105.2, 39.0]\n view_options = MVView(\n projection='EPSG:4326',\n center=view_center,\n zoom=7.0,\n maxZoom=12,\n minZoom=5\n )\n\n # TIGER state/county mapserver\n tiger_boundaries = MVLayer(\n source='TileArcGISRest',\n options={'url': 'https://tigerweb.geo.census.gov/arcgis/rest/services/TIGERweb/State_County/MapServer'},\n legend_title='States & Counties',\n layer_options={'visible':True,'opacity':0.2},\n legend_extent=[-112, 36.3, -98.5, 41.66]) \n \n ##### WMS Layers - Ryan\n usdm_legend = MVLegendImageClass(value='Drought Category',\n image_url='http://ndmc-001.unl.edu:8080/cgi-bin/mapserv.exe?map=/ms4w/apps/usdm/service/usdm_current_wms.map&version=1.3.0&service=WMS&request=GetLegendGraphic&sld_version=1.1.0&layer=usdm_current&format=image/png&STYLE=default')\n usdm_current = MVLayer(\n source='ImageWMS',\n options={'url': 'http://ndmc-001.unl.edu:8080/cgi-bin/mapserv.exe?',\n 'params': {'LAYERS':'usdm_current','FORMAT':'image/png','VERSION':'1.1.1','STYLES':'default','MAP':'/ms4w/apps/usdm/service/usdm_current_wms.map'}},\n layer_options={'visible':False,'opacity':0.25},\n legend_title='USDM',\n legend_classes=[usdm_legend],\n legend_extent=[-126, 24.5, -66.2, 49])\n \n # USGS Rest server for HUC watersheds \n watersheds = MVLayer(\n source='TileArcGISRest',\n options={'url': 'https://hydro.nationalmap.gov/arcgis/rest/services/wbd/MapServer'},\n legend_title='HUC Watersheds',\n layer_options={'visible':False,'opacity':0.4},\n legend_extent=[-112, 36.3, -98.5, 41.66])\n \n # Sector drought vulnerability county risk score maps -> from 2018 CO Drought Plan update\n vuln_legend = MVLegendImageClass(value='Risk Score',\n image_url='/static/tethys_gizmos/data/ag_vuln_legend.jpg')\n state_vuln_kml = MVLayer(\n source='KML',\n options={'url': '/static/tethys_gizmos/data/CO_StateAssets_vuln_score_2018.kml'},\n layer_options={'visible':True,'opacity':0.75},\n legend_title='State Assets Risk Score',\n feature_selection=True,\n legend_classes=[vuln_legend],\n legend_extent=[-109.5, 36.5, -101.5, 41.6])\n \n # Define GeoJSON layer\n # Data from CoCoRaHS Condition Monitoring: https://www.cocorahs.org/maps/conditionmonitoring/\n with open(como_cocorahs) as f:\n data = json.load(f)\n \n # the section below is grouping data by 'scalebar' drought condition\n # this is a work around for displaying each drought report classification with a unique colored icon\n data_sd = {}; data_md ={}; data_ml={}\n data_sd[u'type'] = data['type']; data_md[u'type'] = data['type']; data_ml[u'type'] = data['type']\n data_sd[u'features'] = [];data_md[u'features'] = [];data_ml[u'features'] = []\n for element in data['features']:\n if 'Severely Dry' in element['properties']['scalebar']:\n rdate = datetime.datetime.strptime(element['properties']['reportdate'][:10],\"%Y-%m-%d\")\n if rdate >= week20:\n data_sd[u'features'].append(element)\n if 'Moderately Dry' in element['properties']['scalebar']:\n rdate = datetime.datetime.strptime(element['properties']['reportdate'][:10],\"%Y-%m-%d\")\n if rdate >= week20:\n data_md[u'features'].append(element)\n if 'Mildly Dry' in element['properties']['scalebar']:\n rdate = datetime.datetime.strptime(element['properties']['reportdate'][:10],\"%Y-%m-%d\")\n if rdate >= week20:\n data_ml[u'features'].append(element)\n \n cocojson_sevdry = MVLayer(\n source='GeoJSON',\n options=data_sd,\n legend_title='CoCoRaHS Condition Monitor',\n legend_extent=[-112, 36.3, -98.5, 41.66],\n feature_selection=False,\n legend_classes=[MVLegendClass('point', 'Severely Dry', fill='#67000d')],\n layer_options={'style': {'image': {'circle': {'radius': 6,'fill': {'color': '#67000d'},'stroke': {'color': '#ffffff', 'width': 1},}}}})\n\n cocojson_moddry = MVLayer(\n source='GeoJSON',\n options=data_md,\n legend_title='',\n legend_extent=[-112, 36.3, -98.5, 41.66],\n feature_selection=False,\n legend_classes=[MVLegendClass('point', 'Moderately Dry', fill='#a8190d')],\n layer_options={'style': {'image': {'circle': {'radius': 6,'fill': {'color': '#a8190d'},'stroke': {'color': '#ffffff', 'width': 1},}}}})\n\n cocojson_mildry = MVLayer(\n source='GeoJSON',\n options=data_ml,\n legend_title='',\n legend_extent=[-112, 36.3, -98.5, 41.66],\n feature_selection=False,\n legend_classes=[MVLegendClass('point', 'Mildly Dry', fill='#f17d44')],\n layer_options={'style': {'image': {'circle': {'radius': 6,'fill': {'color': '#f17d44'},'stroke': {'color': '#ffffff', 'width': 1},}}}})\n\n \n # Define map view options\n drought_state_risk_map_view_options = MapView(\n height='100%',\n width='100%',\n controls=['ZoomSlider', 'Rotate', 'ScaleLine', 'FullScreen',\n {'MousePosition': {'projection': 'EPSG:4326'}},\n {'ZoomToExtent': {'projection': 'EPSG:4326', 'extent': [-130, 22, -65, 54]}}],\n layers=[tiger_boundaries,cocojson_sevdry,cocojson_moddry,cocojson_mildry,state_vuln_kml,usdm_current,watersheds],\n view=view_options,\n basemap='OpenStreetMap',\n legend=True\n )\n\n context = {\n 'drought_state_risk_map_view_options':drought_state_risk_map_view_options,\n }\n\n return render(request, 'co_drought/drought_state_risk.html', context)", "def draw_state(subplot, name, **kwargs):\n global _color_idx\n if name not in state2poly:\n if get_statename(name) in state2poly:\n name = get_statename(name)\n else:\n print \"state %s not found\" % name\n return\n\n kwargs['color'] = \"#FFFFFF\"\n for polygon in state2poly[name]:\n draw_polygon(subplot, polygon, **kwargs)", "def load_country_code_data():\n name_conversion = {\n 'East Timor': 'Timor-Leste',\n 'Republic of the Congo': 'Congo (Kinshasa)',\n 'Ivory Coast': 'Cote d\\'Ivoire',\n 'Macedonia': 'North Macedonia',\n 'Myanmar': 'Burma',\n 'Republic of Serbia': 'Serbia',\n 'Taiwan': 'Taiwan*',\n 'The Bahamas': 'Bahamas',\n 'United Republic of Tanzania': 'Tanzania',\n 'United States of America': 'US'\n }\n\n shapefile = os.path.join('data', 'ne_110m_admin_0_countries.shp')\n\n gdf = gpd.read_file(shapefile)[['ADMIN', 'ADM0_A3', 'geometry']]\n gdf.columns = ['country', 'country_code', 'geometry']\n\n gdf.loc[gdf['country'].isin(name_conversion.keys()), 'country'] = gdf['country'].map(name_conversion)\n\n return gdf", "def get_state_colors():\n state_colors = []\n state_cases = []\n state_active = []\n for i in get_covid_stats_for_all_states():\n state_colors.append(i.color)\n state_cases.append(i.cases)\n state_active.append(i.activeCases)\n socketio.emit(\n \"colors\", {\"colors\": state_colors, \"cases\": state_cases, \"active\": state_active}\n )", "def draw_states(self):\n drawing = self.tree.draw(\n width=400,\n height=300,\n layout='d',\n node_labels=(\"idx\", 1, 1),\n node_sizes=15,\n node_style={\"stroke\": \"black\", \"stroke-width\": 2},\n node_colors=[\n toytree.colors[int(round(i[1]))] if isinstance(i, (list, np.ndarray))\n else \"white\" \n for i in self.tree.get_node_values(\"likelihood\", True, True)\n ],\n )\n return drawing", "def write():\n with st.spinner(\"Loading Map ...\"):\n\n # read CSV\n\n # CSV for Choropleth Map\n df = pd.read_csv(\"https://raw.githubusercontent.com/hannahkruck/visuasyl/master/src/datasets/Map.csv\", encoding =\"utf8\", sep=\";\")\n # CSV for Line Map\n df2 = pd.read_csv(\"https://raw.githubusercontent.com/hannahkruck/visuasyl/master/src/datasets/Map.csv\", encoding =\"utf8\", sep=\";\")\n\n # Title\n st.title(\"Map view\")\n\n#----------------- Side bar (filter options) -------------------\n\n # Select map (Choropleth or Line Map)\n selectedMapType = st.sidebar.radio(\"Map\",('Choropleth Map', 'Line Map'))\n if selectedMapType == 'Choropleth Map':\n showChoropleth = True\n showLine = False\n else:\n showLine = True\n showChoropleth = False\n\n # General filter (Age, Gender)\n st.sidebar.header(\"Filters\")\n selectedAge = st.sidebar.multiselect(\"Select Age\", (\"under 18\", \"18 - 34\", \"35 - 64\", \"over 65\"))\n selectedGender = st.sidebar.selectbox(\"Select Gender\", (\"All\", \"Male\", \"Female\"))\n\n # --- Special filter for Choropleth Map --\n st.sidebar.header(\"Filter for Choropleth Map\")\n # Drop down menu for Choropleth Map Information\n selectedMapChoropleth = st.sidebar.selectbox(\"Select Map Information\",('Applications to target countries','Applicants by country of origin'))\n # Information for Choropleth Map based on the chosen map information\n if 'target' in selectedMapChoropleth:\n selectedMapChoropleth = 'destinationCountry'\n selectedCode = 'geoCodeDC'\n mapColor = 'Blues'\n else:\n selectedMapChoropleth = 'homeCountry'\n selectedCode = 'geoCodeHC'\n mapColor = 'Reds'\n\n # --- Special filter for Line Map ---\n st.sidebar.header(\"Filter for Line Map\")\n # Select type (show routes of asylum seeker from a particular origin country or to a particular target country)\n selectedType = st.sidebar.radio(\"Select type\",('Target country','Origin country'))\n if selectedType == 'Target country':\n selectedType = df.destinationCountry.unique()\n countryCategory = 'destinationCountry'\n namesToShow = 'homeCountry'\n selectedLon = 'lonDC'\n selectedLat = 'latDC'\n else:\n selectedType = df.homeCountry.unique()\n countryCategory = 'homeCountry'\n namesToShow = 'destinationCountry'\n selectedLon = 'lonHC'\n selectedLat = 'latHC'\n # Drop down menu for selected country\n selectedCountryMapLine = st.sidebar.selectbox(\"Select country\",(selectedType))\n\n\n#----------------- Website content (Year slider, i-Button) -------------------\n\n # --- Markdown for Info icon ---\n # CSS and HTML Code\n st.markdown('''\n <!-- https://www.w3schools.com/css/tryit.asp?filename=trycss_tooltip_transition & https://www.w3schools.com/css/tryit.asp?filename=trycss_tooltip_right-->\n <style>\n .tooltip {\n position: relative;\n display: inline-block;\n font-size:1.6rem;\n \n }\n \n .tooltip .tooltiptext {\n visibility: hidden;\n width: 50vw;\n background-color: #f1f3f7;\n color: #262730;\n text-align: justify;\n border-radius: 6px;\n padding: 5px;\n font-size:0.9rem;\n \n /* Position the tooltip */\n position: absolute;\n z-index: 1;\n top: -5px;\n left: 105%;\n \n opacity: 0;\n transition: opacity 0.8s;\n }\n \n .tooltip:hover .tooltiptext {\n visibility: visible;\n opacity: 1;\n }\n </style>\n ''', unsafe_allow_html=True)\n\n # Text for tooltip\n st.markdown('''\n <div class=\"tooltip\">&#x24D8\n <span class=\"tooltiptext\">\n <b>Choropleth Map</b><br>The Choropleth Map shows the number of asylum applications per country in Europe and the number of refugees per country worldwide for the selected year (see filter 'Select Map Information' for Choropleth Map).\n <br><br>\n <b>Line Map</b><br>The Line Map presents the routes of the refugees depending on the selected type. The type 'target country' shows from which countries the asylum seekers originate based on a specific target country. The type 'origin country' indicates where the asylum seekers are fleeing to from a specific country of origin.\n <br><br>\n <b>Colour gradient</b><br> It should be noted here that the colour gradient adjusts to the maximum and minimum value, i.e. the colour changes with each filtering.\n \n </span></div>\n ''', unsafe_allow_html=True)\n\n # Slider to choose the year\n selected_year = st.slider(\"\", (int(df[\"year\"].min())),(int(df[\"year\"].max())))\n\n # Title for map regarding the chosen year\n st.subheader('Asylum seekers in the year %s' % selected_year)\n\n\n#----------------- Data preparation (general) -------------------\n\n # Remove 'overall' and 'Überseeische Länder und Hoheitsgebiet' for both CSV\n indexNames = df[ df['destinationCountry'] == 'Overall' ].index\n df.drop(indexNames , inplace=True)\n indexNames = df[ df['homeCountry'] == 'Overall' ].index\n df.drop(indexNames , inplace=True)\n\n indexNames = df[ df['destinationCountry'] == 'Überseeische Länder und Hoheitsgebiete' ].index\n df.drop(indexNames , inplace=True)\n indexNames = df[ df['homeCountry'] == 'Überseeische Länder und Hoheitsgebiete' ].index\n df.drop(indexNames , inplace=True)\n\n indexNames = df2[ df2['destinationCountry'] == 'Overall' ].index\n df2.drop(indexNames , inplace=True)\n indexNames = df2[ df2['homeCountry'] == 'Overall' ].index\n df2.drop(indexNames , inplace=True)\n\n indexNames = df2[ df2['destinationCountry'] == 'Überseeische Länder und Hoheitsgebiete' ].index\n df2.drop(indexNames , inplace=True)\n indexNames = df2[ df2['homeCountry'] == 'Überseeische Länder und Hoheitsgebiete' ].index\n df2.drop(indexNames , inplace=True)\n\n # Delete all cells, except one year (both maps)\n indexNames = df[ df['year'] != selected_year ].index\n df.drop(indexNames , inplace=True)\n\n indexNames = df2[ df2['year'] != selected_year ].index\n df2.drop(indexNames , inplace=True)\n\n\n#----------------- Data preparation (Choropleth Map) -------------------\n\n # Information for Choropleth Map (df) based on the chosen gender and age\n df['subtotal']=0\n # Check selected gender\n if selectedGender == 'Female':\n # if an age is selected\n if selectedAge:\n # selectedAge is a list of strings\n # Therefore, we have to check every entry in the list and sum up partial results in new column subtotal\n for i in selectedAge:\n if i == 'under 18':\n df['subtotal']=df['subtotal']+df['fu18']\n elif i == '18 - 34':\n df['subtotal']=df['subtotal']+df['f18']\n elif i == '35 - 64':\n df['subtotal']=df['subtotal']+df['f35']\n elif i == 'over 65':\n df['subtotal']=df['subtotal']+df['fo65']\n else: # no age is selected, that means the user wants to see all women\n df['subtotal'] = df['subtotal']+df['womenTotal']\n a = 'subtotal'\n elif selectedGender == 'Male':\n if selectedAge:\n for i in selectedAge:\n if i == 'under 18':\n df['subtotal']=df['subtotal']+df['mu18']\n elif i == '18 - 34':\n df['subtotal']=df['subtotal']+df['m18']\n elif i == '35 - 64':\n df['subtotal']=df['subtotal']+df['m35']\n elif i == 'over 65':\n df['subtotal']=df['subtotal']+df['mo65']\n else:\n df['subtotal'] = df['subtotal']+df['menTotal']\n a = 'subtotal'\n else: # if no gender is selected, that means the user wants to see all\n if selectedAge:\n for i in selectedAge:\n if i == 'under 18':\n df['subtotal']=df['subtotal']+df['mu18']+df['fu18']\n elif i == '18 - 34':\n df['subtotal']=df['subtotal']+df['m18']+df['f18']\n elif i == '35 - 64':\n df['subtotal']=df['subtotal']+df['m35']+df['f35']\n elif i == 'over 65':\n df['subtotal']=df['subtotal']+df['fo65']+df['mo65']\n a = 'subtotal'\n else:\n a = 'total'\n\n # Group the countries by year and sum up the number (total) in a new column sum (df['sum']\n df['sum']=df.groupby([selectedMapChoropleth,'year'])[a].transform('sum')\n\n\n\n#----------------- Data preparation (Line Map) -------------------\n\n # countryCategory = homeCountry or destinationCountry\n # selectedCountryMapLine is the selected country for the map line (for example Syria (homeCountry))\n indexNames = df2[ df2[countryCategory] != selectedCountryMapLine ].index\n df2.drop(indexNames , inplace=True)\n\n df2['subtotal'] = 0\n\n if selectedGender == 'Female':\n # if an age is selected\n if selectedAge:\n # selectedAge is a list of strings\n # Therefore, we have to check every entry in the list and delete the row if the value in the column for the age is null\n for i in selectedAge:\n if i == 'under 18':\n indexNames = df2[ df2['fu18'] == 0].index\n df2.drop(indexNames , inplace=True)\n df2['subtotal']=df2['subtotal']+df2['fu18']\n elif i == '18 - 34':\n indexNames = df2[ df2['f18'] == 0].index\n df2.drop(indexNames , inplace=True)\n df2['subtotal']=df2['subtotal']+df2['f18']\n elif i == '35 - 64':\n indexNames = df2[ df2['f35'] == 0].index\n df2.drop(indexNames , inplace=True)\n df2['subtotal']=df2['subtotal']+df2['f35']\n elif i == 'over 65':\n indexNames = df2[ df2['fo65'] == 0].index\n df2.drop(indexNames , inplace=True)\n df2['subtotal']=df2['subtotal']+df2['fo65']\n else:\n indexNames = df2[ df2['womenTotal'] == 0].index\n df2.drop(indexNames , inplace=True)\n df2['subtotal']=df2['subtotal']+df2['womenTotal']\n elif selectedGender == 'Male':\n if selectedAge:\n # selectedAge is a list of strings\n # Therefore, we have to check every entry in the list and delete the row if the value in the column for the age is null\n for i in selectedAge:\n if i == 'under 18':\n indexNames = df2[ df2['mu18'] == 0].index\n df2.drop(indexNames , inplace=True)\n df2['subtotal']=df2['subtotal']+df2['mu18']\n elif i == '18 - 34':\n indexNames = df2[ df2['m18'] == 0].index\n df2.drop(indexNames , inplace=True)\n df2['subtotal']=df2['subtotal']+df2['m18']\n elif i == '35 - 64':\n indexNames = df2[ df2['m35'] == 0].index\n df2.drop(indexNames , inplace=True)\n df2['subtotal']=df2['subtotal']+df2['m35']\n elif i == 'over 65':\n indexNames = df2[ df2['mo65'] == 0].index\n df2.drop(indexNames , inplace=True)\n df2['subtotal']=df2['subtotal']+df2['mo65']\n else:\n indexNames = df2[ df2['menTotal'] == 0].index\n df2.drop(indexNames , inplace=True)\n df2['subtotal']=df2['subtotal']+df2['menTotal']\n else: # if no gender is selected, that means the user wants to see all\n if selectedAge:\n for i in selectedAge:\n if i == 'under 18':\n indexNames = df2[ df2['mu18'] == 0].index\n df2.drop(indexNames , inplace=True)\n indexNames = df2[ df2['fu18'] == 0].index\n df2.drop(indexNames , inplace=True)\n df2['subtotal']=df2['subtotal']+df2['mu18']+df2['fu18']\n elif i == '18 - 34':\n indexNames = df2[ df2['m18'] == 0].index\n df2.drop(indexNames , inplace=True)\n indexNames = df2[ df2['f18'] == 0].index\n df2.drop(indexNames , inplace=True)\n df2['subtotal']=df2['subtotal']+df2['m18']+df2['f18']\n elif i == '35 - 64':\n indexNames = df2[ df2['m35'] == 0].index\n df2.drop(indexNames , inplace=True)\n indexNames = df2[ df2['f35'] == 0].index\n df2.drop(indexNames , inplace=True)\n df2['subtotal']=df2['subtotal']+df2['m35']+df2['f35']\n elif i == 'over 65':\n indexNames = df2[ df2['mo65'] == 0].index\n df2.drop(indexNames , inplace=True)\n indexNames = df2[ df2['fo65'] == 0].index\n df2.drop(indexNames , inplace=True)\n df2['subtotal']=df2['subtotal']+df2['mo65']+df2['fo65']\n else: # all people are considered\n indexNames = df2[ df2['total'] == 0 ].index\n df2.drop(indexNames , inplace=True)\n\n # Create list of origin or target countries to display them in hover text\n # Every second index must contain the country name, so a placeholder is necessary in front of it\n # Structur: [placeholder,name+number,placeholder,name+number,...]\n # name = listPlaceholderNames\n # number = listPlaceholderNumber\n \n listPlaceholderNames = df2[namesToShow].values.tolist()\n listPlaceholderNumber = df2[a].values.tolist()\n\n nameList = []\n i = 0\n if namesToShow == 'homeCountry':\n for x in listPlaceholderNames:\n nameList.append(i)\n x = x +': '+ str(listPlaceholderNumber[i])\n nameList.append(x)\n i = i+1\n if len(nameList) != 0:\n nameList[-2]=None\n else:\n for x in listPlaceholderNames:\n x = x +': '+ str(listPlaceholderNumber[i])\n nameList.append(x)\n nameList.append(i)\n i = i+1\n if len(nameList) != 0:\n nameList[-1]=None\n\n\n st.write('<style>div.Widget.row-widget.stRadio > div{flex-direction:row;}</style>', unsafe_allow_html=True)\n\n \n#----------------Create Maps with Plotly (Choropleth and Line Map)---------------------------\n\n fig = go.Figure()\n\n # Choropleth Map\n fig.add_trace(\n go.Choropleth(\n locations = df[selectedCode],\n visible=showChoropleth,\n z = df['sum'],\n text = df[selectedMapChoropleth],\n colorscale = mapColor,\n autocolorscale=False,\n reversescale=False,\n name=\"\",\n marker_line_color='darkgray',\n marker_line_width=0.5,\n colorbar_tickprefix = '',\n colorbar_title = 'Number of<br>asylum<br>applications<br>',\n ))\n\n #--------- Line Map --------------\n # Set selected country\n fig.add_trace(\n go.Scattergeo(\n locationmode = 'country names',\n lon = df2[selectedLon],\n lat = df2[selectedLat],\n hoverinfo = 'text',\n name= selectedCountryMapLine,\n text = df2[countryCategory],\n line = dict(width = 1,color = 'red'),\n opacity = 0.510,\n visible = showLine,\n mode = 'markers',\n )\n )\n\n # NumPy Array Slicing\n # Longitude and Latitude\n lons = []\n lats = []\n lons = np.empty(2 * len(df2))\n lons[::2] = df2['lonDC']\n lons[1::2] = df2['lonHC']\n lats = np.empty(2 * len(df2))\n lats[::2] = df2['latDC']\n lats[1::2] = df2['latHC']\n\n # Set lines\n fig.add_trace(\n go.Scattergeo(\n locationmode = 'country names',\n visible= showLine,\n name='route and number <br>of asylum seekers',\n hovertemplate = nameList,\n lon = lons,\n lat = lats,\n mode = 'markers+lines',\n line = dict(width = 1,color = 'red'),\n opacity = 0.5\n )\n )\n\n # Update layout choropleth map\n fig.update_layout(\n showlegend = True,\n geo = go.layout.Geo(\n scope = 'world',\n #projection_type = 'azimuthal equal area',\n showland = True,\n showcountries=True,\n landcolor = 'rgb(243, 243, 243)',\n countrycolor = 'rgb(105,105,105)',\n ),\n\n )\n\n # Update layout line map\n fig.update_layout(\n geo=dict(\n showframe=False,\n showcoastlines=False,\n projection_type='equirectangular'\n ),\n autosize=True,\n margin=dict(\n l=0,\n r=0,\n b=0,\n t=20,\n ),\n )\n\n # Display figure\n st.plotly_chart(fig,use_container_width=True, config={'modeBarButtonsToRemove': ['lasso2d','select2d', 'pan2d', 'hoverClosestGeo']})\n\n # Hide the whole modebar\n # config=dict(displayModeBar=False)\n\n # CSS to hide plotly icon in modebar\n #.modebar-btn--logo{\n # display:none;\n #}", "def __setstate__(self,state):\n self.__dict__.update(state)\n self.KDTreeFinder = spatial.KDTree(self.featureVals)", "def _color_field_states(map_f, samp_ids, field, field_states, color_by_field):\r\n colors = []\r\n color_pool = [matplotlib_rgb_color(data_colors[color].toRGB())\r\n for color in data_color_order]\r\n metadata_map = MetadataMap.parseMetadataMap(map_f)\r\n\r\n for field_to_check in field, color_by_field:\r\n if field_to_check not in metadata_map.CategoryNames:\r\n raise ValueError(\"The field '%s' is not in the metadata mapping \"\r\n \"file's column headers.\" % field_to_check)\r\n\r\n all_field_states = metadata_map.getCategoryValues(samp_ids, field)\r\n all_color_by_states = metadata_map.getCategoryValues(samp_ids,\r\n color_by_field)\r\n\r\n if len(set(field_states) - set(all_field_states)) != 0:\r\n raise ValueError(\"Encountered unrecognizable field state(s) in %r \"\r\n \"for field '%s'.\" % (field_states, field))\r\n\r\n # Build mapping from one field to the other.\r\n field_mapping = defaultdict(list)\r\n for field_state, color_by_state in zip(all_field_states,\r\n all_color_by_states):\r\n if field_state in field_states:\r\n field_mapping[field_state].append(color_by_state)\r\n\r\n # For each of the specified input field states, find its corresponding\r\n # \"color by\" field state and give it a color if it hasn't been assigned one\r\n # yet. Make sure we have enough colors and there is a one-to-one mapping.\r\n color_mapping = {}\r\n for field_state in field_states:\r\n color_by_states = set(field_mapping[field_state])\r\n\r\n if len(color_by_states) != 1:\r\n raise ValueError(\"The field '%s' to color by does not have a \"\r\n \"one-to-one mapping with field '%s'. Coloring \"\r\n \"would be ambiguous.\" % (color_by_field, field))\r\n\r\n color_by_state = list(color_by_states)[0]\r\n if color_by_state not in color_mapping:\r\n if len(color_pool) > 0:\r\n color_mapping[color_by_state] = color_pool.pop(0)\r\n else:\r\n raise ValueError(\"There are not enough available QIIME colors \"\r\n \"to color each of the field states in field \"\r\n \"'%s'. Coloring would be ambiguous.\" %\r\n color_by_field)\r\n\r\n colors.append(color_mapping[color_by_state])\r\n\r\n return colors, color_mapping", "def states(self):\n from geoid.core import names\n from geoid.censusnames import geo_names, stusab\n\n states = {}\n\n for state_no, stusab in stusab.items():\n states[stusab] = {\n 'name': geo_names[(state_no,0)],\n 'stusab': stusab,\n 'number' : state_no\n }\n\n states['US'] = {\n 'name': 'United States',\n 'stusab': 'US',\n 'number' : 0\n }\n\n return states", "def DT(time_lvl = 0, date = 160924 ):\n \n #-------Customised color in RGB ------------\n C = [[232,232,230],#grey\n [203,203,203], #grey\n [161,161,161], #grey\n [130,130,130], #grey\n [149,53,229], #lillac, 39\t64\t197\t149,53,229\n [39,64,197], #blue dark,7,67,194\n [15,110,229], #blue\n [80,149,240], #blue\n [74,192,243], #blue\n [152,219,248], #blue\n [183,237,247], #blue\n [251,217,198], #redish\n [255,197,166], #redish\n [255,172,164], #redish\n [253,139,142], #redish\n [253,101,105], #redish\n [255,66,74], #redish\n [238,13,28], #red\n [214,78,166], #pink\n [214,102,201], \n [217,155,210],\n [216,181,211]]\n C = np.array( C )\n C = np.divide( C, 255. ) # RGB has to be between 0 and 1 in python\n #-----------------------------------------------------------\n \n fig = plt.figure()\n \n \n #-----Setting our map area and projection of interest-------\n m = Basemap( llcrnrlon = -90., llcrnrlat = 0., urcrnrlon = 50., urcrnrlat=70.,\\\n resolution = 'l', area_thresh = 10000., projection = 'merc' )\n #m = Basemap(width=11500000,height=8500000,resolution='l',projection='eqdc',\\\n # lat_1=07.,lat_2=40,lat_0=44,lon_0=-30.)\n #m = Basemap(width=190000,height=2200000,resolution='l', projection='tmerc',lon_0=-30,lat_0=44)\n \n map_area( m ) # ploting background\n path = \"gribs/\"\n file = path +\"DT_var.grib\"\n obj = pygrib.open( file )\n \n #-FETCHING ALL THE VALUES----------------------------------------\n #-----Potential temperature---------------------------------------\n lat, lon, data = get_data( obj,'Potential temperature', 2000, date, timelevel = time_lvl )\n contour_val = np.linspace( 264, 384, 22 ) #contours for potential tempeature\n plot_contourf( m, lat, lon, data, C, contour_val )\n \n #-----Relative vorticity, diff level------------------------------\n contour=[ 2.8E-4, 3.5E-4, 4.5E-4, 6.5E-4, 7.E-4, 7.5E-4, 8.E-4 ] #1.5E-4,2.5E-4]#\n lat, lon, data925 = get_data( obj, 'Vorticity (relative)', 925, date, timelevel = time_lvl )\n lat, lon, data900 = get_data( obj, 'Vorticity (relative)', 900, date, timelevel = time_lvl )\n lat, lon, data850 = get_data( obj, 'Vorticity (relative)', 850, date, timelevel = time_lvl )\n \n #->--->---->--mean value over height and filtering----------------\n data = np.sqrt( data900**2 + 2*data850**2 + data925**2 ) #Vertical \"average\", weightet values at 850hpa double.\n footprint = np.array([[0,0,0,1,1,1,1,0,0,0], #footprint=np.ones((3,10))\n [0,0,1,1,1,2,1,1,0,0],\n [1,1,1,2,2,1,2,1,1,1],\n [0,1,1,1,1,2,1,1,1,0],\n [0,0,1,1,1,1,1,1,0,0]])\n \n data = ndimage.generic_filter( data, np.mean, footprint = footprint, mode='wrap' )\n plot_contour( m, lat,lon, data,contour, clr = 'k' )\n \n #-----Wind barbs----------------------------------------------------\n lat, lon, data_u = get_data( obj , 'U component of wind', 2000, date, timelevel = time_lvl )\n lat, lon, data_v = get_data( obj , 'V component of wind', 2000, date, timelevel = time_lvl )\n plot_wind_bar( m, lat, lon, data_u, data_v )\n #-----------------------------------------------\n #-----------------------------------------------\n \n \n \n #-SAVE AND CLOSE----------------------------------------------------\n #------------------------------------------------------------------\n obj.close()\n if time_lvl == 0:\n t = \"0000\"\n elif time_lvl == 1:\n t = \"1200\"\n elif time_lvl == 2:\n t = \"1800\"\n else: \n t = \"t_not_set\"\n \n fig_name = \"DT/DT_\" + str( date ) + \"_\" + str( t )+ \".TIFF\" \n \n ax = plt.gca( )\n plt.rc( 'font', size = 6 )\n fig.set_size_inches( 12.80, 7.15 )\n \n fig.savefig( fig_name, dpi = 600 )\n plt.close( )\n #plt.show()\n #--------------------------\n #----------------------------", "def loop_plot_dendrogram_by_state():\n for state in states:\n if state != 'District of Columbia':\n plot_dendrogram_by_state(state)", "def _color_field_states(map_f, samp_ids, field, field_states, color_by_field):\n colors = []\n color_pool = [matplotlib_rgb_color(data_colors[color].toRGB())\n for color in data_color_order]\n metadata_map = MetadataMap.parseMetadataMap(map_f)\n\n for field_to_check in field, color_by_field:\n if field_to_check not in metadata_map.CategoryNames:\n raise ValueError(\"The field '%s' is not in the metadata mapping \"\n \"file's column headers.\" % field_to_check)\n\n all_field_states = metadata_map.getCategoryValues(samp_ids, field)\n all_color_by_states = metadata_map.getCategoryValues(samp_ids,\n color_by_field)\n\n if len(set(field_states) - set(all_field_states)) != 0:\n raise ValueError(\"Encountered unrecognizable field state(s) in %r \"\n \"for field '%s'.\" % (field_states, field))\n\n # Build mapping from one field to the other.\n field_mapping = defaultdict(list)\n for field_state, color_by_state in zip(all_field_states,\n all_color_by_states):\n if field_state in field_states:\n field_mapping[field_state].append(color_by_state)\n\n # For each of the specified input field states, find its corresponding\n # \"color by\" field state and give it a color if it hasn't been assigned one\n # yet. Make sure we have enough colors and there is a one-to-one mapping.\n color_mapping = {}\n for field_state in field_states:\n color_by_states = set(field_mapping[field_state])\n\n if len(color_by_states) != 1:\n raise ValueError(\"The field '%s' to color by does not have a \"\n \"one-to-one mapping with field '%s'. Coloring \"\n \"would be ambiguous.\" % (color_by_field, field))\n\n color_by_state = list(color_by_states)[0]\n if color_by_state not in color_mapping:\n if len(color_pool) > 0:\n color_mapping[color_by_state] = color_pool.pop(0)\n else:\n raise ValueError(\"There are not enough available QIIME colors \"\n \"to color each of the field states in field \"\n \"'%s'. Coloring would be ambiguous.\" %\n color_by_field)\n\n colors.append(color_mapping[color_by_state])\n\n return colors, color_mapping", "def get_state_pop(api_key, year = str(), state_fip = str(), map = bool()):\n try:\n pop_url = f'http://api.census.gov/data/{year}/pep/population?get=POP&for=COUNTY&in=state:*&key={api_key}'\n r = requests.get(pop_url)\n data = json.loads(r.content) \n pop_df = pd.DataFrame(data[1:], columns=data[0]).\\\n rename(columns={\"POP\": \"Pop_Count\", \"state\": \"STATEFP\", \"county\": \"COUNTYFP\"})\n pop_df['Pop_Count'] = pop_df['Pop_Count'].astype(str).astype(int)\n pop_df = pop_df[pop_df.STATEFP == state_fip]\n geodata_url = f\"https://raw.githubusercontent.com/uscensusbureau/citysdk/master/v2/GeoJSON/20m/{year}/county.json\"\n geo_df = gpd.read_file(geodata_url)\n geo_df = geo_df[geo_df.STATEFP == state_fip]\n geo_df = geo_df.merge(pop_df, on = 'COUNTYFP')\n geo_df.drop(geo_df.filter(regex='_y$').columns.tolist(),axis=1, inplace=True)\n geo_df = geo_df.rename(columns = {'STATEFP_x':'STATE_FIP'})\n if map == True:\n return geo_df.plot(column = 'Pop_Count')\n else:\n return geo_df\n r.raise_for_status()\n except HTTPError as http_err:\n print(f'HTTP error occurred: {http_err}')\n except Exception as err:\n print(f'An error occured. All parameters must exist in the Census GeoJSON database and API. Please check https://github.com/uscensusbureau/citysdk/tree/master/v2/GeoJSON: {err}')", "def drawMap(self):\n world_map = folium.Map(location=[25, 10], zoom_start=3)\n totals_column = 'total_' + self.map_type.lower()\n top10 = self.covid_df.sort_values(totals_column, axis=0, ascending=False)['location'][:10]\n scale, units = self.unitsDetector(self.covid_df[totals_column].max())\n \n color_scheme = {'Cases': 'YlOrRd', 'Deaths': 'PuRd'}[self.map_type]\n bins = list(np.linspace(0, np.ceil(self.covid_df[totals_column].max() / scale) * scale, 6))\n legend_name = 'Total Number of COVID-19 ' + self.map_type\n map_file_name = self.generateFileName()\n \n folium.Choropleth(geo_data=self.geo_data,\n data=self.covid_df,\n columns=['location', totals_column],\n key_on='feature.properties.ADMIN',\n fill_color=color_scheme,\n bins=bins,\n legend_name=legend_name,\n highlight=True\n ).add_to(world_map)\n \n for i in range(10):\n country = top10.iloc[i]\n cases = self.covid_df[self.covid_df['location'] == country][totals_column] / scale\n \n # Centroid coordinates for each country labelled by its ISO-2 code\n lat = self.countries_centroids.loc[self.name_iso2_mapping[country]]['latitude']\n long = self.countries_centroids.loc[self.name_iso2_mapping[country]]['longitude']\n popup = f\"{country}: {cases.values[0]:.2f}{units} total {self.map_type.lower()}\"\n \n folium.Marker(location=[lat, long],\n popup=folium.Popup(popup, \n max_width=1000)\n ).add_to(world_map)\n \n world_map.save(map_file_name)", "def test_render_world_map():\n gdpinfo = {\n \"gdpfile\": \"isp_gdp.csv\",\n \"separator\": \",\",\n \"quote\": '\"',\n \"min_year\": 1960,\n \"max_year\": 2015,\n \"country_name\": \"Country Name\",\n \"country_code\": \"Country Code\"\n }\n\n # Get pygal country code map\n pygal_countries = pygal.maps.world.COUNTRIES\n #pygal_countries = {'KEN':'Kenya', 'IDN':'Indonesia', 'IND':'India', \\\n #'USA':'United States of America'}\n\n # 1960\n #render_world_map(gdpinfo, pygal_countries, \"1960\", \"isp_gdp_world_name_1960.svg\")\n\n # 1980\n #render_world_map(gdpinfo, pygal_countries, \"1980\", \"isp_gdp_world_name_1980.svg\")\n\n # 2000\n #render_world_map(gdpinfo, pygal_countries, \"2000\", \"isp_gdp_world_name_2000.svg\")\n\n # 2010\n render_world_map(gdpinfo, pygal_countries, \"2010\", \"isp_gdp_world_name_2010.svg\")", "def cities_by_states():\n new_dict = storage.all(State)\n return render_template('8-cities_by_states.html', states=new_dict)", "def calculate_world_daywise(countries_daywise_df):", "def get_usa_states_geo_df() -> geopandas.GeoDataFrame:\n\n geo_df: geopandas.GeoDataFrame = geopandas.read_file(\n GEO_DATA_DIR / \"cb_2017_us_state_20m\" / \"cb_2017_us_state_20m.shp\"\n ).rename(columns={\"STUSPS\": CODE}, errors=\"raise\")\n\n geo_df = geo_df[\n [\n \"STATEFP\",\n # \"STATENS\",\n # \"AFFGEOID\",\n # \"GEOID\",\n CODE,\n # \"NAME\",\n \"LSAD\",\n # \"ALAND\",\n # \"AWATER\",\n \"geometry\",\n ]\n ]\n\n return geo_df", "def plot_active_cases(data, country, province):\n # Create a data frame with number of active cases\n active = (data['confirmed'].iloc[:, 4:] -\n data['recovered'].iloc[:, 4:] -\n data['death'].iloc[:, 4:])\n\n # Copy the identifying columns on geography\n identifier = data['confirmed'][['province/state',\n 'country/region', 'lat', 'long']]\n\n # Append two dataframes\n active = pd.concat([identifier, active], axis=1)\n\n # Append active cases into master data\n data['active'] = active\n\n # Plot active cases by country\n fig, ax = plt.subplots(1, 1)\n case_type = 'active'\n\n # Choose color scheme\n color_active = get_rgb((188, 189, 34))\n\n dates = get_dates(data, case_type)\n num_cases = get_num_cases(data, case_type, country, province)\n ax.plot(dates, num_cases, color=color_active)\n\n ax.text(dates[-1], num_cases[-1], '{:,.0f}'.format(num_cases[-1]),\n color=color_active, ha='left', va='center')\n\n # x axis\n ax.set_xlabel('End of month')\n ax.set_xticks(get_end_months(dates))\n ax.set_xticklabels([format_datetime(end_month)\n for end_month in get_end_months(dates)])\n ax.xaxis.set_tick_params(direction='in')\n\n # y axis\n ax.set_ylabel('Number of active cases')\n ax.yaxis.set_tick_params(direction='in')\n ax.set_yscale('log')\n\n # Set graph title\n ax.set_title(get_title(country, province))\n\n sns.despine(ax=ax)\n\n fig.tight_layout()\n path = 'plots/active_case_by_country.pdf'\n fig.savefig(path, bbox_inches='tight')\n print('Saved to {}'.format(path))", "def set_district_sprayed_visited():\n queryset = Location.objects.filter(level=\"ta\", target=True)\n for location in queryset.iterator():\n set_sprayed_visited(location)\n\n for location in Location.objects.filter(level=\"RHC\"):\n set_sprayed_visited(location)\n\n for location in Location.objects.filter(level=\"district\"):\n set_sprayed_visited(location)", "def data(pd_edit_series):\n country_dict = {\n 'europe': ['United Kingdom', 'France', 'Spain', 'Belgium',\n 'Finland', 'Sweden', 'Germany', 'Croatia',\n 'Switzerland', 'Austria', 'Greece', 'Hungary',\n 'Slovenia', 'Poland', 'Bosnia and Herzegovina',\n 'Denmark', 'Liechtenstein', 'Ukraine',\n 'North Macedonia', 'Latvia', 'Andorra',\n 'Norway', 'Portugal', 'Romania', 'Estonia',\n 'Netherlands', 'San Marino', 'Belarus',\n 'Iceland', 'Lithuania', 'Ireland', 'Luxembourg',\n 'Monaco', 'Czechia', 'Slovakia', 'Holy See',\n 'Serbia', 'Malta', 'Bulgaria', 'Albania',\n 'Cyprus', 'Moldova', 'Andorra', 'Armenia',\n 'Austria', 'Cyprus', 'Estonia', 'Georgia',\n 'Gibraltar', 'Greenland', 'Croatia',\n 'Israel', 'Iceland', 'Luxembourg',\n 'Latvia', 'Monaco', 'Portugal', 'Romania',\n 'Svalbard and Jan Mayen', 'Slovakia',\n 'Turkey', 'Serbia', 'Montenegro',\n 'Aland Islands', 'Guernsey',\n 'Island of Man', 'Jersey', 'Kosovo'],\n\n 'asia': ['Thailand', 'Japan', 'Singapore', 'Mongolia',\n 'Nepal', 'Malaysia', 'Sri Lanka', 'Philippines',\n 'India', 'Cambodia', 'Pakistan',\n 'Indonesia', 'United Arab Emirates', 'Lebanon',\n 'Iraq', 'Oman', 'Afghanistan', 'Bahrain',\n 'Kuwait', 'Qatar', 'Saudi Arabia',\n 'Jordan', 'Azerbaijan', 'Bhutan', 'Maldives',\n 'Bangladesh', 'Brunei', 'Korea, South', 'Vietnam',\n 'Russia', 'Iran', 'Reunion', 'Taiwan*', 'Yemen',\n 'American Samoa', 'Brunei Darussalam',\n 'Guam', 'Hong Kong',\n 'Heard Island and McDonald Islands',\n 'British Indian Ocean Territory',\n 'Kyrgystan', 'Kiribati', 'Korea, North',\n 'Kazakhstan', 'Sri Lanka', 'Marshall Islands',\n 'Lao People\\'s Democratic Republic',\n 'Myanmar', 'Mongolia', 'Macau', 'Macao SAR',\n 'North Mariana Islands', 'Maldives',\n 'Malaysia', 'Papua New Guinea', 'Palau',\n 'Singapore', 'Syrian Arab Republic',\n 'Tajikistan', 'Turkmenistan', 'Timor-Leste',\n 'United States Minor Outlying Islands',\n 'Uzbekistan', 'Kyrgyzstan',\n 'occupied Palestinian territory', 'Mauritania',\n 'Comoros', 'Djibouti', 'Bahrain', 'Nepal',\n 'Malaysia', 'Singapore', 'East Timor', 'Syria'],\n\n 'africa': ['Egypt', 'Algeria', 'Nigeria',\n 'Morocco', 'Senegal', 'Tunisia',\n 'South Africa', 'Togo', 'Cameroon',\n 'Burkina Faso', 'Cote d\\'Ivoire',\n 'Congo (Kinshasa)', 'Congo (Brazzaville)',\n 'Republic of the Congo',\n 'Central African Republic', 'Eswatini',\n 'Eswatini', 'Ethiopia', 'Gabon', 'Ghana',\n 'Guinea', 'Equatorial Guinea', 'Kenya',\n 'Namibia', 'Rwanda', 'Sudan', 'Seychelles',\n 'Republic of Congo', 'Tanzania', 'Mayotte',\n 'Benin', 'Liberia', 'Somalia', 'The Gambia',\n 'Gambia, The', 'Zambia', 'Mauritius', 'Chad',\n 'Madagascar', 'Cabo Verde', 'Niger', 'Zimbabwe',\n 'Cape Verde', 'Angola', 'Eritrea', 'Uganda',\n 'Mozambique'],\n\n 'americas': ['Brazil', 'Mexico', 'Ecuador',\n 'Dominican Republic', 'Argentina',\n 'Chile', 'Peru', 'Netherlands Antilles',\n 'Costa Rica', 'Colombia', 'French Guiana',\n 'Martinique', 'Paraguay', 'Panama',\n 'Canada', 'US', 'Jamaica', 'Honduras',\n 'Bolivia', 'Antigua and Barbuda', 'Anguilla',\n 'Argentina', 'Aruba', 'Barbados',\n 'Bouvet Island', 'Belize', 'Cuba', 'Dominica',\n 'Equador', 'Falkland Islands', 'Malvinas',\n 'Grenada', 'Guadeloupe', 'Guyana',\n 'South Georgia and the South Sandwich '\n 'Islands', 'US',\n 'Guatemala', 'Haiti', 'Saint Kitts and Nevis',\n 'Cayman Islands', 'Saint Lucia', 'Montserrat',\n 'Mexico', 'Nicaragua', 'Puerto Rico',\n 'Paraguay', 'Suriname', 'El Salvador',\n 'Turks and Caicos Islands',\n 'Trinidad and Tobago', 'Uruguay',\n 'Saint Vincent and the Grenadines',\n 'Venezuela', 'Virgin Islands (British)',\n 'Virgin Islands (US)', 'Saint Martin',\n 'Saint Berthelemy', 'Bermuda',\n 'Saint Pierre and Miquelon', 'Cuba', 'Guyana',\n 'Curacao', 'The Bahamas', 'Bahamas, The'],\n\n 'oceania': ['Australia', 'New Zealand', 'New Caledonia',\n 'Norfolk Island', 'Nauru', 'Niue',\n 'Micronesia (federated States of)', 'Fiji',\n 'Cook Islands', 'Christmas Island',\n 'Cocos (Keeling) Islands', 'French Polynesia',\n 'Pitcairn Islands', 'Solomon Islands',\n 'French Southern Territories',\n 'American Samoa', 'Tokelau', 'Tonga', 'Tuvalu',\n 'Vanuatu', 'Wallis and Futuna', 'Samoa']}\n\n europe = []\n asia = []\n oceania = []\n americas = []\n africa = []\n uk_list = []\n italy = []\n china = []\n others = []\n ship = []\n all_lists = [europe, asia, oceania, americas, africa, uk_list,\n italy, china, ship, others]\n for_total = [europe, asia, oceania, americas, africa, china,\n others, ship, italy]\n\n for region, countries in country_dict.items():\n for column in pd_edit_series:\n if column in countries:\n if region == 'europe':\n if column == 'United Kingdom':\n if column not in uk_list:\n uk_list.append(column)\n if column not in europe:\n europe.append(column)\n\n elif region == 'asia':\n if column not in asia:\n asia.append(column)\n elif region == 'africa':\n if column not in africa:\n africa.append(column)\n elif region == 'americas':\n if column not in americas:\n americas.append(column)\n elif region == 'oceania':\n if column not in oceania:\n oceania.append(column)\n\n else:\n if column == 'Italy':\n if column not in italy:\n italy.append(column)\n\n elif column == 'China':\n if column not in china:\n china.append(column)\n\n elif column == 'Cruise Ship':\n if column not in ship:\n ship.append(column)\n\n else:\n others.append(column)\n\n # -----------------------------------------------------------\n # Segment of code it to catch any straggler countries not\n # accounted for in the country_dict\n remove_list = []\n for region in all_lists:\n for countries in region:\n if countries in others:\n if countries not in remove_list:\n remove_list.append(countries)\n\n others_final = [item for item in others\n if item not in remove_list]\n\n if len(others_final) > 0:\n logging.debug(others_final)\n print('Exiting due to unaccounted countries')\n sys.exit()\n\n total_count_list = []\n for region in for_total:\n for country in region:\n if country not in total_count_list:\n total_count_list.append(country)\n # -----------------------------------------------------------\n\n diamond_csv = pd_edit_series[ship].copy()\n main_china_csv = pd_edit_series[china].copy()\n europe_csv = pd_edit_series[europe].copy()\n americas_csv = pd_edit_series[americas].copy()\n asia_csv = pd_edit_series[asia].copy()\n africa_csv = pd_edit_series[africa].copy()\n uk_csv = pd_edit_series[uk_list].copy()\n italy_csv = pd_edit_series[italy].copy()\n oceania_csv = pd_edit_series[oceania].copy()\n\n csv_list = {'europe': europe_csv, 'america': americas_csv,\n 'asia': asia_csv, 'main_china': main_china_csv,\n 'UK': uk_csv, 'diamond': diamond_csv,\n 'italy': italy_csv, 'oceania': oceania_csv,\n 'africa': africa_csv}\n\n backup_frame = pd_edit_series.copy()\n backup_frame['Global_Cases'] = \\\n backup_frame.sum(axis=1)\n\n pd_edit_series['Mainland_China_Total'] = \\\n pd_edit_series[china].sum(axis=1)\n\n pd_edit_series['Oceania_Total'] = \\\n pd_edit_series[oceania].sum(axis=1)\n\n pd_edit_series['Europe_Total'] = \\\n pd_edit_series[europe + ['Italy']].sum(axis=1)\n\n pd_edit_series['Diamond_Princess'] = \\\n pd_edit_series[ship]\n\n pd_edit_series['UK_Total'] = \\\n pd_edit_series[uk_list].sum(axis=1)\n\n pd_edit_series['Asian_Total'] = \\\n pd_edit_series[asia].sum(axis=1)\n\n pd_edit_series['Americas_Total'] = \\\n pd_edit_series[americas].sum(axis=1)\n\n pd_edit_series['African_Total'] = \\\n pd_edit_series[africa].sum(axis=1)\n\n # As China is being kept separate\n pd_edit_series = pd_edit_series.drop('China', axis=1)\n pd_edit_series = pd_edit_series.drop('Cruise Ship', axis=1)\n\n for place in asia:\n pd_edit_series = pd_edit_series.drop(place, axis=1)\n for place in europe:\n pd_edit_series = pd_edit_series.drop(place, axis=1)\n for place in americas:\n pd_edit_series = pd_edit_series.drop(place, axis=1)\n for place in africa:\n pd_edit_series = pd_edit_series.drop(place, axis=1)\n for place in oceania:\n pd_edit_series = pd_edit_series.drop(place, axis=1)\n\n return csv_list, pd_edit_series, backup_frame", "def Map(data):\n lon = [loc[1] for loc in data[\"geo\"]]\n lat = [loc[0] for loc in data[\"geo\"]]\n\n return dcc.Graph(id=\"MapGraph\", figure=dict(\n data=[dict(\n type='scattergeo',\n # mode='markers',\n lon=lon,\n lat=lat,\n text=data[\"names\"],\n hoverinfo='text',\n marker=dict(\n symbol='circle',\n color=\"#B22234\",\n opacity=0.8,\n size=data['frequencies'],\n sizemode='area',\n sizeref=max(data['frequencies']) / (5.**3),\n sizemin=1,\n line=dict(width=0)\n )\n )],\n layout=dict(\n title='<b>Most common Places</b>',\n font=dict(family='Soria, Times New Roman, Times, serif', color='#B22234', size=19),\n dragmode=\"pan\",\n geo=dict(\n showocean=True,\n oceancolor=\"rgba(0, 44, 119, 0.7)\",\n showland=True,\n landcolor=\"#ededed\", # c4c4c4, #0ba340\n lonaxis=dict(range=[min(lon) - 10, max(lon) + 10]),\n lataxis=dict(range=[min(lat) - 10, max(lat) + 10]),\n showcountries=True,\n countrywidth=0.5,\n subunitwidth=0.5,\n projection=dict(type=\"equirectangular\")\n ),\n margin=dict(l=0, r=0, t=50, b=30),\n hovermode=\"closest\",\n paper_bgcolor='rgba(0,0,0,0)',\n plot_bgcolor='rgba(0,0,0,0)',\n autosize=True,\n )\n ))", "def plot_states_graph(G, color_labels):\r\n pos = nx.spring_layout(G, k=0.1)\r\n plt.rcParams.update({'figure.figsize': (7, 7)})\r\n nx.draw_networkx(\r\n G, \r\n pos=pos, \r\n node_size=20, \r\n node_color=color_labels ,\r\n arrowsize=0.001,\r\n edge_color=\"#C0C0C0\", \r\n alpha=0.3, \r\n with_labels=False)\r\n plt.gca().set_facecolor(\"white\")", "def color_coding_neurons(Coordinates, Color_Code):\r\n \r\n keys=np.unique(Color_Code)\r\n values=[\"grey\",\"red\",\"y\",\"k\",\"r\",\"c\",\"m\"]\r\n dictionary = dict(zip(keys, values))\r\n print(dictionary)\r\n \r\n col=[]\r\n for i in range(0,np.shape(Color_Code)[0]):\r\n col.append(dictionary.get(Color_Code[i]))\r\n \r\n fig = plt.figure(figsize=(5,5))\r\n ax = fig.add_subplot(111, projection='3d')\r\n ax.scatter(Coordinates[:,0],Coordinates[:,1],Coordinates[:,2], c=col, s=10)\r\n # plot the point (2,3,4) on the figure\r\n plt.show()", "def create_cityscapes_label_colormap():\r\n colormap = np.zeros((256, 3), dtype=np.uint8)\r\n colormap[0] = [128, 64, 128]\r\n colormap[1] = [244, 35, 232]\r\n colormap[2] = [70, 70, 70]\r\n colormap[3] = [102, 102, 156]\r\n colormap[4] = [190, 153, 153]\r\n colormap[5] = [153, 153, 153]\r\n colormap[6] = [250, 170, 30]\r\n colormap[7] = [220, 220, 0]\r\n colormap[8] = [107, 142, 35]\r\n colormap[9] = [152, 251, 152]\r\n colormap[10] = [70, 130, 180]\r\n colormap[11] = [220, 20, 60]\r\n colormap[12] = [255, 0, 0]\r\n colormap[13] = [0, 0, 142]\r\n colormap[14] = [0, 0, 70]\r\n colormap[15] = [0, 60, 100]\r\n colormap[16] = [0, 80, 100]\r\n colormap[17] = [0, 0, 230]\r\n colormap[18] = [119, 11, 32]\r\n return colormap", "def test_get_country_states(self):\n pass", "def get_house_est(api_key, year = str(), map = bool()):\n try:\n house_url = f'http://api.census.gov/data/{year}/pep/housing?get=HUEST&for=state:*&key={api_key}'\n r = requests.get(house_url)\n data = json.loads(r.content) \n house_df = pd.DataFrame(data[1:], columns=data[0]).\\\n rename(columns={\"HUEST\": \"Housing_Estimates\", \"state\": \"STATEFP\"})\n house_df['Housing_Estimates'] = house_df['Housing_Estimates'].astype(str).astype(int)\n geodata_url = f\"https://raw.githubusercontent.com/uscensusbureau/citysdk/master/v2/GeoJSON/20m/{year}/state.json\"\n geo_df = gpd.read_file(geodata_url)\n geo_df = geo_df.merge(house_df, on = 'STATEFP')\n if map == True:\n return geo_df.plot(column = 'Housing_Estimates')\n else:\n return geo_df\n r.raise_for_status()\n except HTTPError as http_err:\n print(f'HTTP error occurred: {http_err}')\n except Exception as err:\n print(f'An error occured. All parameters must exist in the Census GeoJSON database and API. Please check https://github.com/uscensusbureau/citysdk/tree/master/v2/GeoJSON: {err}')", "def show_cities_of_state(id):\n return render_template('9-states.html', id=id,\n storage=storage.all(\"State\"))", "def test_color_field_states(self):\r\n # All sample IDs and field states.\r\n exp = ([(1.0, 0.0, 0.0), (0.0, 0.0, 1.0), (1.0, 0.0, 0.0)],\r\n {'y': (0.0, 0.0, 1.0), 'x': (1.0, 0.0, 0.0)})\r\n obs = _color_field_states(self.map_f, ['1', '2', '3', '4', '5', '6'],\r\n 'Foo', ['a', 'b', 'c'], 'Bar')\r\n self.assertEqual(exp[0], obs[0])\r\n assert_almost_equal(obs[1]['x'], exp[1]['x'])\r\n assert_almost_equal(obs[1]['y'], exp[1]['y'])\r\n\r\n # Subset of sample IDs and field states.\r\n exp = ([(1.0, 0.0, 0.0)], {'x': (1.0, 0.0, 0.0)})\r\n obs = _color_field_states(self.map_f, ['1', '2'], 'Foo', ['a'], 'Bar')\r\n self.assertEqual(exp[0], obs[0])\r\n assert_almost_equal(obs[1]['x'], exp[1]['x'])\r\n\r\n # Color field by itself (useless but still allowed).\r\n exp = ([(1.0, 0.0, 0.0), (0.0, 0.0, 1.0), (0.9490196078431372,\r\n 0.45098039215686275, 0.01568627450980392)], {'a':\r\n (1.0, 0.0, 0.0),\r\n 'c': (0.9490196078431372, 0.45098039215686275,\r\n 0.01568627450980392), 'b': (0.0, 0.0, 1.0)})\r\n obs = _color_field_states(self.map_f, ['1', '2', '3', '4', '5', '6'],\r\n 'Foo', ['a', 'b', 'c'], 'Foo')\r\n self.assertEqual(exp[0], obs[0])\r\n assert_almost_equal(obs[1]['a'], exp[1]['a'])\r\n assert_almost_equal(obs[1]['b'], exp[1]['b'])\r\n assert_almost_equal(obs[1]['c'], exp[1]['c'])", "def set_state(canvas, state):\n for key, value in state.items():\n set_attribute(canvas, key, value)", "def plot_map(\n self,\n variable,\n title=None,\n ax=None,\n figsize=None,\n **kwargs,\n ):\n turbines = self.results[FC.TURBINE].to_numpy()\n states = self.results[FC.STATE].to_numpy()\n\n if ax is None:\n __, ax = plt.subplots(figsize=figsize)\n fig = ax.get_figure()\n\n ds = states[-1] - states[-2]\n states = np.append(states, states[-1] + ds)\n turbines = np.arange(len(turbines) + 1)\n\n y, x = np.meshgrid(states, turbines)\n z = self.results[variable].to_numpy()\n\n prgs = {\"shading\": \"flat\"}\n prgs.update(kwargs)\n\n c = ax.pcolormesh(x, y, z.T, **prgs)\n\n ax.set_xticks(turbines[:-1] + 0.5)\n ax.set_xticklabels(turbines[:-1])\n yt = ax.get_yticks()\n ytl = ax.get_yticklabels()\n ax.set_yticks(yt[:-1] + 0.5 * (yt[-1] - yt[-2]), ytl[:-1])\n if len(turbines) > 10:\n xt = ax.get_xticks()\n xtl = [None for t in xt]\n xtl[::5] = ax.get_xticklabels()[::5]\n ax.set_xticks(xt, xtl)\n fig.colorbar(c, ax=ax)\n\n t = title if title is not None else variable\n ax.set_title(t)\n ax.set_xlabel(\"Turbine index\")\n ax.set_ylabel(\"State\")\n\n return ax", "def test_render_world_map():\n\n gdpinfo = {\n \"gdpfile\": \"isp_gdp.csv\",\n \"separator\": \",\",\n \"quote\": '\"',\n \"min_year\": 1960,\n \"max_year\": 2015,\n \"country_name\": \"Country Name\",\n \"country_code\": \"Country Code\"\n }\n \n# gdpinfo = {\n# \"gdpfile\": \"gdptable1.csv\",\n# \"separator\": \",\",\n# \"quote\": '\"',\n# \"min_year\": 2000,\n# \"max_year\": 2005,\n# \"country_name\": \"Country Name\",\n# \"country_code\": \"Code\"\n# }\n\n \n # Get pygal country code map\n pygal_countries = pygal.maps.world.COUNTRIES\n \n #test\n #render_world_map(gdpinfo, pygal_countries, \"2002\", \"isp_gdp_world_name_1960.svg\")\n\n # 1960\n render_world_map(gdpinfo, pygal_countries, \"1960\", \"isp_gdp_world_name_1960.svg\")\n\n # 1980\n #render_world_map(gdpinfo, pygal_countries, \"1980\", \"isp_gdp_world_name_1980.svg\")\n\n # 2000\n #render_world_map(gdpinfo, pygal_countries, \"2000\", \"isp_gdp_world_name_2000.svg\")\n\n # 2010\n #render_world_map(gdpinfo, pygal_countries, \"2010\", \"isp_gdp_world_name_2010.svg\")", "def transfer_map(file_path='~/Desktop/vizcovidfr_files/',\n file_name='Covid_transfer_map',\n color_d=[243, 31, 44, 80], color_a=[230, 190, 37, 80]):\n start = time.time()\n # ---------- covid file ----------\n transfer = load_datasets.Load_transfer().save_as_df()\n # Keep trace of transfer order\n # because rows get mixed up when merging.\n # number transfer from first to last\n transfer_order = np.arange(0, len(transfer), 1)\n # add transfer_order column\n transfer['order'] = transfer_order\n # ---------- geo files ----------\n # only need regions here\n reg_path = os.path.join(\n os.path.dirname(\n os.path.realpath(__file__)),\n \"geodata\", \"regions.geojson\")\n regions = gpd.read_file(reg_path)\n # grab region's centroids (lat and lon)\n region_points = regions.copy()\n # set Europe Coordinate Reference System for geographic accuracy purpose\n region_points = region_points.set_crs(epsg=3035, allow_override=True)\n region_points['geometry'] = region_points['geometry'].centroid\n # extract departure information\n departure = transfer[['region_depart', 'order', 'debut_transfert']]\n departure['nom'] = departure['region_depart']\n # extract departure information\n arrival = transfer[['region_arrivee',\n 'nombre_patients_transferes',\n 'order']]\n arrival['nom'] = arrival['region_arrivee']\n # get departure and arrival geographic coordinates\n D = pd.merge(departure, region_points, on=\"nom\")\n A = pd.merge(arrival, region_points, on=\"nom\")\n # extract latitude and longitude\n # for departure\n D['lon_d'] = D.geometry.apply(lambda p: p.x)\n D['lat_d'] = D.geometry.apply(lambda p: p.y)\n # for arrival\n A['lon_a'] = A.geometry.apply(lambda p: p.x)\n A['lat_a'] = A.geometry.apply(lambda p: p.y)\n # delete not-useful-anymore columns for clarity purpose\n del D['nom']\n del D['geometry']\n del A['nom']\n del A['geometry']\n # merge these new dataframes together\n # (on order so that we have our chronology back!)\n DA = pd.merge(A, D, on='order')\n # save for sparse matrix purpose ?\n # DA.to_csv('departure_arrival.csv')\n # ---------- map time! ----------\n # initialize view (centered on Paris!)\n view = pdk.ViewState(latitude=46.2322, longitude=2.20967, pitch=50, zoom=5)\n # make arc layers from departure to arrival points\n arc_layer = pdk.Layer('ArcLayer',\n data=DA,\n get_source_position=['lon_d', 'lat_d'],\n get_target_position=['lon_a', 'lat_a'],\n get_width=5,\n get_tilt=15,\n get_source_color=color_d,\n get_target_color=color_a,\n # interactivity\n pickable=True,\n auto_highlight=True)\n # add tooltip\n tooltip = {\n \"html\": \"<b>Date:\\\n </b> {debut_transfert} <br />\\\n <b>Number of transfered patient:\\\n </b> {nombre_patients_transferes} <br />\\\n <b>Departure region:</b> {region_depart} <br />\\\n <b>Arrival region:</b> {region_arrivee}\\\n \"}\n # add view and layer to map\n arc_layer_map = pdk.Deck(layers=arc_layer,\n initial_view_state=view,\n tooltip=tooltip)\n # save map\n file_path = preprocess_maps.map_save_path_routine(file_path)\n suffix = '.html'\n save_path = os.path.join(file_path, file_name + suffix)\n arc_layer_map.to_html(save_path)\n sms1 = f\"\\nThat's it! \\n{file_name + suffix} has been successfully saved\"\n sms2 = f\" in {file_path}! \\nYou can go ahead and open it with your\"\n sms3 = \" favorite web browser!\"\n print(sms1 + sms2 + sms3)\n end = time.time()\n print(\"Time to execute: {0:.5f} s.\".format(end - start))", "def region_graph(t0, t1, t2, t3):\r\n medians0 = sorted(sub_saharan_africa_countries())\r\n t0.goto(-250, ((medians0[0])-50))\r\n t0.rt(90)\r\n for idx in range(1, len(medians0)):\r\n t0.pencolor(\"blue\")\r\n t0.pd()\r\n t0.setpos((-250+(idx*10.5)), ((medians0[idx])))\r\n\r\n medians1 = sorted(south_asia_countries())\r\n t1.goto(-250, ((medians1[0]) - 60))\r\n t1.rt(90)\r\n for idx in range(1, len(medians1)):\r\n t1.pencolor(\"red\")\r\n t1.pd()\r\n t1.setpos((-250 + (idx * 68.5)), (2*(medians1[idx])+10))\r\n\r\n medians2 = sorted(europe_central_asia_countries())\r\n t2.goto(-250, (medians2[0])+60)\r\n t2.rt(90)\r\n for idx in range(1, len(medians2)):\r\n t2.pencolor(\"green\")\r\n t2.pd()\r\n t2.setpos((-250 + (idx*10.19)), (2 * (medians2[idx]) + 10))\r\n\r\n medians3 = sorted(latin_america_countries())\r\n t3.goto(-250, (medians3[0]) + 20)\r\n t3.rt(90)\r\n for idx in range(1, len(medians3)):\r\n t3.pencolor(\"gold\")\r\n t3.pd()\r\n t3.setpos((-250 + (idx * 14.39)), (2 * (medians3[idx]) + 10))\r\n\r\n t0.pu()\r\n t0.goto(initialCoordinates())\r\n t1.pu()\r\n t1.goto(initialCoordinates())\r\n t2.pu()\r\n t2.goto(initialCoordinates())\r\n t3.pu()\r\n t3.goto(initialCoordinates())\r\n\r\n medians4 = sorted(middle_east_countries())\r\n t0.goto(-250, (medians4[0])-40)\r\n t0.rt(90)\r\n for idx in range(1, len(medians4)):\r\n t0.pencolor(\"BLACK\")\r\n t0.pd()\r\n t0.setpos((-250 + (idx * 26.3)), (2 * (medians4[idx]) + 10))\r\n t0.rt(180)\r\n\r\n medians5 = sorted(north_america_countries())\r\n t1.goto(-250, (medians5[0])+60)\r\n t1.rt(90)\r\n for idx in range(1, len(medians5)):\r\n t1.pencolor(\"YELLOW\")\r\n t1.pd()\r\n t1.setpos((-250 + (idx * 485.3)), (2 * (medians5[idx]) + 10))\r\n t1.rt(180)\r\n\r\n medians6 = sorted(east_asia_pacific_countries())\r\n t2.goto(-250, (medians6[0]))\r\n t2.rt(90)\r\n for idx in range(1, len(medians6)):\r\n t2.pencolor(\"purple\")\r\n t2.pd()\r\n t2.setpos((-250 + (idx * 16)), (2 * (medians6[idx]) + 10))\r\n t0.pu()\r\n t0.goto(initialCoordinates())\r\n t1.pu()\r\n t1.goto(initialCoordinates())\r\n t2.pu()\r\n t2.goto(initialCoordinates())", "def plot_countryperskill(data_df, **args):\n name = args.get('name', 'VARIABLE NAME')\n idx = args.get('idx', data_df.index.values)\n order = args.get('order', np.array([9, 0, 1, 2, 3, 4, 5, 6, 8, 7], int))\n dd = args.get('dd', .7) # 3.3\n wdth = args.get('wdth', 8) # 7\n hght = args.get('hght', 4)\n markersize = 60\n target_y = args.get('target_y', 1)\n label_y = args.get('label_y', r'$\\rho$')\n colors14 = args.get('colors14', ['#a6cee3', '#1f78b4', '#b2df8a', '#33a02c', \\\n '#fb9a99', '#e31a1c', '#fdbf6f', '#ff7f00', \\\n '#cab2d6', '#6a3d9a', '#ffff99', '#b15928', \\\n '#dd1c77', '#8dd3c7'])\n plt.figure(facecolor='w', figsize=(wdth, hght))\n meth_labels = [r'$Lit$', r'$Lit^2$', r'$Lit^3$', r'$Lit^4$', r'$Lit^5$', \\\n r'$Pop$', r'$Pop^2$', r'$Lit^3Pop$', r'$Lit^2Pop$', r'$LitPop$']\n idx = idx[order]\n meth_labels = [meth_labels[i] for i in order]\n # empty plots for legend handlers:\n for i in np.arange(0, len(countries_sel)): # country\n plt.scatter([], [], marker='o', s=markersize, edgecolor='black', linewidth='.4',\\\n c=colors14[i], label=countries[countries_sel[i]])\n plt.legend()\n\n plt.scatter([0, len(idx)+dd], [0.7, 0.7], marker='.', lw=1, c='white') # legendspace\n\n # actual plotting:\n for i in np.arange(0, len(countries_sel)): # country\n for j in np.arange(0, len(idx)):\n # rp - pearson correlation:\n plt.scatter([j], data_df[countries[countries_sel[i]]][idx[j]], marker='o', \\\n s=markersize, edgecolor='black', linewidth='.4',\\\n alpha=1., c=colors14[i], zorder=j+10)\n if not target_y == 'none':\n plt.plot([0, j], [target_y, target_y], c='#d3d3d3', lw=5, ls='-', zorder=1)\n\n plt.xticks(np.arange(0, len(idx)), meth_labels, color='black', rotation=30)\n plt.grid(axis='y')\n # plt.xlabel('Method')\n plt.ylabel(label_y)\n plt.title(name)\n\n plt.savefig(os.path.join(output_path, experiment_name + '_' + 'allcountries_perScore_v4_' + name + '.pdf'),\\\n dpi=600, facecolor='w', edgecolor='w',\n orientation='portrait', papertype=None, format='pdf',\n transparent=False, bbox_inches=None, pad_inches=0.1,\n frameon=None, metadata=None)\n plt.show()", "def visualize_count(df: pd.DataFrame = None, small_data: bool = False):\n if df is None:\n df = data_cleaning.pd_load_data(small_data)\n df = df.reset_index()\n df = df[['STATE', 'FPA_ID']]\n count_df = df.groupby('STATE').count()\n count_df = count_df.rename(columns={'FPA_ID': 'Count'})\n fig = go.Figure(data=go.Choropleth(\n locations=count_df.index,\n z=count_df['Count'].astype(float),\n locationmode='USA-states',\n colorscale='Reds',\n colorbar_title=\"# of fires\",\n ))\n\n fig.update_layout(\n title_text='Number of wildfires across US from 1992 to 2015',\n geo_scope='usa',\n )\n fig.write_image('figures/fire_counts.svg')\n fig.show()", "def render_state_dict(self, target_state, dmx_universe_target):\n if not target_state:\n return\n # Copy the alias over this bytearray\n if isinstance(target_state, str):\n target_state = {'use': target_state}\n alias_name = target_state.get('use')\n if alias_name:\n assert alias_name in self.dmx_universe_alias, \"alias '{0}' not defined\".format(alias_name)\n dmx_universe_target[:] = self.dmx_universe_alias[alias_name]\n\n # Render items\n for dmx_device_name, color_value in target_state.items():\n self.config.render_device(dmx_universe_target, dmx_device_name, color_value)\n # Mute items\n for dmx_device_name in self.mute_devices:\n self.config.render_device(dmx_universe_target, dmx_device_name, None)\n\n # Add an alias for this state if a name is provided\n if target_state.get('name'):\n self.dmx_universe_alias[target_state.get('name')] = dmx_universe_target", "def highlight(self, number, state):\n\n marker = game.markers[number]\n link = game.markers[marker.link]\n board = self.ids.board\n ui_link = board.children[-link.index - 1]\n\n # Toggle highlighting on\n if state == 'on':\n ui_link.old_color = ui_link.color\n ui_link.color = scheme.white\n\n # Toggle highlighting off\n elif state == 'off':\n ui_link.color = ui_link.old_color", "def find_dark_states(excited_state, ground_states):", "def OnNodeActivated( self, event ):\n self.activated_node = self.selected_node = event.node \n self.squareMap.SetModel( event.node, self.adapter )\n self.RecordHistory()", "def state_map():\n\n # get current user from session\n user_id = session[\"user_id\"]\n\n print user_id\n\n # inputs from state map in console.log [feature.id] = state_id feature = state\n state_id = request.form['feature_id']\n\n print state_id\n\n state = db.session.query(State).filter_by(state_id=state_id).one()\n\n user_state_obj = User_State(state_id=state_id, user_id=user_id, visited_at=datetime.now())\n\n db.session.add(user_state_obj)\n\n db.session.commit()\n\n user_state_json_data = {\"state_id\": state.state_id, \"state_name\": state.state_name, \"visited_at\": user_state_obj.visited_at}\n\n return jsonify(user_state_json_data)\n\n ################## REMOVING ##########################", "def i94_us_states_demographic(spark, df):\n df.createOrReplaceTempView('i94_us_states_demographics')\n states = spark.sql(\"\"\"\n SELECT\n DISTINCT\n State AS state,\n DOUBLE(`Median Age`) AS median_age,\n INT(`Male Population`) AS male_population,\n INT(`Female Population`) AS female_population,\n INT(`Number of Veterans`) AS num_veterans,\n INT(`Foreign-born`) AS num_foreign_born,\n DOUBLE(`Average Household Size`) AS avg_household_size,\n `State Code` AS state_code\n FROM\n i94_us_states_demographics\n \"\"\")\n return states", "def filter_plot(mode, country, continent, start_date, end_date, options):\n # Default is World mode\n chart_data = world_daywise_df\n map_data = countries_daywise_df\n print(country, continent)\n if mode == SelectionMode.Continents.value:\n #Continents mode\n if not isinstance(continent, list):\n continent = [continent]\n\n chart_data = continents_daywise_df[continents_daywise_df['WHO Region'].isin(continent)]\n map_data = map_data[map_data['WHO Region'].isin(continent)]\n elif mode == SelectionMode.Countries.value:\n # Countries mode\n if not isinstance(country, list):\n country = [country]\n\n chart_data = countries_daywise_df[countries_daywise_df['Country/Region'].isin(country)]\n map_data = chart_data\n\n chart_data = chart_data.query('Date >= @start_date & Date <= @end_date')\n map_data = map_data.query('Date >= @start_date & Date <= @end_date')\n\n # fix error when groupby geometry or put it in the aggregate column\n temp = map_data.drop(['geometry', 'country_code', 'Date'], axis=1).groupby(['Country/Region']).agg(metrics).reset_index()\n map_data = join_country_code_data(temp, country_code_data)\n\n if is_perCapita(options):\n for metric in ['Confirmed', 'Deaths', 'Recovered']:\n chart_data[metric + '_per_capita'] = chart_data[metric] / chart_data['Population']\n map_data[metric + '_per_capita'] = map_data[metric] / map_data['Population']\n \n if is_perCapita(options):\n return plot(chart_data, 'Confirmed_per_capita', 'Confirmed Cases Per Capita'), \\\n plot(chart_data, 'Deaths_per_capita', 'Confirmed Deaths Per Capita'), \\\n plot(chart_data, 'Recovered_per_capita', 'Confirmed Recoveries Per Capita'), \\\n generate_map(map_data)\n\n return plot(chart_data, 'Confirmed', 'Confirmed Cases'), \\\n plot(chart_data, 'Deaths', 'Confirmed Deaths'), \\\n plot(chart_data, 'Recovered', 'Confirmed Recoveries'), \\\n generate_map(map_data)", "def cities_aibnb():\n objetos = storage.all(\"State\")\n c_dit = {}\n s_dit = {}\n for key, values in objetos.items():\n if \"State\" in key:\n s_dit[key] = values\n if \"City\" in key:\n c_dit[key] = values\n return render_template(\"8-cities_by_states.html\", city=c_dit, state=s_dit)", "def plot_map(log= None, df= None, geojson= None, color= 'precio', hover_name= 'CCAA', title= '',\n hover_data= ['volumen', 'valor', 'penetracion', 'consumo_per_capita', 'gasto_per_capita']):\n df = df.copy()\n if log:\n df = df[df[log] > 0]\n df[color] = np.log10(df[color])\n \n fig = px.choropleth(\n df, \n locations= 'id',\n geojson= geojson,\n color= color,\n range_color= (0, df[color].max()),\n animation_frame= 'mes',\n animation_group= color,\n hover_name= hover_name,\n hover_data= hover_data\n )\n fig.update_geos(fitbounds= 'locations', visible= False)\n fig.update_layout(title= title)\n fig.show()", "def plot_country(name, case):\n click.echo(click.style(\n \"Generating Plot....\", fg='cyan', bold='true'))\n plot_time_series.TimeSeriesPloTs.plot_country(case, name)\n click.echo(click.style(\n \"Done....\", fg='green', bold='true'))", "def add_region_feature(data):\n\n data.loc[:, 'region'] = data.loc[:, 'district'].apply(\n lambda x: mapping.SOFIA_NEIGHBOURHOOD_TO_REGION_MAPPING[x]\n )\n\n return data", "def main():\n \n \"\"\" Download and load data\"\"\"\n dfs = get_data()\n \n \"\"\" Preprocess data, combine rows for country provinces\"\"\"\n combine_list = [\"Australia\", \"US\", \"Canada\", \"Mainland China\", \"China\"]\n for key in dfs.keys():\n dfs[key] = preprocess(df=dfs[key], combine_list=combine_list)\n \n \"\"\" Compute additional variables\"\"\"\n dfs = compute_deaths_over_closed(dfs)\n dfs = compute_active_cases(dfs)\n dfs = compute_death_rate(dfs)\n dfs = compute_df_reindexed(dfs, \"active_cases\")\n dfs = compute_df_reindexed(dfs, \"death_rate\")\n \n \"\"\"Remove 0 and 1 from rate variables\"\"\"\n for keys in [\"death_rate\", \"death_rate_reindexed\", \"deaths_over_closed\"]:\n dfs[keys] = remove_corner_values(dfs[keys])\n \n \"\"\" Set parameters for plotting\"\"\"\n titles = {\"active_cases\": \"COVID-19 Active Cases\", \"active_cases_reindexed\": \"COVID-19 Active Cases (Days from the Start of the Outbreak)\", \"deaths_over_closed\": \"COVID-19 Deaths over (Deaths + Recovered)\", \"death_rate\": \"COVID-19 Death Rate\", \"death_rate_reindexed\": \"COVID-19 Death Rate (Days from the Start of the Outbreak)\"}\n filenames = {\"active_cases\": \"covid19_active.png\", \"active_cases_reindexed\": \"covid19_active_ri.png\", \"deaths_over_closed\": \"covid19_death_over_closed.png\", \"death_rate\": \"covid19_death_rate.png\", \"death_rate_reindexed\": \"covid19_death_rate_ri.png\"}\n row_inclusion_index_threasholds = {\"active_cases\": 770, \"active_cases_reindexed\": 500, \"deaths_over_closed\": 770, \"death_rate\": 770, \"death_rate_reindexed\": 500}\n row_inclusion_indices = {}\n #row_inclusion_indices.get(x) is None:\n # row_inclusion_indices = dfs[\"cases\"].iloc[:,-1] > x\n\n \"\"\" Plot\"\"\"\n for key in row_inclusion_index_threasholds.keys():\n row_inclusion_indices[key] = dfs[\"cases\"].iloc[:,-1] > row_inclusion_index_threasholds[key]\n if key in [\"active_cases_reindexed\", \"death_rate_reindexed\"]:\n row_inclusion_indices[key] = dfs[\"cases\"].iloc[:,-5] > row_inclusion_index_threasholds[key]\n plot(dfs[key], row_inclusion_indices.get(key), titles[key], filenames[key])", "def _OnSelect(self, event, name=None):\n self.selected = name\n if self.mapper:\n self.mapper.set_cmap(cm.get_cmap(name))\n if self.canvas:\n self.canvas.draw()\n if self.callback:\n self.callback(name)", "def __draw(self, state:dict):\n _, ax = plt.subplots()\n ax.set_axis_off()\n tb = Table(ax, bbox=[0,0,1,1])\n\n width = height = 1.0 /9 \n\n\n for key in self.state.keys():\n # Add cells\n i,j = self.__display_table_map[key]\n tb.add_cell(i, j, width, height, text='{}'.format(state[key]), \n loc='center',facecolor= self.__color_map[key])\n\n ax.add_table(tb)\n plt.show()", "def __init__(self, gdf_map):\n\n self.fig = plt.figure(figsize=(13,13))\n self.ax = self.fig.add_subplot(1,1,1)\n self.fontsize = 18\n\n self.city_markersize = 6\n self.city_marker = 'o'\n self.city_markercolor = 'k'\n self.cmap = 'Reds'\n\n self.map = gdf_map", "def preprocess_state(states):\n states_pp = np.copy(states)\n \n # Paint black over the sum of rewards\n states_pp[:, 85:, :15] = [0.0, 0.0, 0.0]\n\n # Replace the colors defined bellow\n def replace_color(old_color, new_color):\n mask = np.all(states_pp == old_color, axis=3)\n states_pp[mask] = new_color\n\n # Black bar\n replace_color([000., 000., 000.], [120.0, 120.0, 120.0])\n\n # Road\n #new_road_color = [255.0, 255.0, 255.0]\n new_road_color = [102.0, 102.0, 102.0]\n replace_color([102., 102., 102.], new_road_color)\n replace_color([105., 105., 105.], new_road_color)\n replace_color([107., 107., 107.], new_road_color)\n # Curbs\n replace_color([255., 000., 000.], new_road_color)\n replace_color([255., 255., 255.], new_road_color)\n # Grass\n #new_grass_color = [0.0, 0.0, 0.0]\n new_grass_color = [102., 229., 102.]\n replace_color([102., 229., 102.], new_grass_color)\n replace_color([102., 204., 102.], new_grass_color)\n\n # Float RGB represenattion\n states_pp /= 255.\n\n # Converting to gray scale\n states_pp = rgb2gray(states_pp)\n\n return states_pp", "def filter_region_graph(data, region):\r\n MetaDct = data[1]\r\n f_MetaDct = {}\r\n for idx in MetaDct:\r\n if idx != ',':\r\n if MetaDct[idx].region == region:\r\n f_MetaDct[idx] = MetaDct[idx].country\r\n return f_MetaDct", "def add_to_state_dict(all_states_dict: dict, state: str) -> None:\n if state not in all_states_dict:\n url = 'https://www.vaccinespotter.org/api/v0/states/' +\\\n f'{state}.json'\n all_states_dict[state] = get_json_dict(url)", "def plot_map(locations, sel_street):\n\n kortteli = get_gml_data('Data/Paikkatieto/akaava-kortteli.gml')\n kiinteistot = get_gml_data('Data/Paikkatieto/kanta-kiinteisto.gml')\n\n base = kortteli.plot()\n kiinteistot.plot(ax=base, color='lightblue')\n locations.plot(ax=base, color='red', markersize=20)\n\n # selected location with a bigger and more red\n loc_df = pd.DataFrame()\n loc_df = loc_df.append(locations.iloc[int(sel_street)-1])\n loc_df = make_geodf(loc_df, lat_col_name='lat', lon_col_name='long')\n loc_df.plot(ax=base, color='darkred', markersize=100)\n\n plt.show()", "def do_countyplot(df: pd.DataFrame, thru: date):\n\twith urlopen('https://raw.githubusercontent.com/plotly/datasets/master/geojson-counties-fips.json') as response:\n\t\tcounties = json.load(response)\n\tfig = go.Figure(go.Choroplethmapbox(geojson=counties,\n\t\tlocations=list(df['FIPS']), z=df.DeathstoPop,\n\t\tcustomdata=df.DeathstoPop.round(2),\n\t\thovertext=df.County, marker_opacity=0.5, marker_line_width=0.1,\n\t\tcolorscale=\"ylorrd\",\n\t\tcolorbar_title=\"Fatalities per 100 people\"))\n\n\tfig.update_layout(mapbox_style=\"open-street-map\", mapbox_zoom=3,\n\t\tmapbox_center={\"lat\": 37.0902, \"lon\": -95.7129})\n\tfig.update_layout(title_text='Covid-19 local mortality as of ' +\n\t\tthru.strftime('%m-%d-%Y')+ \" -python code and analysis by Brian Herbert\")\n\tfig.update_layout(margin={\"r\": 0, \"t\": 32, \"l\": 0, \"b\": 0})\n\treturn fig", "def cities_by_states():\n states = storage.all(State).values()\n return render_template('8-cities_by_states.html', states=states)", "def __current_state(self, country='ETH', year=2017):\n values = [self.features.indicator_value(i, country, year) for i in indicators]\n return {i: (v, pred_api.features.value_to_category(i, v)) for (i, v) in zip(indicators, values)}", "def generateStationPlot(dir_path, traj_list, color_scheme='light'):\n\n\n # Choose the color scheme\n cs = MapColorScheme()\n \n if color_scheme == 'light':\n cs.light()\n\n else:\n cs.dark()\n\n\n plt.figure(figsize=(19.2, 10.8))\n\n # Init the map\n m = Basemap(projection='cyl', resolution='i')\n\n # Draw the coast boundary and fill the oceans with the given color\n m.drawmapboundary(fill_color=cs.map_background)\n\n # Fill continents, set lake color same as ocean color\n m.fillcontinents(color=cs.continents, lake_color=cs.lakes, zorder=1)\n\n # Draw country borders\n m.drawcountries(color=cs.countries)\n m.drawstates(color=cs.states, linestyle='--')\n\n\n\n ### PLOT WORLD MAP ###\n\n # Group stations into countries\n country_dict = {}\n for traj in traj_list:\n\n for obs in traj.observations:\n\n # Extract country code\n country_code = obs.station_id[:2]\n\n if country_code not in country_dict:\n country_dict[country_code] = {}\n \n\n if obs.station_id not in country_dict[country_code]:\n country_dict[country_code][obs.station_id] = [obs.lat, obs.lon]\n\n\n\n # Plot stations in all countries\n for country_code in country_dict:\n\n station_dict = country_dict[country_code]\n\n # Extract lat/lon\n lat = np.degrees([station_dict[station_id][0] for station_id in station_dict])\n lon = np.degrees([station_dict[station_id][1] for station_id in station_dict])\n\n # Convert lat/lon to x/y\n x, y = m(lon, lat)\n\n plt.scatter(x, y, s=0.75, zorder=5, label=\"{:s}: {:d}\".format(country_code, len(lat)))\n\n\n plt.legend(loc='lower left')\n\n plt.tight_layout()\n\n plt.savefig(os.path.join(dir_path, \"world_map.png\"), dpi=100)\n\n plt.close()\n\n ### ###", "def draw_observation(data, date_obj, map_region):\n\n # set mapbox token\n px.set_mapbox_access_token(CONFIG.CONFIG['MAPBOX']['token'])\n\n # create figures\n map_center = {'lat':(map_region[2] + map_region[3]) * 0.5,\n 'lon':(map_region[0] + map_region[1]) * 0.5}\n figs = collections.OrderedDict()\n\n # draw precipitation\n bins = [0.1, 10, 25, 50, 100, 250, 1200]\n keys = ['0.1~10', '10~25', '25~50', '50~100', '100~250', '>=250']\n cols = ['lightgreen', 'yellow', 'lightskyblue', 'blue', 'magenta','maroon']\n cols_map = dict(zip(keys, cols))\n data['rain'] = pd.cut(data['PRE_Time_0808'], bins=bins, labels=keys)\n data['Rainfall'] = '['+data['Lon'].round(2).astype(str) + ',' + data['Lat'].round(2).astype(str) + ']: ' + \\\n data['PRE_Time_0808'].astype(str)\n data['rain_size'] = data['PRE_Time_0808'] + data['PRE_Time_0808'].mean()\n df = data[data['rain'].notna()]\n if df.shape[0] >= 2:\n figs['Rainfall'] = px.scatter_mapbox(\n df, lat=\"Lat\", lon=\"Lon\", color=\"rain\", category_orders={'rain': keys}, color_discrete_map = cols_map,\n hover_data={'Rainfall':True, 'Lon':False, 'Lat':False, 'rain':False, 'rain_size':False},\n mapbox_style='satellite-streets', size=\"rain_size\", center=map_center, size_max=10, zoom=4,\n title = 'Accumulated precipitation ({})'.format(date_obj.strftime(\"%Y%m%d 08-08\")),\n width=900, height=700)\n\n # draw maximum temperature\n bins = [35, 37, 40, 60]\n keys = ['35~37', '37~40', '>=40']\n cols = ['rgb(255,191,187)', 'rgb(250,89,0)', 'rgb(230,0,8)']\n cols_map = dict(zip(keys, cols))\n data['max_temp_warning'] = pd.cut(data['TEM_Max'], bins=bins, labels=keys)\n data['max_temp'] = '['+data['Lon'].round(2).astype(str) + ',' + data['Lat'].round(2).astype(str) + ']: ' + \\\n data['TEM_Max'].astype(str)\n df = data[data['max_temp_warning'].notna()]\n if df.shape[0] >= 2:\n figs['Max_temperature'] = px.scatter_mapbox(\n df, lat=\"Lat\", lon=\"Lon\", color=\"max_temp_warning\", category_orders={'max_temp_warning': keys}, \n color_discrete_map = cols_map,\n hover_data={'max_temp':True, 'Lon':False, 'Lat':False, 'max_temp_warning':False, 'TEM_Max':False},\n mapbox_style='satellite-streets', size=\"TEM_Max\", center=map_center, size_max=10, zoom=4,\n title = 'Maximum temperature ({})'.format(date_obj.strftime(\"%Y%m%d 08-08\")),\n width=900, height=700)\n\n # draw minimum temperature\n bins = [-120, -40, -30, -20, -10, 0]\n keys = ['<=-40','-40~-30', '-30~-20', '-20~-10', '-10~0']\n cols = ['rgb(178,1,223)', 'rgb(8,7,249)', 'rgb(5,71,162)', 'rgb(5,109,250)', 'rgb(111,176,248)']\n cols_map = dict(zip(keys, cols))\n data['min_temp_warning'] = pd.cut(data['TEM_Min'], bins=bins, labels=keys)\n data['min_temp'] = '['+data['Lon'].round(2).astype(str) + ',' + data['Lat'].round(2).astype(str) + ']: ' + \\\n data['TEM_Min'].astype(str)\n df = data[data['min_temp_warning'].notna()]\n if df.shape[0] >= 2:\n figs['Min_temprature'] = px.scatter_mapbox(\n df, lat=\"Lat\", lon=\"Lon\", color=\"min_temp_warning\", category_orders={'min_temp_warning': keys}, \n color_discrete_map = cols_map,\n hover_data={'min_temp':True, 'Lon':False, 'Lat':False, 'min_temp_warning':False, 'TEM_Min':False},\n mapbox_style='satellite-streets', size=-1.0*df[\"TEM_Min\"], center=map_center, size_max=10, zoom=4,\n title = 'Minimum temperature ({})'.format(date_obj.strftime(\"%Y%m%d 08-08\")),\n width=900, height=700)\n\n # draw low visibility\n data['VIS_Min'] /= 1000.0\n bins = [0, 0.05, 0.2, 0.5, 1]\n keys = ['<=0.05','0.05~0.2', '0.2~0.5', '0.5~1']\n cols = ['rgb(0,82,77)', 'rgb(0,153,160)', 'rgb(0,210,204)', 'rgb(95,255,252)']\n cols_map = dict(zip(keys, cols))\n data['min_vis_warning'] = pd.cut(data['VIS_Min'], bins=bins, labels=keys)\n data['VIS_Min_size'] = 2.0-data[\"VIS_Min\"]\n data['min_vis'] = '['+data['Lon'].round(2).astype(str) + ',' + data['Lat'].round(2).astype(str) + ']: ' + \\\n data['VIS_Min'].astype(str)\n df = data[data['min_vis_warning'].notna()]\n if df.shape[0] >= 2:\n figs['Low_visibility'] = px.scatter_mapbox(\n df, lat=\"Lat\", lon=\"Lon\", color=\"min_vis_warning\", category_orders={'min_vis_warning': keys}, \n color_discrete_map = cols_map,\n hover_data={'min_vis':True, 'Lon':False, 'Lat':False, 'min_vis_warning':False, 'VIS_Min_size':False},\n mapbox_style='satellite-streets', size=\"VIS_Min_size\", center=map_center, size_max=10, zoom=4,\n title = 'Low visibility ({})'.format(date_obj.strftime(\"%Y%m%d 08-08\")),\n width=900, height=700)\n\n # draw high wind\n bins = [10.8, 13.9, 17.2, 20.8, 24.5, 28.5, 32.7, 37.0, 120]\n keys = ['10.8~13.8','13.9~17.1', '17.2~20.7', '20.8~24.4', '24.5~28.4', '28.5~32.6', '32.7~36.9', '>=37.0']\n cols = ['rgb(0,210,244)', 'rgb(0,125,255)', 'rgb(253,255,0)', 'rgb(247,213,0)',\n 'rgb(255,141,0)', 'rgb(251,89,91)', 'rgb(255,3,0)', 'rgb(178,1,223)']\n cols_map = dict(zip(keys, cols))\n data['max_win_warning'] = pd.cut(data['WIN_S_Max'], bins=bins, labels=keys)\n data['max_win'] = '['+data['Lon'].round(2).astype(str) + ',' + data['Lat'].round(2).astype(str) + ']: ' + \\\n data['WIN_S_Max'].astype(str)\n df = data[data['max_win_warning'].notna()]\n if df.shape[0] >= 2:\n figs['High_wind'] = px.scatter_mapbox(\n df, lat=\"Lat\", lon=\"Lon\", color=\"max_win_warning\", category_orders={'max_win_warning': keys}, \n color_discrete_map = cols_map,\n hover_data={'max_win':True, 'Lon':False, 'Lat':False, 'max_win_warning':False, 'WIN_S_Max':False},\n mapbox_style='satellite-streets', size=\"WIN_S_Max\", center=map_center, size_max=10, zoom=4,\n title = 'Maximum wind speed ({})'.format(date_obj.strftime(\"%Y%m%d 08-08\")),\n width=1000, height=800)\n\n return figs", "def __init__(self, transition_matrix, states):\n self.transition_matrix = np.atleast_2d(transition_matrix)\n self.states = states\n self.index_dict = {self.states[index]: index for index in\n range(len(self.states))}\n self.state_dict = {index: self.states[index] for index in\n range(len(self.states))}", "def worldplot(data):\n \n plt.rcParams['font.size'] = 18\n world_df= geopandas.read_file(geopandas.datasets.get_path('naturalearth_lowres'));\n\n world_df = world_df[world_df[\"iso_a3\"].isin(data[\"recipient_iso3\"])];\n\n #world_2df.[\"OFa_all_con\"] = np.nan;\n #world_2df.sort_values(by=\"iso_a3\").head()\n for i in world_df.index:\n for j in data.index:\n if world_df.loc[i,\"iso_a3\"] == data.loc[j,\"recipient_iso3\"]:\n world_df.loc[i,\"OFa_all_con\"] = data.loc[j, \"OFa_all_con\"];\n\n\n fig, ax = plt.subplots(1,1, figsize=(22,14))\n ax.axis('off')\n fig.suptitle('Chinese Development Finance', fontsize=25)\n \n world_df.plot(column='OFa_all_con', ax = ax, legend=True, legend_kwds={\"label\":\"\\n Chinese Development Finance in $10 bln.\",\n \"orientation\": \"horizontal\"}, \n missing_kwds={\"color\": \"lightgrey\",\n \"edgecolor\": \"red\",\n \"hatch\": \"///\",\n \"label\": \"Missing values\"});", "def plot_2d (cities):\n LON_s = [ xpath(city, 'city/coord/lon') for city in cities ]\n LAT_s = [ xpath(city, 'city/coord/lat') for city in cities ]\n # emphasize selected cities with a specific size and color\n # 'r' stands for red, 'b' is black\n colors = [ 'r' if 'selected' in city else 'b' for city in cities ]\n sizes = [ 100 if 'selected' in city else .1 for city in cities ]\n plot.scatter(LON_s, LAT_s, c=colors, s=sizes)\n\n # to exit the drawing, close related window\n plot.show()", "def callback_selectstate(self, attrname, old, new):\n self._update_chart(self.selectstate.value)", "def get_custom_states(self, *args, **kwargs):\n pass", "def get_shapes4country(country='South Africa'):\n # location of data\n URL = \"http://www.naturalearthdata.com/downloads/10m-cultural-vectors\"\n URL += \"/10m-admin-1-states-provinces/\"\n # Shapefiles locally?\n # TODO - update to download automatically and store in AC_tools' data directory\n shapefiles = 'ne_10m_admin_1_states_provinces_lakes'\n# shapefiles = 'ne_10m_admin_1_states_provinces'\n folder = '/mnt/lustre/users/ts551/labbook/Python_progs/'\n folder += '/AC_tools/data/shapefiles/{}'.format(shapefiles, shapefiles)\n states = geopandas.read_file(folder)\n # Just select state of interest\n choosen_states = states.query(\"admin == '{}'\".format(country))\n choosen_states = choosen_states.reset_index(drop=True)\n # Get the shapes\n shapes = zip(choosen_states.geometry, range(len(choosen_states)))\n return shapes", "def build(self):\n states = WOFRegion.query.filter(WOFRegion.country_iso=='US')\n\n logger.info('Indexing US states.')\n\n for row in tqdm(states):\n\n # Key -> id(s)\n for key in map(keyify, state_key_iter(row)):\n self.add_key(key, row.wof_id)\n\n # ID -> state\n self.add_location(row.wof_id, StateMatch(row))", "def view_state():\n global adresses\n pos = int(request.args.get(\"position\", default=-1))\n with open('data/states.json', 'r') as f:\n states = json.loads(f.read())\n if not pos == -1 and pos < len(states):\n print \"Pos valid\"\n for adress in states[pos].keys():\n if not adress == \"name\":\n print str(adress) + \":\" + str(states[pos][adress])\n adresses[int(adress)] = states[pos][adress]\n else:\n print \"Property Name\"\n dmxsender.send(adresses)\n return json_back()\n return \"INVALID KEY\"", "def State(**variables):\n return pd.Series(variables, name='state')", "def index():\n global states\n with open('data/states.json', 'r') as f:\n states = json.loads(f.read())\n # COLORS: https://www.w3schools.com/w3css/w3css_colors.asp\n return render_template('UI.html', main_color=\"orange\", adresses=map(str, adresses), channels=channels, options=all_lights.keys(), states=states)", "def __basemap_ancillary(m, latvalues=None, lonvalues=None, drawparallels=True, drawcountries=True, land_color=0.8):\n\n if latvalues is None:\n latvalues = np.arange(-90., 120., 30.)\n if lonvalues is None:\n lonvalues = np.arange(-180., 180., 90.)\n if drawcountries:\n m.drawcountries()\n m.drawcoastlines()\n m.drawlsmask(lakes=True, land_color=land_color)\n m.drawmapboundary() # draw a line around the map region\n if drawparallels:\n m.drawparallels(latvalues, labels=[1, 0, 0, 0])\n m.drawmeridians(lonvalues, labels=[0, 0, 0, 1]) # draw meridians", "def _create_color_map(self):\n unique_labels = np.unique(self.out_labels)\n color_map = {}\n for unique_label in unique_labels:\n color_map[unique_label] = self._random_color()\n\n return color_map", "def draw_on(self, folium_map):", "def worldplot_2(data, cc, pc):\n # define the columns of input\n # cc = data.columns[checkcol]\n #pc = data.columns[plotcol]\n \n plt.rcParams['font.size'] = 18\n # generate standart geopandas dataframe\n world_df = geopandas.read_file(geopandas.datasets.get_path('naturalearth_lowres'));\n #check indicies of the input dataframe and modify standart geopandas df\n world_df = world_df[world_df[\"iso_a3\"].isin(data[cc])];\n\n #world_2df.[\"OFa_all_con\"] = np.nan;\n #world_2df.sort_values(by=\"iso_a3\").head()\n for i in world_df.index:\n for j in data.index:\n if world_df.loc[i,\"iso_a3\"] == data.loc[j, cc]:\n try:\n world_df.loc[i,pc] = data.loc[j, pc];\n except: \n print(\"\\nError! Invalid Input. Example for input: OFa_all_con\")\n return\n \n\n fig, ax = plt.subplots(1,1, figsize=(22,12))\n ax.axis('off')\n \n \n if pc == \"OFa_all_con\":\n fig.suptitle('Chinese Development Finance (financial amount)', fontsize=25)\n world_df.plot(column=pc, ax = ax, legend=True, cmap='jet', legend_kwds={\"label\":\"\\n Chinese Development Finance in $10 bln (2000-2014)\",\n \"orientation\": \"horizontal\"}, \n missing_kwds={\"color\": \"lightgrey\",\n \"edgecolor\": \"red\",\n \"hatch\": \"///\",\n \"label\": \"Missing values\"});\n else:\n fig.suptitle('Chinese Development Finance (probability)', fontsize=25)\n world_df.plot(column=pc, ax = ax, legend=True, cmap='jet', legend_kwds={\"label\":\"\\n Probability of receiving Chinese Development Finance (2000-2014)\",###ADDDDJUST!!!!!\n \"orientation\": \"horizontal\"}, \n missing_kwds={\"color\": \"lightgrey\",\n \"edgecolor\": \"red\",\n \"hatch\": \"///\",\n \"label\": \"Missing values\"});", "def uk_map(fig1, indata, clevs, datlons, datlats, mtitle, munits, maskswitch):\n\t\n\tfrom mpl_toolkits import basemap as bm\n\timport matplotlib.cm as cm\n\tfrom mpl_toolkits.basemap import shiftgrid \n\tfrom netCDF4 import Dataset\n\tfrom matplotlib.colors import LightSource\n\timport matplotlib.pyplot as plt\n\timport numpy as np\n\timport hillshade\n\timport set_shade\n\timport colour_map\n\t\n\tif maskswitch==1:\n\t\t# import missing data map for masking out of oceans \n\t\tmissdata = Dataset('/exports/work/geos_cxc/users/ahardin4/output/amibatch/afixa/miss.nc', 'r', format='NETCDF3_CLASSIC')\n\t\t\n\t# create the figure and axes instances.\n\tax = fig1.add_axes([0.1,0.1,0.8,0.8])\n\tm = bm.Basemap(llcrnrlon=-9.5,llcrnrlat=49.5,urcrnrlon=2.5,urcrnrlat=59,rsphere=(6378137.00,6356752.3142),\\\n \tresolution='f',area_thresh=1000.,projection='laea', lat_0=54.5,lon_0=-2.75,ax=ax)\n\tm.drawcoastlines()\n\t\n\t# read in etopo5 topography/bathymetry.\n\turl = 'http://ferret.pmel.noaa.gov/thredds/dodsC/data/PMEL/etopo5.nc'\n\tetopodata = Dataset(url)\n\ttopoin = etopodata.variables['ROSE'][:]\n\tlons = etopodata.variables['ETOPO05_X'][:]\n\tlats = etopodata.variables['ETOPO05_Y'][:]\n\t\n\t# shift data so lons go from -180 to 180 instead of 00 to 360.\n\ttopoin,lons = shiftgrid(180.,topoin,lons,start=False)\n\n\t# transform coordinates\n\tx,y=m(datlons[:,:],datlats[:,:])\n\t# transform to nx x ny regularly spaced 5km native projection grid\n\tnx = int((m.xmax-m.xmin)/5000.)+1; ny = int((m.ymax-m.ymin)/5000.)+1\n\ttopodat = m.transform_scalar(topoin,lons,lats,nx,ny)\n\t\n\t# create light source object for topography\n\tls = LightSource(azdeg = 0, altdeg = 2)\n\t# use set_shade function (also available)\n\trgb = set_shade(topodat)\n\n\t# plot image over map with imshow.\n\tim = m.imshow(rgb)\n\t\n\t# apply function to colormap pointers, can be any function at all, as long as\n\t# 0 remains 0, 1 remains 1, and values increase from one to the other.\n\t\n\t# x^4 is good for pseudo-log plots of rainfall:\n\t#log_jet=cmap_xmap(lambda x: (x*x*x*x), cm.hsv)\n\t\n\t#set to lambda x: x for no change:\n\tlog_jet=cmap_xmap(lambda x: (x), cm.jet)\n\t\n\t#apply function to colormap if desired to make whole scale 'hotter' or 'colder'\n\t#example makes colourmap significantly hotter by confining values to upper quarter:\t\n\t#log_jet=cmap_map(lambda x: x/4+0.75, cm.gist_rainbow)\n\t\n\t# mask out oceans, but not lakes. Useful when plotting or comparing against observed\n\tif maskswitch==1:\n\t\tmissmap=missdata.variables['land_map']\n\t\tmissmap2=missdata.variables['land_map']\n\t\t# cut from big mask to small mask if necessary\n\t\t#smallmap=missmap[0,6:46,0:34]\n\t\tsmallmap=missmap[0,:,:]\n\t\tsmallmap2=missmap2[0,:,:]\n\t\t# expand out by one to take into account interpolation\n\t\t\n\t\tfor i in range(1,39):\n\t\t\tfor j in range(1,33):\n\t\t\t\tif smallmap[i,j] == 0.0:\n\t\t\t\t\tsmallmap2[i-1,j]=0.0 \n\t\t\t\t\tsmallmap2[i,j-1]=0.0\n\t\t\t\t\tsmallmap2[i+1,j]=0.0 \n\t\t\t\t\tsmallmap2[i,j+1]=0.0\n\t\t\n\t\t# perform masking\n\t\tindata=np.ma.masked_array(indata,mask=(smallmap2<-0.5))\n\t\tprint smallmap2[0,0], smallmap2[36,0], smallmap2[20,20]\n\t\t#indata[indata<=0.1]=np.nan\n\t# produce semi-transparent contour map\n\tcontourmap=m.contourf(x,y,indata,clevs,cmap=cm.get_cmap(log_jet,len(clevs)-1),extend='both',\n\t\talpha=0.5,origin='lower',rasterized=True)\n\t\t\n\t# produce simple block plot\n\t#contourmap=m.pcolor(x,y,indata,shading='interp',cmap=cm.get_cmap(log_jet,len(clevs)-1),\n\t#\talpha=0.5)\n\t\t\n\t# place colour bar on right\n\tcb = m.colorbar(contourmap,\"right\", size=\"5%\", pad='3%')\n\t# configure colour bar labeling\n\tcl = plt.getp(cb.ax, 'ymajorticklabels')\n\tcontourmap=plt.setp(cl, fontsize=14)\n\n\t# draw parallels and meridians so as not to clash with colour bar placement\n\t# labels = [left,right,top,bottom]\n\tm.drawparallels(np.arange(-70.,80,1.), labels=[1,0,0,1], fontsize=13)\n\tm.drawmeridians(np.arange(351.,362.,2.),labels=[1,0,0,1], fontsize=13)\n\t\n\t# configure title and units\n\tcb.ax.set_xlabel(munits, fontsize=12)\n\tcontourmap=plt.title(mtitle, fontsize=14)", "def ad_rep_city_state(obj):\n return '%s, %s' % (obj.ad_rep.geolocation_object.us_city.name,\n obj.ad_rep.geolocation_object.us_state.abbreviation)", "def labels_to_cityscapes_palette(image):\n classes=ZHANG_classes \n result =np.zeros((img.shape[0], img.shape[1], 3),dtype=np.uint8)\n for key, value in classes.items():\n result[np.where(img == key)] = value\n return result", "def add_city(g, code, name, country, continent, timezone, coordinates, population, region):\n port = Ports(code, name, country, continent, timezone, coordinates, population, region)\n g.city_dict[code] = port\n g.convert[name] = code \n return g", "def plotly_tree_map():\n df = process_life_expectancy_dataset(\"regression\")\n\n # Drop latitude and year column\n df.drop([\"latitude\", \"year\"], axis=1, inplace=True)\n\n df = convert_ohe_columns_into_one(df, \"x0\", \"country\")\n df = convert_ohe_columns_into_one(df, \"continent\", \"continent\")\n\n tree_df = df.groupby([\"continent\", \"country\", \"value\"]).sum().reset_index()\n\n # Plotting Treemap with Continent as parent, country as child, and values representing total size country-wise\n fig = px.treemap(tree_df, path=['continent', 'country'], values='value')\n\n return fig", "def color(self, values, ids=(), key_on='feature.id', palette='YlOrBr', **kwargs):\n # Set values and ids to both be simple sequences by inspecting values\n id_name, value_name = 'IDs', 'values'\n if isinstance(values, collections.abc.Mapping):\n assert not ids, 'IDs and a map cannot both be used together'\n if hasattr(values, 'columns') and len(values.columns) == 2:\n table = values\n ids, values = table.columns\n id_name, value_name = table.labels\n else:\n dictionary = values\n ids, values = list(dictionary.keys()), list(dictionary.values())\n if len(ids) != len(values):\n assert len(ids) == 0\n # Use indices as IDs\n ids = list(range(len(values)))\n\n m = self._create_map()\n data = pandas.DataFrame({id_name: ids, value_name: values})\n attrs = {\n 'geo_data': json.dumps(self.geojson()),\n 'data': data,\n 'columns': [id_name, value_name],\n 'key_on': key_on,\n 'fill_color': palette,\n }\n kwargs.update(attrs)\n folium.Choropleth(\n **kwargs,\n name='geojson'\n ).add_to(m)\n colored = self.format()\n colored._folium_map = m\n return colored", "def visualize_environment(self,env_state):\n fig=plt.figure(figsize=self.figsize)\n ax=plt.subplot(111)\n #Plot the targets\n plt.plot([i[0] for i in self.coordinates__targets],\\\n [i[1] for i in self.coordinates__targets],\\\n marker='x',markersize=15,linestyle='None',color='k',label='Target')\n plot_target_values = True\n if plot_target_values:\n for i ,t in enumerate(self.coordinates__targets):\n plt.text(t[0],t[1],self.target_values[i])\n #Plot the towers\n tower_colors = ['r','b','g']\n for tk in xrange(self.N_tower_kinds):\n plt.plot([i[0] for i in self.coordinates__tower_sites[tk]],\\\n [i[1] for i in self.coordinates__tower_sites[tk]],\\\n marker='o',markersize=10,linestyle='None',color=tower_colors[tk],alpha=.5,label='Tower {} Sites'.format(tk+1))\n if env_state == 'solved':\n for tk in xrange(self.N_tower_kinds):\n plt.plot([i[0] for i in self.coordinates__solved_towers[tk]],\\\n [i[1] for i in self.coordinates__solved_towers[tk]],\\\n marker='^',markersize=20,linestyle='None',color=tower_colors[tk],label='Tower {} Placed'.format(tk+1))\n for x,y,w,h in self.coordinates__obstacles:\n r = plt.Rectangle((x,y),w,h,fc='c')\n ax.add_patch(r)\n plt.xlim(0,self.map_dimensions[1])\n plt.ylim(0,self.map_dimensions[0])\n plt.legend(numpoints=1,loc='best')\n savename = 'SolvedMap.png' if env_state == 'solved' else 'InitialMap.png'\n plt.savefig(savename)", "def drought_index_map(request):\n \n view_center = [-105.2, 39.0]\n view_options = MVView(\n projection='EPSG:4326',\n center=view_center,\n zoom=7.0,\n maxZoom=12,\n minZoom=5\n )\n\n # TIGER state/county mapserver\n tiger_boundaries = MVLayer(\n source='TileArcGISRest',\n options={'url': 'https://tigerweb.geo.census.gov/arcgis/rest/services/TIGERweb/State_County/MapServer'},\n legend_title='States & Counties',\n layer_options={'visible':True,'opacity':0.8},\n legend_extent=[-112, 36.3, -98.5, 41.66]) \n\n # NCDC Climate Divisions\n climo_divs = MVLayer(\n source='TileArcGISRest',\n options={'url': 'https://gis.ncdc.noaa.gov/arcgis/rest/services/backgrounds/MapServer',\n 'params': {'LAYERS': 'show:1'}},\n legend_title='Climate Divisions',\n layer_options={'visible':False,'opacity':0.8},\n legend_extent=[-112, 36.3, -98.5, 41.66]) \n \n # USGS Rest server for HUC watersheds \n watersheds = MVLayer(\n source='TileArcGISRest',\n options={'url': 'https://hydro.nationalmap.gov/arcgis/rest/services/wbd/MapServer'},\n legend_title='HUC Watersheds',\n layer_options={'visible':False,'opacity':0.4},\n legend_extent=[-112, 36.3, -98.5, 41.66])\n \n ##### WMS Layers - Ryan\n usdm_legend = MVLegendImageClass(value='Drought Category',\n image_url='http://ndmc-001.unl.edu:8080/cgi-bin/mapserv.exe?map=/ms4w/apps/usdm/service/usdm_current_wms.map&version=1.3.0&service=WMS&request=GetLegendGraphic&sld_version=1.1.0&layer=usdm_current&format=image/png&STYLE=default')\n usdm_current = MVLayer(\n source='ImageWMS',\n options={'url': 'http://ndmc-001.unl.edu:8080/cgi-bin/mapserv.exe?',\n 'params': {'LAYERS':'usdm_current','FORMAT':'image/png','VERSION':'1.1.1','STYLES':'default','MAP':'/ms4w/apps/usdm/service/usdm_current_wms.map'}},\n layer_options={'visible':False,'opacity':0.3},\n legend_title='USDM',\n legend_classes=[usdm_legend],\n legend_extent=[-126, 24.5, -66.2, 49])\n \n usdm_kml = MVLayer(\n source='KML',\n options={'url': '/static/tethys_gizmos/data/usdm_current.kml'},\n layer_options={'visible':True,'opacity':0.5},\n legend_title='USDM',\n feature_selection=False,\n legend_classes=[usdm_legend],\n legend_extent=[-126, 24.5, -66.2, 49])\n \n # ESI Data from USDA\n esi_1 = MVLayer(\n source='ImageWMS',\n options={'url': 'https://hrsl.ba.ars.usda.gov/wms.esi.2012?',\n 'params': {'LAYERS': 'ESI_current_1month', 'VERSION':'1.1.3', 'CRS':'EPSG:4326'}},\n layer_options={'visible':False,'opacity':0.5},\n legend_title='ESI - 1 month',\n legend_extent=[-126, 24.5, -66.2, 49])\n\n # Define SWSI KML Layer\n swsi_legend = MVLegendImageClass(value='',\n image_url='/static/tethys_gizmos/data/swsi_legend.PNG')\n SWSI_kml = MVLayer(\n source='KML',\n options={'url': '/static/tethys_gizmos/data/SWSI_2018Current.kml'},\n legend_title='SWSI',\n layer_options={'visible':True,'opacity':0.7},\n feature_selection=True,\n legend_classes=[swsi_legend],\n legend_extent=[-109.5, 36.5, -101.5, 41.6])\n \n # NCDC/NIDIS precip index\n ncdc_pindex = MVLayer(\n source='TileArcGISRest',\n options={'url': 'https://gis.ncdc.noaa.gov/arcgis/rest/services/cdo/indices/MapServer',\n 'params': {'LAYERS': 'show:1'}},\n legend_title='Precipitation Index',\n layer_options={'visible':False,'opacity':0.7},\n legend_extent=[-112, 36.3, -98.5, 41.66])\n \n # NCDC/NIDIS palmer drought severity index\n # NOTE: MONTH LOOKUP IS HARDCODED RIGHT NOW\n ncdc_pdsi = MVLayer(\n source='TileArcGISRest',\n options={'url': 'https://gis.ncdc.noaa.gov/arcgis/rest/services/cdo/indices/MapServer',\n 'params': {'LAYERS': 'show:2','layerDefs':'{\"2\":\"YEARMONTH='+str(yearnow)+str(prevmonth)+'\"}'}},\n legend_title='PDSI',\n layer_options={'visible':False,'opacity':0.7},\n legend_extent=[-112, 36.3, -98.5, 41.66])\n \n # NCDC/NIDIS palmer drought severity index\n # NOTE: MONTH LOOKUP IS HARDCODED RIGHT NOW\n ncdc_palmz = MVLayer(\n source='TileArcGISRest',\n options={'url': 'https://gis.ncdc.noaa.gov/arcgis/rest/services/cdo/indices/MapServer',\n 'params': {'LAYERS': 'show:8','layerDefs':'{\"8\":\"YEARMONTH='+str(yearnow)+str(prevmonth)+'\"}'}},\n legend_title='Palmer Z',\n layer_options={'visible':False,'opacity':0.7},\n legend_extent=[-112, 36.3, -98.5, 41.66])\n \n # NCDC/NIDIS standardized precip index\n ncdc_spi_1 = MVLayer(\n source='TileArcGISRest',\n options={'url': 'https://gis.ncdc.noaa.gov/arcgis/rest/services/cdo/indices/MapServer',\n 'params': {'LAYERS': 'show:11','layerDefs':'{\"11\":\"YEARMONTH='+str(yearnow)+str(prevmonth)+'\"}'}},\n legend_title='SPI (1-month)',\n layer_options={'visible':False,'opacity':0.6},\n legend_extent=[-112, 36.3, -98.5, 41.66])\n \n # NCDC/NIDIS standardized precip index\n ncdc_spi_3 = MVLayer(\n source='TileArcGISRest',\n options={'url': 'https://gis.ncdc.noaa.gov/arcgis/rest/services/cdo/indices/MapServer',\n 'params': {'LAYERS': 'show:13','layerDefs':'{\"13\":\"YEARMONTH='+str(yearnow)+str(prevmonth)+'\"}'}},\n legend_title='SPI (3-month)',\n layer_options={'visible':False,'opacity':0.6},\n legend_extent=[-112, 36.3, -98.5, 41.66])\n \n # NCDC/NIDIS standardized precip index\n ncdc_spi_6 = MVLayer(\n source='TileArcGISRest',\n options={'url': 'https://gis.ncdc.noaa.gov/arcgis/rest/services/cdo/indices/MapServer',\n 'params': {'LAYERS': 'show:14','layerDefs':'{\"14\":\"YEARMONTH='+str(yearnow)+str(prevmonth)+'\"}'}},\n legend_title='SPI (6-month)',\n layer_options={'visible':False,'opacity':0.6},\n legend_extent=[-112, 36.3, -98.5, 41.66])\n \n \n # Define map view options\n drought_index_map_view_options = MapView(\n height='100%',\n width='100%',\n controls=['ZoomSlider', 'Rotate', 'ScaleLine', 'FullScreen',\n {'MousePosition': {'projection': 'EPSG:4326'}},\n {'ZoomToExtent': {'projection': 'EPSG:4326', 'extent': [-112, 36.3, -98.5, 41.66]}}],\n layers=[tiger_boundaries,climo_divs,ncdc_pdsi,ncdc_palmz,ncdc_spi_1,ncdc_spi_3,ncdc_spi_6,SWSI_kml,watersheds],\n view=view_options,\n basemap='OpenStreetMap',\n legend=True\n )\n\n context = {\n 'drought_index_map_view_options':drought_index_map_view_options,\n }\n\n return render(request, 'co_drought/drought_index.html', context)" ]
[ "0.6094421", "0.60562986", "0.600109", "0.58970064", "0.58948016", "0.57658577", "0.56608766", "0.5599893", "0.55898225", "0.552269", "0.5476184", "0.5427226", "0.53729886", "0.53549206", "0.5288607", "0.52129906", "0.52069104", "0.51814765", "0.51725775", "0.51422197", "0.50980484", "0.5079439", "0.50733835", "0.50692004", "0.50489324", "0.50338554", "0.50228494", "0.49867666", "0.49624625", "0.49606118", "0.49569678", "0.49428734", "0.494048", "0.49371135", "0.49269682", "0.49249893", "0.48805737", "0.4863863", "0.48625252", "0.48597124", "0.48573363", "0.48560047", "0.48480126", "0.48463592", "0.483913", "0.4837189", "0.48324788", "0.48280695", "0.48163489", "0.4815785", "0.48118773", "0.48080158", "0.48047385", "0.47980297", "0.4781758", "0.4776528", "0.47765073", "0.47759005", "0.47533846", "0.47532338", "0.47497424", "0.47483155", "0.47408652", "0.47349098", "0.47297573", "0.47257897", "0.4723311", "0.47206843", "0.47166982", "0.47072044", "0.47043774", "0.4703754", "0.47031876", "0.4682333", "0.467887", "0.46755746", "0.46649313", "0.46615106", "0.46604788", "0.46590486", "0.46583733", "0.46543953", "0.46492687", "0.4645465", "0.4643798", "0.46429893", "0.46393937", "0.46392944", "0.4637196", "0.46327707", "0.46308935", "0.4630318", "0.4628778", "0.4627668", "0.4626055", "0.46254694", "0.46212846", "0.4619961", "0.46167472", "0.46094435", "0.45977992" ]
0.0
-1
state map where users can click on state and changes colors
def world_map(): # AJAX CALL FOR USER STATE VISIT # get current user from session user_id = session["user_id"] print user_id # inputs from state map in console.log [feature.id] = state_id feature = state country_id = request.form['mapData.id'] print country_id country = db.session.query(Country).filter_by(country_id=country_id).one() user_country_obj = User_Country(country_id=country_id, user_id=user_id, visited_at=datetime.now()) # TODO: make the object be added db.session.add(user_country_obj) db.session.commit() # # # TODO: query datbase for the information to go into this json user_country_json_data = {"country_id": country.country_id, "country_name": country.country_name, "visited_at": user_country_obj.visited_at} return jsonify(user_country_json_data)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_state(canvas, state):\n for key, value in state.items():\n set_attribute(canvas, key, value)", "def state_style(person, color):\r\n return lambda x: {'fillColor': color if x['id']\r\n in person['States'] else 'white',\r\n 'color': 'black',\r\n 'weight': 0.3,\r\n 'fillOpacity': 0.5 if x['id']\r\n in person['States'] else 0.0\r\n }", "def states_traveled(person, color):\r\n folium.GeoJson(data=state_geo,\r\n name=person['Name'] + ' - '\r\n + str(len(person['States'])),\r\n style_function=state_style(person, color)\r\n ).add_to(us_map)", "def highlight(self, number, state):\n\n marker = game.markers[number]\n link = game.markers[marker.link]\n board = self.ids.board\n ui_link = board.children[-link.index - 1]\n\n # Toggle highlighting on\n if state == 'on':\n ui_link.old_color = ui_link.color\n ui_link.color = scheme.white\n\n # Toggle highlighting off\n elif state == 'off':\n ui_link.color = ui_link.old_color", "def get_state_colors():\n state_colors = []\n state_cases = []\n state_active = []\n for i in get_covid_stats_for_all_states():\n state_colors.append(i.color)\n state_cases.append(i.cases)\n state_active.append(i.activeCases)\n socketio.emit(\n \"colors\", {\"colors\": state_colors, \"cases\": state_cases, \"active\": state_active}\n )", "def get_colors_st(top_cities_reviews):\n unique_states = top_cities_reviews['state'].unique()\n\n st = {}\n for state in unique_states:\n r = random.uniform(0, 1)\n g = random.uniform(0, 1)\n b = random.uniform(0, 1)\n st[state] = [r, g, b]\n\n return st", "def get_state(self):\n state_dict = OrderedDict()\n for key, target_object in self._map.items():\n state = self._get_single_state(target_object)\n if state is not None:\n # pushbuttons for example are not defined in the get function\n state_dict[key] = state\n return state_dict", "def draw_state(subplot, name, **kwargs):\n global _color_idx\n if name not in state2poly:\n if get_statename(name) in state2poly:\n name = get_statename(name)\n else:\n print \"state %s not found\" % name\n return\n\n kwargs['color'] = \"#FFFFFF\"\n for polygon in state2poly[name]:\n draw_polygon(subplot, polygon, **kwargs)", "def setup_states(self, state_dict, start_state):\n self.state_dict = state_dict\n self.state_name = start_state\n self.state = self.state_dict[self.state_name]()", "def change_color(self, x, y, state):\n if state == 1:\n color = self.tile_color\n else:\n color = self.background_color\n self.canvas.itemconfig(self.board[(x, y)], fill=color)", "def change_state(self):\n transitions = self.transition_map[self.current_state]\n self.current_state = select_from_probability_dict(random(),transitions)", "def update_state_call(state_id):\n update_state = plot_choropleth('state', state_id).to_html()\n return update_state", "def set_state( self ):", "def set_state(self, state: int):", "def __init__(self):\n\n super(ColorMap, self).__init__()\n self.by_id = dict()\n\n for color in [Color.white(), Color.black()]:\n self.push_color(color)\n\n # only black and white are added ny now\n self.black_and_white = True", "def get_custom_states(self, *args, **kwargs):\n pass", "def state(self, state: str) -> None:", "def update_cells(self, state):\n width = WIDTH / CELL_SIZE\n height = HEIGHT / CELL_SIZE\n\n for index in range(0, width * height):\n if state[index] != self.get_state(index):\n self.toggle_color(index)", "def states():\n states = storage.all(State).values()\n return render_template('9-states.html', states=states)", "def state(self):\n return {self._reverse_mapping[k]: v for k, v in enumerate(self._state)}", "def get_state_map(self):\n if self.is_po:\n state_map = self.curr_map\n else:\n state_map = self.grid_map\n state_map[self.curr_pos] = 0.5\n state_map[self.goal_pos] = 0.7\n return state_map", "def index():\n global states\n with open('data/states.json', 'r') as f:\n states = json.loads(f.read())\n # COLORS: https://www.w3schools.com/w3css/w3css_colors.asp\n return render_template('UI.html', main_color=\"orange\", adresses=map(str, adresses), channels=channels, options=all_lights.keys(), states=states)", "def set_state(self, state_dict):\n for key, target_object in self._map.items():\n self.set_single_state(target_object,\n value=state_dict.get(key, None))", "def control_lights(state):\n for led in (RED, AMBER, GREEN):\n GPIO.output(LED[led],state[led])", "def find_dark_states(excited_state, ground_states):", "def update_input_states(self, input_values):", "def state_map():\n\n # get current user from session\n user_id = session[\"user_id\"]\n\n print user_id\n\n # inputs from state map in console.log [feature.id] = state_id feature = state\n state_id = request.form['feature_id']\n\n print state_id\n\n state = db.session.query(State).filter_by(state_id=state_id).one()\n\n user_state_obj = User_State(state_id=state_id, user_id=user_id, visited_at=datetime.now())\n\n db.session.add(user_state_obj)\n\n db.session.commit()\n\n user_state_json_data = {\"state_id\": state.state_id, \"state_name\": state.state_name, \"visited_at\": user_state_obj.visited_at}\n\n return jsonify(user_state_json_data)\n\n ################## REMOVING ##########################", "def _color_field_states(map_f, samp_ids, field, field_states, color_by_field):\r\n colors = []\r\n color_pool = [matplotlib_rgb_color(data_colors[color].toRGB())\r\n for color in data_color_order]\r\n metadata_map = MetadataMap.parseMetadataMap(map_f)\r\n\r\n for field_to_check in field, color_by_field:\r\n if field_to_check not in metadata_map.CategoryNames:\r\n raise ValueError(\"The field '%s' is not in the metadata mapping \"\r\n \"file's column headers.\" % field_to_check)\r\n\r\n all_field_states = metadata_map.getCategoryValues(samp_ids, field)\r\n all_color_by_states = metadata_map.getCategoryValues(samp_ids,\r\n color_by_field)\r\n\r\n if len(set(field_states) - set(all_field_states)) != 0:\r\n raise ValueError(\"Encountered unrecognizable field state(s) in %r \"\r\n \"for field '%s'.\" % (field_states, field))\r\n\r\n # Build mapping from one field to the other.\r\n field_mapping = defaultdict(list)\r\n for field_state, color_by_state in zip(all_field_states,\r\n all_color_by_states):\r\n if field_state in field_states:\r\n field_mapping[field_state].append(color_by_state)\r\n\r\n # For each of the specified input field states, find its corresponding\r\n # \"color by\" field state and give it a color if it hasn't been assigned one\r\n # yet. Make sure we have enough colors and there is a one-to-one mapping.\r\n color_mapping = {}\r\n for field_state in field_states:\r\n color_by_states = set(field_mapping[field_state])\r\n\r\n if len(color_by_states) != 1:\r\n raise ValueError(\"The field '%s' to color by does not have a \"\r\n \"one-to-one mapping with field '%s'. Coloring \"\r\n \"would be ambiguous.\" % (color_by_field, field))\r\n\r\n color_by_state = list(color_by_states)[0]\r\n if color_by_state not in color_mapping:\r\n if len(color_pool) > 0:\r\n color_mapping[color_by_state] = color_pool.pop(0)\r\n else:\r\n raise ValueError(\"There are not enough available QIIME colors \"\r\n \"to color each of the field states in field \"\r\n \"'%s'. Coloring would be ambiguous.\" %\r\n color_by_field)\r\n\r\n colors.append(color_mapping[color_by_state])\r\n\r\n return colors, color_mapping", "def test_color_field_states(self):\r\n # All sample IDs and field states.\r\n exp = ([(1.0, 0.0, 0.0), (0.0, 0.0, 1.0), (1.0, 0.0, 0.0)],\r\n {'y': (0.0, 0.0, 1.0), 'x': (1.0, 0.0, 0.0)})\r\n obs = _color_field_states(self.map_f, ['1', '2', '3', '4', '5', '6'],\r\n 'Foo', ['a', 'b', 'c'], 'Bar')\r\n self.assertEqual(exp[0], obs[0])\r\n assert_almost_equal(obs[1]['x'], exp[1]['x'])\r\n assert_almost_equal(obs[1]['y'], exp[1]['y'])\r\n\r\n # Subset of sample IDs and field states.\r\n exp = ([(1.0, 0.0, 0.0)], {'x': (1.0, 0.0, 0.0)})\r\n obs = _color_field_states(self.map_f, ['1', '2'], 'Foo', ['a'], 'Bar')\r\n self.assertEqual(exp[0], obs[0])\r\n assert_almost_equal(obs[1]['x'], exp[1]['x'])\r\n\r\n # Color field by itself (useless but still allowed).\r\n exp = ([(1.0, 0.0, 0.0), (0.0, 0.0, 1.0), (0.9490196078431372,\r\n 0.45098039215686275, 0.01568627450980392)], {'a':\r\n (1.0, 0.0, 0.0),\r\n 'c': (0.9490196078431372, 0.45098039215686275,\r\n 0.01568627450980392), 'b': (0.0, 0.0, 1.0)})\r\n obs = _color_field_states(self.map_f, ['1', '2', '3', '4', '5', '6'],\r\n 'Foo', ['a', 'b', 'c'], 'Foo')\r\n self.assertEqual(exp[0], obs[0])\r\n assert_almost_equal(obs[1]['a'], exp[1]['a'])\r\n assert_almost_equal(obs[1]['b'], exp[1]['b'])\r\n assert_almost_equal(obs[1]['c'], exp[1]['c'])", "def map(s,dic):\n state=s.getstate()\n if not state in dic:raise Exception(\"the current state \"+str(state)+\" is not available to map to using the dictionary \"+str(dic))\n val=dic[state]\n if callable(val):\n return val()\n states=s.getstates()\n if val in states:\n return s.setstate(val)\n raise Exception(\"I dont know how to use this \"+str(state)+\" since it maps to a type of \"+str(type(val))+\" namely \"+str(val))", "def _color_field_states(map_f, samp_ids, field, field_states, color_by_field):\n colors = []\n color_pool = [matplotlib_rgb_color(data_colors[color].toRGB())\n for color in data_color_order]\n metadata_map = MetadataMap.parseMetadataMap(map_f)\n\n for field_to_check in field, color_by_field:\n if field_to_check not in metadata_map.CategoryNames:\n raise ValueError(\"The field '%s' is not in the metadata mapping \"\n \"file's column headers.\" % field_to_check)\n\n all_field_states = metadata_map.getCategoryValues(samp_ids, field)\n all_color_by_states = metadata_map.getCategoryValues(samp_ids,\n color_by_field)\n\n if len(set(field_states) - set(all_field_states)) != 0:\n raise ValueError(\"Encountered unrecognizable field state(s) in %r \"\n \"for field '%s'.\" % (field_states, field))\n\n # Build mapping from one field to the other.\n field_mapping = defaultdict(list)\n for field_state, color_by_state in zip(all_field_states,\n all_color_by_states):\n if field_state in field_states:\n field_mapping[field_state].append(color_by_state)\n\n # For each of the specified input field states, find its corresponding\n # \"color by\" field state and give it a color if it hasn't been assigned one\n # yet. Make sure we have enough colors and there is a one-to-one mapping.\n color_mapping = {}\n for field_state in field_states:\n color_by_states = set(field_mapping[field_state])\n\n if len(color_by_states) != 1:\n raise ValueError(\"The field '%s' to color by does not have a \"\n \"one-to-one mapping with field '%s'. Coloring \"\n \"would be ambiguous.\" % (color_by_field, field))\n\n color_by_state = list(color_by_states)[0]\n if color_by_state not in color_mapping:\n if len(color_pool) > 0:\n color_mapping[color_by_state] = color_pool.pop(0)\n else:\n raise ValueError(\"There are not enough available QIIME colors \"\n \"to color each of the field states in field \"\n \"'%s'. Coloring would be ambiguous.\" %\n color_by_field)\n\n colors.append(color_mapping[color_by_state])\n\n return colors, color_mapping", "def render_state_dict(self, target_state, dmx_universe_target):\n if not target_state:\n return\n # Copy the alias over this bytearray\n if isinstance(target_state, str):\n target_state = {'use': target_state}\n alias_name = target_state.get('use')\n if alias_name:\n assert alias_name in self.dmx_universe_alias, \"alias '{0}' not defined\".format(alias_name)\n dmx_universe_target[:] = self.dmx_universe_alias[alias_name]\n\n # Render items\n for dmx_device_name, color_value in target_state.items():\n self.config.render_device(dmx_universe_target, dmx_device_name, color_value)\n # Mute items\n for dmx_device_name in self.mute_devices:\n self.config.render_device(dmx_universe_target, dmx_device_name, None)\n\n # Add an alias for this state if a name is provided\n if target_state.get('name'):\n self.dmx_universe_alias[target_state.get('name')] = dmx_universe_target", "def _draw_state(self, game_states, surface, viewscreen_size, space_radius):\n \n self.selection += 1\n if self.selection >= len(game_states): \n self.selection = 0\n input()\n\n print(self.selection)\n\n game_state = game_states[self.selection]\n\n colors = [(0, 0, 0), (255, 255, 255), (100, 100, 100)]\n\n spacing_x = viewscreen_size[0] / 10\n spacing_y = (viewscreen_size[1]* 0.866025403784) / 10 #hardcoded sqrt(3)/2 aka sin(pi/3)\n offset_y = viewscreen_size[1] * ((1-0.866025403784)/2)\n\n for coord in all_coords():\n x, y = coord\n\n offset_x = (5-y) * (spacing_x/2)\n\n draw_y = int(viewscreen_size[1] - (spacing_y * y)) - offset_y\n draw_x = int(spacing_x * x) + offset_x\n\n color = colors[game_state[(x, y)]]\n\n pygame.draw.circle(surface, color, (draw_x, draw_y), space_radius)", "def show_states():\n return render_template('7-states_list.html',\n storage=storage.all(\"State\").values())", "def state_chosen_do(cfg, app, win, events):", "def set_state(self, state_dict):\n self.set_script_output(state_dict.get('script_text', ''))\n for key, target_object in self._map.items():\n self.set_single_state(target_object,\n value=state_dict.get(key, None))", "def push_color(self, color):\n self[color.name] = color\n # for every added new color, set the map as colored\n self.black_and_white = False", "def create_map_coloring_csp():\n csp = CSP()\n states = [ 'WA', 'NT', 'Q', 'NSW', 'V', 'SA', 'T' ]\n edges = { 'SA': [ 'WA', 'NT', 'Q', 'NSW', 'V' ], 'NT': [ 'WA', 'Q' ], 'NSW': [ 'Q', 'V' ] }\n colors = [ 'red', 'green', 'blue' ]\n for state in states:\n csp.add_variable(state, colors)\n for state, other_states in edges.items():\n for other_state in other_states:\n csp.add_constraint_one_way(state, other_state, lambda i, j: i != j)\n csp.add_constraint_one_way(other_state, state, lambda i, j: i != j)\n return csp", "def state_choose_do(cfg, app, win, events):", "def change(widget, colors): \n\t\n new_val = '#'\n for name in ('red', 'green', 'blue'):\n new_val += colors[name].get()\n widget['bg'] = new_val", "def get_state(self):\n return self.state_map", "def storeState(self):\n\n self.action_history[self.trial] = self.action\n self.ball_history[self.trial] = self.ballcolor", "def state_transition(self, curr_state, curr_action):\n curr_state[curr_action[0]] = curr_action[1]\n return curr_state", "def preprocess_state(states):\n states_pp = np.copy(states)\n \n # Paint black over the sum of rewards\n states_pp[:, 85:, :15] = [0.0, 0.0, 0.0]\n\n # Replace the colors defined bellow\n def replace_color(old_color, new_color):\n mask = np.all(states_pp == old_color, axis=3)\n states_pp[mask] = new_color\n\n # Black bar\n replace_color([000., 000., 000.], [120.0, 120.0, 120.0])\n\n # Road\n #new_road_color = [255.0, 255.0, 255.0]\n new_road_color = [102.0, 102.0, 102.0]\n replace_color([102., 102., 102.], new_road_color)\n replace_color([105., 105., 105.], new_road_color)\n replace_color([107., 107., 107.], new_road_color)\n # Curbs\n replace_color([255., 000., 000.], new_road_color)\n replace_color([255., 255., 255.], new_road_color)\n # Grass\n #new_grass_color = [0.0, 0.0, 0.0]\n new_grass_color = [102., 229., 102.]\n replace_color([102., 229., 102.], new_grass_color)\n replace_color([102., 204., 102.], new_grass_color)\n\n # Float RGB represenattion\n states_pp /= 255.\n\n # Converting to gray scale\n states_pp = rgb2gray(states_pp)\n\n return states_pp", "def getState():\n engine = create_engine(\n 'mysql+mysqldb://{}:{}@localhost:3306/{}'.format(\n sys.argv[1],\n sys.argv[2],\n sys.argv[3]),\n pool_pre_ping=True)\n Base.metadata.create_all(engine)\n\n Session = sessionmaker(bind=engine)\n session = Session()\n\n new_states = State(name='Louisiana')\n session.add(new_states)\n\n for state in session.query(State).order_by(State.id).all():\n if state.name == \"Louisiana\":\n print(\"{}\".format(state.id))\n\n session.commit()\n session.close()", "def ControlLights(state):\n for led in (RED,YELLOW,GREEN):\n GPIO.output(LED[led],state[led])\n time.sleep(FLASH_TIME)", "def switch_colors(mutated_genome):\n index1 = random.randint(0,max(0,len(mutated_genome)-1))\n index2 = random.randint(0,max(0,len(mutated_genome)-1))\n temp = mutated_genome[index1][0]\n mutated_genome[index1][0] = mutated_genome[index2][0]\n mutated_genome[index2][0] = temp", "def mutate_color(mutated_genome):\n seed = random.randint(0,2)\n if seed == 0:\n new_color(mutated_genome)\n elif seed == 1:\n change_color(mutated_genome)\n else: #seed == 2:\n switch_colors(mutated_genome)\n #else: seed == 3: # depricated\n # shuffle_colors(mutated_genome)", "def state_preview_do(cfg, app, win, events):", "def toGameState(self):\r\n level = self.mapselector.selectedmap\r\n const.toGameState(self,level)", "def draw_game_state(screen, gs):\n draw_board(screen)\n draw_pieces(screen, gs.board)", "def state(self, newState):\n for i in range(len(newState)):\n self._state[i] = newState[i]", "def states():\n all_states = storage.all(State)\n return render_template('9-states.html', States=all_states, ID=None,\n Stateobj=None)", "def map_state_info(port, nmap_store):\n state = port.find(\"state\")\n nmap_store[\"state\"] = state.get(\"state\")\n nmap_store[\"reason\"] = state.get(\"reason\")\n nmap_store[\"reason_ttl\"] = state.get(\"reason_ttl\")", "def set_state(self, state):\n #print(\"ComponentBase.set_state\")\n for k,v in state.items():\n #print(\" Set {:14s} to {:s}\".format(k,str(v)))\n if k == \"connectors\":\n for con_state in v:\n self.add_connector() \n self.connectors[-1].set_state(con_state)\n else:\n setattr(self, k, v)", "def change_ops_state(self, state):\n for op_button in self.operators.values():\n op_button['state'] = state", "def changeState(self, node, name, state):", "def select_action(self, state):", "def on_stateico_clicked(self, *a):\n\t\tself.window1.set_property('visible', True)\n\t\tself.stateico.set_visible(False)\n\t\tself.window1.present()", "def toState(attrs=ALL):", "def make_state_dict(self):\r\n state_dict = {c.TRANSITION_IN: self.transition_in,\r\n c.TRANSITION_OUT: self.transition_out,\r\n c.NORMAL: self.normal_update}\r\n\r\n return state_dict", "def states_list():\n return render_template('7-states_list.html',\n states=storage.all(State).values())", "def change_to_tasks(self):\n self.ids[\"shp_btn\"].color = 1, 1, 1, 0.5", "def __draw(self, state:dict):\n _, ax = plt.subplots()\n ax.set_axis_off()\n tb = Table(ax, bbox=[0,0,1,1])\n\n width = height = 1.0 /9 \n\n\n for key in self.state.keys():\n # Add cells\n i,j = self.__display_table_map[key]\n tb.add_cell(i, j, width, height, text='{}'.format(state[key]), \n loc='center',facecolor= self.__color_map[key])\n\n ax.add_table(tb)\n plt.show()", "def find_new_states(tree):\n\trow = tree.get_all_states()\n\t\n\tfor state in row:\n\t\tif state not in all_states_explored:\n\t\t\t\tall_states_explored.append(state)", "def state_changed(self, oldstate, newstate, event, *args, **kwargs):", "def state_changed(self, oldstate, newstate, event, *args, **kwargs):", "def get_states(self):\n raise NotImplementedError()", "def setstate(self,name,state):\n if (name not in KFNode.names):\n print ' state name ',name,' not in KNode!'\n self.states[name]=state.copy()\n self.status = name\n return", "def draw_states(self):\n drawing = self.tree.draw(\n width=400,\n height=300,\n layout='d',\n node_labels=(\"idx\", 1, 1),\n node_sizes=15,\n node_style={\"stroke\": \"black\", \"stroke-width\": 2},\n node_colors=[\n toytree.colors[int(round(i[1]))] if isinstance(i, (list, np.ndarray))\n else \"white\" \n for i in self.tree.get_node_values(\"likelihood\", True, True)\n ],\n )\n return drawing", "def changeTool(event):\n global active_colour, active_note, size\n\n if ((event.x > red_x1) & (event.x < red_x2) & (event.y > red_y1) & (event.y < red_y2)):\n active_colour = \"red\"\n active_note = 'A'\n elif ((event.x > orange_x1) & (event.x < orange_x2) & (event.y > orange_y1) & (event.y < orange_y2)):\n active_colour = \"orange\" \n active_note = 'B' \n elif ((event.x > yellow_x1) & (event.x < yellow_x2) & (event.y > yellow_y1) & (event.y < yellow_y2)):\n active_colour = \"yellow\"\n active_note = 'C'\n elif ((event.x > green_x1) & (event.x < green_x2) & (event.y > green_y1) & (event.y < green_y2)):\n active_colour = \"green\"\n active_note = 'D'\n elif ((event.x > blue_x1) & (event.x < blue_x2) & (event.y > blue_y1) & (event.y < blue_y2)):\n active_colour = \"blue\"\n active_note = 'E'\n elif ((event.x > indigo_x1) & (event.x < indigo_x2) & (event.y > indigo_y1) & (event.y < indigo_y2)):\n active_colour = \"indigo\"\n active_note = 'F'\n elif ((event.x > violet_x1) & (event.x < violet_x2) & (event.y > violet_y1) & (event.y < violet_y2)):\n active_colour = \"violet\"\n active_note = 'G'\n elif ((event.x > thin_x1) & (event.x < thin_x2) & (event.y > thin_y1 - 15) & (event.y < thin_y2 + 15)):\n size = 2\n elif ((event.x > medium_x1) & (event.x < medium_x2) & (event.y > medium_y1 - 10) & (event.y < medium_y2 + 10)):\n size = 5\n elif ((event.x > thick_x1) & (event.x < thick_x2) & (event.y > thick_y1 - 5) & (event.y < thick_y2 + 5)):\n size = 10\n\n t.coords(current_option, 133, 40 - size, 168, 41 + size)\n t.itemconfig(current_option , fill = active_colour, outline = active_colour)\n current_note.config(text=active_note)", "def _see_state(self, new_state: State) -> None:\n entity_id = new_state.entity_id\n domain = new_state.domain\n state = new_state.state\n registry: GroupIntegrationRegistry = self.hass.data[REG_KEY]\n self._assumed[entity_id] = bool(new_state.attributes.get(ATTR_ASSUMED_STATE))\n\n if domain not in registry.on_states_by_domain:\n # Handle the group of a group case\n if state in registry.on_off_mapping:\n self._on_states.add(state)\n elif state in registry.off_on_mapping:\n self._on_states.add(registry.off_on_mapping[state])\n self._on_off[entity_id] = state in registry.on_off_mapping\n else:\n entity_on_state = registry.on_states_by_domain[domain]\n if domain in registry.on_states_by_domain:\n self._on_states.update(entity_on_state)\n self._on_off[entity_id] = state in entity_on_state", "def add_to_state_dict(all_states_dict: dict, state: str) -> None:\n if state not in all_states_dict:\n url = 'https://www.vaccinespotter.org/api/v0/states/' +\\\n f'{state}.json'\n all_states_dict[state] = get_json_dict(url)", "def _state_actions(self) -> dict:\n return {}", "def exposed_get_state(self):\n return json.dumps(dict(state=random.choice(self.states)), indent=2)", "def get_segment_colour_map(self, features):\n\n hashList = {'1' : 'Grey',\n '2':'Red',\n '3':'Green',\n '4':'greenyellow',\n '5':'Pink',\n '6':'Orange',\n '7':'goldenrod',\n '8':'indianred',\n '9':'peachpuff',\n '10':'deepskyblue',\n '11':'firebrick',\n '12':'orchid',\n '13': 'moccasin',\n '14':'slateblue',\n '15':'turquoise',\n '16':'tomato',\n '17':'darkmagenta',\n '18':'olivedrab'}\n return hashList", "def state_encod_arch2(self, state, action):", "def __add_current_state_to_state_dict(self):\n board_fen = self.board_fen()\n if board_fen not in self.states:\n self.states[self.board_fen()] = GameState(self.board_array())", "def States_func():\n engine = create_engine('mysql+mysqldb://{}:{}@localhost/{}'.format(\n sys.argv[1], sys.argv[2], sys.argv[3]), pool_pre_ping=True)\n Base.metadata.create_all(engine)\n\n Session = sessionmaker(bind=engine)\n session = Session()\n x = session.query(State).get(2)\n x.name = 'New Mexico'\n session.commit()\n session.close()", "def get_state_actions_mapping(self):\n return None", "def update_to_state(self, game_state):\n pass", "def plot_choropleth(_type, state_id=6):\n if _type == 'states':\n return states_choropleth.plot_map(DATA)\n elif _type == 'state':\n return state_choropleth.plot_map(DATA, state_id)", "def states_list():\n state_dict = storage.all('State').values()\n return render_template('7-states_list.html', state_dict=state_dict)", "def _get_state(self):", "def fromState(state):", "def color(self, state: State) -> str:\n if state == State.HEALTHY:\n return \"#00ff00\" # green\n elif state == State.INFECTED:\n return \"#ff0000\" # red\n elif state == State.RECOVERED:\n return \"#0000ff\" # blue\n elif state == State.DEAD:\n return \"#000000\" # black", "def on_state_change(self, new_state):\n self.state = new_state", "def changeColor(self):\n self.layer.new_colormap()", "def state_processing_do(cfg, app, win, events):", "def _create_color_map(self):\n unique_labels = np.unique(self.out_labels)\n color_map = {}\n for unique_label in unique_labels:\n color_map[unique_label] = self._random_color()\n\n return color_map", "def state_dict(self, *args, **kwargs):\n return self.module.state_dict(*args, **kwargs)", "def print_state(id=None):\n data = storage.all(\"State\")\n return render_template('9-states.html', states=data, id=id)", "def updateShaderState(self):\n\n dopts = self.opts\n copts = self.canvas.opts\n lightPos = None\n flatColour = dopts.getConstantColour()\n useNegCmap = (not dopts.useLut) and dopts.useNegativeCmap\n\n if self.threedee:\n lightPos = np.array(copts.lightPos)\n lightPos *= (copts.zoom / 100.0)\n else:\n lightPos = None\n\n if dopts.useLut:\n delta = 1.0 / (dopts.lut.max() + 1)\n cmapXform = transform.scaleOffsetXform(delta, 0.5 * delta)\n else:\n cmapXform = self.cmapTexture.getCoordinateTransform()\n\n fslgl.glmesh_funcs.updateShaderState(\n self,\n useNegCmap=useNegCmap,\n cmapXform=cmapXform,\n flatColour=flatColour,\n lightPos=lightPos)", "def states(self):\n from geoid.core import names\n from geoid.censusnames import geo_names, stusab\n\n states = {}\n\n for state_no, stusab in stusab.items():\n states[stusab] = {\n 'name': geo_names[(state_no,0)],\n 'stusab': stusab,\n 'number' : state_no\n }\n\n states['US'] = {\n 'name': 'United States',\n 'stusab': 'US',\n 'number' : 0\n }\n\n return states", "def get_states(self):\n states = {}\n if hasattr(self, 'random_mask_state'):\n states['random_mask_state'] = self.random_mask_state.get_state()\n if hasattr(self, 'deformrandomstate'):\n states['deformrandomstate'] = self.deformrandomstate.get_state()\n states['randomstate'] = self.randomstate.get_state()\n return states", "def load_custom_states(self, states, *args, **kwargs):\n pass", "def view_state():\n global adresses\n pos = int(request.args.get(\"position\", default=-1))\n with open('data/states.json', 'r') as f:\n states = json.loads(f.read())\n if not pos == -1 and pos < len(states):\n print \"Pos valid\"\n for adress in states[pos].keys():\n if not adress == \"name\":\n print str(adress) + \":\" + str(states[pos][adress])\n adresses[int(adress)] = states[pos][adress]\n else:\n print \"Property Name\"\n dmxsender.send(adresses)\n return json_back()\n return \"INVALID KEY\"", "def _localSetState(self,pdict):\n self.low = pdict.pop('low' )\n self.high = pdict.pop('high' )\n self.alpha = pdict.pop('alpha')\n self.beta = pdict.pop('beta' )", "def draw_state_frequencies(state_frequencies):\n for name, shapes in us_states.items():\n frequency = state_frequencies.get(name, None)\n draw_state(shapes, frequency)", "def states_html(id=None):\n state_list = [value for key, value in storage.all(\"State\").items()]\n if id:\n state = None\n for x in state_list:\n if x.id == id:\n state = x\n return(render_template('9-states.html', state=state))\n return(render_template('7-states_list.html', states_list=state_list))", "def set_state(self, power, color, brightness, duration, infrared):\n payload = {\"duration\": duration}\n if power is not None:\n payload['power'] = power\n if color is not None:\n payload['color'] = color\n if brightness is not None:\n payload['brightness'] = brightness\n if infrared is not None:\n payload['duration'] = duration\n response = requests.put(self.__api_url('state'.format(self.name)), data=payload, headers=self.headers)\n time.sleep(duration)\n return response.text" ]
[ "0.6322096", "0.6314533", "0.62529945", "0.61717737", "0.6053243", "0.5946069", "0.583432", "0.582224", "0.58115244", "0.58107173", "0.57522345", "0.5743763", "0.5738496", "0.57296467", "0.5709835", "0.56997746", "0.56476825", "0.56447077", "0.55822253", "0.5581793", "0.5577193", "0.5567746", "0.55551976", "0.5544646", "0.55320996", "0.55167466", "0.5513263", "0.5503318", "0.54947954", "0.5485012", "0.54603285", "0.54592144", "0.5447204", "0.5404664", "0.54005903", "0.5398653", "0.5387094", "0.53844744", "0.5364946", "0.5361516", "0.5360499", "0.5355345", "0.5340684", "0.53336763", "0.5329543", "0.53258693", "0.5316442", "0.53005856", "0.5300371", "0.5296651", "0.52932924", "0.5286338", "0.5279677", "0.52759874", "0.5271394", "0.5259037", "0.52558124", "0.5214514", "0.5202047", "0.5196467", "0.5192405", "0.51918435", "0.51902694", "0.5190243", "0.51899725", "0.5180546", "0.5180546", "0.5173612", "0.51658165", "0.5160895", "0.5159883", "0.5150046", "0.51488864", "0.5137249", "0.5136633", "0.51342726", "0.51333076", "0.5132576", "0.51322734", "0.5124135", "0.5122585", "0.5122049", "0.5116764", "0.51143485", "0.5101528", "0.5101117", "0.50998056", "0.50981027", "0.5097182", "0.50944066", "0.50942576", "0.5092427", "0.50906265", "0.50851524", "0.5083778", "0.5082637", "0.5081533", "0.5080882", "0.5080489", "0.5075152", "0.50717884" ]
0.0
-1
Takes a full media url from Bandwidth and extracts the media id
def get_media_id(media_url): split_url = media_url.split("/") #Media urls of the format https://messaging.bandwidth.com/api/v2/users/123/media/file.png if split_url[-2] == "media": return split_url[-1] #Media urls of the format https://messaging.bandwidth.com/api/v2/users/123/media/abc/0/file.png else: #This is required for now due to the SDK parsing out the `/`s return "%2F".join(split_url[-3:])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def media_id(self):\n try:\n return Html.toId(self.content)\n except:\n Mp3Error(1)", "def _id_from_url(url):\n url = re.sub(r'\\?.*', '', url)\n video_id = url.split('/')[-2]\n return video_id", "def media_content_id(self):\n return self._media_uri_final", "def media_content_id(self) -> str | None:\n # The lovelace app loops media to prevent timing out, don't show that\n if self.app_id == CAST_APP_ID_HOMEASSISTANT_LOVELACE:\n return None\n media_status = self._media_status()[0]\n return media_status.content_id if media_status else None", "def get_single_media(media_id):\n return query_single(media_id, Media, media_schema)", "def get_media_id_from_post(media_obj):\n if media_obj:\n media_id = media_obj.get('id')\n return media_id\n return", "def unique_id(self):\n if self._uuid != '':\n return \"linkplay_media_\" + self._uuid", "def get_video_id(url):\n\n if not url:\n return \"\"\n\n # If URL is embedded\n if \"embed\" in url:\n return url.split(\"/\")[-1]\n\n parse_result = urlparse(url)\n query = parse_qs(parse_result.query)\n return query[\"v\"][0]", "def media_content_id(self) -> str | None:\n if self._device.movie.handle:\n return self._device.movie.handle\n return None", "def get_media_filename(media_url):\n return media_url.split(\"/\")[-1]", "def get_video_id(self):\n \n if self.video_id:\n return self.video_id\n \n if not self.original_url:\n return ''\n \n #logger.debug('DAILYMOTION VIDEO FOUND %s' % url)\n \n p = urlparse.urlparse(self.original_url)\n path = p.path\n if path.endswith('/'):\n path = path[:-1]\n path_list = path[1:].split('/')\n \n if len(path_list) == 3 and (p.path.startswith('/embed/video/') or p.path.startswith('/swf/video/')):\n # http://www.dailymotion.com/embed/video/xmp7zw\n return re.sub('_.+', '', path_list[2])\n elif len(path_list) == 2 and (p.path.startswith('/video/') or p.path.startswith('/swf/')):\n # http://www.dailymotion.com/video/xmp7zw_whatever\n # http://www.dailymotion.com/swf/xmp7zw\n return re.sub('_.+', '', path_list[1])\n \n return ''", "def fix_moviedb(url):\n assert url\n\n # get id from the title\n # e.g.: https://www.themoviedb.org/movie/482936-la-quietud\n path = url.split('/')[-1]\n movie_id = int(path.split('-')[0])\n return url, movie_id", "def media_content_id(self):\n return self._table.active_track.id if self._table.active_track else None", "def extract_item_id(url):\n m = re.search('/([0-9]+)\\.htm', url)\n if m is not None:\n return m.group(1)\n else:\n return None", "def media_entry_id(self):\n return self.getattr('media_entry_id')", "def parse_link_to_id(self, playlist_link: str) -> str:\n split_1 = playlist_link.split('/')[4]\n split_2 = split_1.split('?')\n return split_2[0]", "def extract_media_v1(data):\n user = data[\"user\"]\n location = data.get(\"location\")\n if location:\n location = {\"pk\": int(location.get(\"pk\")), \"name\": location.get(\"name\")}\n video_url = \"\"\n if \"video_versions\" in data:\n # Select Best Quality by Resolutiuon\n video_url = sorted(\n data[\"video_versions\"], key=lambda o: o[\"height\"] * o[\"width\"]\n ).pop()[\"url\"]\n product_type = data.get(\"product_type\", \"\")\n if data[\"media_type\"] == 2 and not product_type:\n product_type = \"feed\"\n thumbnail_url = ''\n if 'image_versions2' in data:\n thumbnail_url = sorted(\n data[\"image_versions2\"][\"candidates\"],\n key=lambda o: o[\"height\"] * o[\"width\"],\n ).pop()[\"url\"]\n return {\n \"pk\": int(data[\"pk\"]),\n \"taken_at\": int(data[\"taken_at\"]),\n \"id\": data[\"id\"],\n \"media_type\": data[\"media_type\"],\n \"product_type\": product_type,\n \"code\": data[\"code\"],\n \"thumbnail_url\": thumbnail_url,\n \"location\": location,\n \"user\": extract_user_short(user),\n \"comment_count\": int(data.get(\"comment_count\") or 0),\n \"like_count\": int(data.get(\"like_count\") or 0), # the media just published has no like_count\n \"caption_text\": json_value(data, \"caption\", \"text\", default=\"\"),\n \"usertags\": [\n extract_usertag(usertag)\n for usertag in data.get(\"usertags\", {}).get(\"in\", [])\n ],\n \"video_url\": video_url,\n \"view_count\": int(data.get('view_count') or 0),\n \"video_duration\": data.get('video_duration'),\n \"title\": data.get(\"title\") or None,\n \"resources\": [\n extract_resource_v1(edge)\n for edge in data.get('carousel_media', [])\n ]\n }", "def get_video_id(self):\n \n if self.video_id:\n return self.video_id\n \n if not self.original_url:\n return ''\n \n p = urlparse.urlparse(self.original_url)\n params = cgi.parse_qs(p.query)\n \n if p.path.endswith('/video'):\n # url type http://www.livestream.com/xprize/video?clipId=pla_1a25a2ba-9ca4-4c3b-b1b1-ebd7d79ef6d2\n if 'clipId' in params:\n return params['clipId'][0]\n if p.path.startswith('/embed'):\n # url type http://cdn.livestream.com/embed/xprize?layout=4&amp;clip=pla_1a25a2ba-9ca4-4c3b-b1b1-ebd7d79ef6d2&amp;width=560&amp;autoplay=false\n if 'clip' in params:\n return params['clip'][0]\n \n return ''", "def get_id(self, url):\n return url.split('/')[-1]", "def get_id(share_url):\n url = get_redirect_url(share_url)\n id_num = re.findall('(\\d*)\\?', url)[0]\n if id_num.isnumeric():\n return id_num\n else:\n print(\"Something wrong with id number\")", "def parse_image_id(image_ref):\n temp = image_ref.rsplit('/')\n #Return the last item, which is the image id\n return temp[len(temp) - 1]", "def get_lis_id(chamber, url):\n match = re.search(lis_id_patterns[chamber], url)\n if match.groups:\n return match.group(1)", "def get_id_regular_link(link = None):\n #Legacy compatibility\n choppedLink = legacy_check(link)\n # dont bother if we are none.\n if link == None:\n return link\n\n vid_url_params = choppedLink[3].split(\"&\")\n # Search the id in the list of elements of the url\n vid = search_video_id(vid_url_params)\n\n # And dont forget the links with hashtags #\n vid = vid.split(\"#\")[0]\n\n return vid # change this var names TODO", "def get_playlist_id_from_url(url):\n return match1(url, r'youku\\.com/playlist_show/id_([a-zA-Z0-9=]+)')", "def generate_media_source_id(domain: str, identifier: str) -> str:\n uri = f\"{URI_SCHEME}{domain or ''}\"\n if identifier:\n uri += f\"/{identifier}\"\n return uri", "def find_player_id(url):\r\n response = requests.get(url)\r\n result = PLAYER_ID_PATTERN.search(response.text)\r\n return result.group(1)", "def get_id_attribution(link = None):\n log.debug(\"attribution link: \" + repr(link))\n choppedLink = legacy_check(link)\n id = None\n try:\n # First try to get the relevant part, that is encoded\n step1 = choppedLink[3][choppedLink[3].find(\"watch\"):]\n # Then stplit the other encoded params\n step2 = step1[12:].split(\"%\")\n # and get the good part\n step3 = step2[0]\n id = step3 # choppedLink[3][choppedLink[3].find(\"watch\"):][12:].split(\"%\")[0]\n except Exception as e:\n raise e # dont care 'bout issues here. all will be NotImplementedError \n\n # If we havent found a match, then this is not implemented.\n if id == \"\":\n raise Exception(\"no recognised kind of link\")\n\n return id", "def parse_url_discl_id(cls, url):\n url_query = urlparse(url)[4]\n try:\n return parse_qs(url_query).get('Discl_id', None)[-1]\n except IndexError as e:\n print(e)\n return \"\"", "def find_id(href):\n ID = idRE.search(href)\n if ID:\n return ID.group(1)", "def get_id(html):\n\ttry:\n\t\tsong_id = re.findall('soundcloud://sounds:(.*?)\"', html)[0]\n\t\treturn song_id\n\texcept IndexError:\n\t\tprint(\"\\033[91m✘ Could not find song ID\\033[0m\")\n\t\tsys.exit()", "def _getURL(self, params):\n qs = Media.objects.filter(pk=params['id'], deleted=False)\n if not qs.exists():\n raise Http404\n response_data = list(qs.values(*MEDIA_PROPERTIES))\n # Use 24-hour URLS\n _presign(24*3600, response_data)\n\n element = params['element']\n if element == 'auto':\n if qs[0].meta.dtype == 'video':\n element = 'streaming'\n elif qs[0].meta.dtype == 'image':\n element = 'image'\n elif qs[0].meta.dtype == 'multi':\n return None\n if element == 'audio':\n return response_data[0].get('media_files',{}).get('audio',[])[0]['path']\n elif element == 'thumbnail':\n search_in = response_data[0].get('media_files',{}).get('thumbnail',[])\n elif element == 'thumbnail_gif':\n search_in = response_data[0].get('media_files',{}).get('thumbnail_gif',[])\n elif element == 'image':\n search_in = response_data[0].get('media_files',{}).get('image',[])\n elif element == 'streaming':\n search_in = response_data[0].get('media_files',{}).get('streaming',[])\n elif element == 'archival':\n search_in = response_data[0].get('media_files',{}).get('archival',[])\n elif element == 'attachment':\n search_in = response_data[0].get('media_files',{}).get('attachment',[])\n\n if not search_in:\n return None\n quality = params['quality']\n max_delta = sys.maxsize\n quality_idx = 0\n for idx, info in enumerate(search_in):\n delta = abs(quality-info['resolution'][0])\n if delta < max_delta:\n quality_idx = idx\n max_delta = delta\n return search_in[quality_idx]['path']", "def get_video_id(self):\n if self.video_id:\n return self.video_id\n \n if not self.original_url:\n return ''\n \n p = urlparse.urlparse(self.original_url)\n if p.netloc.endswith('vimeo.com') and 'hubnut/album/' in p.path:\n return ''\n \n if p.netloc.endswith('vimeo.com') and p.path.split('/')[-1:][0].isdigit():\n # Url of type http://vimeo.com/21347521\n # mobile type http://vimeo.com/m/21347521\n return p.path.split('/')[-1:][0]\n elif p.netloc.endswith('vimeo.com') and p.path == '/moogaloop.swf' and 'clip_id' in p.query:\n # Old embed code style url\n #params = dict([part.split('=') for part in p.query.split('&')])\n params = cgi.parse_qs(p.query)\n if 'clip_id' in params:\n return params['clip_id'][0]\n elif p.netloc == 'player.vimeo.com' and p.path.startswith('/video/'):\n # Url of type http://player.vimeo.com/video/21347521?title=0&amp;byline=0&amp;portrait=0\n path = p.path.split('/')\n return path[-1]\n \n return ''", "def get_video_id(self):\n \n if self.video_id:\n return self.video_id\n \n if not self.original_url:\n return ''\n \n p = urlparse.urlparse(self.original_url)\n \n if p.path.startswith('/v/') or p.path.startswith('/broadcast/'):\n path = p.path.split('/')\n if len(path) == 3:\n return p.path.split('/')[-1].replace('.live', '')\n \n return ''", "def get_video_id_from_link(link):\n query_string = urlparse.urlparse(link).query\n qs_params = urlparse.parse_qs(query_string)\n return qs_params['v'][0]", "def get_ch_id(share_url):\n url = get_redirect_url(share_url)\n id_num = re.findall('/(\\d*)/', url)[1]\n if id_num.isnumeric():\n return id_num\n else:\n print(\"Something wrong with id number\")", "def media_content_id(self):\n if 'current_title' in self._status:\n return self._status['current_title']", "def get_stream_id(self) -> str:", "def job_id(driver):\n elem = driver.find_element_by_xpath(\"//meta[@property='og:url']\")\n url = elem.get_attribute(\"content\")\n return url[url.find('/', 34) + 1:]", "def get_picture_id(path):\n\t\tif path is None:\n\t\t\treturn\n\t\tcon = mdb.connect('localhost', 'root', 'sensepass', 'sensecambrowser')\n\t\twith con:\n\t\t\tquery = \"SELECT id from fileuploader_picture WHERE file=%s\" % (path)\n\t\t\tcur = con.cursor()\n\t\t\tcur.execute(query)\n\t\t\tdata = cur.fetchall()\n\t\t\tprint \"len(data)\"\n\t\t\tprint data\n\t\t\tif len(data) > 0:\n\t\t\t\treturn data[0]\n\t\t\treturn None", "def alternative_media_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"alternative_media_id\")", "def parse_media_info(filename):\n print_info('Extracting hash from {0}'.format(filename))\n media_info = MediaInfo()\n for media_info_type in MEDIA_INFO_REGEXS:\n #print_info('Parsing for {0}'.format(media_info_type))\n for regex in MEDIA_INFO_REGEXS[media_info_type]:\n m = re.search(regex, filename)\n\n if m is None:\n continue\n\n extracted_data = m.group('MediaInfo').upper()\n print_info('Extracted {0}: {1}'.format(media_info_type, extracted_data))\n\n # Before we set, do any needed cleanup\n if media_info_type == 'resolution':\n if not extracted_data.endswith('p'):\n resolution = int(extracted_data)\n if resolution == 1280:\n extracted_data = '720'\n extracted_data = extracted_data + 'p'\n media_info.resolution = extracted_data\n if media_info_type == 'source':\n media_info.source = extracted_data.replace('-', '')\n elif media_info_type == 'audio_source':\n media_info.audio_source = extracted_data\n elif media_info_type == 'encoding':\n media_info.encoding = re.sub('X', 'H', extracted_data)\n elif media_info_type == 'color_bits':\n media_info.color_bits = extracted_data\n break\n \n \n return media_info", "def extract_media_gql(data):\n user = data[\"owner\"]\n media_id = \"%s_%s\" % (data[\"id\"], user[\"id\"])\n if \"full_name\" in user:\n # for hashtag user contain {'id': '2041641294'}\n user = extract_user_short(user)\n else:\n user[\"pk\"] = user.pop(\"id\")\n location = data.get(\"location\")\n if location:\n location = {\"pk\": int(location.get(\"id\")), \"name\": location.get(\"name\")}\n media_type = {\"GraphImage\": 1, \"GraphVideo\": 2, \"GraphSidecar\": 8}[data[\"__typename\"]]\n product_type = data.get(\"product_type\", \"\")\n video_url = \"\"\n if media_type == 2:\n video_url = data[\"video_url\"]\n if not product_type:\n product_type = \"feed\"\n shortcode = ''\n if 'shortcode' in data:\n shortcode = data[\"shortcode\"]\n return {\n \"pk\": int(data[\"id\"]),\n \"taken_at\": int(data[\"taken_at_timestamp\"]),\n \"id\": media_id,\n \"media_type\": media_type,\n \"product_type\": product_type,\n \"code\": shortcode,\n \"thumbnail_url\": sorted(\n data.get(\"display_resources\", data.get('thumbnail_resources')), # display_resources - user feed, thumbnail_resources - hashtag feed\n key=lambda o: o[\"config_width\"] * o[\"config_height\"],\n ).pop()[\"src\"],\n \"location\": location,\n \"user\": user,\n \"comment_count\": json_value(data, \"edge_media_to_comment\", \"count\"),\n \"like_count\": json_value(data, \"edge_media_preview_like\", \"count\"),\n \"caption_text\": json_value(\n data, \"edge_media_to_caption\", \"edges\", 0, \"node\", \"text\", default=\"\"\n ),\n \"usertags\": [\n extract_usertag(usertag['node'])\n for usertag in data.get(\"edge_media_to_tagged_user\", {}).get(\"edges\", [])\n ],\n \"video_url\": video_url,\n \"view_count\": int(data.get('video_view_count') or 0),\n \"video_duration\": data.get('video_duration'),\n \"title\": data.get(\"title\") or None,\n \"resources\": [\n extract_resource_gql(edge['node'])\n for edge in data.get('edge_sidecar_to_children', {}).get('edges', [])\n ]\n }", "def video_id_from_url(url):\n\n parsed_url = urlparse(url)\n url_params = dict(parse_qsl(parsed_url.query))\n return url_params.get(\"v\", parsed_url.path.split(\"/\")[-1])", "def get_media_source_id(self, source_name):\n\t\tvalidation.required(source_name, 'source_name')\n\n\t\treturn self.media_sources.get(source_name, 1)", "def search_video_id(broken_link):\n for param in broken_link:\n vid = regex_video_id(param)\n if vid:\n return vid", "def get_video_id(self):\n if self.video_id:\n return self.video_id\n \n if not self.original_url:\n return ''\n \n p = urlparse.urlparse(self.original_url)\n path = p.path\n if path.endswith('/'):\n path = path[:-1]\n path_list = path[1:].split('/')\n \n if path_list[0] == 'v':\n # https://vine.co/v/bjHh0zHdgZT\n return path_list[1]\n \n return ''", "def prepare_media(self, object):\n if object.media is not None:\n #return object.media.media_file.name\n return '/api/v1/media/{0}/'.format(object.media.id)\n else:\n return ''", "def _get_id(mf, url=None):\n\n\tprops = mf['properties']\n\n\tif 'uid' in props:\n\t\treturn props['uid'][0]\n\telif 'url' in props:\n\t\treturn props['url'][0]\n\telse:\n\t\treturn None", "def media_content_id(self):\n return int(self._gallery_status[\"current_item\"])", "def get_id_from_url(url):\n doc_id_regex = r'.*docsend.com/view/(?P<doc_id>.*)'\n search = re.search(doc_id_regex, url)\n if search:\n doc_id = search.group('doc_id')\n return doc_id", "def get_video_id(self):\n \n if self.video_id:\n return self.video_id\n \n if not self.original_url:\n return ''\n \n if self.res.get('slideshow_id'):\n return self.res.get('slideshow_id')\n \n p = urlparse.urlparse(self.original_url)\n path = p.path\n if path.endswith('/'):\n path = path[:-1]\n path_list = path[1:].split('/')\n \n if len(path_list) == 3 and (p.path.startswith('/slideshow/embed_code')):\n # http://www.slideshare.net/slideshow/embed_code/1293644\n return path_list[2]\n elif len(path_list) == 2 and p.path.startswith('/swf'):\n # return -1 when url is like : http://static.slideshare.net/swf/ssplayer2.swf?doc=working-dogs-1201800078341935-2\n # FixMe :slideshare oembed api doesnt support this kind of url\n return -1\n return ''", "def is_media_source_id(media_content_id: str) -> bool:\n return URI_SCHEME_REGEX.match(media_content_id) is not None", "def grab_playlist():\n sp = credentials()\n playlists = sp.current_user_playlists()\n for playlist in playlists['items']:\n if playlist['name'] == 'Billboard Hot 100':\n playlist_id = playlist['uri']\n return playlist_id", "def id_from_url(url):\n return url.split('-')[-1].split('.html')[0]", "def extract_id(url):\n trail_id = url.replace('https://www.trailforks.com/trails/','').replace('/','')\n return trail_id", "def _locate_media(self, media_id):\n\t\ttry:\n\t\t\tmedia_id = validation.media_id(media_id)\n\t\texcept errors.ValidationError, ex:\n\t\t\treturn utils.return_deferred_error(ex.value)\n\n\t\t@stack\n\t\tdef listify(rows):\n\t\t\tif not rows:\n\t\t\t\traise errors.NotFound, \"unable to locate media_id %s\" % media_id\n\t\t\thosts = []\n\t\t\tfor r in rows:\n\t\t\t\thosts.append(r['hostname'])\n\t\t\treturn hosts\n\t\td = self.app.db.query(\"\"\"\n\t\t\tSELECT\n\t\t\t\thostname\n\t\t\tFROM\n\t\t\t\tstorage_assignments\n\t\t\tWHERE\n\t\t\t\tmedia_id = %s\n\t\t\t\"\"\", (media_id,))\n\t\td.addCallback(listify)\t\n\t\td.addCallback(lambda _: (0, _))\n\t\td.addErrback(lambda _: (-1, _.getErrorMessage()))\n\t\treturn d", "def get_listing_id(url):\n match = re.search(r\"\\/([\\dA-Za-z]*)_zpid\", url)\n if match:\n return match.group(1)\n else:\n return \"\".join(random.choice(ascii_letters) for _ in range(10))", "def movie_identifier(self):\n return 'bluray_id'", "def id(artist,track):\n track_id = sp.search(q='artist:' + artist + ' track:' + track, type='track', limit = 1)\n if len(track_id['tracks']['items'])>0:\n s_id = track_id['tracks']['items'][0]['id']\n else:\n s_id = '0'\n return s_id", "def getMedia(media_type, media_id):\n\n mediaURL = BASE_URL + media_type + \"/\" + str(media_id) + API_KEY\n videoURL = BASE_URL + media_type + \"/\" + str(media_id) + '/videos' + API_KEY\n\n # get the data from the API\n headers = {'Accept': 'application/json'}\n media_request = requests.get(mediaURL, headers=headers)\n video_request = requests.get(videoURL, headers=headers)\n\n # parse to json array\n media_response = media_request.json()\n video_response = video_request.json()\n\n # pull out desired attributes from json data\n data = {\n 'poster': 'http://image.tmdb.org/t/p/w500' + media_response[\"poster_path\"],\n 'title': media_response[\"title\"],\n 'storyline': media_response[\"overview\"],\n 'trailer': 'https://www.youtube.com/watch?v=' + video_response[\"results\"][0][\"key\"]\n }\n\n return data", "def get_music_url(self, song_id, bit_rate=320000):\n url = netease_song_download_url\n csrf = ''\n params = {'ids': [song_id], 'br': bit_rate, 'csrf_token': csrf}\n result = self.post_request(url, params)\n song_url = result['data'][0]['url']\n return song_url", "def _extract_id(self, dirty_id):\n if dirty_id[:1] == \"/\":\n return dirty_id.split(\"/\")[-1]\n else:\n return dirty_id", "def get_media_path(self, filename):\n return join(settings.CMS_PAGE_MEDIA_PATH, \"%d\" % self.id, filename)", "def get_media_json_url(self, nuxeo_id):\n # https://s3.amazonaws.com/static.ucldc.cdlib.org/media_json/002130a5-e171-461b-a41b-28ab46af9652-media.json\n url = \"https://s3.amazonaws.com/static.ucldc.cdlib.org/media_json/{}-media.json\".format(nuxeo_id)\n\n return url", "def _parse_id(line):\n ablt_pat = re.compile('(?<=2014_)[0-9]{12}(?=.jpg)')\n orig_pat = re.compile('(?<=[0-9]{16}_)[0-9]+')\n mat = ablt_pat.search(line)\n if mat is None: #original image\n mat = orig_pat.search(line)\n assert not mat is None, (\"this line does not contain a COCO image id: {}\" % line )\n return line[mat.start(): mat.end()], 'orig'\n else: #ablated image\n num = line[mat.start(): mat.end()]\n return str(int(num)), 'ablt'", "def get_song_url(self, song_id, bit_rate=320000):\n url = song_download_url\n csrf = ''\n params = {'ids': [song_id], 'br': bit_rate, 'csrf_token': csrf}\n result = self.post_request(url, params)\n song_url = result['data'][0]['url']\n return song_url", "def get_chunk_id_for_link(self, link):\n try:\n conn = psycopg2.connect(\"dbname='{0}'\".format(DATABASE))\n cur = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)\n cur.execute(\"SELECT chunk_id FROM link WHERE link = %s;\", (link,))\n result = cur.fetchall()\n cur.close()\n return result\n except Exception as e:\n print(e)", "def parse_url(url):\n url_parts = url.split('/')\n webcam_name = url_parts[-3] + 'CAM' + url_parts[-2]\n file_ext = url[-5:-1]\n last_update = 0.\n return {\n 'url': url[:-1], # Skip end of line\n 'name': webcam_name,\n 'imgpath': os.path.join(WEBCAM_DIR, webcam_name, '%d' + file_ext),\n 'last_update': last_update\n }", "def get_current_record_id(self):\n url = self.selenium.get_location()\n for part in url.split(\"/\"):\n oid_match = re.match(OID_REGEX, part)\n if oid_match is not None:\n return oid_match.group(2)\n raise AssertionError(\"Could not parse record id from url: {}\".format(url))", "def media(self, media_id):\r\n return Media(self, media_id)", "def get_pid_from_url(url):\n return re.findall(r'store.lining.com/shop/goods-(\\w+).html\\w*', url)[0]", "def get_id_from_a(a):\n if a:\n # We split from that and take the rest\n id_ = a['href'].split(\"Id=\")[1]\n\n # We split one more time in case of there is more after the id\n # We take the first part this time\n id_ = id_.split(\"&\")[0]\n\n return id_", "def extract_medialive_channel_id(ml_channel_arn):\n ml_channel_id = None\n if is_valid_medialive_channel_arn(ml_channel_arn):\n ml_channel_id = ml_channel_arn.strip().split(\":\")[-1]\n return ml_channel_id", "def torrent_id(url, debug):\n id = url[url.find('tid=')+4:]\n\n if not debug:\n return id\n\n if debug == 'Y':\n print \"ID :\", id\n return id", "def info(self, media_id = \"\", shortcode = \"\"):\n\n if media_id:\n url = \"https://api.instagram.com/v1/media/{0}?access_token={1}\".format(media_id, self.access_token)\n else:\n url = \"https://api.instagram.com/v1/media/{0}/D?access_token={1}\".format(shortcode, self.access_token)\n request = requests.get(url)\n return request.content\n\n request = requests.get(url)\n return request.json()", "def getMp4Url(urlPartsDate, qNum, verboseLogs):\n urlPartsMp4 = urlPartsDate[:] # copy URL\n urlPartsMp4.append('MP4')\n files = readUrlDir(urlPartsMp4, verboseLogs, '.mp4')\n if verboseLogs:\n logging.warning('MP4s %s', files)\n qMp4Name = 'Q' + str(qNum) + '.mp4'\n if files and (qMp4Name in files):\n urlPartsMp4.append(qMp4Name)\n return '/'.join(urlPartsMp4)\n return None", "def get_loved_music_url_id(self, homepage_id: str):\n url = self.base_url + \"home?id=\" + homepage_id\n browser.get(url)\n browser.switch_to.frame(\"g_iframe\")\n\n all_links = browser.find_elements_by_xpath(\"//a[contains(@title, '喜欢的音乐')]\")\n while len(all_links) == 0: # find until all_links contains what we want.\n print(\"enter while\")\n all_links = browser.find_elements_by_xpath(\"//a[contains(@title, '喜欢的音乐')]\")\n _url_id = str(all_links[0].get_attribute(\"href\")).split(\"=\")[1]\n print(_url_id)\n try:\n assert len(_url_id) > 0\n except:\n print(\"Exception\", _url_id)\n return _url_id", "def get_media(self, max_id):\r\n url = 'https://instagram.com/' + self.username + '/media'\r\n\r\n if max_id is not None:\r\n url += '?&max_id=' + max_id\r\n resp = requests.get(url)\r\n\r\n if resp.status_code == 200:\r\n media = json.loads(resp.text)\r\n\r\n if not media['items']:\r\n raise ValueError('User %s is private' % self.username)\r\n\r\n return media\r\n else:\r\n raise ValueError('User %s does not exist' % self.username)", "def extract_id_from_uri(id_or_uri):\n if '/' in id_or_uri:\n return id_or_uri[id_or_uri.rindex('/') + 1:]\n else:\n return id_or_uri", "def get_mediatype_id(self, description):\n result = self.conn.mediatype.get(filter={'description': description})\n\n if result:\n mediatypeid = result[0]['mediatypeid']\n else:\n mediatypeid = None\n\n return mediatypeid", "def get_clean_url(url, unique_id):\n search = f\"(.*{unique_id})\"\n return re.findall(search,url)[0]", "def regex_video_id(param):\n miregex = '(.*)v=(.*)&?(.*)'\n vid = None\n #log.debug(\"get video id: \" + repr(param))\n try:\n rs = re.search(miregex, param)\n params = rs.group(2)\n #log.debug(\"params \" + params)\n vid = params\n #id = params.split(\"&\")[0] if params != None and len(params)>12 else params\n except Exception as e:\n #log.debug(\"HURU\")\n #log.exception(e)\n pass # yes, we pass\n return vid", "def get_zillow_property_id(url):\n pattern = re.compile(r\"(\\d+)_zpid\")\n zpid = re.search(pattern, url).group(1)\n\n return zpid", "def media_image_url(self) -> str:\n return self._device.movie.cover", "def get_yt_id(url):\n yt = re.search(_YT_PATTERN, url)\n if yt:\n return yt.group(1)", "def _extract_image_short_id(scan_result: dict[str, Any]) -> str:\n\n if \"id\" not in scan_result:\n return \"sha256:unknown\"\n\n image_id: str = scan_result[\"id\"]\n\n if image_id.startswith(\"sha256:\"):\n return image_id[:17]\n return image_id[:10]", "def guid(self):\n _, _, _, guid, _ = RPR.GetSetMediaItemTakeInfo_String(\n self.id, 'GUID', 'stringNeedBig', False\n )\n return guid", "def __ext_embed_id(self, youtube_url):\n youtube_id_match = re.search(r'(?<=v=)[^&#]+', youtube_url)\n youtube_id_match = youtube_id_match or re.search(\n r'(?<=be/)[^&#]+', youtube_url)\n trailer_youtube_id = (youtube_id_match.group(0) if youtube_id_match\n else None)\n return trailer_youtube_id", "def fileId_from_url(url):\r\n raw_fileId = re.findall(\"~[0-z.]+/[0-9]+\", url)[0][1: ]\r\n return raw_fileId.replace('/', ':')", "def request_id(self):\n select_id = input(\"\\n>>> \")\n select_dict = [format for format in self.result['formats']\n if format['format_id'] == select_id][0]\n filesize = size(select_dict['filesize']\n ) if select_dict['filesize'] else 0\n # url = select_dict['url']\n print(f\"Downloading {self.result['title']}, size={filesize}\")\n self.title = self.result['title']\n for item in [\"(\", \")\", \" \", \",\", \".\", \"'\"]:\n self.title = self.title.replace(item, '_')\n self.title = self.title.replace('__', '_')\n self.download_video(select_id)", "def _get_current_media(self):\n key = int(self.status.content_id.split(\"/\")[-1])\n media_item = self.pms.fetchItem(key).reload()\n media_idx = self.status.media_custom_data.get(\"mediaIndex\", 0)\n part_idx = self.status.media_custom_data.get(\"partIndex\", 0)\n media = media_item.media[media_idx]\n part = media.parts[part_idx]\n\n return media_item, media, part", "def media(self, path):\n path = \"/media/%s%s\" % (self.session.root, format_path(path))\n\n url, params, headers = self.request(path, method='GET')\n\n return self.rest_client.GET(url, headers)", "def mp4_url(self) -> str:\n\t\treturn 'video.mp4?oid={0}'.format(self._oid)", "def media_image_url(self):\n return self.coordinator.data.nowplaying[self.zone.SourceID].CurrSong.ArtworkURI", "def get_media_info(xmlsent):\n media = {}\n media_element = xmlsent.findall(\".//{%s}media\" % NS)\n\n if media_element:\n media_element = media_element[0]\n media = {'start': media_element.attrib['start'],\n 'end': media_element.attrib['end'],\n 'unit': media_element.attrib['unit']\n }\n return media", "def get_url(self):\n if not self.get_video_id():\n return ''\n \n return 'http://www.dailymotion.com/%s' % self.get_video_id()", "def id_from_url(url: str) -> str:\n parts = RedditBase._url_parts(url)\n try:\n comment_index = parts.index(\"comments\")\n except ValueError:\n raise InvalidURL(url) from None\n\n if len(parts) - 4 != comment_index:\n raise InvalidURL(url)\n return parts[-1]", "def download_media_from_bandwidth(media_urls):\n downloaded_media_files = []\n for media_url in media_urls:\n media_id = get_media_id(media_url)\n filename = get_media_filename(media_url)\n with open(filename, \"wb\") as f:\n try:\n downloaded_media = messaging_client.get_media(MESSAGING_ACCOUNT_ID, media_id)\n f.write(downloaded_media.body)\n except Exception as e:\n print(e)\n downloaded_media_files.append(filename)\n return downloaded_media_files", "def _get_file_path(self, url):\n try:\n row = ET.fromstring(self._session.get(url, headers={\"Access-Token\":self._token}).text)[1][2][1]\n data = [row[1].text, row[1].text, row[2].text]\n if \" - S\" in data[0]:\n data[0] = data[0][0:data[1].rfind(\" - S\")]\n elif \" (\" in data[0]:\n data[0] = data[0][0:data[1].rfind(\" (\")]\n return data\n except Exception as e:\n exception_type = type(e).__name__\n print(\"Unable to get media name.\")\n print(exception_type)\n print(e)\n return None", "def getid(data):\n return int(data.split('/')[-1])" ]
[ "0.65878916", "0.6518125", "0.6397448", "0.6382529", "0.61623603", "0.61505437", "0.6048567", "0.60049254", "0.60015476", "0.5999453", "0.5963927", "0.5951866", "0.59363306", "0.59259576", "0.59230775", "0.5900245", "0.5842724", "0.5811348", "0.57866263", "0.5779499", "0.57722807", "0.5754538", "0.575169", "0.5737208", "0.57291096", "0.5720975", "0.5688694", "0.56549746", "0.5641877", "0.5641138", "0.5634743", "0.56191885", "0.56164384", "0.5605402", "0.5600657", "0.5590244", "0.5580092", "0.5573524", "0.55640113", "0.5556397", "0.5542144", "0.55214536", "0.55111456", "0.5506374", "0.54999685", "0.54829705", "0.547076", "0.5468126", "0.54546976", "0.5453574", "0.54522216", "0.5445548", "0.54213077", "0.54138327", "0.54099816", "0.53760046", "0.53643876", "0.5346043", "0.53404796", "0.5323956", "0.532152", "0.5319392", "0.53160644", "0.5315641", "0.53094095", "0.5284126", "0.5278468", "0.52755475", "0.52677935", "0.525081", "0.52486765", "0.5237814", "0.52353853", "0.5232382", "0.5230049", "0.52249634", "0.52241546", "0.5222906", "0.5221686", "0.52157056", "0.5213288", "0.5185868", "0.51849604", "0.5180568", "0.51794165", "0.51791894", "0.5173916", "0.51727706", "0.51697934", "0.5163399", "0.51608795", "0.5154472", "0.5144871", "0.51440936", "0.5141601", "0.5141436", "0.51381093", "0.51380205", "0.51330805", "0.51316136" ]
0.79839915
0
Takes a full media url from Bandwidth and extracts the filename
def get_media_filename(media_url): return media_url.split("/")[-1]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _get_file_path(self, url):\n try:\n row = ET.fromstring(self._session.get(url, headers={\"Access-Token\":self._token}).text)[1][2][1]\n data = [row[1].text, row[1].text, row[2].text]\n if \" - S\" in data[0]:\n data[0] = data[0][0:data[1].rfind(\" - S\")]\n elif \" (\" in data[0]:\n data[0] = data[0][0:data[1].rfind(\" (\")]\n return data\n except Exception as e:\n exception_type = type(e).__name__\n print(\"Unable to get media name.\")\n print(exception_type)\n print(e)\n return None", "def get_content_name(self, content_url):\n endpoint = content_url.split('/')[-1]\n return re.match(r'(.+\\.(?:jpg|mp4))', endpoint).group(0)", "def filename_from(url):\n filename = url.split('/')[-1]\n return filename", "def get_media_id(media_url):\n split_url = media_url.split(\"/\")\n #Media urls of the format https://messaging.bandwidth.com/api/v2/users/123/media/file.png\n if split_url[-2] == \"media\":\n return split_url[-1]\n #Media urls of the format https://messaging.bandwidth.com/api/v2/users/123/media/abc/0/file.png\n else:\n #This is required for now due to the SDK parsing out the `/`s\n return \"%2F\".join(split_url[-3:])", "def filename(self,imgurl):\n if imgurl.find('/'):\n return imgurl.rsplit('/', 1)[1]", "def _get_filename_from_url(self) -> Optional[str]:\n file_name_portion = None\n\n right_portion = self.url.rsplit(\"/\", 1)\n if len(right_portion) == 2:\n # split any potential query params - these start with \"?\"\"\n file_name_portion = right_portion[1].split(\"?\")[0].strip()\n\n if len(file_name_portion) == 0:\n file_name_portion = None\n\n return file_name_portion", "def file_path(self, request, response=None, info=None):\n url = request.url\n media_guid = hashlib.sha1(to_bytes(url)).hexdigest()\n media_ext = os.path.splitext(url)[1]\n if not media_ext.isalnum():\n media_ext = os.path.splitext(urlparse(url).path)[1]\n return \"full/%s%s\" % (media_guid, media_ext)", "def _filename_from_url(url):\n file_name = url.split(\"/\")[-1]\n return file_name", "def url_file_name(url):\r\n return url[url.rfind('/') + 1:]", "def _get_file_name(url: str) -> str:\n url = url.strip('/')\n result = findall(r'/(\\w+\\.\\w+)[?|$]', url)\n if result:\n return result[-1]\n return url.split('/')[-1]", "def get_file_name(url: str):\n filename = os.path.basename(url)\n fname, extension = os.path.splitext(filename)\n if extension:\n if \"=\" in filename:\n return filename.split(\"=\")[-1]\n return filename\n header = requests.head(url).headers\n if \"Location\" in header:\n return os.path.basename(header[\"Location\"])\n return filename", "def get_filename(target_dir, filename_prefix):\n # this whole function is not the nicest thing, but isolating it makes\n # things clearer , a good refactoring would be to get\n # the info from the video_url or the current output, to avoid the\n # iteration from the current dir\n filenames = os.listdir(target_dir)\n subs_filename = filename_prefix\n for name in filenames: # Find the filename of the downloaded video\n if name.startswith(filename_prefix):\n (basename, ext) = os.path.splitext(name)\n return basename", "def get_filename(target_dir, filename_prefix):\n # This whole function is not the nicest thing, but isolating it makes\n # things clearer. A good refactoring would be to get the info from the\n # video_url or the current output, to avoid the iteration from the\n # current dir.\n filenames = os.listdir(target_dir)\n for name in filenames: # Find the filename of the downloaded video\n if name.startswith(filename_prefix):\n (basename, ext) = os.path.splitext(name)\n return basename\n return None", "def get_filename(self) -> str:\n fname = self.url.split(\"/\")[-1]\n if \",\" in fname:\n _fname, _i = fname.split(\",\")\n _split_fname = _fname.split(\".\")\n _name = _split_fname[0]\n _extension = _split_fname[-1]\n return _name + _i + \".\" + _extension\n else:\n return fname", "def get_filename(link):\r\n return link[link.rfind(\"/\") + 1:]", "def get_filename_from_url(url: str) -> str:\n return os.path.basename(urllib.parse.urlparse(urllib.parse.unquote_plus(url)).path)", "def url_filename(url):\n return os.path.basename(urlparse.urlparse(url).path)", "def get_filename(url: str) ->str:\n if 'drive.google.com' in url:\n return _extract_google_drive_file_id(url)\n url, filename = os.path.split(url)\n return filename or os.path.basename(url)", "def extractParticular(link):\n webpage = openWebsite(link).read()\n nameIndexStart = webpage.index('<title>') + 7\n nameIndexStop = webpage[nameIndexStart:].index('</title>') + nameIndexStart - 1\n name = webpage[nameIndexStart : nameIndexStop].split('-')[0]\n name = \" \".join(name.split())\n name = re.sub('/', '', name)\n\n avatarName = RESTAURANTPATH + '{}.png'.format(\"\".join(name.split()).lower())\n captureImage(link, avatarName)\n\n return name, avatarName", "def get_urifilename(uri):\n up=urlparse.urlparse(uri)\n return split(up[2],\"/\")[-1]", "def parse_filename(url):\n # extract the URL path\n url_path = urlparse.urlparse(url).path\n filename = url_path.split('/')[-1]\n\n # make loose assumption the file name is for an HTML page\n if len(filename) < 1:\n filename = 'index.html'\n\n return filename", "def get_url_filename(url, headers=None, strip=[]):\n filename = get_url_disposition_filename(url, headers)\n if filename:\n return filename\n return get_url_straight_filename(url, strip=[])", "def get_track_filename(self, url = None):\n track_file = urllib.urlopen(url)\n headers = track_file.info()\n track_file.close()\n return wget.filename_from_headers(headers)", "def extract_filename(str):\n regex = r\"([0-9_-]+).jpg\"\n matches = re.search(regex, str)\n if matches:\n return matches.group(1)", "def parse_url(url):\n url_parts = url.split('/')\n webcam_name = url_parts[-3] + 'CAM' + url_parts[-2]\n file_ext = url[-5:-1]\n last_update = 0.\n return {\n 'url': url[:-1], # Skip end of line\n 'name': webcam_name,\n 'imgpath': os.path.join(WEBCAM_DIR, webcam_name, '%d' + file_ext),\n 'last_update': last_update\n }", "def fileId_from_url(url):\r\n raw_fileId = re.findall(\"~[0-z.]+/[0-9]+\", url)[0][1: ]\r\n return raw_fileId.replace('/', ':')", "def fileId_from_url(url):\r\n raw_fileId = re.findall(\"~[A-z.]+/[0-9]+\", url)[0][1:]\r\n return raw_fileId.replace('/', ':')", "def prepare_media(self, object):\n if object.media is not None:\n #return object.media.media_file.name\n return '/api/v1/media/{0}/'.format(object.media.id)\n else:\n return ''", "def isolate_path_filename(self, uri, api_base=None):\n # Did we get an api_base\n api_base = api_base if api_base else self.api_base\n\n # Look for the part after the api_base\n url_parse = uri.lower().rpartition(api_base)\n\n # Take everything to the right of the api_base\n file_component = url_parse[2]\n\n # Remove any URL ? parameters\n if '?' in file_component:\n file_component = file_component.rpartition('?')[0]\n\n #: Remove URL encoding\n file_component = unquote(file_component)\n\n #: Remove any spaces in the filename\n file_component = file_component.replace(' ','')\n\n return file_component", "def best_filename(link, response):\n content_type = response.info().get('content-type', '')\n filename = link.filename # fallback\n # Have a look at the Content-Disposition header for a better guess:\n content_disposition = response.info().get('content-disposition')\n if content_disposition:\n type, params = cgi.parse_header(content_disposition)\n # We use ``or`` here because we don't want to use an \"empty\" value\n # from the filename param:\n filename = params.get('filename') or filename\n ext = splitext(filename)[1]\n if not ext:\n ext = mimetypes.guess_extension(content_type)\n if ext:\n filename += ext\n if not ext and link.url != response.geturl():\n ext = splitext(response.geturl())[1]\n if ext:\n filename += ext\n return filename", "def getMp4Url(urlPartsDate, qNum, verboseLogs):\n urlPartsMp4 = urlPartsDate[:] # copy URL\n urlPartsMp4.append('MP4')\n files = readUrlDir(urlPartsMp4, verboseLogs, '.mp4')\n if verboseLogs:\n logging.warning('MP4s %s', files)\n qMp4Name = 'Q' + str(qNum) + '.mp4'\n if files and (qMp4Name in files):\n urlPartsMp4.append(qMp4Name)\n return '/'.join(urlPartsMp4)\n return None", "def parse_filename(cls, filename):\n #from nose.tools import set_trace; set_trace()\n m = re.match(cls._pattern, os.path.basename(filename))\n basename = m.group(1)\n bandname = cls._bandmap.get(m.group(2), m.group(2))\n return basename, bandname", "def retrieveURL(mw, url):\n req = urllib2.Request(url, None, {'User-Agent': 'Mozilla/5.0 (compatible; Anki)'})\n resp = urllib2.urlopen(req)\n # ct = resp.info().getheader(\"content-type\")\n filecontents = resp.read()\n # strip off any query string\n url = re.sub(r\"\\?.*?$\", \"\", url)\n path = unicode(urllib2.unquote(url.encode(\"utf8\")), \"utf8\")\n fname = os.path.basename(path)\n if not fname:\n fname = checksum(filecontents)\n return mw.col.media.writeData(unicode(fname), filecontents)", "def _get_file_url (url, path) :\n path = path + \"/\" + url.replace (\"/\", \"!\").replace (\":\",\"\").replace (\".\",\"-\")\n spl = path.split (\"-\")\n if len (spl) >= 2 :\n ext = spl [len (spl)-1].lower ()\n if 2 <= len (ext) <= 3 and ext in [\"png\", \"jpg\", \"zip\", \"txt\", \"gif\", \"py\", \"cpp\", \"gz\", \"pdf\", \"tif\", \"py\", \"html\", \"h\"] :\n spl = path.split (\"-\")\n spl = spl [:len(spl)-1]\n path = \"-\".join (spl) + \".\" + ext\n return path", "def _UrlBaseName(url):\n return url.rstrip('/').rpartition('/')[-1]", "def get_filename(self, stream, media_type, parser_context):\n try:\n return parser_context['kwargs']['filename']\n except KeyError:\n pass\n\n try:\n meta = parser_context['request'].META\n disposition = parse_header(meta['HTTP_CONTENT_DISPOSITION'])\n return disposition[1]['filename']\n except (AttributeError, KeyError):\n pass", "def _get_file_name(self) -> str:\n headers = self._get_url_http_headers()\n file_type = self._check_url_file_type(headers)\n file_name = self._get_filename_from_headers(headers)\n\n if not file_name:\n file_name = self._get_filename_from_url()\n\n if file_name is None:\n raise FileNameCannotBeEvaluatedException\n\n if file_type:\n file_name = self._add_file_extension(file_name, file_type)\n\n return file_name", "def get_basename(file: Union[str, FileStorage]) -> str:\n filename = _retrieve_filename(file)\n # split will split at the final part of the path(image.jpg) and everything\n # before it is at index 0\n return os.path.split(filename)[1]", "def webfilename(url):\n headers = _get_headers_from_url(url)\n\n if _content_disposition(headers):\n result = filename_from_content_disposition(_content_disposition(headers))\n else:\n result = url.split(\"/\")[-1]\n\n return result.translate(REMOVE_PUNCTUATION)", "def get_file_name_from_resposne(r):\n if not r: \n return None\n return get_file_name_from_cd(r.headers.get())", "def download_filename_full(self, doc):\n # todo modify\n authors = \",\".join([x['name'] for x in doc.artists])\n author = re.sub(\"[\\\\\\\\/:*?\\\"<>|]\", '', authors.strip())\n mp3_name = re.sub(\"[\\\\\\\\/:*?\\\"<>|]\", '', doc['name'])\n name = os.path.join(author, \"%s - %s.mp4\" % (author, mp3_name))\n return name", "def get_bucket_name_from_url(file_url):\n\tparts = urlparse(file_url)\n\tpaths = parts.path.split(\"/\")\n\treturn paths[1]", "def getWaveformFileName(self):\n return self.waveform_info.split(\":\")[1][:20]", "def filenameAsURL(self):\n return \"/recordings/\" + self.filename + \".mp3\"", "def media_title(self):\n return self.source", "def _getURL(self, params):\n qs = Media.objects.filter(pk=params['id'], deleted=False)\n if not qs.exists():\n raise Http404\n response_data = list(qs.values(*MEDIA_PROPERTIES))\n # Use 24-hour URLS\n _presign(24*3600, response_data)\n\n element = params['element']\n if element == 'auto':\n if qs[0].meta.dtype == 'video':\n element = 'streaming'\n elif qs[0].meta.dtype == 'image':\n element = 'image'\n elif qs[0].meta.dtype == 'multi':\n return None\n if element == 'audio':\n return response_data[0].get('media_files',{}).get('audio',[])[0]['path']\n elif element == 'thumbnail':\n search_in = response_data[0].get('media_files',{}).get('thumbnail',[])\n elif element == 'thumbnail_gif':\n search_in = response_data[0].get('media_files',{}).get('thumbnail_gif',[])\n elif element == 'image':\n search_in = response_data[0].get('media_files',{}).get('image',[])\n elif element == 'streaming':\n search_in = response_data[0].get('media_files',{}).get('streaming',[])\n elif element == 'archival':\n search_in = response_data[0].get('media_files',{}).get('archival',[])\n elif element == 'attachment':\n search_in = response_data[0].get('media_files',{}).get('attachment',[])\n\n if not search_in:\n return None\n quality = params['quality']\n max_delta = sys.maxsize\n quality_idx = 0\n for idx, info in enumerate(search_in):\n delta = abs(quality-info['resolution'][0])\n if delta < max_delta:\n quality_idx = idx\n max_delta = delta\n return search_in[quality_idx]['path']", "def get_media_path(self, filename):\n return join(settings.CMS_PAGE_MEDIA_PATH, \"%d\" % self.id, filename)", "def normalize_filename(url):\n fname = url.replace('file://', '')\n if os.sep != '/' and not os.path.exists(fname):\n fname = fname.lstrip('/')\n return fname", "def frame_string(path):\n filename = os.path.split(path)[1]\n return os.path.splitext(filename)[0]", "def extract_file_extension(url_file):\n pattern = re.split(\"\\.\",url_file)\n return pattern[-1]", "def find_reddit_filename(wildcards):\n yearmonth = wildcards.year + '-' + wildcards.month\n if yearmonth <= '2017-11':\n ext = '.bz2'\n elif yearmonth <= '2018-10':\n ext = '.xz'\n else:\n ext = '.zst'\n return DATA + \"/downloaded/reddit/\" + yearmonth + ext", "def extract_file_name_from_source_full_path(source_full_path):\n destination_file_name = os.path.basename(source_full_path)\n return destination_file_name", "def basename(self):\n return self._getbyspec(\"basename\")[0]", "def _get_track_name(self, filename):\n return os.path.basename(filename)", "def get_extension(srcurl):\r\n if 'youtu' in srcurl:\r\n return 'video/youtube'\r\n else:\r\n disassembled = urlparse(srcurl)\r\n file_ext = splitext(basename(disassembled.path))[1]\r\n return 'video/' + file_ext.replace('.', '')", "def urlgrab(self, url, filename=None, **kwargs):\n blob_location = \"%s/%s\" % (self.base_path, url)\n self.verbose_logger.info(\"downloading gs://%s/%s to %s\" % (self.bucket.name, blob_location, filename))\n url = url.lstrip('/')\n if not filename:\n filename = url\n\n blob = storage.blob.Blob(name=blob_location,bucket = self.bucket)\n blob.download_to_filename(filename)\n return filename", "def getOriginalFile(url):\n # does url exist?\n if url is None or url is \"\":\n return", "def band_url(scene, band):\n\n img = scene + '_B' + str(band) + '.TIF'\n url_components = scene.split('_')\n sensor, level, path, row = url_components[0], url_components[5], url_components[2][:3], url_components[2][3:]\n \n return GOOGLE_STORAGE + sensor + '/' + level + '/' + path + '/' + row + '/' + scene + '/' + img", "def basename(self, filename):\n return filename.replace(self.remote_path, '', 1).lstrip(sep)", "def give_filename( url_rel ):\n filename = basename( url_rel )\n\t# Add time information\n now_datetime = datetime.datetime.now( )\n now_string = now_datetime.strftime( \"%Y-%m-%d-%H-%M-%S\" )\n if filename.endswith( '.pdf' ):\n\t\tfileno, ext_pdf = splitext( filename )\n\t\tpdf_filename = fileno + '-' + now_string + ext_pdf\n\t\treturn pdf_filename", "def get_file_name(self):\n return self.upload.name[6:]", "def parse_media_info(filename):\n print_info('Extracting hash from {0}'.format(filename))\n media_info = MediaInfo()\n for media_info_type in MEDIA_INFO_REGEXS:\n #print_info('Parsing for {0}'.format(media_info_type))\n for regex in MEDIA_INFO_REGEXS[media_info_type]:\n m = re.search(regex, filename)\n\n if m is None:\n continue\n\n extracted_data = m.group('MediaInfo').upper()\n print_info('Extracted {0}: {1}'.format(media_info_type, extracted_data))\n\n # Before we set, do any needed cleanup\n if media_info_type == 'resolution':\n if not extracted_data.endswith('p'):\n resolution = int(extracted_data)\n if resolution == 1280:\n extracted_data = '720'\n extracted_data = extracted_data + 'p'\n media_info.resolution = extracted_data\n if media_info_type == 'source':\n media_info.source = extracted_data.replace('-', '')\n elif media_info_type == 'audio_source':\n media_info.audio_source = extracted_data\n elif media_info_type == 'encoding':\n media_info.encoding = re.sub('X', 'H', extracted_data)\n elif media_info_type == 'color_bits':\n media_info.color_bits = extracted_data\n break\n \n \n return media_info", "def get_basename(absolute_file_path):\r\n return absolute_file_path.split('/')[-1]", "def get_video_parts(video_path):\n parts = video_path.split(os.path.sep)\n #print(parts)\n filename = parts[-1]\n filename_no_ext = filename.split('.')[0]\n return filename_no_ext, filename#('video6514', 'video6514.mp4')", "def getBaseName(filepath):\n return os.path.basename(filepath)", "def media_folder_name(self):\n raise NotImplementedError", "def standardized_filename(self, url, fname):\n #TODO:apply filename standardization logic\n # non-result pages/files use default urllib name conventions\n # result files need standardization logic (TBD)\n if fname:\n filename = join(self.cache_dir, fname)\n else:\n filename = self.filename_from_url(url)\n return filename", "def extractFileName(fileType, modelName, modelVersion, modelState):\n fileName = '{}_{}_{}'.format(modelName, modelVersion, fileType) if modelState == 'national' else '{}_{}_{}_{}'.format(modelName, modelVersion, modelState, fileType)\n return fileName", "def filename(self):\n _, tail = os.path.split(self.url)\n return self.folder + '/' + tail[:-4] + '/' + tail[:-3] + 'shp'", "def get_filename(path):\n return path.split('/')[-1]", "def __extractFileName(self, line):\n f = line.split(None, 1)[1]\n f = f.rsplit(None, 6)[0]\n if f == \"/dev/null\":\n f = \"__NULL__\"\n else:\n f = f.split(\"/\", 1)[1]\n return f", "def get_generated_image_name(full_image_url):\r\n\r\n logging.debug('get_generated_image_name({})'.format(full_image_url))\r\n\r\n image_name = datetime.datetime.now().strftime(\"%Y%m%d%H%M%S\")\r\n image_extension = full_image_url.split(\".\")[-1]\r\n image_name = image_name + \".\" + image_extension\r\n logging.debug('get_generated_image_name - image_name = {}'.format(image_name))\r\n return image_name", "def get_filename(headers):\n content_disp = [x for x in headers if x[0] == 'Content-Disposition'][0][1]\n raw_filename = [x for x in content_disp.split(';') if x.startswith('filename=')][0]\n return raw_filename.replace('filename=', '').replace('\"', '')", "def get_media(self, url, out_filename=None, raw_data=False):\n if not raw_data:\n if not out_filename:\n out_filename = os.path.join(settings.BW_MMS_DIRECTORY,\n url.split('/')[-1])\n\n if not os.path.isdir(os.path.dirname(out_filename)):\n raise ValueError('Invalid output directory: {} - '\n 'unable to download MMS'.\n format(os.path.dirname(out_filename)))\n\n if os.path.isfile(out_filename):\n logging.info('filename {}, already exists - will be '\n 'overwritten.....'.format(out_filename))\n\n try:\n resp = requests.get(url, auth=(self.token, self.secret))\n except requests.exceptions.RequestException as e:\n logging.info('Error while fetching media: {}'.format(e))\n return\n\n if resp.status_code == requests.codes.ok:\n try:\n if raw_data:\n return resp.content\n else:\n with open(out_filename, 'wb') as fd:\n fd.write(resp.content)\n\n return out_filename\n except Exception as e:\n logging.info('Error: {} while writing file: {}'.\n format(e, out_filename))\n return\n\n logging.info('Invalid URI or an error occured, response: {}, '\n 'response content: {}'.format(resp.status_code,\n resp.text))", "def get_filename(img_path):\n filename = os.path.splitext(img_path)\n return os.path.basename(filename[0])", "def get_url_image(artist, track, size):\n s = size.lower()\n if s not in ['small', 'medium', 'large', 'extralarge']:\n return None\n track_infos = get_infos(artist, track)\n for image in track_infos['track']['album']['image']:\n if image['size'] == s:\n return image['#text']\n return None", "def filename(self):\n filename, ext = os.path.splitext(self.file.name)\n return filename.split('/')[-1]", "def get_ext(url):\r\n root, ext = splitext(url)\r\n return ext", "def _get_video_name(self, fname):\n csv_name_split = fname.split(\"_\")\n thirty_fps_loc = csv_name_split.index(\"30fps\")\n video_name = \"_\".join(csv_name_split[0:thirty_fps_loc+1])\n return video_name", "def get_url_straight_filename(url, strip=[], allowdir=False):\n path = urlunquote(urlsplit(url).path)\n path_parts = path.split('/')\n\n if allowdir:\n # strip empty ones\n while len(path_parts) > 1 and not path_parts[-1]:\n path_parts = path_parts[:-1]\n\n if strip:\n while path_parts and path_parts[-1] in strip:\n path_parts = path_parts[:-1]\n\n if path_parts:\n return path_parts[-1]\n else:\n return None", "def download_media_from_bandwidth(media_urls):\n downloaded_media_files = []\n for media_url in media_urls:\n media_id = get_media_id(media_url)\n filename = get_media_filename(media_url)\n with open(filename, \"wb\") as f:\n try:\n downloaded_media = messaging_client.get_media(MESSAGING_ACCOUNT_ID, media_id)\n f.write(downloaded_media.body)\n except Exception as e:\n print(e)\n downloaded_media_files.append(filename)\n return downloaded_media_files", "def media_path(self):\n return self._path", "def get_file_path(url, token):\n parsed = urllib_parse.urlsplit(url)\n upload_shards = os.getenv('UPLOAD_SHARDS')\n if upload_shards is None:\n # No media shards, the path is /uploads\n path = '/uploads'\n else:\n # Get metadata for this URL.\n upload_uid = TatorCache().get_upload_uid_cache(parsed.path)\n response = requests.head(f\"{urllib_parse.urljoin('http://nginx-internal-svc', parsed.path)}\",\n allow_redirects=True,\n headers={'Authorization': f'Token {token}',\n 'Upload-Uid': f'{upload_uid}'})\n upstream = response.headers['X-Upstream']\n logger.info(f\"Upstream for URL {url} is {upstream}\")\n hostname = socket.getfqdn(upstream.split(':')[0])\n logger.info(f\"Hostname for URL {url} is {hostname}\")\n path = f'/{hostname.split(\"-\")[1]}'\n path += f'/{parsed.path.split(\"/\")[-1]}'\n logger.info(f\"Path is {path}\")\n return path", "def test_get_ext(self):\r\n filename_str = 'http://www.example.com/path/video.mp4'\r\n output = get_ext(filename_str)\r\n self.assertEqual(output, 'mp4')", "def get_filename(extended_slug):\n user, project, build_id, job_id = split_extended_slug(extended_slug)\n\n if None in (user, project, build_id, job_id): # todo; remove this\n return\n\n filename_glob = os.path.join(\n test_data_dir,\n user, project,\n '{0}.{1}-*.txt'.format(build_id, job_id))\n filenames = glob.glob(filename_glob)\n if filenames:\n return filenames[0]\n else:\n return None", "def clean_filename(url):\n valid_chars = \"-_() %s%s\" % (string.ascii_letters, string.digits)\n nodot = url.replace('.','_').replace(':','_')\n filename = ''.join([c for c in nodot if c in valid_chars])\n return filename", "def bsw_getCurrentAssetMainFileName():\n projectShortName = ProjectNamingInputs().projectShortName\n # get asset UID from the kns_getAssetDetails function (second last return is assetUID).\n assetUID = bsw_getAssetDetails()[-2]\n if os.environ['BSW_PROJECT_TYPE'] == 'series':\n return projectShortName + '_' + assetUID.split('_')[1] + '_' + assetUID.split('_')[2] + '_' + \\\n assetUID.split('_')[-1] + '_' + assetUID.split('_')[-2] + '.ma'\n else:\n return projectShortName + '_' + assetUID.split('_')[1] + '_' + assetUID.split('_')[2] + '_' + \\\n assetUID.split('_')[-1] + '.ma'", "def get_music_url(self, song_id, bit_rate=320000):\n url = netease_song_download_url\n csrf = ''\n params = {'ids': [song_id], 'br': bit_rate, 'csrf_token': csrf}\n result = self.post_request(url, params)\n song_url = result['data'][0]['url']\n return song_url", "def get_filename(filepath):\n return os.path.basename(filepath)", "def get_ext(url):\n\n path = urlparse(url).path\n ext = splitext(path)[1]\n return ext", "def media_image_url(self) -> str:\n return self._device.movie.cover", "def _get_cap_filename(self):\n\n fnd = self._get_session_dir()\n fn = os.path.join(fnd, 'F%4.4d.tif' % self.dpar.cur_cap)\n\n return fn", "def basename(self):\n return self.name.basename", "def media_image_url(self):\n return self.coordinator.data.nowplaying[self.zone.SourceID].CurrSong.ArtworkURI", "def grab_file(url, filename):\n with RemoteZip(url) as zip:\n filenames = zip.namelist()\n for fname in filenames:\n zinfo = zip.getinfo(fname)\n if filename in zinfo.filename and not \".plist\" in zinfo.filename:\n filename = zinfo.filename.split(\"/\")[-1]\n print(\"[i] downloading %s\" % filename)\n extract_and_clean(zip, zinfo.filename, filename)\n return filename\n return filename", "def media_title(self):\n return self._current_item[\"name\"]", "def basename(path):\r\n return split(path)[1]", "def media_title(self) -> str | None:\n return self._output_name", "def get_song_url(self, song_id, bit_rate=320000):\n url = song_download_url\n csrf = ''\n params = {'ids': [song_id], 'br': bit_rate, 'csrf_token': csrf}\n result = self.post_request(url, params)\n song_url = result['data'][0]['url']\n return song_url", "def absolute_folder_name(self):\n return 'music_decompose/media/{0}'.format(self.media_folder_name)" ]
[ "0.7303174", "0.70052874", "0.68807364", "0.67521065", "0.67236215", "0.66535795", "0.661498", "0.6603966", "0.6586546", "0.6538387", "0.6509765", "0.64932483", "0.64535576", "0.6430052", "0.638914", "0.63559645", "0.6279012", "0.62202466", "0.6168988", "0.61595553", "0.6069832", "0.60655814", "0.60260266", "0.6017655", "0.6014971", "0.6010148", "0.6006867", "0.59884715", "0.5948225", "0.5939837", "0.5926801", "0.5893688", "0.5875327", "0.58547467", "0.5836101", "0.5809463", "0.57714385", "0.57584393", "0.5757971", "0.57507616", "0.5748383", "0.5726056", "0.5714649", "0.57136166", "0.57028437", "0.568663", "0.56687975", "0.56564885", "0.5655087", "0.5623619", "0.5607852", "0.5604676", "0.56002706", "0.5589146", "0.55582875", "0.5555533", "0.5552461", "0.5551364", "0.55301374", "0.55284476", "0.5528105", "0.5526173", "0.55154747", "0.5489801", "0.548587", "0.54853237", "0.5480636", "0.54716605", "0.5470534", "0.5465411", "0.54607016", "0.5450844", "0.54423654", "0.5439873", "0.5439036", "0.54372483", "0.54348373", "0.5420296", "0.54196674", "0.5409475", "0.54045975", "0.5397437", "0.53882766", "0.5388106", "0.53853756", "0.53822047", "0.5377327", "0.5366084", "0.53628165", "0.53619295", "0.5361565", "0.5360665", "0.5360134", "0.5356397", "0.53557146", "0.534211", "0.5332827", "0.5332746", "0.53325826", "0.5330797" ]
0.7981755
0
Takes a list of media urls and downloads the media into the temporary storage
def download_media_from_bandwidth(media_urls): downloaded_media_files = [] for media_url in media_urls: media_id = get_media_id(media_url) filename = get_media_filename(media_url) with open(filename, "wb") as f: try: downloaded_media = messaging_client.get_media(MESSAGING_ACCOUNT_ID, media_id) f.write(downloaded_media.body) except Exception as e: print(e) downloaded_media_files.append(filename) return downloaded_media_files
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def downloadLocal(url_list,path):\n print(\"You are downloading {} images\".format(parser_arguments().limit),end=\" \");print(\"of {} class.\".format(parser_arguments().classes))\n print(\"Please, be patient :)\")\n for i in range(len(url_list)):\n filename= url_list[i].split(\"/\")[-1] # name of the picture file\n r = requests.get(url_list[i], stream =True)\n print(filename)\n\n with open(filename,'wb') as f : # create the file locally in binary-write mode\n r = requests.get(url_list[i], stream =True)\n shutil.copyfileobj(r.raw, f) #write our image to the file\n shutil.move(filename,path)\n print('Done!')", "def _download_file(self, video_objects):\n downloaded_video = []\n path=\"media/\"\n for video_object in video_objects:\n if 'contentUrl' in video_object.keys() and video_object['contentUrl']!='':\n \n url = video_object['contentUrl']\n filename = url.split('/')[-1]\n r = requests.get(url, stream=True)\n \n with open(filename, 'wb') as f:\n for chunk in r.iter_content(chunk_size=1024): \n if chunk:\n f.write(chunk)\n\n path+=filename\n return path", "def downloader(urls, path):\n counter = 1\n for media_file in urls:\n # Create the file name\n file_name = \"meme\" + str(counter) + \".jpg\"\n file_location = path + \"/\" + file_name\n print(f\"Downloading {media_file} as {file_name}.\")\n # Overwrite files\n if os.path.exists(file_location):\n os.remove(file_location)\n print(f\"{file_name} will overwrite an existing file of the same name.\")\n wget.download(media_file, out=file_location)\n print(\"\\n\")\n counter += 1\n print(f\"{counter - 1} items were downloaded.\")\n return counter - 1", "def download(urls, dest_folder):\n pass", "def fetch_files_from_urls(urls, dir):\n makedir(dir)\n try:\n pool = []\n for url in urls:\n p = Process(target=download, args=(url, dir,))\n p.start()\n pool.append(p)\n for p in pool:\n p.join()\n except KeyboardInterrupt:\n print \"Shutdown requested...exiting\"\n # except Exception:\n # traceback.print_exc(file=sys.stdout)\n\n # print(\"removing temporary files from current directory\")\n map(os.remove, glob.glob(\"*.tmp\"))", "def download_songs(**kwargs):\n for url in kwargs[\"songs\"][\"urls\"]:\n log.debug(\"Downloading to %s\", url[\"save_path\"])\n reference_file = DOWNLOAD_LIST\n track_db = write_tracks(reference_file, kwargs[\"songs\"])\n os.rename(reference_file, kwargs[\"output_dir\"] + \"/\" + reference_file)\n reference_file = str(kwargs[\"output_dir\"]) + \"/\" + reference_file\n kwargs[\"reference_file\"] = reference_file\n kwargs[\"track_db\"] = track_db\n if kwargs[\"multi_core\"] > 1:\n multicore_find_and_download_songs(kwargs)\n else:\n find_and_download_songs(kwargs)\n os.remove(reference_file)", "def downloadMinio(url_list,list_d):\n print(\"You are downloading {} images\".format(parser_arguments().limit),end=\" \");print(\"of {} class.\".format(parser_arguments().classes))\n print(\"Please, be patient :)\")\n name = \"-\".join(parser_arguments().classes)\n name = name.lower()\n for i in range(len(url_list)):\n filename= url_list[i].split(\"/\")[-1] # name of the picture file\n r = requests.get(url_list[i], stream =True)\n\n\n if r.status_code == 200:\n r.raw.decode_content = True\n\n with open(filename,'wb') as f : # create the file locally in binary-write mode\n metadata = list_d[i]\n r = requests.get(url_list[i], stream =True)\n shutil.copyfileobj(r.raw, f) #write our image to the file\n path = os.getcwd()+'/'+filename # image path\n minioClient.fput_object(name,filename,path,'image/jpg',metadata)\n os.remove(filename)\n print(filename,'have been successfuly uploaded')\n print('Done!')", "def download_cdn_videos(filenames,sub_urls,handout_urls,video_urls, target_dir):\n \"\"\" using a simple file downloader \"\"\"\n for i, v in enumerate(video_urls):\n filename_prefix = str(i+1).zfill(2) + '-'\n #original_filename = v.rsplit('/', 1)[1]\n video_filename = filename_prefix + filenames[i] + '.mp4'\n sub_filename = filename_prefix + filenames[i] + '.srt'\n handout_filename = filename_prefix + filenames[i] + '.srt'\n video_path = os.path.join(target_dir, video_filename)\n sub_path = os.path.join(target_dir, sub_filename)\n handout_path = os.path.join(target_dir, handout_filename)\n #print('[debug] GET %s' % v)\n print('[download] Destination: %s' % video_path)\n v = quote(v,safe=\":/\")\n if len(v) != YOUTUBE_VIDEO_ID_LENGTH:\n req = Request(v) \n try:\n video = urlopen(v)\n fileSize = int(video.headers['content-length'])\n finish = False\n existSize = 0\n if os.path.exists(video_path):\n output = open(video_path,\"ab\")\n existSize = os.path.getsize(video_path)\n #If the file exists, then only download the remainder\n if existSize < fileSize:\n #print(\"[debug] bytes range is: %s-%s\" % (existSize,fileSize))\n req.headers[\"Range\"]= \"bytes=%s-%s\" % (existSize,fileSize)\n video = urlopen(req)\n else:\n finish = True\n else:\n output = open(video_path,\"wb\")\n if finish == False:\n file_size_dl = existSize\n block_sz = 262144\n while True:\n buffer = video.read(block_sz)\n if not buffer:\n break\n \n file_size_dl += len(buffer)\n output.write(buffer)\n status = r\"%10d [%3.2f%%]\" % (file_size_dl, file_size_dl * 100. / fileSize)\n status = status + chr(8)*(len(status)+1)\n sys.stdout.write(status)\n sys.stdout.flush()\n \n output.close()\n\n except URLError as e:\n print(\"[warning]error: %r when downloading %s\" % (e.reason,v) )\n\n else:\n download_youtube_video(v,video_path)\n \n if sub_urls[i] != \"\":\n #print('[debug] GET %s' % BASE_URL+sub_urls[i])\n if not os.path.exists(sub_path):\n subs_string = edx_get_subtitle(sub_urls[i], headers)\n if subs_string:\n print('[info] Writing edX subtitles: %s' % sub_path)\n open(os.path.join(os.getcwd(), sub_path),\n 'wb+').write(subs_string.encode('utf-8'))\n\n if handout_urls[i] != \"\":\n #print('[debug] GET %s' % BASE_URL+sub_urls[i])\n if not os.path.exists(handout_path):\n handout_content = urlopen(BASE_URL+handout_urls[i]).read()\n if handout_content:\n print('[info] Writing handout: %s' % handout_path)\n open(os.path.join(os.getcwd(), handout_path),\n 'wb+').write(handout_content)\n #srtfile = urlopen(BASE_URL+sub_urls[i])\n #output = open(srt_path,'wb')\n #output.write(srtfile.read())\n #output.close()", "def download_pics(pics_links):\n\n for link in range(len(pics_links)):\n r = requests.get(pics_links[link][0])\n with open(os.path.join(\"tmp\", f\"{link}.jpg\"), \"wb\") as dl:\n dl.write(r.content)", "def url_media(self, csvlinks=\"\", csvset=\"\", urldir=\"\", medialog_file=\"\",\n directory=\"\", ignore_twitter_link=True, mediatype=\"vi\",\n site_sources=[], name_scraping=\"\", video_timelimit=1000,\n image_timelimit=60):\n\n if csvlinks == \"\":\n csvlinks = CSVLINKS\n if csvset == \"\":\n csvset = CSVSETURL\n if medialog_file == \"\":\n medialog_file = MEDIALOG\n if directory == \"\":\n directory = self.directory\n\n if urldir == \"\" and name_scraping == \"\":\n urldir = URLDIR\n name_scraping = urldir.lower()\n elif name_scraping == \"\":\n name_scraping = urldir.lower()\n elif urldir == \"\":\n urldir = name_scraping\n\n if urldir[-1] != \"/\":\n urldir = urldir + \"/\"\n if name_scraping[-1] == \"/\":\n name_scraping = name_scraping[:-1]\n\n mediatype = str(mediatype).lower()\n if mediatype not in (\"v\", \"i\", \"vi\", \"iv\"):\n mediatype = \"vi\"\n\n root_dir = os.getcwd()\n\n if directory != \"\":\n os.chdir(directory)\n directory = os.getcwd()\n else:\n directory = root_dir\n\n setUrls = CSVUtils.csv_to_dict(csvset, 1, 0)\n\n if urldir[-1] == '/':\n urldir = urldir[:-1]\n OSUtils.createDir(urldir)\n\n seq = \"\"\n\n # get next sequence number\n if os.path.isfile(medialog_file):\n seq = JSONUtils.read_keyval_json(\"next_\"+name_scraping+\"Seq\",\n medialog_file)\n\n # if the parameter does not exist, get the seq from the\n if seq == \"\":\n seq = max([int(d) for d in os.listdir(urldir)] + [0]) + 1\n\n try:\n seqdir = os.path.realpath(urldir + \"/\" + str(seq))\n\n # implemented in order to give a feedback about progresss %\n total_row = sum(1 for row in CSVUtils.csvGenerator(csvlinks))\n row_count = 0\n\n # iterate through each link\n for line in CSVUtils.csvGenerator(csvlinks):\n row_count += 1\n\n if \"https://twitter.com\" in line[0] and ignore_twitter_link:\n continue\n\n url = self.__expandURL(line[0])\n\n if len(site_sources) > 0:\n if len([site for site in site_sources if site in url]\n ) == 0:\n continue\n\n if url not in setUrls.keys():\n\n print('\\x1b[6;30;42m' + \"Starting Scrapping for Link \"\n + str(url) + \" (\" + str(seq) + \")\" + '\\x1b[0m')\n\n os.mkdir(seqdir)\n os.chdir(seqdir)\n\n if \"v\" in mediatype:\n try:\n # in order to avoid stalls in lives\n signal.signal(signal.SIGALRM,\n OSUtils.handler_timeout)\n signal.alarm(video_timelimit)\n\n youtube_dl.YoutubeDL({}).download([url])\n except KeyboardInterrupt:\n raise\n except Exception as e:\n print(e)\n finally:\n signal.alarm(0)\n\n if \"i\" in mediatype:\n for im in self.__urlImageGenerator(url):\n try:\n signal.signal(signal.SIGALRM,\n OSUtils.handler_timeout)\n signal.alarm(image_timelimit)\n\n if \"base64,\" in im:\n continue\n\n lo = Text.lastocc(im, \"/\")+1\n\n if lo < len(im)-1:\n output = im[Text.lastocc(im, \"/\")+1:]\n else:\n output = im[\n Text.lastocc(im[:-1], \"/\")+1:-1]\n\n if output == \"\" or len(output) > 80:\n output = random.randint(1, 10000000000000)\n\n self.__request_download(link=im,\n output=str(output))\n except requests.exceptions.ConnectionError as e:\n print(e)\n continue\n except requests.exceptions.InvalidSchema as e:\n print(e)\n continue\n except Exception as e:\n print(e)\n finally:\n signal.alarm(0)\n\n os.chdir(directory)\n\n setUrls[url] = seq\n\n CSVUtils.write_line_b_csv(csvfile=csvset, line=[seq, url])\n\n print('\\x1b[6;30;42m' + \"Scrap Finished for Link \"\n + str(url) + \" (\"\n + str(round(row_count*100/total_row, 4)) + \"%)\"\n + '\\x1b[0m')\n\n seq += 1\n seqdir = os.path.realpath(urldir + \"/\" + str(seq))\n\n os.chdir(root_dir)\n\n except KeyboardInterrupt:\n print(\"Stopping...\")\n\n JSONUtils.add_keyval_json(\"next_\"+name_scraping+\"Seq\", seq,\n medialog_file)\n\n os.chdir(root_dir)\n\n shutil.rmtree(seqdir)\n except Exception as e:\n JSONUtils.add_keyval_json(\"next_\"+name_scraping+\"Seq\", seq,\n medialog_file)\n\n os.chdir(root_dir)\n\n shutil.rmtree(seqdir)\n print(e)\n raise", "def download_images(links):\n\n for link in links:\n print(\"Processing\", link)\n try:\n response = requests.get(link,\n timeout=METADATA_REQUEST_TIMEOUT, stream=True)\n except requests.exceptions.RequestException as e:\n print(e)\n sys.exit(1)\n\n artist_name = link.rsplit('/', 2)[1]\n image_name = link.rsplit('/', 2)[2]\n image_name = artist_name + image_name\n\n file_location = ASSET_PATH.joinpath(image_name)\n\n with open(str(file_location), 'wb') as outfile:\n shutil.copyfileobj(response.raw, outfile)", "def download_photos(urls, folder=''):\n folder_path = os.path.join('photos', folder)\n if not os.path.exists(folder_path):\n os.mkdir(folder_path)\n for url in urls:\n image = requests.get(url)\n filename = os.path.join(folder_path, url.split('/')[-1])\n with open(filename, 'wb') as f:\n f.write(image.content)", "def download(urls: List[str], num_threads: int = 40) -> List[str]:\n\n num_files = len(urls)\n start = perf_counter()\n\n print(\"Starting download of %s files . . .\" % num_files)\n\n results = multiprocess(urls, Downloader, num_threads=num_threads)\n\n dur = perf_counter() - start\n print(\"Completed download of %s files after %.3f seconds.\" % (num_files, dur))\n\n return results", "def get_media_files(tweets, today, hour, output_folder):\n media_file = \"\"\n tweet_id = \"\"\n create_picture_folder(output_folder)\n\n for tweet in tweets:\n if tweet.get('delete') != None:\n continue\n if not tweet['retweeted'] and 'RT @' not in tweet['text'] and not tweet['in_reply_to_status_id']:\n media = tweet.get('entities').get('media', [])\n if len(media) > 0:\n # media_files.append(media[0]['media_url'])\n media_file += media[0]['media_url']\n # tweet_ids.append(tweet['id'])\n tweet_id += tweet['id_str']\n return media_file, tweet_id", "def download_urls(urls, path):\n count = 0\n if urls:\n for url in urls:\n try:\n res = requests.get(url, verify=False, stream=True)\n rawdata = res.raw.read()\n with open(os.path.join(path, 'img_' + str(count) + '.jpg'), 'wb') as f:\n f.write(rawdata)\n count += 1\n except Exception as e:\n print('Failed to write rawdata.')\n print(e)", "def download_list(urls, outdir=None, workdir=None, threads=3):\n pool = ThreadPool(threads)\n download_lambda = lambda x: download(x, outfile=outdir, workdir=workdir)\n pool.map(download_lambda, urls)", "def download(self, output_dir=None, chunk_size=1024):\n def download_content(content_link, output_dir):\n \"\"\"Download the content of a media and save it in a existing\n directory.\n\n Args:\n content_link (str):\n output_dir (str):\n Returns:\n dict: local version of the media object\n \"\"\"\n if content_link is None: return None\n res = requests.get(content_link, stream=True)\n try:\n res.raise_for_status()\n except requests.exceptions.HTTPError:\n return None\n img_name, img_format = parse_image_url(res.url)\n filepath = '{}/{}.{}'.format(output_dir, img_name, img_format)\n\n with open(filepath, mode='wb') as image_file:\n for chunk in res.iter_content(chunk_size=chunk_size):\n image_file.write(chunk)\n\n return abspath(filepath)\n\n output_dir = output_dir or getcwd()\n\n media_links = dict(\n image=[],\n video=[]\n )\n if self['media'] and self['media']['image']:\n downloaded_images = [\n download_content(item, output_dir) for item in self['media']['image']\n ]\n media_links['image'].extend(list(filter(None, downloaded_images)))\n if self['media'] and self['media']['video']:\n downloaded_videos = [\n {\n 'url': download_content(item['url'], output_dir),\n 'thumbnail': download_content(item['thumbnail'], output_dir)\n } for item in self['media']['video']\n ]\n media_links['video'].extend(\n filter(lambda x: x['url'] and x['thumbnail'], downloaded_videos)\n )\n\n return media_links", "def download_files(file_uris):\n\n if os.path.exists(LOG_FILE):\n log_file = open(LOG_FILE, \"rU+\")\n downloaded_podcasts = strip_newlines(log_file)\n else:\n log_file = open(LOG_FILE,\"w\")\n downloaded_podcasts = []\n\n for uri in file_uris:\n # if the current file URI is not found in the log, it is a new file, and\n # is thus downloaded\n if uri not in downloaded_podcasts:\n # extract filename from the URI \n uri_split = re.split(\"/\", uri)\n filename = uri_split[len(uri_split) - 1]\n \n # download the file\n if OUTPUT:\n print \"downloading \" + uri\n urllib.urlretrieve(uri, DEST_DIR + os.sep + filename)\n log_file.write(uri + os.linesep)\n\n log_file.close()", "def download_files(urls, folder): \n\n if not urls: \n return None\n if not folder: \n return None\n \n folder_path = Path(folder)\n if not folder_path.exists():\n os.makedirs(folder_path)", "async def save_url_images(images):\n for source, image in images:\n name = source.split('/')[-1]\n async with aiofiles.open(f'{OUTPUT_FOLDER}/{name}', 'wb') as f:\n await f.write(image)", "def download_files(self):", "def download_image(urls):\r\n image_paths = []\r\n\r\n base_url = \"https://classifieds.castanet.net\"\r\n image_directory = os.path.join('C:\\\\', 'users', 'ccholon', 'my documents', 'castanet images')\r\n\r\n for url in urls:\r\n listing_url = base_url + url\r\n image_page = requests.get(listing_url)\r\n image_soup = BeautifulSoup(image_page.text, 'html.parser')\r\n\r\n # find the URL for the listing image\r\n image_element = image_soup.find(name='div', class_='image_container')\r\n image_element = image_element.find(name='img')\r\n image_url = image_element.get('src')\r\n\r\n # download the image\r\n #image = requests.get(image_url, stream=True)\r\n\r\n # save to local directory\r\n #image_file = open(os.path.join(image_directory, os.path.basename(image_url)), 'wb')\r\n #for bytes in image.iter_content(100000):\r\n #image_file.write(bytes)\r\n #image_file.close()\r\n\r\n image_paths.append(os.path.join(image_directory, os.path.basename(image_url)))\r\n\r\n return image_paths", "def multi_download(self, url_list):\n workers = 4\n with ThreadPoolExecutor(workers) as ex:\n urls = [url_list[x] for x in range(len(url_list))]\n self.filenames = [str(y)+\".txt\" for y in range(len(url_list))]\n ex.map(self.download, urls, self.filenames)\n return self.filenames", "def download_remote_files(output_dir, files):\n logging.debug(f\"Try to download files: {files}\")\n\n # Create list of remote and local files\n base_url = \"https://storage.googleapis.com/\"\n urls = [base_url+file for file in files]\n local_files = [output_dir + file.split(\"/\")[-1] for file in files]\n\n\n async def get(session, url, local_f):\n if os.path.isfile(local_f):\n logging.info(\"Raw file {} exists locally\".format(local_f))\n pass\n else:\n # Download file\n async with session.get(url=url) as response:\n if response.status == 200:\n resp = await response.read()\n with open(local_f, \"wb\") as outfile:\n outfile.write(resp)\n\n\n async def main(urls, local_files):\n conn = aiohttp.TCPConnector(limit=30)\n timeout = aiohttp.ClientTimeout(total=None, connect=None, sock_connect=30, sock_read=10)\n async with aiohttp.ClientSession(connector=conn, timeout=timeout) as session:\n _ = await asyncio.gather(*[get(session, urls[f], local_files[f]) for f in range(len(urls))])\n\n asyncio.run(main(urls, local_files))\n return local_files", "def download_attachments(output_path, urls):\r\n locations = []\r\n for url in urls:\r\n path = urlparse(url).path\r\n #teardown path and rebuild to negate any errors with\r\n #os.path.join and leading /'s\r\n path = path.split('/')\r\n filename = path.pop(-1)\r\n localpath = ''\r\n for item in path:\r\n localpath = os.path.join(localpath, item)\r\n full_path = os.path.join(output_path, localpath)\r\n if not os.path.exists(full_path):\r\n os.makedirs(full_path)\r\n print('downloading {}'.format(filename))\r\n try:\r\n urlretrieve(url, os.path.join(full_path, filename))\r\n locations.append(os.path.join(localpath, filename))\r\n except URLError as e:\r\n error = (\"No file could be downloaded from {}; Error {}\"\r\n .format(url, e))\r\n logger.warning(error)\r\n except IOError as e: #Python 2.7 throws an IOError rather Than URLError\r\n # For japanese, the error might look kind of like this:\r\n # e = IOError( 'socket error', socket.error(111, u'\\u63a5\\u7d9a\\u3092\\u62d2\\u5426\\u3055\\u308c\\u307e\\u3057\\u305f') )\r\n # and not be suitable to use in \"{}\".format(e) , raising UnicodeDecodeError\r\n # (This is at least the case on my Fedora running Python 2.7.5 \r\n # (default, Feb 19 2014, 13:47:28) [GCC 4.8.2 20131212 (Red Hat 4.8.2-7)] on linux2\r\n try:\r\n error = (\"No file could be downloaded from {}; Error {}\"\r\n .format(url, e))\r\n except UnicodeDecodeError:\r\n # For lack of a better log message because we could not decode e, let's use repr(e)\r\n error = (\"No file could be downloaded from {}; Error {}\"\r\n .format(url, repr(e)))\r\n logger.warning(error)\r\n return locations", "def get_media(api, num_tweets=25, profile=\"@hakeemangulu\", admin=False):\n # Store the media urls in a list\n media_files = []\n\n # Create cursor object for the timeline\n if admin:\n # If the admin is using the application, return his timeline\n tl = tweepy.Cursor(api.home_timeline).items(num_tweets)\n else:\n # If the admin is not using the application, return the specified\n # user's timeline\n tl = tweepy.Cursor(api.user_timeline, screen_name=profile).items(num_tweets)\n\n # Iterate through the timeline and extract images\n for status in tl:\n # Get all media from a tweet\n media = status.entities.get('media', [])\n # Add non-empty media to the set\n for image in media:\n # Only add the image if it is a photo or GIF (as opposed to a\n # video)\n if image['type'] == 'photo' or image['type'] == 'animated_gif':\n media_files.append(image['media_url'])\n return media_files", "def download_feed_item(feed_item, base_directory):\n join_path = partial(os.path.join, base_directory)\n\n base_filename = base_filename_for_feed_item(feed_item)\n\n json_filename = join_path(\"{}.json\".format(base_filename))\n\n if os.path.exists(json_filename):\n # Stop here, we already have this video.\n return\n\n content = highest_quality_content(\n download_info_for_feed_item(feed_item)\n )\n\n video_content = (\n content[0]\n if isinstance(content, tuple) else\n content\n )\n\n assert video_content.media_type.has_video\n\n video_filename = join_path(\"{}.{}\".format(\n base_filename, video_content.media_type.file_type\n ))\n\n if os.path.exists(video_filename):\n # Delete the video file if it's there already.\n os.remove(video_filename)\n\n if isinstance(content, tuple):\n # Download video and audio at the same time.\n que = Queue()\n exception_queue = Queue()\n\n def download_in_queue():\n try:\n download_to_file(*que.get())\n except Exception as ex:\n exception_queue.put(ex)\n\n # TODO: It would be nice to be able to terminate the other\n # thread here.\n\n if isinstance(ex, (KeyboardInterrupt, SystemExit)):\n # Re-raise interrupts so cleanup code works.\n raise ex\n finally:\n que.task_done()\n\n temp_video_filename = tempfile.mkstemp(prefix= base_filename)[1]\n temp_audio_filename = tempfile.mkstemp(prefix= base_filename)[1]\n\n try:\n que.put((content[0].url, temp_video_filename))\n que.put((content[1].url, temp_audio_filename))\n\n for i in range(2):\n Thread(target= download_in_queue).start()\n\n que.join()\n\n if not exception_queue.empty():\n raise exception_queue.get()\n\n # Now use ffmpeg to join the audio and video content together.\n subprocess.check_call((\n \"ffmpeg\",\n \"-i\", temp_video_filename,\n \"-i\", temp_audio_filename,\n \"-c\", \"copy\", os.path.abspath(video_filename)\n ))\n finally:\n # Clean up temporary files.\n os.remove(temp_video_filename)\n os.remove(temp_audio_filename)\n else:\n # Download one audio-video file.\n download_to_file(video_content.url, video_filename)\n\n # Now write the JSOn file with the metadata.\n with open(json_filename, \"w\") as out_file:\n json.dump({\n \"version\": JSON_FORMAT_VERSION,\n \"content\": (\n [content[0].to_json(), content[1].to_json()]\n if isinstance(content, tuple) else\n [content.to_json()]\n ),\n \"feed_item\": feed_item.to_json(),\n }, out_file)\n\n return (video_filename, json_filename)", "def content_media_urls(*paths):\n from mezzanine.conf import settings\n media_url = settings.CONTENT_MEDIA_URL.strip(\"/\")\n return [\"/%s/%s\" % (media_url, path) for path in paths]", "def download_file():\n for lines in urls:\n try:\n req.urlretrieve(lines, '{0}/{1}'.format(folder_path, lines.split('/')[-1]))\n time.sleep(1)\n print ('File - {} - downloaded successfully'.format(lines.split('/')[-1]))\n except urllib.error.HTTPError:\n print('File is missing or not reachable')\n print('Download Complete & Successful!')", "def downloadDatasets(datasets: Iterable) -> Generator[tuple, None, None]:\n\n for ds in datasets:\n with urllib.request.urlopen(ds) as response:\n\n with tempfile.NamedTemporaryFile(delete=False) as tmp_file:\n shutil.copyfileobj(response, tmp_file)\n\n yield (response.url, tmp_file.name)", "def get_media(self, url, out_filename=None, raw_data=False):\n if not raw_data:\n if not out_filename:\n out_filename = os.path.join(settings.BW_MMS_DIRECTORY,\n url.split('/')[-1])\n\n if not os.path.isdir(os.path.dirname(out_filename)):\n raise ValueError('Invalid output directory: {} - '\n 'unable to download MMS'.\n format(os.path.dirname(out_filename)))\n\n if os.path.isfile(out_filename):\n logging.info('filename {}, already exists - will be '\n 'overwritten.....'.format(out_filename))\n\n try:\n resp = requests.get(url, auth=(self.token, self.secret))\n except requests.exceptions.RequestException as e:\n logging.info('Error while fetching media: {}'.format(e))\n return\n\n if resp.status_code == requests.codes.ok:\n try:\n if raw_data:\n return resp.content\n else:\n with open(out_filename, 'wb') as fd:\n fd.write(resp.content)\n\n return out_filename\n except Exception as e:\n logging.info('Error: {} while writing file: {}'.\n format(e, out_filename))\n return\n\n logging.info('Invalid URI or an error occured, response: {}, '\n 'response content: {}'.format(resp.status_code,\n resp.text))", "def download_image_urls(\n urls_filename: Union[Path, str],\n synsets: List[str],\n max_concurrent: int = 50,\n rewrite: bool = False\n) -> Dict[str, Optional[List[str]]]:\n print(\"Downloading image urls.\")\n synsets_to_urls = asyncio.run(_download_image_urls(urls_filename, synsets, max_concurrent, rewrite))\n return synsets_to_urls", "def upload_media_to_bandwidth(media_files):\n for filename in media_files:\n with open(filename, \"rb\") as f:\n file_content = f.read()\n try:\n ##Note: The filename is doubling as the media id##\n response = messaging_client.upload_media(MESSAGING_ACCOUNT_ID, filename, str(len(file_content)), body=file_content)\n except Exception as e:\n print(e)", "def download(query, destination='', max_items=None):\n destination = os.path.join(destination, query)\n eol_id = search(query)\n urls = []\n for idx, url in enumerate(get_images(eol_id)):\n filepath = os.path.join(destination, str(idx))\n data.download_image(url, filepath)\n print(idx)\n if max_items and idx >= max_items:\n break", "def download_tracks(client, tracks, num_tracks=sys.maxsize, downloadable=False,\n folders=False, custom_path='', id3_extras={}):\n\n filenames = []\n\n for i, track in enumerate(tracks):\n\n # \"Track\" and \"Resource\" objects are actually different,\n # even though they're the same.\n if isinstance(track, soundcloud.resource.Resource):\n\n try:\n t_track = {}\n t_track['downloadable'] = track.downloadable\n t_track['streamable'] = track.streamable\n t_track['title'] = track.title\n t_track['user'] = {'username': track.user['username']}\n t_track['release_year'] = track.release\n t_track['genre'] = track.genre\n t_track['artwork_url'] = track.artwork_url\n if track.downloadable:\n t_track['stream_url'] = track.download_url\n else:\n if downloadable:\n puts_safe(colored.red(\"Skipping\") + colored.white(\": \" + track.title))\n continue\n if hasattr(track, 'stream_url'):\n t_track['stream_url'] = track.stream_url\n\n track = t_track\n except Exception as e:\n puts_safe(colored.white(track.title) + colored.red(' is not downloadable.'))\n continue\n\n if i > num_tracks - 1:\n continue\n try:\n if not track.get('stream_url', False):\n puts_safe(colored.white(track['title']) + colored.red(' is not downloadable.'))\n continue\n else:\n track_artist = sanitize_filename(track['user']['username'])\n track_title = sanitize_filename(track['title'])\n track_filename = track_artist + ' - ' + track_title + '.mp3'\n\n if folders:\n track_artist_path = join(custom_path, track_artist)\n if not exists(track_artist_path):\n mkdir(track_artist_path)\n track_filename = join(track_artist_path, track_filename)\n else:\n track_filename = join(custom_path, track_filename)\n\n if exists(track_filename):\n puts_safe(colored.yellow(\"Track already downloaded: \") + colored.white(track_title))\n continue\n\n puts_safe(colored.green(\"Downloading\") + colored.white(\": \" + track['title']))\n\n if track.get('direct', False):\n location = track['stream_url']\n else:\n stream = client.get(track['stream_url'], allow_redirects=False, limit=200)\n if hasattr(stream, 'location'):\n location = stream.location\n else:\n location = stream.url\n\n filename = download_file(location, track_filename)\n tagged = tag_file(filename,\n artist=track['user']['username'],\n title=track['title'],\n year=track['release_year'],\n genre=track['genre'],\n album=id3_extras.get('album', None),\n artwork_url=track['artwork_url'])\n\n if not tagged:\n wav_filename = filename[:-3] + 'wav'\n os.rename(filename, wav_filename)\n filename = wav_filename\n\n filenames.append(filename)\n except Exception as e:\n puts_safe(colored.red(\"Problem downloading \") + colored.white(track['title']))\n puts_safe(str(e))\n\n return filenames", "def kegg_download_manager_synchronous(list_of_ids, wait=1):\n urls = ['http://rest.kegg.jp/get/%s' % '+'.join(chunk) for chunk in chunks(list(list_of_ids), 10)]\n num_urls = len(urls)\n print(f\"Total urls to download: {num_urls}. Progress will be shown below.\")\n results = []\n for url in tqdm(urls):\n results.append(download_synchronous(url))\n time.sleep(wait)\n\n return [raw_record for raw_records in results for raw_record in raw_records.split('///')[:-1]]", "def get_and_download(api, path, num_tweets=25, profile=\"@hakeemangulu\", admin=False):\n return downloader(get_media(api, num_tweets, profile, admin), path)", "async def _remove_local_media_from_disk(\n self, media_ids: List[str]\n ) -> Tuple[List[str], int]:\n removed_media = []\n for media_id in media_ids:\n logger.info(\"Deleting media with ID '%s'\", media_id)\n full_path = self.filepaths.local_media_filepath(media_id)\n try:\n os.remove(full_path)\n except OSError as e:\n logger.warning(\"Failed to remove file: %r: %s\", full_path, e)\n if e.errno == errno.ENOENT:\n pass\n else:\n continue\n\n thumbnail_dir = self.filepaths.local_media_thumbnail_dir(media_id)\n shutil.rmtree(thumbnail_dir, ignore_errors=True)\n\n await self.store.delete_remote_media(self.server_name, media_id)\n\n await self.store.delete_url_cache((media_id,))\n await self.store.delete_url_cache_media((media_id,))\n\n removed_media.append(media_id)\n\n return removed_media, len(removed_media)", "def _download_files(self, products, base_dir, cache=True):\n\n manifestArray = []\n for dataProduct in products:\n\n localPath = base_dir + \"/\" + dataProduct['obs_collection'] + \"/\" + dataProduct['obs_id']\n dataUrl = self._MAST_DOWNLOAD_URL + \"?uri=\" + dataProduct[\"dataURI\"]\n\n if not os.path.exists(localPath):\n os.makedirs(localPath)\n\n localPath += '/' + dataProduct['productFilename']\n\n status = \"COMPLETE\"\n msg = None\n url = None\n\n try:\n if self._boto3 is not None and fpl.has_path(dataProduct):\n try:\n self._download_from_cloud(dataProduct, localPath, cache)\n except Exception as ex:\n log.exception(\"Error pulling from S3 bucket: %s\" % ex)\n log.warn(\"Falling back to mast download...\")\n self._download_file(dataUrl, localPath, cache=cache, head_safe=True)\n else:\n self._download_file(dataUrl, localPath, cache=cache, head_safe=True)\n\n # check if file exists also this is where would perform md5,\n # and also check the filesize if the database reliably reported file sizes\n if not os.path.isfile(localPath):\n status = \"ERROR\"\n msg = \"File was not downloaded\"\n url = dataUrl\n\n except HTTPError as err:\n status = \"ERROR\"\n msg = \"HTTPError: {0}\".format(err)\n url = dataUrl\n\n manifestArray.append([localPath, status, msg, url])\n\n manifest = Table(rows=manifestArray, names=('Local Path', 'Status', 'Message', \"URL\"))\n\n return manifest", "def download_images(img_urls, dest_dir):\n # +++your code here+++\n (errcode, statusmsg) = check_create_dir(dest_dir)\n if errcode:\n print statusmsg\n sys.exit(errcode)\n else: print statusmsg\n # retrieve images and generate html code for files\n html_str = '<html>\\n<body>\\n' # opening html file tags\n i = 0\n for img in img_urls:\n img_filename = 'img' + str(i)\n full_filepath = os.path.join(dest_dir, img_filename) \n print 'Retrievieng ' + img + ' to ' + full_filepath + ' file..'\n urllib.urlretrieve(img, full_filepath)\n html_str += '<img src=\\\"' + img_filename + '\\\">'\n i += 1\n html_str += '\\n</html>\\n</body>' # closing html file tags\n # create html file\n html_filename = os.path.join(dest_dir, 'index.html')\n f = open(html_filename, 'w')\n f.write(html_str) \n f.close()\n print 'File ' + html_filename + ' was created.'", "def _download_images(self, image_urls: typing.List[str], save_dir: str) -> typing.List[str]:\n\n\t\timage_paths = []\n\n\t\tfor i, url in enumerate(image_urls):\n\t\t\timage = self.send_request_image(url)\n\n\t\t\timage_ext = url.split(\".\")[-1]\n\n\t\t\timage_dst_path = os.path.join(save_dir, f\"{i}.{image_ext}\")\n\n\t\t\tif image is not None:\n\t\t\t\twith open(image_dst_path, \"wb\") as fh:\n\n\t\t\t\t\t# Magic boolean which makes it work\n\t\t\t\t\timage.raw.decode_content = True\n\n\t\t\t\t\t# noinspection PyBroadException\n\n\t\t\t\t\t# Attempt to download the image from the URL\n\t\t\t\t\ttry:\n\t\t\t\t\t\tshutil.copyfileobj(image.raw, fh)\n\n\t\t\t\t\t# We should reduce the scope\n\t\t\t\t\texcept Exception:\n\t\t\t\t\t\tpass\n\n\t\t\t\t\t# We downloaded the image without any errors\n\t\t\t\t\telse:\n\t\t\t\t\t\timage_paths.append(image_dst_path)\n\n\t\treturn image_paths", "def load_captured_urls(url_list):\n if gs.local:\n result = load_captured_urls_local(url_list)\n else:\n result = load_captured_urls_aws(url_list)\n return result", "def download(self, account, code):\n\n url = Spider.BASE_URL + \"/p/%s/?taken-by=%s\" % (code, account)\n r = self.session.get(url)\n content_match = re.search(r\"<script.*?>\\s*?window._sharedData\\s*?=\\s*?({.*}).*?</script>\", r.text,\n re.MULTILINE)\n data = json.loads(content_match.group(1))\n media = data['entry_data']['PostPage'][0]['graphql']['shortcode_media']\n download_urls = []\n if media['__typename'] == 'GraphVideo': # video\n download_urls.append(media[\"video_url\"])\n if media['__typename'] == 'GraphImage': # image\n download_urls.append(media[\"display_url\"])\n if media['__typename'] == 'GraphSidecar': # slide\n nodes = media['edge_sidecar_to_children']['edges']\n for node in nodes:\n node = node['node']\n if node['is_video']:\n download_urls.append(node['video_url'])\n else:\n download_urls.append(node['display_url'])\n\n actual_download_dir = os.path.join(download_dir, account)\n if not os.path.isdir(actual_download_dir):\n os.mkdir(actual_download_dir)\n for url in download_urls:\n filename = os.path.join(actual_download_dir, url.split('/')[-1].split('?')[0])\n temp_name = filename + '.tmp'\n if os.path.isfile(filename):\n if self.spider.auto_stop:\n print('file', filename, \"already exists, exiting......\")\n sys.exit()\n print('file', filename, \"already exists, skipping\")\n else:\n print('downloading %s:' % filename)\n r = self.session.get(url, stream=True)\n content_length = int(r.headers['content-length'])\n curr = 0\n with open(temp_name, 'wb') as f:\n for chunk in r.iter_content(chunk_size=1024):\n f.write(chunk)\n curr += 1024\n progress(curr, content_length)\n os.rename(temp_name, filename)\n self.spider.item_count += 1", "async def fetch_all(urls: List[str]) -> None:\n tasks = []\n async with ClientSession() as session:\n for url in urls:\n task = asyncio.ensure_future(fetch(url, session))\n tasks.append(task) # create list of tasks\n done = await asyncio.gather(*tasks)\n dp = pathlib.Path(\"data\")\n for url, res in done:\n fp = dp.joinpath(url[url.find(\"json\") + 5 :])\n with fp.open(\"w\") as out:\n out.write(res.decode(\"utf-8\"))", "def download_images(img_urls, dest_dir):\n if len(img_urls) > 0 :\n if not os.path.exists(dest_dir):\n os.mkdir(dest_dir)\n # save each images file name\n image_names = []\n # Iterate over each image url, downloading the image to a local file\n img_ctr = 0\n for url in img_urls :\n file_name = 'img' + str(img_ctr) + '.jpg'\n image_names.append(file_name)\n full_name = dest_dir + '/' + file_name\n print('Writing file: %s from %s' % (full_name, url) )\n # When calling the SSLContext constructor directly, CERT_NONE is the default.\n # Since it does not authenticate the other peer it can be insecure\n # Beyond the scope of this exercise (emoji holding my nose)\n unsecure_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)\n with urllib.request.urlopen(url, context=unsecure_context) as response, open(full_name, 'wb') as out_file:\n shutil.copyfileobj(response, out_file)\n img_ctr += 1\n return image_names", "def download(self, url: str, dest: PathLike, force: bool = False):", "def maybe_download(urls, path, filenames=None, extract=False):\n utils_io.maybe_create_dir(path)\n if not isinstance(urls, (list, tuple)):\n is_list = False\n urls = [urls]\n else:\n is_list = True\n if filenames is not None:\n if not isinstance(filenames, (list, tuple)):\n filenames = [filenames]\n if len(urls) != len(filenames):\n raise ValueError('`filenames` must have the same number of elements as `urls`.')\n result = []\n for i, url in enumerate(urls):\n if filenames is not None:\n filename = filenames[i]\n elif 'drive.google.com' in url:\n filename = _extract_google_drive_file_id(url)\n else:\n filename = url.split('/')[-1]\n if filename.endswith('?raw=true'):\n filename = filename[:-9]\n filepath = os.path.join(path, filename)\n result.append(filepath)\n if not os.path.exists(filepath):\n if 'drive.google.com' in url:\n filepath = _download_from_google_drive(url, filename, path)\n else:\n filepath = _download(url, filename, path)\n if extract:\n logging.info('Extract %s', filepath)\n if tarfile.is_tarfile(filepath):\n tarfile.open(filepath, 'r').extractall(path)\n elif zipfile.is_zipfile(filepath):\n with zipfile.ZipFile(filepath) as zfile:\n zfile.extractall(path)\n else:\n logging.info('Unknown compression type. Only .tar.gz.tar.bz2, .tar, and .zip are supported')\n if not is_list:\n return result[0]\n return result", "def download_pics(pic_urls, directory):\r\n print(\"downloading pictures...\")\r\n for url in pic_urls:\r\n name = url.split(\"/\")[-1]\r\n if len(name) >= 20:\r\n name = name[len(name)-20:]\r\n \r\n print('from:', url)\r\n pic_path = directory + name\r\n if not os.path.exists(pic_path):\r\n print(\"downloading ->\", pic_path)\r\n try:\r\n urllib.request.urlretrieve(url, pic_path)\r\n except ValueError:\r\n # 'http://' missing from link\r\n urllib.request.urlretrieve(\"http://\" + url, pic_path)\r\n except urllib.error.HTTPError:\r\n # access forbidden\r\n # ex: http://puu.sh/n2zPL/2491975ef3.jpg\r\n print(\"URL skipped due to HTTPError\", url)\r\n else:\r\n print(\"already downloaded ->\", pic_path)\r\n print(\"Downloads Finished\")", "def mediaGenerator(request):\n folder = 'content/' + request\n mediaPaths = glob(folder + '/*')\n return random.choice(mediaPaths)", "def download_images(img_urls, dest_dir):\n if not os.path.exists(dest_dir):\n # If the directory doesn't exist, create it\n os.mkdir(dest_dir)\n count = 0\n img_string = ''\n # Copies each file from the url provided to the directory provided\n for file in img_urls:\n new_filename = '{}/img{}.jpg'.format(dest_dir, count)\n print \"Retrieving {}\".format(file)\n urllib.urlretrieve(file, new_filename)\n img_string += \"<img src = 'img{}.jpg'>\".format(count)\n count += 1\n print \"Retrieved {} files\".format(count)\n # Creates an html file to display the completed image\n with open('{}/index.html'.format(dest_dir), 'w') as f:\n f.write(\n '<html>\\n<body>\\n{}\\n</body>\\n</html>'.format(img_string)\n )\n pass", "def download(self):\n\n # os.open *should* give a thread-safe way to exlusivly open files\n filepath = self.film\n try:\n # os.O_BINARY is only avilable and needed on windows\n flags = os.O_CREAT | os.O_EXCL | os.O_WRONLY | os.O_BINARY\n except:\n flags = os.O_CREAT | os.O_EXCL | os.O_WRONLY\n try:\n fd = os.open(filepath, flags)\n except:\n return\n\n try:\n response = self.session.get(self.filmurl, stream=True)\n if response.status_code == 200:\n for chunk in response.iter_content(1024):\n os.write(fd, chunk)\n except:\n # Remove partial img file if request or stream fails\n os.close(fd)\n os.remove(filepath)", "def download_videos(blink, save_dir=\"/media\"):\n blink.download_videos(save_dir, since=get_date())", "def download_files(service, file_list, out_path):\n total = len(file_list)\n for i, file_id in enumerate(file_list, 1):\n name = get_file(service, file_id)['title']\n print('Downloading {}... ({}/{}) [{}%]'.format(name, i, total,\n round(i / total * 100)))\n path = os.path.join(out_path, name)\n try:\n download_file(service, file_id, path)\n except errors.HttpError as error:\n os.remove(path) # Remove broken file\n print('Could not download file: {}'.format(error), file=sys.stderr)", "def download_urls(urls: List[str], to_dir: str, pool_size: int = 16) -> int: # pragma: no cover\n logger.debug(\"Download urls to %s: %s\", to_dir, urls)\n create_dir_if_not_exists(to_dir)\n with Pool(pool_size) as pool:\n pool.map(partial(download, to_dir=to_dir), urls)\n return len(urls)", "def download(\n urls,\n output_dir,\n *,\n existing=\"error\",\n jobs=6,\n develop_debug=False,\n authenticate=False, # Seems to work just fine for public stuff\n recursive=True,\n):\n urls = flattened([urls])\n if len(urls) > 1:\n raise NotImplementedError(\"multiple URLs not supported\")\n if not urls:\n # if no paths provided etc, we will download dandiset path\n # we are at, BUT since we are not git -- we do not even know\n # on which instance it exists! Thus ATM we would do nothing but crash\n raise NotImplementedError(\"No URLs were provided. Cannot download anything\")\n url = urls[0]\n girder_server_url, asset_type, asset_id = parse_dandi_url(url)\n\n # We could later try to \"dandi_authenticate\" if run into permission issues.\n # May be it could be not just boolean but the \"id\" to be used?\n client = girder.get_client(\n girder_server_url,\n authenticate=authenticate,\n progressbars=True, # TODO: redo all this\n )\n\n lgr.info(f\"Downloading {asset_type} with id {asset_id} from {girder_server_url}\")\n\n # there might be multiple asset_ids, e.g. if multiple files were selected etc,\n # so we will traverse all of them\n files = flatten(\n _get_asset_files(\n asset_id_, asset_type, output_dir, client, authenticate, existing, recursive\n )\n for asset_id_ in set(flattened([asset_id]))\n )\n\n Parallel(n_jobs=jobs, backend=\"threading\")(\n delayed(client.download_file)(\n file[\"id\"],\n op.join(output_dir, file[\"path\"]),\n existing=existing,\n attrs=file[\"attrs\"],\n # TODO: make it less \"fluid\" to not breed a bug where we stop verifying\n # for e.g. digests move\n digests={\n d: file.get(\"metadata\")[d]\n for d in metadata_digests\n if d in file.get(\"metadata\", {})\n },\n )\n for file in files\n )", "def download_engine(fcsd): #fcsd = first comic strip date\n\n url_list = get_comic_strip_url(fcsd)\n\n for url in url_list:\n session = requests.Session()\n response = session.get(url)\n download_url = get_image_comic_url(session, response)\n# download_dilbert(session, download_url)\n return download_url", "async def _download_image_urls(\n urls_filename: Union[Path, str],\n synsets: List[str],\n max_concurrent: int = 50,\n rewrite: bool = False\n) -> Dict[str, Optional[List[str]]]:\n if (not rewrite) and os.path.exists(urls_filename):\n with open(urls_filename, \"r\") as f:\n return json.load(f)\n raise NotImplementedError(\"The ImageNet site was updated and there is no longer access to lists of urls by synset.\")\n semaphore = asyncio.Semaphore(max_concurrent) # pylint: disable=unreachable\n synsets_to_urls = dict(await asyncio.gather(*[_download_urls_for_synset(synset, semaphore) for synset in synsets]))\n with open(urls_filename, \"w\") as f:\n json.dump(synsets_to_urls, f)\n print(len(synsets_to_urls))\n return synsets_to_urls", "def download_multiple(\n bucket_name: str,\n object_names: Iterable[str],\n file_paths: Iterable[str],\n show_progress: bool = constants.SHOW_PROGRESS_DEFAULT.value,\n):\n # pylint: disable=import-outside-toplevel\n from sotaque_brasileiro.utils import safe_getenv\n\n object_names = list(object_names)\n file_paths = list(file_paths)\n minio_client = Minio(\n safe_getenv(constants.MINIO_ENDPOINT.value),\n access_key=safe_getenv(constants.MINIO_ACCESS_KEY.value),\n secret_key=safe_getenv(constants.MINIO_SECRET_KEY.value),\n )\n if show_progress:\n if tqdm is None:\n raise ImportError(\n \"\"\"tqdm must be installed to show progress.\n Either install tqdm or run with show_progress=False\"\"\"\n )\n for object_name, file_path in tqdm(\n zip(object_names, file_paths),\n desc=\"Downloading audio files...\",\n total=len(object_names),\n ):\n minio_client.fget_object(bucket_name, object_name, file_path)\n tqdm.write(object_name)\n else:\n for object_name, file_path in zip(object_names, file_paths):\n minio_client.fget_object(bucket_name, object_name, file_path)", "def podcast_download(self):\r\n warnings.filterwarnings(\"ignore\", category=UnicodeWarning)\r\n now = datetime.datetime.now()\r\n\r\n for podcast_file in self.podcast_list:\r\n published, name, link, title = podcast_file\r\n if self.podcast_list != []:\r\n line_file = (published + ';' + title + ';' + name + ';' + link).encode(\"utf-8\") \r\n if line_file in open(self.download_log).read():\r\n pass\r\n else:\r\n title = unicodedata.normalize('NFKD', title).encode('ascii', 'ignore')\r\n download_folder = os.path.join('downloads', title)\r\n if not os.path.exists(download_folder): \r\n os.makedirs(download_folder)\r\n try:\r\n published = str(parser.parse(published))[:10]\r\n except IOError as error:\r\n print 'Error' + (error) + ': File - ' + str(title)\r\n download_folder = os.path.join(download_folder, published)\r\n if not os.path.exists(download_folder): \r\n os.makedirs(download_folder)\r\n namefile_unicode = link[link.rfind('/')+1:]\r\n namefile_str = unicodedata.normalize('NFKD', \r\n namefile_unicode).encode('ascii', 'ignore')\r\n namefile_str = namefile_str.decode('utf-8', 'ignore').encode(\"utf-8\")\r\n if '.mp3' in namefile_str:\r\n len_name = namefile_str.index('.mp3')\r\n elif '.MP3' in namefile_str:\r\n len_name = namefile_str.index('.MP3')\r\n namefile_str = namefile_str[:len_name + 4]\r\n fileoutput = os.path.join(download_folder, namefile_str)\r\n name = unicodedata.normalize('NFKD', name).encode('ascii', 'ignore')\r\n print str(published) + '; ' + name\r\n ## downlink\r\n download_file(link, fileoutput) \r\n ## tagging\r\n mp3_tagging(fileoutput, podcast_file)\r\n ## write log\r\n write_file(self.download_log, line_file)\r\n end = datetime.datetime.now()\r\n print '\\r' + 'Download Time = ' + str(end-now) + '\\r'\r\n return None", "def find_and_download_songs(kwargs):\n sponsorblock_postprocessor = []\n reference_file = kwargs[\"reference_file\"]\n files = {}\n with open(reference_file, \"r\", encoding=\"utf-8\") as file:\n for line in file:\n temp = line.split(\";\")\n name, artist, album, i = (\n temp[0],\n temp[1],\n temp[4],\n int(temp[-1].replace(\"\\n\", \"\")),\n )\n\n query = f\"{artist} - {name} Lyrics\".replace(\":\", \"\").replace('\"', \"\")\n print(f\"Initiating download for {query}.\")\n\n file_name = kwargs[\"file_name_f\"](\n name=name, artist=artist, track_num=kwargs[\"track_db\"][i].get(\"playlist_num\")\n )\n\n if kwargs[\"use_sponsorblock\"][0].lower() == \"y\":\n sponsorblock_postprocessor = [\n {\n \"key\": \"SponsorBlock\",\n \"categories\": [\"skip_non_music_sections\"],\n },\n {\n \"key\": \"ModifyChapters\",\n \"remove_sponsor_segments\": [\"music_offtopic\"],\n \"force_keyframes\": True,\n },\n ]\n save_path = kwargs[\"track_db\"][i][\"save_path\"]\n file_path = path.join(save_path, file_name)\n\n mp3file_path = f\"{file_path}.mp3\"\n\n if save_path not in files:\n path_files = set()\n files[save_path] = path_files\n else:\n path_files = files[save_path]\n\n path_files.add(f\"{file_name}.mp3\")\n\n if (\n kwargs[\"no_overwrites\"]\n and not kwargs[\"skip_mp3\"]\n and path.exists(mp3file_path)\n ):\n print(f\"File {mp3file_path} already exists, we do not overwrite it \")\n continue\n\n outtmpl = f\"{file_path}.%(ext)s\"\n ydl_opts = {\n \"proxy\": kwargs.get(\"proxy\"),\n \"default_search\": \"ytsearch\",\n \"format\": \"bestaudio/best\",\n \"outtmpl\": outtmpl,\n \"postprocessors\": sponsorblock_postprocessor,\n \"noplaylist\": True,\n \"no_color\": False,\n \"postprocessor_args\": [\n \"-metadata\",\n \"title=\" + name,\n \"-metadata\",\n \"artist=\" + artist,\n \"-metadata\",\n \"album=\" + album,\n ],\n }\n if not kwargs[\"skip_mp3\"]:\n mp3_postprocess_opts = {\n \"key\": \"FFmpegExtractAudio\",\n \"preferredcodec\": \"mp3\",\n \"preferredquality\": \"192\",\n }\n ydl_opts[\"postprocessors\"].append(mp3_postprocess_opts.copy())\n with yt_dlp.YoutubeDL(ydl_opts) as ydl:\n try:\n ydl.download([query])\n except Exception as e: # skipcq: PYL-W0703\n log.debug(e)\n print(f\"Failed to download {name}, make sure yt_dlp is up to date\")\n if not kwargs[\"skip_mp3\"]:\n set_tags(temp, mp3file_path, kwargs)\n if kwargs[\"remove_trailing_tracks\"] == \"y\":\n for save_path in files:\n for f in os.listdir(save_path):\n if f not in files[save_path]:\n print(f\"File {f} is not in the playlist anymore, we delete it\")\n os.remove(path.join(save_path, f))", "def _download_files():\n import urllib\n from os import makedirs\n from os.path import exists, join\n \n atom_list = ['Ruthenium', 'Rhenium', 'Rutherfordium', 'Radium', 'Rubidium',\n 'Radon', 'Rhodium', 'Beryllium', 'Barium', 'Bohrium', 'Bismuth',\n 'Berkelium', 'Bromine', 'Hydrogen', 'Phosphorus', 'Osmium', 'Mercury',\n 'Germanium', 'Gadolinium', 'Gallium', 'Ununbium', 'Praseodymium',\n 'Platinum', 'Plutonium', 'Carbon', 'Lead', 'Protactinium', 'Palladium',\n 'Xenon', 'Polonium', 'Promethium', 'Hassium',\n 'Holmium', 'Hafnium', 'Molybdenum', 'Helium', 'Mendelevium', 'Magnesium',\n 'Potassium', 'Manganese', 'Oxygen', 'Meitnerium', 'Sulfur', 'Tungsten',\n 'Zinc', 'Europium', 'Einsteinium', 'Erbium', 'Nickel', 'Nobelium',\n 'Sodium', 'Niobium', 'Neodymium', 'Neon', 'Neptunium', 'Francium', 'Iron',\n 'Fermium', 'Boron', 'Fluorine', 'Strontium', 'Nitrogen', 'Krypton',\n 'Silicon', 'Tin', 'Samarium', 'Vanadium', 'Scandium', 'Antimony',\n 'Seaborgium', 'Selenium', 'Cobalt', 'Curium', 'Chlorine', 'Calcium',\n 'Californium', 'Cerium', 'Cadmium', 'Thulium', 'Caesium', 'Chromium',\n 'Copper', 'Lanthanum', 'Lithium', 'Thallium', 'Lutetium', 'Lawrencium',\n 'Thorium', 'Titanium', 'Tellurium', 'Terbium', 'Technetium', 'Tantalum',\n 'Ytterbium', 'Dubnium', 'Zirconium', 'Dysprosium', 'Iodine', 'Uranium',\n 'Yttrium', 'Actinium', 'Silver', 'Iridium', 'Americium', 'Aluminium',\n 'Arsenic', 'Argon', 'Gold', 'Astatine', 'Indium', 'Darmstadtium', 'Copernicium']\n\n if not exists(\"elements\"): makedirs(\"elements\")\n for name in atom_list: \n file = urllib.urlopen(\"http://www.webelements.com/{0}\".format(name.lower()))\n string = file.read()\n file.close()\n with open(join(\"elements\", name), \"w\") as out: out.write(string)\n file = urllib.urlopen(\"http://www.webelements.com/{0}/atoms.html\".format(name.lower()))\n string = file.read()\n file.close()\n with open(join(\"elements\", name + \"_atoms.html\"), \"w\") as out: out.write(string)\n file = urllib.urlopen( \"http://www.webelements.com/{0}/electronegativity.html\"\\\n .format(name.lower()))\n string = file.read()\n file.close()\n with open(join(\"elements\", name + \"_electronegativity.html\"), \"w\") as out: out.write(string)\n file = urllib.urlopen( \"http://www.webelements.com/{0}/atom_sizes.html\"\\\n .format(name.lower()))\n string = file.read()\n file.close()\n with open(join(\"elements\", name + \"_atom_sizes.html\"), \"w\") as out: out.write(string)\n file = urllib.urlopen( \"http://www.webelements.com/{0}/thermochemistry.html\"\\\n .format(name.lower()))\n string = file.read()\n file.close()\n with open(join(\"elements\", name + \"_thermochemistry.html\"), \"w\") as out: out.write(string)\n file = urllib.urlopen( \"http://www.webelements.com/{0}/physics.html\"\\\n .format(name.lower()))\n string = file.read()\n file.close()\n with open(join(\"elements\", name + \"_physics.html\"), \"w\") as out: out.write(string)", "def download_images(urlList):\n fileNumber = 1;\n fileName = \"\"\n\n # urlList[0] is just titles, so we start at 1\n for url in urlList[1:]:\n sys.stdout.write(\"\\rFile number %i of %i \" % (fileNumber+1, len(urlList)))\n\n sys.stdout.flush()\n\n try:\n fileName = str(fileNumber) + \".png\"\n # Download the file from `url` and save it locally under `fileName`:\n # I append png to the end of the file to \"make it\" png, but there's definitely a better way\n with urllib.request.urlopen(url) as response, open(fileName, 'wb') as out_file:\n shutil.copyfileobj(response, out_file)\n except urllib.error.HTTPError:\n sys.stdout.flush()\n print(\"\\r %s is not a downloadable image. Skipping to next url...\" % url)\n \n fileNumber += 1;\n\n sys.stdout.write(\"\\r\\nDone!\")\n sys.stdout.flush()\n sys.stdout.write(\"\\r\\n\")", "def download_models_and_data():\n\n for file in DATA_FILES:\n download_file(file[\"url\"], file[\"path\"])", "def get_downloadable_data(url_list):\n downloadable_data_list = []\n for url in url_list:\n soup = visit_homepage(url)\n for link in soup.find_all(class_='resource-url-analytics'):\n downloadable_data_list.append(link['href'])\n return downloadable_data_list", "def find_URLs(directory, options):\n\n files = os.listdir(directory)\n filtered_files = []\n files_for_download = []\n for item in files:\n if item.endswith(\".json\"):\n filtered_files.append(item)\n\n for item in filtered_files:\n file_path = os.path.join(directory, item)\n\n with open(file_path, \"r\") as json_file:\n payload = json.load(json_file)\n for message in payload:\n if (\"subtype\" in message\n and message.get(\"subtype\") == \"file_share\"):\n\n download_URL = message.get(\"file\").get(\"url_download\")\n\n if options.remote_name:\n download_filename = message.get(\"file\").get(\"id\")\n else:\n download_filename = message.get(\"file\").get(\"name\")\n if download_filename.startswith(\"-.\"):\n download_filename = download_filename.lstrip(\"-\")\n download_filename = \"{}{}\".format(\n message.get(\"file\").get(\"id\"),\n download_filename)\n\n files_for_download.append(\n (download_filename, download_URL))\n\n download_URLs(files_for_download, directory)", "def copy_media(items, dest):\n for file in items:\n filename = os.path.basename(file)\n copyfile(file, dest + '\\\\' + filename)", "def download_all_videos(self, dl_limit=10):\r\n counter = dl_limit\r\n self.video_link_title_keylist = self.video_link_title_dict.keys()\r\n music = []\r\n for title in self.video_link_title_keylist:\r\n try:\r\n title = title.encode('ascii')\r\n # print 'downloading title with counter: ', counter\r\n if not counter:\r\n return random.choice(music) #some margin for randomness, first result isnt always accurate, (gets slower...)\r\n print 'downloading title: ', title\r\n\r\n self.add_result(\"Dowloaded_Song\", title)\r\n\r\n path = self.download_video(self.video_link_title_dict[title], title)\r\n music.append(path)\r\n counter = counter - 1\r\n except:\r\n print \"illegal characters in youtube name\" + title + \"\\n trying next result\"", "def url_files_download(url, ext, outdir, check_exist=False, create_dir=False,\n remove_files=False, bar_opt='tqdm'):\n file_msg = fd.Program_Msg(__file__)\n ## Checking for file type\n # 'URL'\n if not isinstance(url, str):\n msg = '{0} `url` ({1}) is not a valid type. It must be a STRING!'\n msg = msg.format(file_msg, type(url))\n raise TypeError(msg)\n # File extension\n if not isinstance(ext, str):\n msg = '{0} `ext` ({1}) is not a valid type. It must be a STRING!'\n msg = msg.format(file_msg, type(ext))\n raise TypeError(msg)\n # Output directory\n if not isinstance(outdir, str):\n msg = '{0} `outdir` ({1}) is not a valid type. It must be a STRING!'\n msg = msg.format(file_msg, type(outdir))\n raise TypeError(msg)\n # `check_exist`\n if not (isinstance(check_exist, bool)):\n msg = '`check_exist` ({0}) must be of `boolean` type!'.format(\n type(check_exist))\n raise TypeError(msg)\n # `create_dir`\n if not (isinstance(create_dir, bool)):\n msg = '`create_dir` ({0}) must be of `boolean` type!'.format(\n type(create_dir))\n raise TypeError(msg)\n # `bar` - Type\n if not (isinstance(bar_opt, str)):\n msg = '`bar_opt` ({0}) must be of `boolean` type!'.format(\n type(bar_opt))\n raise TypeError(msg)\n # Progress bar - Value\n if not (bar_opt in ['tqdm', 'native']):\n msg = '{0} `bar_opt` ({1}) is not a valid option! Exiting'\n msg = msg.format(file_msg, bar_opt)\n raise LSSUtils_Error(msg)\n ##\n ## List of files in the URL\n files_arr = url_file_list(url, ext)\n # Creating directory\n if create_dir:\n cfutils.Path_Folder(outdir)\n # Check for its existence\n if check_exist:\n if not (os.path.exists(outdir)):\n msg = '`outdir` ({0}) was not found!'.format(\n outdir)\n raise FileNotFoundError(msg)\n ##\n ## Downloading files to output directory\n if len(files_arr) > 0:\n if (bar_opt == 'tqdm'):\n tqdm_desc = 'Downloading files: '\n for file_ii in tqdm(files_arr, desc=tqdm_desc):\n # Local file\n file_ii_local = os.path.join( outdir,\n os.path.basename(file_ii))\n # Checking if local file exists\n if os.path.exists(file_ii_local):\n if remove_files:\n os.remove(file_ii_local)\n wget_opt = True\n else:\n wget_opt = False\n else:\n wget_opt = True\n ##\n ## Only downloading if necessary\n if wget_opt:\n wget.download(file_ii, out=outdir, bar=None)\n elif (bar_opt == 'native'):\n for file_ii in files_arr:\n # Local file\n file_ii_local = os.path.join( outdir,\n os.path.basename(file_ii))\n # Checking if local file exists\n if os.path.exists(file_ii_local):\n if remove_files:\n os.remove(file_ii_local)\n wget_opt = True\n else:\n wget_opt = False\n else:\n wget_opt = True\n ##\n ## Only downloading if necessary\n if wget_opt:\n wget.download(file_ii, out=outdir)\n else:\n msg = '{0} Number of files is ZERO!'.format(file_msg)\n print(msg)", "def get_files(self):\n # self.folder= +str(int(time.time()))\n if not os.path.exists(self.folder):\n os.mkdir(self.folder)\n while len(self.url_queue): # If we have URLs to crawl - we crawl\n href = self.url_queue.popleft() # We grab a URL from the left of the list\n filename = href.rsplit('/', 1)[-1]\n print(\"Downloading %s to %s...\" % (href, filename))\n fullname = os.path.join(self.folder, filename)\n urlretrieve(href, fullname)\n self.xlfnames.append(filename)", "def convert_async(paths, args):\n files = []\n with Pool() as pool:\n st = time.perf_counter()\n print(f\"\\n[{colored('+','green')}] Extraction of audio started ...\")\n p = pool.starmap_async(extract, product(paths, [args.q]), callback=files.extend)\n \n p.wait()\n print(\n f\"[{colored('+','green')}] Completed extraction of {colored(len(paths),'yellow')} file(s) in {colored(time.perf_counter()-st,'yellow')} seconds\"\n )\n return files", "def download_images(image_urls):\n fetched = []\n count = 0\n for img_url in image_urls:\n if not db.is_image_in_db(img_url):\n filename = os.path.basename(img_url)\n if not os.path.exists(cfg.PHOTO_DIR + filename):\n referer_string = web.get_referrer_string(img_url) # to trick 4walled.org\n cmd = \"wget -t {retry_count} -T {timeout} {ref} {url} -O {save}\".format(url=img_url,\n save=os.path.join(cfg.PHOTO_DIR, filename),\n ref=referer_string,\n retry_count=cfg.WGET_RET,\n timeout=cfg.WGET_TIMEOUT)\n print cmd\n os.system(cmd)\n fetched.append(img_url)\n count += 1\n else:\n print(\"# {0} was already fetched once...\".format(img_url))\n\n print(\"# new imgage(s): {0}\".format(count))\n return fetched", "def downloadFilesForDate(googleServices, settings, outputDir, hpwrenSource, gapMinutes, verboseLogs):\n startTimeDT = hpwrenSource['startTimeDT']\n endTimeDT = hpwrenSource['endTimeDT']\n dateDirName = '{year}{month:02d}{date:02d}'.format(year=startTimeDT.year, month=startTimeDT.month, date=startTimeDT.day)\n hpwrenSource['dateDirName'] = dateDirName\n urlPartsDate = hpwrenSource['urlParts'][:] # copy URL\n urlPartsDate.append(dateDirName)\n hpwrenSource['urlPartsDate'] = urlPartsDate\n\n timeGapDelta = datetime.timedelta(seconds = 60*gapMinutes)\n imgTimes = None\n lastQNum = 0 # 0 never matches because Q numbers start with 1\n curTimeDT = startTimeDT\n downloaded_files = []\n prevTime = None\n while curTimeDT <= endTimeDT:\n qNum = 1 + int(curTimeDT.hour/3)\n urlPartsQ = urlPartsDate[:] # copy URL\n urlPartsQ.append('Q' + str(qNum))\n if qNum != lastQNum:\n # List times of files in Q dir and cache\n useHttp = True\n imgTimes = listTimesinQ(urlPartsQ, verboseLogs)\n if not imgTimes:\n if verboseLogs:\n logging.error('No images in Q dir %s', '/'.join(urlPartsQ))\n mp4Url = getMp4Url(urlPartsDate, qNum, verboseLogs)\n if not mp4Url:\n return downloaded_files\n if outputDir != outputDirCheckOnly:\n imgTimes = getGCSMp4(googleServices, settings, hpwrenSource, qNum)\n useHttp = False\n # logging.warning('imgTimes %d %s', len(imgTimes), imgTimes)\n lastQNum = qNum\n\n if outputDir == outputDirCheckOnly:\n downloaded_files.append(outputDirCheckOnly)\n else:\n desiredTime = int(curTimeDT.timestamp())\n closestEntry = min(imgTimes, key=lambda x: abs(x['time']-desiredTime))\n closestTime = closestEntry['time']\n downloaded = None\n if closestTime != prevTime: # skip if closest timestamp is still same as previous iteration\n prevTime = closestTime\n if useHttp:\n downloaded = downloadHttpFileAtTime(outputDir, urlPartsQ, hpwrenSource['cameraID'], closestTime, verboseLogs)\n else:\n downloaded = downloadGCSFileAtTime(outputDir, closestEntry)\n if downloaded and verboseLogs:\n logging.warning('Successful download for time %s', str(datetime.datetime.fromtimestamp(closestTime)))\n if downloaded:\n downloaded_files.append(downloaded)\n\n curTimeDT += timeGapDelta\n return downloaded_files", "def get_content():\n with open(\"url_list.json\", mode=\"r\", encoding=\"utf-8\") as f:\n urls = json.load(f)\n image_url = None\n to_remove = False\n while image_url is None:\n if to_remove:\n urls.remove(to_read)\n else:\n to_remove = True\n to_read = urls[random.randrange(0, len(urls))]\n print(to_read)\n r = requests.get(to_read[1]).json()\n if to_read[0] == \"Met\":\n image_url, image_name, image_artist = met(r)\n else:\n image_url, image_name, image_artist = tate(r)\n with open(\"url_list.json\", mode=\"w\", encoding=\"utf-8\") as f:\n json.dump(urls, f)\n return to_read[0], image_url, image_name, image_artist", "def download_imgs(img_urls, outfolder):\n \n print \"Downloading %d images from: \" %len(img_urls), url\n \n for image in img_urls:\n filename = image.split('/')[-1]\n outpath = os.path.join(outfolder, filename)\n img_url = urljoin(url, image)\n try:\n urlretrieve(image, outpath)\n print img_url, \"downloaded successfully.\"\n \n except IOError:\n print \"Failed to download file:\", img_url\n pass", "def list_media(storage, filter_list):\n results = []\n total = 0\n try:\n for media in storage.listdir('.')[1]:\n if not media.endswith('/') and media != \"\":\n location = storage.url(media).split('?')[0]\n total += 1\n if not filter_list or location in filter_list:\n results += [\n {'location': location,\n 'tags': MediaTag.objects.filter(\n location=location).values_list(\n 'tag', flat=True)\n }]\n except OSError:\n LOGGER.exception(\n \"Unable to list objects in %s.\", storage.__class__.__name__)\n except S3ResponseError:\n LOGGER.exception(\n \"Unable to list objects in %s bucket.\", storage.bucket_name)\n return {'count': total, 'results': results}", "def download_pictures(recent_seach_tweets):\n # Downloading pictures\n print('%s - Downloading %d tweets' % (datetime.datetime.now().strftime('%d/%m/%Y - %H:%M'), len(recent_seach_tweets)))\n for tw in recent_seach_tweets:\n img_url = tw['images'][0]\n filename = tw['text'][:tw['text'].index(\"#\")-1].lower().replace(' ','_')\n filename = \"./downloaded_pics/%s.jpg\" % filename\n urllib.request.urlretrieve(img_url, filename)", "def download_gaia(dest_path, files):\n for f in files:\n # Get URL and file name\n file_url, file_name = f\n print(file_name)\n file_path = os.path.join(path, file_name)\n # Download data (if not already)\n if (not os.path.exists(file_path) and not os.path.isfile(file_path)):\n print(\"Downloading {}...\".format(file_name))\n response = urllib.request.urlopen(file_url)\n data = response.read()\n tar_gz = open(file_path, 'wb')\n tar_gz.write(data)\n tar_gz.close()\n # Be nice\n sleep(1)", "def filter(self):\n for f in FileHelper.ALL_PATHS:\n media_obj = MediaObject(FileHelper.get_url(f), FileHelper.get_title(f), FileHelper.get_media_type(f), FileHelper.get_icon(f), FileHelper.get_duration(f), FileHelper.get_ctype(f))\n _id = media_obj.uuid\n if media_obj.media_type == \"image\":\n DB.IMAGES[_id] = media_obj\n elif media_obj.media_type == \"audio\":\n DB.MUSIC[_id] = media_obj\n elif media_obj.media_type == \"video\":\n DB.VIDEOS[_id] = media_obj\n else:\n print \"File '%s' doesn't play nice.\" % (f)", "def download_images(img_urls, dest_dir):\n # Creating the directory if the directory does not already exist\n if not os.path.exists(str(dest_dir)):\n os.mkdir(dest_dir)\n print ('Retrieving...')\n with open(str(dest_dir) + '/index.html', 'w') as f:\n f.write(\"<html>\\n<body>\\n\")\n for index, url in enumerate(img_urls):\n img_name = 'img' + str(index + 1)\n urllib.urlretrieve(\"https://code.google.com\" + url, filename=str(dest_dir) + '/'\n + img_name +'.jpg')\n print ('Downloaded ' + url[-10:] + \": \" + \\\n str(index + 1) + \" images downloaded\")\n\n f.write(\"<img src=\" + '\"' + img_name +\".jpg\" +'\">')\n f.write(\"\\n</html>\\n</body>\")\n print ('Download Complete!')\n pass", "def download_video(url, fn):\n start_time = time.time()\n\n # Sorry: This is terrible code, but I'm kind of throwing it\n # together as I discover more about it.\n print ' Downloading {0} to {1}'.format(url, fn)\n\n resp = requests.get(url)\n if resp.status_code != 200:\n print ' GAH! MY EYES! {0} kicked up {1}'.format(url, resp.status_code)\n return\n\n rss_url_m = re.search(r'\"(/rss/flash/\\d+)\"', resp.content)\n rss_url = 'http://blip.tv' + rss_url_m.group(0).strip('\"')\n resp = requests.get(rss_url)\n\n rss_content = resp.content\n\n for ending in POSSIBLE_ENDINGS:\n regex = r'\"http://blip.tv[^\"]+?' + ending + '\"'\n\n download_m = re.search(regex, rss_content)\n if not download_m:\n print ' No {0} url found'.format(ending)\n continue\n\n download_url = download_m.group(0).strip('\"')\n print ' Attempting to download {0}'.format(download_url)\n\n try:\n resp = requests.get(download_url, stream=True)\n print ' Downloading {0}'.format(download_url)\n if resp.status_code == 200:\n total_length = int(resp.headers['content-length'])\n\n if os.path.exists(fn + ending) and file_size(fn + ending) == total_length:\n print ' Already downloaded.'\n return\n\n with open(fn + ending, 'w') as fp:\n total_downloaded = 0\n\n tic_chunk = total_downloaded\n tic = time.time()\n for chunk in resp.iter_content(chunk_size=1024):\n if chunk:\n fp.write(chunk)\n fp.flush()\n tic_chunk += len(chunk)\n total_downloaded += len(chunk)\n\n if time.time() - tic > 1:\n with TERM.location(x=0):\n line = ' {0} {1}kbps'.format(\n format_downloaded(total_downloaded, total_length),\n int(tic_chunk / (time.time() - tic) / 1000))\n sys.stdout.write(line + TERM.clear_eol)\n sys.stdout.flush()\n tic_chunk = 0\n tic = time.time()\n print ''\n\n print ' Done! {0} {1}mb {2}'.format(\n fn + ending,\n int(total_length / 1000000.0),\n format_duration(time.time() - start_time))\n return\n\n else:\n print ' HTTP{0}! GAH! SPUTTER!'.format(resp.status_code)\n\n except requests.exceptions.ConnectionError as exc:\n print ' CONNECTIONERROR! GAH! SPUTTER! {0}'.format(exc)\n\n print ' SO MANY FAILURES!'\n raise NoDownloadMeNoLikeyException()", "def download(self, destination: str = None) -> list:\n return [f.download(destination=destination) for f in self.files]", "def download_all(self):\r\n download_path = os.path.join(self.download_path, self.username)\r\n already_downloaded = []\r\n successful_downloads = []\r\n failed_downloads = []\r\n if not os.path.exists(download_path):\r\n os.makedirs(download_path)\r\n elif not os.path.isdir(download_path):\r\n raise NotADirectoryError(\"Download path is not a directory: \" + download_path)\r\n elif self.skip_downloaded:\r\n for item in os.listdir(download_path):\r\n file_path = str(os.path.join(download_path, item))\r\n if os.path.isfile(file_path):\r\n parsed_file = self._parse_file_name(os.path.basename(file_path))\r\n if parsed_file is not None:\r\n already_downloaded.append(parsed_file[\"id\"])\r\n for index, item in enumerate(self.videos):\r\n # Don't download it if the user has set that option, and the tiktok already exists on the disk\r\n if item[\"id\"] in already_downloaded:\r\n logger.info(\"Already downloaded video with id: \" + item[\"id\"])\r\n continue\r\n file_name = self._format_file_name(item[\"createTime\"], item[\"id\"])\r\n file_path = os.path.join(download_path, file_name)\r\n logger.info(\"Downloading video: \" + file_name + \" (\" + str(index + 1) + \"/\" + str(len(self.videos)) + \")\")\r\n video_url = self._format_video_url(item)\r\n success = self.download_video(file_path, video_url, item[\"createTime\"])\r\n if success:\r\n successful_downloads.append(video_url)\r\n else:\r\n failed_downloads.append(video_url)\r\n sleep_time = random.uniform(self.sleep_min, self.sleep_max)\r\n logger.info(\"Sleeping for: \" + str(sleep_time) + \" seconds\")\r\n sleep(sleep_time)\r\n logger.info(\"Processed all {} videos\".format(self.video_count))\r\n logger.debug(\"Fallback counter: \" + str(self.fallback_counter))\r\n logger.debug(\"YouTube-dl DownloadError counter: \" + str(self.fallback_counter))\r\n logger.debug(\"Other error counter: \" + str(self.other_error_counter))\r\n return {\"successful_downloads\": successful_downloads,\r\n \"failed_downloads\": failed_downloads,\r\n \"skipped_downloads\": already_downloaded}", "def get_url():\r\n songs = []\r\n with open(FILE_CONTAINING_URLS) as f:\r\n for line in f:\r\n if not line.startswith(\"#\") and is_web_url(line):\r\n songs.append(line)\r\n\r\n # pick a random song and store it in song variable\r\n song = random.choice(songs)\r\n\r\n url_attempts = []\r\n\r\n for x in range(RETRY_COUNT):\r\n response = requests.get(song)\r\n # check if URL is valid and also make sure video is available\r\n if response.ok and video_is_available(song):\r\n return song\r\n # store failed URL\r\n url_attempts.append(song)\r\n # choose new random song\r\n song = random.choice(songs)\r\n\r\n print(\"Could not access video URLs. Please check network connection\")\r\n print(\"Tried the following URLs before failing:\")\r\n print(\"\\n\".join(url_attempts))\r\n exit(1)", "def download_files(valid_links: list) -> list:\n print('Starting process...')\n print('')\n\n year_month_filepath = []\n\n for link_info in valid_links:\n\n # Get file extension\n extension = link_info[0].split('.')[-1]\n\n # Link to download\n link_to_download = link_info[0]\n\n # Get month\n month = link_info[1]\n\n # Get year\n year = link_info[2]\n\n # Create a standard filename to save\n file_name = f'{year}-{month}.{extension}'\n\n print(f'Downloading... {link_to_download} Saving... {file_name}')\n\n # Create a link to save into ./file directory\n link_to_save = f'./file/{file_name}'\n\n # Download file and save it\n wget.download(link_to_download, out=link_to_save)\n\n\n # Special treatment to zip and xlsx file\n if extension == 'zip':\n\n # Get right link to save (.csv) from zip function\n link_to_save = get_file_into_zip(link_to_save)\n\n elif extension == 'xlsx':\n # Get right link to save (.csv) from xlsx function\n link_to_save = excel2csv(link_to_save)\n\n # Include the tuple into a list\n year_month_filepath.append((year, month, link_to_save))\n\n print('Finishing process...')\n\n return year_month_filepath", "def _download_audio_files(self, records, target_path):\n\n for record in logger.progress(records):\n audio_folder = os.path.join(target_path, 'audio', record[2])\n audio_file = os.path.join(audio_folder, '{}.mp3'.format(record[0]))\n os.makedirs(audio_folder, exist_ok=True)\n\n download_url = 'https://audio.tatoeba.org/sentences/{}/{}.mp3'.format(record[2], record[0])\n download.download_file(download_url, audio_file)", "def run_download_flickr_video(queue, save_root, force_overwrite):\n\n while True:\n url = queue.get()\n try:\n save_path = download_flickr_video(url, save_root, force_overwrite)\n log('print', f'Saved video to {save_path}')\n except HTTPError as e:\n if e.code == 404:\n log('warn', f'HTTP error 404 returned for URL {url}')\n except FileExistsError as e:\n log('warn', f'File already exists for URL {url}, skipping')\n\n queue.task_done()", "def download_all(self):\r\n # Fetch website list\r\n self.fetch_website_list()\r\n\r\n for website in self.website_list:\r\n self.download(website['id'])", "def download_images(urls: List[str] = None):\n are_images = [is_url_image(url) for url in urls]\n if not are_images[: sum(are_images)]:\n raise NotImplementedError('Only images are supported')\n downloads = [requests.get(url) for url in urls]\n images = [load_image(io.BytesIO(download.content)) for download in downloads]\n return images", "def getMediaFiles(path):\n fileList = getMediaFileList(path)\n # dirList = getDirectoryList(path)\n\n # results = map(getMediaFiles, dirList)\n\n # for result in results:\n # fileList = fileList + result\n\n return fileList", "def download_urls(urls_filename, reverse=True, log_filename='youtube-playlist-download.log'):\n urls_file = open(urls_filename)\n url_lines = urls_file.read().splitlines();\n urls_file.close()\n if reverse:\n url_lines = reversed(url_lines)\n\n logfile = open(log_filename, 'w')\n logfile.write('\\n' + str(datetime.now()) + '\\n')\n logfile.flush()\n\n # use -f best to avoid merging and just get the best overall format (saves time)\n youtube_cmd_with_args = ['youtube-dl', '--ignore-errors', '--ignore-config', '--write-info-json', '--no-mtime', '-f best', '-o ' + get_full_filename()]\n\n try:\n for line in url_lines:\n url_id, title = line.split('\\t')[:2]\n print('Downloading video: \"' + title + '\" with id \"' + url_id + '\"')\n run(youtube_cmd_with_args + [YT_PREFIX + url_id])\n print('Done downloading url:', url_id)\n notify('Done downloading url:' + url_id)\n logfile.write('Downloaded\\t' + url_id + '\\t' + title + '\\n')\n logfile.flush()\n except KeyboardInterrupt as e:\n print(\"Exiting\")\n logfile.close()\n\n logfile.close()", "def download_images(img_urls, dest_dir, base_url=\"http://code.google.com\"):\n create_dir(dest_dir)\n img_tags = fetch_call(img_urls, dest_dir)\n create_html(dest_dir, img_tags)", "def download_artifacts(token, artifacts):\n zipfiles = []\n for a in artifacts:\n updated_at = datetime.fromisoformat(a[\"updated_at\"][:-1])\n datename = a[\"name\"]+updated_at.strftime(\"-%Y-%m-%d\")\n filename = datename + \".zip\"\n if os.path.exists(filename):\n zipfiles.append((a[\"name\"], datename, filename))\n print(f\"{filename} skipped, already downloaded\")\n continue\n\n print(f\"Fetching {filename}\")\n ok = run_curl(token, a[\"archive_download_url\"], filename)\n if not ok:\n continue\n\n zipfiles.append((a[\"name\"], datename, filename))\n\n return zipfiles", "def download_images(main_keyword, supplemented_keywords, download_dir): \n image_links = set()\n print('Process {0} Main keyword: {1}'.format(os.getpid(), main_keyword))\n\n # create a directory for a main keyword\n img_dir = download_dir + main_keyword + '/'\n if not os.path.exists(img_dir):\n os.makedirs(img_dir)\n\n for j in range(len(supplemented_keywords)):\n print('Process {0} supplemented keyword: {1}'.format(os.getpid(), supplemented_keywords[j]))\n search_query = quote(main_keyword + ' ' + supplemented_keywords[j])\n # url = 'https://www.google.com/search?q=' + search_query + '&espv=2&biw=1366&bih=667&site=webhp&source=lnms&tbm=isch&sa=X&ei=XosDVaCXD8TasATItgE&ved=0CAcQ_AUoAg'\n url = 'https://www.google.com/search?q=' + search_query + '&source=lnms&tbm=isch'\n image_links = image_links.union(parse_page(url))\n print('Process {0} get {1} links so far'.format(os.getpid(), len(image_links)))\n time.sleep(2)\n print (\"Process {0} get totally {1} links\".format(os.getpid(), len(image_links)))\n\n print (\"Start downloading...\")\n count = 1\n for link in image_links:\n try:\n req = urllib.request.Request(link, headers = {\"User-Agent\": generate_user_agent()})\n response = urllib.request.urlopen(req)\n data = response.read()\n file_path = img_dir + '{0}.jpg'.format(count)\n with open(file_path,'wb') as wf:\n wf.write(data)\n print('Process {0} fininsh image {1}/{2}.jpg'.format(os.getpid(), main_keyword, count))\n count += 1\n except urllib.error.URLError as e:\n logging.error('URLError while downloading image {0}\\nreason:{1}'.format(link, e.reason))\n continue\n except urllib.error.HTTPError as e:\n logging.error('HTTPError while downloading image {0}\\nhttp code {1}, reason:{2}'.format(link, e.code, e.reason))\n continue\n except Exception as e:\n logging.error('Unexpeted error while downloading image {0}\\nerror type:{1}, args:{2}'.format(link, type(e), e.args))\n continue\n\n print(\"Finish downloading, total {0} errors\".format(len(image_links) - count))", "def download_songs(playlist_url):\n command_string = 'youtube-dl -x --audio-format wav --postprocessor-args \"-ar 44100 -ac 1\" --output \"Songs/%(' \\\n 'title)s_%(id)s.%(ext)s\" ' + \\\n playlist_url\n args = shlex.split(command_string)\n subprocess.call(args)", "def download(self, item, save_dir='./'):\r\n try:\r\n os.makedirs(save_dir)\r\n except OSError as e:\r\n if e.errno == errno.EEXIST and os.path.isdir(save_dir):\r\n # another thread beat us to creating this dir\r\n pass\r\n else:\r\n # target dir exists as a file, or a different error\r\n raise\r\n\r\n item['url'] = item[item['type'] + 's']['standard_resolution']['url'].split('?')[0]\r\n # remove dimensions to get largest image\r\n item['url'] = re.sub(r'/s\\d{3,}x\\d{3,}/', '/', item['url']) \r\n\r\n base_name = item['url'].split('/')[-1]\r\n file_path = os.path.join(save_dir, base_name)\r\n\r\n if not os.path.isfile(file_path):\r\n\r\n with open(file_path, 'wb') as file:\r\n try:\r\n bytes = requests.get(item['url']).content\r\n except requests.exceptions.ConnectionError:\r\n\t\t\t\t\tsleep(5)\r\n\t\t\t\t\tbytes = requests.get(item['url']).content\r\n\t\t\t\t\t\r\n file.write(bytes)\r\n\r\n file_time = int(item['created_time'])\r\n os.utime(file_path, (file_time, file_time))", "def url_to_file():\n urls = argToList(demisto.getArg('urls'))\n files = []\n for i in range(len(urls)):\n fileEntry = fileResult('url_' + str(i + 1), '[InternetShortcut]\\nURL=' + str(urls[i]))\n files.append(fileEntry)\n demisto.results(files)", "def downloader(thread_num):\n tid = 'Thread ' + numprefix.format(thread_num) + ': '\n for i in range(thread_num, len(self.titles), thread_count):\n title, link = self.titles[i], self.download_urls[i]\n name = vidprefix.format(i) + ' ' + title + '.mp4'\n tries = 0\n while (not os.path.exists(name) or os.path.getsize(name) == 0) \\\n and tries <= trycount:\n if os.path.exists(name): os.remove(name)\n self.log(tid + 'Calling wget for ' + name)\n subprocess.call(['wget', '--output-document=' + name, link])\n tries += 1\n if (not os.path.exists(name) or os.path.getsize(name) == 0):\n self.log(tid + 'wget failed for ' + name)\n else:\n self.log(tid + 'wget successfully downloaded ' + name)", "def collect_attachments(self, paths_or_urls: Iterable[str]) -> List[Tuple[str, str, str, bytes]]:\n attachments = []\n same_content = [] # type: List[bytes]\n for src in paths_or_urls:\n try:\n content = self.load_file(src)\n except ImageNotFound as err:\n self.log_error(err)\n self.conditionally_raise(err)\n continue\n content_hash = hashlib.md5(content).digest()\n if content_hash in same_content:\n continue\n same_content.append(content_hash)\n maintype, subtype = self._get_mime_type(src)\n filename = os.path.basename(src)\n attachments.append((maintype, subtype, filename, content))\n return attachments", "def download_file_list(self, limit=None, test_page='https://www.google.com'):\n # test csv file parsing\n if self.file_list is None:\n raise NoFileListProvided()\n\n # test Internet connection\n try:\n urllib.request.urlopen(test_page, timeout=2)\n except urllib.request.URLError:\n raise InternetConnectionIssue()\n except:\n raise UnknownError()\n\n # determine whether the number of file to be downloaded is capped for test purposes\n if limit is None:\n total_file_num = self.file_num\n else:\n total_file_num = limit\n print('Total number of files to be downloaded: ' + str(total_file_num))\n\n # perform downloading\n print(\"Downloading MODIS data...\")\n for row in tqdm(range(total_file_num)):\n download_dir = self.file_list['download_dir'].iloc[row]\n file_name = self.file_list['file_name'].iloc[row]\n online_url = self.file_list['online_url'].iloc[row]\n\n # create local sub-directories\n if not os.path.isdir(download_dir):\n os.makedirs(download_dir)\n\n # check local file existence\n # CAUTION: the existence of local files, even incomplete, will preemptively stop the downloading process\n if os.path.isfile(os.path.join(download_dir, file_name)):\n self.file_list.set_value(index=row, col='status', value=1)\n else:\n try:\n HTTPresponse = urllib.request.urlretrieve(online_url, os.path.join(download_dir, file_name))\n # check remote file existence\n if 'Content-Type: application/x-hdf' in HTTPresponse[1].__str__():\n self.file_list.set_value(index=row, col='status', value=1)\n elif 'Content-Type: text/html' in HTTPresponse[1].__str__():\n os.remove(os.path.join(download_dir, file_name))\n raise RemoteFileDoesntExist()\n else:\n os.remove(os.path.join(download_dir, file_name))\n raise UnknownError()\n except RemoteFileDoesntExist:\n self.file_list.set_value(index=row, col='status', value=0)\n except:\n os.remove(os.path.join(download_dir, file_name))\n self.file_list.set_value(index=row, col='status', value=0)\n raise UnknownError()", "def run(self):\n urls_to_download = self._get_links()\n results = ThreadPool(8).imap_unordered(self._download_url, urls_to_download)\n for path in results:\n print(path)" ]
[ "0.66829646", "0.6647512", "0.6639991", "0.6638432", "0.66363764", "0.6452788", "0.6394921", "0.63525635", "0.6312523", "0.6243412", "0.6240311", "0.61974305", "0.6190884", "0.61289537", "0.6114882", "0.6058402", "0.6052626", "0.6050897", "0.59977293", "0.5993598", "0.5966962", "0.59464955", "0.5936169", "0.59230244", "0.5921858", "0.5857853", "0.57877237", "0.57627517", "0.5756762", "0.5733441", "0.5729159", "0.5710364", "0.57095695", "0.5697013", "0.5695914", "0.5690145", "0.568715", "0.56822366", "0.56497765", "0.562564", "0.5621097", "0.5607192", "0.5606514", "0.5599113", "0.5589011", "0.5558104", "0.5552949", "0.554153", "0.55375457", "0.5536406", "0.5508423", "0.55061626", "0.5495773", "0.54882574", "0.54870045", "0.547586", "0.5473203", "0.54587495", "0.54534024", "0.5451175", "0.5450461", "0.54471004", "0.5443234", "0.54426706", "0.5437294", "0.5435203", "0.54257137", "0.54228395", "0.5418807", "0.54152244", "0.54133433", "0.5404608", "0.5402751", "0.5394815", "0.53890646", "0.5382705", "0.53726804", "0.5372537", "0.5358729", "0.53572786", "0.53549266", "0.5353505", "0.53514403", "0.5340699", "0.532161", "0.531804", "0.5314537", "0.5311331", "0.5305491", "0.53051627", "0.5301175", "0.5294185", "0.52920467", "0.52850294", "0.5283764", "0.5278744", "0.5275462", "0.52749044", "0.52550703", "0.5241545" ]
0.75554264
0
Takes a list of media files and uploads them to Bandwidth The media file names are used as the media id
def upload_media_to_bandwidth(media_files): for filename in media_files: with open(filename, "rb") as f: file_content = f.read() try: ##Note: The filename is doubling as the media id## response = messaging_client.upload_media(MESSAGING_ACCOUNT_ID, filename, str(len(file_content)), body=file_content) except Exception as e: print(e)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def upload_files(self, files):\n\n for f in files:\n self.scp.put(f, recursive=True)", "def upload(self, folder_list, files):\n current_folder_id = self.top_folder_id\n for fname in folder_list:\n current_folder_id = self._fetch_or_create_folder(fname, current_folder_id)\n for file in files:\n self._upload_detail(file, current_folder_id)", "def copy_media(items, dest):\n for file in items:\n filename = os.path.basename(file)\n copyfile(file, dest + '\\\\' + filename)", "def download_media_from_bandwidth(media_urls):\n downloaded_media_files = []\n for media_url in media_urls:\n media_id = get_media_id(media_url)\n filename = get_media_filename(media_url)\n with open(filename, \"wb\") as f:\n try:\n downloaded_media = messaging_client.get_media(MESSAGING_ACCOUNT_ID, media_id)\n f.write(downloaded_media.body)\n except Exception as e:\n print(e)\n downloaded_media_files.append(filename)\n return downloaded_media_files", "def media_file_upload(request, manifest_id):\n manifest = get_object_or_404(Manifest, id=manifest_id)\n\n manifest_files = MediaFile.objects.filter(manifest=manifest)\n total_files_count = manifest_files.count()\n files_needing_upload = manifest_files.filter(file='')\n files_needing_upload_count = files_needing_upload.count()\n\n file_to_upload = files_needing_upload.first()\n\n # If no files left to upload, mark the manifest complete and move on\n if files_needing_upload_count < 1:\n Manifest.objects.filter(id=manifest.id).update(all_media_present=True)\n return HttpResponseRedirect(reverse('manifest-view', args=(manifest.id,)))\n\n form = MediaFileForm(request.POST or None, request.FILES or None, instance=file_to_upload)\n\n if form.is_valid():\n form.save()\n return HttpResponseRedirect(reverse('file-upload', args=(manifest.id,))) # Refresh view\n\n return render(request, 'file_manager/file_upload.html', {\n 'form': form,\n 'upload_number': total_files_count - files_needing_upload_count + 1, # Which place in order of upload e.g. 2 of 3\n 'total_files_count': manifest_files.count(),\n 'file_to_upload': file_to_upload,\n })", "def post_wave(cnct):\n files = []\n\n if request.mimetype == 'multipart/form-data':\n for _, file in request.files.items():\n files.append((file.filename, file))\n else:\n files.append(('%s.wav' % uuid4(), request.stream))\n\n response = []\n for (name, fp) in files:\n parser = WaveParser(fp)\n try:\n audio_file = db.AudioFile.FromWaveParser(name, parser)\n cnct.add(audio_file)\n except WaveException as err:\n raise HttpError(406, str(err)) from None\n except Exception as err:\n print(err)\n raise HttpError(500) from None\n\n response.append(audio_file.info)\n\n cnct.commit()\n return {'files': response}", "def move_media(items, dest):\n for file in items:\n filename = os.path.basename(file)\n os.rename(file, dest + '\\\\' + filename)", "def testMediaUpload(self):\n self._testUpload(DefaultStorage(), 'media')\n self._testUpload(StaticStorage(), 'static')", "def upload_all(all_file_names):\n with ThreadPool(processes=int(10)) as pool:\n return pool.map(upload_file, all_file_names)", "def upload_bulk_sms_file(batch_id, file_path):\n batch = Batch.objects.get(id=batch_id)\n batch.add_messages(read_messages_from_file(file_path))\n batch.status = Batch.PENDING\n batch.save()", "async def create_upload_files(background_tasks: BackgroundTasks, files: List[UploadFile] = File(...), db: Session = Depends(get_db)):\n background_tasks.add_task(process_wrist, files)\n return {\"status\": \"success\"}", "async def create_upload_files(files: List[UploadFile] = File(...)):\n\n if len(files) > 3:\n return {\" \": {\"mode\": \"File Limit Exceeded\"}}\n \n filename = \"_temp_files_one/myfilem.wav\"\n res_json = {}\n file_counter = 0\n for upload_file in files:\n \n with open(filename, \"wb\") as file_object:\n \n file_object.write(upload_file.file.read())\n \n res_json[upload_file.filename + str(file_counter)] = predict_many(filename)\n \n os.remove(filename)\n \n return res_json", "def add_files(self,count=None):\n message_buffer =[]\n if count is None:\n count = len(self.files)\n while count:\n count -= 1\n message_buffer.append((count,base64.b64encode(self.files.pop()),0)) # required to maintain compatibility with\n if len(message_buffer) > 9:\n self.queue.write_batch(message_buffer)\n message_buffer = []\n self.queue.write_batch(message_buffer)", "def upload(media, media_data, *, additional_owners=_ELIDE,\n media_category=_ELIDE):\n binding = {'media': media, 'media_data': media_data, 'additional_owners':\n additional_owners, 'media_category': media_category}\n url = 'https://upload.twitter.com/1.1/media/upload.json'\n return _TwitterRequest('POST',\n url,\n 'rest:media',\n 'post-media-upload',\n binding)", "def post_files(self, file_list):\n self.body = None # Disable general body to be sent\n f_list = map(lambda f: (f[0], (pycurl.FORM_FILE, f[1])), file_list)\n self.curl.setopt(pycurl.HTTPPOST, f_list)", "def process_meter_upload(self, configlist):\n switches = [str(t[0]) for t in self.get_switches()]\n for swconfig in configlist: # for each\n dpid = list(swconfig.keys())[0]\n\n if dpid not in switches:\n break\n\n for flow in swconfig[dpid]:\n flow['dpid'] = dpid\n flow['operation'] = 'add'\n result = self.process_meter_message(flow)\n return 'Meters added successfully!'", "def _add_files(self, index_key, media_key,\n new_list, fundamentals):\n _index=fundamentals.get(index_key, {})\n _media=fundamentals.get(media_key, {})\n for _file in new_list:\n _data=self._item_from_index(_file, 'data', _media)\n if not _data:\n self.log('Failed to write file %s due to no data'%_file)\n continue\n if self._item_from_index(_file, None, _index) is None:\n _origin=self._item_from_index(_file, 'origin', _media)\n if _origin=='ringers':\n _path=self.protocolclass.RT_PATH\n elif _origin=='sounds':\n _path=self.protocolclass.SND_PATH\n elif _origin=='images':\n _path=self.protocolclass.PIC_PATH\n else:\n selg.log('File %s has unknown origin, skip!'%_file)\n continue\n _file_name=_path+'/'+_file\n try:\n self.writefile(_file_name, _data)\n except:\n self.log('Failed to write file '+_file_name)\n if __debug__:\n raise", "def handle_inbound_media_mms(to, from_, media):\n downloaded_media_files = download_media_from_bandwidth(media)\n upload_media_to_bandwidth(downloaded_media_files)\n remove_files(downloaded_media_files)\n body = MessageRequest()\n body.application_id = MESSAGING_APPLICATION_ID\n body.to = [from_]\n body.mfrom = to\n body.text = \"Rebound!\"\n #Build the media URL by taking the media ids (that doubled as the file names) and appending them to\n #the bandwidth media base url\n body.media = [BANDWIDTH_MEDIA_BASE_ENDPOINT + media_file for media_file in downloaded_media_files]\n try:\n messaging_client.create_message(MESSAGING_ACCOUNT_ID, body)\n except Exception as e:\n print(e)\n return None", "def upload_file(log_filename_list, index):\n initlog(\"begin to upload files to server!!!!!!!\") \n for filename in log_filename_list:\n ftp_server = '10.10.3.25'\n ftp_port = '21'\n remotepath = '.'\n \n ftp = FTP() \n ftp.set_debuglevel(2)\n ftp.connect(ftp_server, ftp_port)\n ftp.login('', '')\n ftp.cwd(remotepath)\n bufsize = 1024\n \n try:\n file_handler = open(filename, 'rb') \n ftp.storbinary('STOR %s' % (str(index) + '_' + os.path.basename(filename)), file_handler, bufsize)\n ftp.set_debuglevel(0)\n except Exception, e:\n initlog('failed to upload files; %s' % str(e))\n else:\n file_handler.close()\n finally:\n ftp.quit()", "def filter(self):\n for f in FileHelper.ALL_PATHS:\n media_obj = MediaObject(FileHelper.get_url(f), FileHelper.get_title(f), FileHelper.get_media_type(f), FileHelper.get_icon(f), FileHelper.get_duration(f), FileHelper.get_ctype(f))\n _id = media_obj.uuid\n if media_obj.media_type == \"image\":\n DB.IMAGES[_id] = media_obj\n elif media_obj.media_type == \"audio\":\n DB.MUSIC[_id] = media_obj\n elif media_obj.media_type == \"video\":\n DB.VIDEOS[_id] = media_obj\n else:\n print \"File '%s' doesn't play nice.\" % (f)", "def _upload_samples(self, samples):\n # Iterate over the full set of provided samples, uploading them in chunks.\n for offset in range(0, len(samples), self.upload_chunk_size):\n chunk = samples[offset:offset + self.upload_chunk_size]\n self.api.upload_samples(offset, chunk)", "def submitFiles(self):\n formData =__new__(FormData)();\n \"\"\"\n Iteate over any file sent over appending the files\n to the form data.\n \"\"\"\n i=0\n console.log(self.vue.files)\n while i < self.vue.files.length:\n file = self.vue.files[i];\n formData.append('files[' + i + ']', file);\n i+=1\n \"\"\"\n Make the request to the POST /file-drag-drop URL\n \"\"\"\n formData.append(\"type\",\"upload\")\n __pragma__ ('jsiter') \n fetch('/json/plugins/',\n {\n \"method\":\"POST\",\n \"body\":formData,\n })\\\n .then(lambda res:res.json())\\\n .then(self.uploaded)\\\n .catch(lambda e:console.log('FAILURE!!',e));\n __pragma__ ('nojsiter')", "def _multipart_upload(self, credentials, src_file_path, artifact_file_path):\n try:\n headers = self._extract_headers_from_credentials(credentials.headers)\n # try to create the file\n self._retryable_adls_function(\n func=put_adls_file_creation,\n artifact_file_path=artifact_file_path,\n sas_url=credentials.signed_uri,\n headers=headers,\n )\n # next try to append the file\n futures = {}\n file_size = os.path.getsize(src_file_path)\n num_chunks = _compute_num_chunks(src_file_path, _MULTIPART_UPLOAD_CHUNK_SIZE)\n use_single_part_upload = num_chunks == 1\n for index in range(num_chunks):\n start_byte = index * _MULTIPART_UPLOAD_CHUNK_SIZE\n future = self.chunk_thread_pool.submit(\n self._retryable_adls_function,\n func=patch_adls_file_upload,\n artifact_file_path=artifact_file_path,\n sas_url=credentials.signed_uri,\n local_file=src_file_path,\n start_byte=start_byte,\n size=_MULTIPART_UPLOAD_CHUNK_SIZE,\n position=start_byte,\n headers=headers,\n is_single=use_single_part_upload,\n )\n futures[future] = index\n\n _, errors = _complete_futures(futures, src_file_path)\n if errors:\n raise MlflowException(\n f\"Failed to upload at least one part of {artifact_file_path}. Errors: {errors}\"\n )\n\n # finally try to flush the file\n if not use_single_part_upload:\n self._retryable_adls_function(\n func=patch_adls_flush,\n artifact_file_path=artifact_file_path,\n sas_url=credentials.signed_uri,\n position=file_size,\n headers=headers,\n )\n except Exception as err:\n raise MlflowException(err)", "def test_upload_dir_contents_multiple_files(self):\n self._test_upload_dir_contents(filenames=['file1', 'file2'])", "def upload(self, sources=None):\n\n # Backwards compatible with < v1.4\n if self.path is None:\n self.path = self.name\n\n if self.name is None:\n raise ValueError(\"Cannot upload without a file name\")\n\n if self.ftype is None:\n raise ValueError(\"Cannot upload without a file type\")\n\n data = {}\n sources = ThreatQSource.make_source_list(sources)\n if sources:\n data['sources'] = [src.to_dict() for src in sources if src]\n\n fname = os.path.basename(self.name)\n new_filename = \"%i-%s\" % (\n random.randint(1, 100000),\n fname.replace('.', ''))\n\n content = self.content\n if not content:\n inf = open(self.path, 'rb')\n content = inf.read()\n inf.close()\n\n res = self.tq.post(\n '/api/attachments/upload',\n data={\n 'resumableIdentifier': new_filename,\n 'resumableRelativePath': fname,\n 'resumableTotalChunks': 1,\n 'resumableFilename': fname,\n },\n files={\n 'file': ('blob', content, 'application/octet-stream')\n })\n\n data['name'] = fname\n if self.title:\n data['title'] = self.title\n data['type'] = self.ftype\n data['malware_locked'] = self.locked\n\n res = self.tq.post('/api/attachments', data=data)\n\n r = res.get('data')\n if not r or 'id' not in r:\n raise exceptions.UploadFailedError(res)\n\n for t in self.tags:\n res = self.tq.post('/api/attachments/%i/tags' % r['id'], data={'name': t})\n\n self.fid = r['id']\n return self", "def extract(request):\n try:\n files = request.FILES.getlist('myFile')\n msg_data = []\n fs = FileSystemStorage()\n for file in files:\n name = file.name.replace(\" \", \"_\")\n if os.path.exists(settings.MEDIA_ROOT + \"\\\\\" + name):\n os.remove(settings.MEDIA_ROOT + \"\\\\\" + name)\n fs.save(settings.MEDIA_ROOT + \"\\\\\" + name, file)\n msg = extract_msg.Message(settings.MEDIA_ROOT + \"\\\\\" + name)\n msg.save_attachments(customPath=settings.MEDIA_ROOT + \"\\\\\")\n attachments = []\n for i in range(0, len(msg.attachments)):\n attachments.append({\n \"filename\": msg.attachments[i].shortFilename,\n \"filepath\": \"/media/\" + msg.attachments[i].shortFilename\n })\n msg_data.append({\n # \"mainProperties\": msg.mainProperties,\n # \"header\": msg.header,\n \"attachments\": attachments,\n \"filename\": file.name,\n \"filepath\": \"/media/\" + name,\n \"from\": msg.sender,\n \"to\": msg.to,\n \"cc\": msg.cc,\n \"subject\": msg.subject,\n \"date\": msg.date,\n \"body\": msg.body,\n })\n msg.close()\n response = {\n \"response\": \"SUCCESS\",\n \"message\": \"File Uploaded!\",\n \"data\": msg_data\n }\n except:\n response = {\n \"response\": \"FAIL\",\n \"message\": \"Erorr in file uploading!\",\n \"data\": msg_data\n }\n return Response(response)", "def upload(request):\n ids = ((1, 1), (2, 1), (3, 1), (4, 1), (5, 1), (6, 1), (7, 1), (8, 1), (9, 1),\n (10, 1), (11, 1), (12, 1), (13, 1), (14, 1), (15, 1), (16, 1), (17, 1),\n (18, 1), (19, 2), (20, 2), (21, 2), (22, 2), (23, 2), (24, 2), (25, 2),\n (26, 2), (27, 2), (28, 2), (29, 3), (30, 3), (31, 3), (32, 3), (33, 3),\n (34, 3), (35, 3), (36, 4), (37, 4), (38, 4), (39, 4), (40, 4), (41, 4),\n (42, 4), (43, 4), (44, 4), (45, 4), (46, 4), (47, 4), (48, 4), (49, 4),\n (50, 4), (51, 4), (52, 4), (53, 4), (54, 4), (55, 4), (56, 4), (57, 4),\n (58, 4), (59, 4), (60, 4), (61, 4), (62, 4), (63, 4), (64, 4), (81, 4),\n (97, 4), (98, 4), (65, 5), (66, 5), (67, 5), (68, 5), (69, 5), (70, 5),\n (71, 5), (72, 5), (73, 5), (74, 5), (75, 5), (76, 5), (77, 5), (78, 5),\n (79, 5), (80, 6), (81, 6), (82, 6), (83, 6), (84, 6), (85, 6), (86, 6),\n (87, 6), (88, 6), (89, 6), (90, 6), (91, 6), (92, 6), (93, 6), (94, 6),\n (95, 6), (96, 7), (97, 7), (98, 7), (99, 7), (100, 7), (101, 7));\n idx = 1\n products = Product.objects.all()\n for product in products:\n product.product_category.add(Category.objects.get(category_id=ids[idx][1]))\n idx += 1\n\n serializer = ProductSerializer(instance=products, context={'request': request})\n\n return Response(data=serializer.data)", "async def update_files_provided(conn):\n select_sql = \"\"\" select * from media_files where type = ?\"\"\"\n rows = select(conn, select_sql, (\"mapshot\",))\n for row in rows:\n print(texture_path + row[1])\n if any([os.path.isfile(pball_path + row[1] + x) for x in (\".png\", \".jpg\", \".tga\", \".pcx\", \".wal\")]):\n select_sql = \"\"\"update media_files set provided=? where file_id=?\"\"\"\n select(conn, select_sql, (1, row[0]))\n else:\n select_sql = \"\"\"update media_files set provided=? where file_id=?\"\"\"\n select(conn, select_sql, (0, row[0]))\n\n select_sql = \"\"\" select * from media_files where type = ?\"\"\"\n rows = select(conn, select_sql, (\"texture\",))\n for row in rows:\n print(texture_path + row[1])\n if any([os.path.isfile(texture_path + row[1] + x) for x in (\".png\", \".jpg\", \".tga\", \".pcx\", \".wal\")]):\n select_sql = \"\"\"update media_files set provided=? where file_id=?\"\"\"\n select(conn, select_sql, (1, row[0]))\n else:\n select_sql = \"\"\"update media_files set provided=? where file_id=?\"\"\"\n select(conn, select_sql, (0, row[0]))\n\n select_sql = \"\"\" select * from media_files where type = ?\"\"\"\n rows = select(conn, select_sql, (\"sky\",))\n for row in rows:\n print(texture_path + row[1])\n if any([os.path.isfile(env_path + row[1] + x) for x in (\".png\", \".jpg\", \".tga\", \".pcx\", \".wal\")]):\n select_sql = \"\"\"update media_files set provided=? where file_id=?\"\"\"\n select(conn, select_sql, (1, row[0]))\n else:\n select_sql = \"\"\"update media_files set provided=? where file_id=?\"\"\"\n select(conn, select_sql, (0, row[0]))\n\n select_sql = \"\"\" select * from media_files where type = ?\"\"\"\n rows = select(conn, select_sql, (\"requiredfile\",))\n for row in rows:\n if os.path.isfile(pball_path + row[1]):\n select_sql = \"\"\"update media_files set provided=? where file_id=?\"\"\"\n select(conn, select_sql, (1, row[0]))\n else:\n select_sql = \"\"\"update media_files set provided=? where file_id=?\"\"\"\n select(conn, select_sql, (0, row[0]))\n\n select_sql = \"\"\" select * from media_files where type = ?\"\"\"\n rows = select(conn, select_sql, (\"externalfile\",))\n for row in rows:\n if any([any([os.path.isfile(pball_path + row[1] + x) for x in (\".skm\", \".md2\")]),\n any([os.path.isfile(pball_path + row[1] + x) for x in\n (\".png\", \".jpg\", \".tga\", \".pcx\", \".wal\", \"\")]),\n os.path.isfile(pball_path + \"sound/\" + row[1])]):\n select_sql = \"\"\"update media_files set provided=? where file_id=?\"\"\"\n select(conn, select_sql, (1, row[0]))\n else:\n select_sql = \"\"\"update media_files set provided=? where file_id=?\"\"\"\n select(conn, select_sql, (0, row[0]))\n\n select_sql = \"\"\" select * from media_files where type = ?\"\"\"\n rows = select(conn, select_sql, (\"linkedfile\",))\n print(\"rows\", rows)\n for row in rows:\n if any([any([os.path.isfile(pball_path + row[1] + x) for x in (\".skp\", \"\")]), any(\n [os.path.isfile(pball_path + row[1].split(\".\")[0] + x) for x in\n (\".png\", \".jpg\", \".tga\", \".pcx\", \".wal\", \"\")])]):\n select_sql = \"\"\"update media_files set provided=? where file_id=?\"\"\"\n select(conn, select_sql, (1, row[0]))\n else:\n select_sql = \"\"\"update media_files set provided=? where file_id=?\"\"\"\n select(conn, select_sql, (0, row[0]))", "def _upload_dir_to_bucket(self, path, ext_path):\n for file in os.listdir(path):\n self._upload_to_bucket(path+'/'+file, ext_path+'/'+file)", "def add_media():\n input_csv = os.path.join(config['input_dir'], config['input_csv'])\n if os.path.exists(input_csv):\n with open(input_csv) as csvfile:\n csv_data = csv.DictReader(csvfile, delimiter=config['delimiter'])\n csv_column_headers = csv_data.fieldnames\n\n for row in csv_data:\n row = clean_csv_values(row)\n if not ping_node(config, row['node_id']):\n print(\"Node \" + row['node_id'] + \" not found or not \" +\n \"accessible, skipping adding media.\")\n continue\n\n file_path = os.path.join(config['input_dir'], row['file'])\n media_type = set_media_type(file_path, config)\n\n node_json_url = config['host'] + '/node/' + row['node_id'] + '?_format=json'\n node_uri = config['host'] + '/node/' + row['node_id']\n node_response = issue_request(config, 'GET', node_json_url)\n if node_response.status_code == 200:\n media_response_status_code = create_media(config, row['file'], node_uri)\n allowed_media_response_codes = [201, 204]\n if media_response_status_code in allowed_media_response_codes:\n print(media_type.title() + \" media for \" + row['file'] + \" created and added to \" + node_uri)\n logging.info(\"%s media for %s created and added to %s.\", media_type.title(), row['file'], node_uri)", "def _upload_to_gcs(self, files_to_upload):\n # Compose mime_type using file format passed as param\n mime_type = 'application/' + self.export_format['file_format']\n hook = GoogleCloudStorageHook(\n google_cloud_storage_conn_id=self.google_cloud_storage_conn_id,\n delegate_to=self.delegate_to)\n for object, tmp_file_handle in files_to_upload.items():\n hook.upload(self.bucket, object, tmp_file_handle.name, mime_type)", "def handle_upload(f, attrs):\n\n # chunked = False\n dest_folder = os.path.join(app.config['UPLOAD_DIRECTORY'], attrs['qquuid'])\n dest = os.path.join(dest_folder, attrs['qqfilename'])\n save_upload(f, dest)", "def addmedia(device, paths):\n\n if isinstance(paths, str):\n paths = [paths]\n\n if not paths:\n return\n\n command = 'addmedia \"%s\" ' % (device.udid)\n\n # Now we need to add the paths\n quoted_paths = ['\"' + path + '\"' for path in paths]\n paths_arg = \" \".join(quoted_paths)\n command += paths_arg\n\n _run_command(command)", "async def audiofiles(self, ctx):\r\n files = '\"{0}\"'.format('\", \"'.join(self.audio_files))\r\n await ctx.send(\"```Available audio files :\\n{0}```\".format(files))", "def upload(server):\n for i in range(10):\n start_time = time.time()\n logging.debug('Start uploading: %d' % i)\n os.system(\"scp uploads/18UPLOAD %s:\" % server)\n end_time = time.time()\n logging.debug('End uploading: ')\n logging.debug('Time taken by uploader: %s' % (end_time - start_time))", "def add_mms_attachments(db, mms, backup_dir, thread_dir):\n qry = db.execute(\n \"SELECT _id, ct, unique_id, voice_note, width, height, quote \"\n \"FROM part WHERE mid=?\",\n (mms._id,),\n )\n for _id, ct, unique_id, voice_note, width, height, quote in qry:\n a = Attachment(\n contentType=ct,\n unique_id=unique_id,\n fileName=get_attachment_filename(\n _id, unique_id, backup_dir, thread_dir\n ),\n voiceNote=voice_note,\n width=width,\n height=height,\n quote=quote,\n )\n mms.attachments.append(a)", "def merge_waves(self):\n dirname = self.dirname\n name = self.get_name()\n videocluster = os.path.join(dirname, name)\n if sys.platform == 'win32':\n videocluster = dirname + '/' + name\n listwaves = os.listdir(videocluster)\n listwaves.sort()\n listw = [os.path.join(videocluster, fil) for fil in listwaves]\n #file_basename = os.path.join(dirname, name)\n if sys.platform == 'win32':\n listw = [videocluster + '/' + fil for fil in listwaves] \n # file_basename = dirname + '/' + name\n self.wave = os.path.join(dirname, name + \".wav\")\n if sys.platform == 'win32':\n self.wave = dirname + '/' + name + \".wav\"\n fm.merge_waves(listw, self.wave)", "def storage_file_upload_batch(cmd, client, destination, source, destination_path=None, pattern=None, dryrun=False,\n validate_content=False, content_settings=None, max_connections=1, metadata=None,\n progress_callback=None):\n\n from ..util import glob_files_locally, normalize_blob_file_path, guess_content_type\n from ..track2_util import make_file_url\n\n source_files = [c for c in glob_files_locally(source, pattern)]\n logger = get_logger(__name__)\n settings_class = cmd.get_models('_models#ContentSettings')\n\n if dryrun:\n logger.info('upload files to file share')\n logger.info(' account %s', client.account_name)\n logger.info(' share %s', destination)\n logger.info(' total %d', len(source_files))\n return [{'File': make_file_url(client, os.path.dirname(dst) or None, os.path.basename(dst)),\n 'Type': guess_content_type(src, content_settings, settings_class).content_type} for src, dst in\n source_files]\n\n # TODO: Performance improvement\n # 1. Upload files in parallel\n def _upload_action(src, dst):\n dst = normalize_blob_file_path(destination_path, dst)\n dir_name = os.path.dirname(dst)\n file_name = os.path.basename(dst)\n\n _make_directory_in_files_share(client, dir_name)\n\n logger.warning('uploading %s', src)\n\n storage_file_upload(client.get_file_client(dst), src, content_settings, metadata, validate_content,\n progress_callback, max_connections)\n\n return make_file_url(client, dir_name, file_name)\n\n return list(_upload_action(src, dst) for src, dst in source_files)", "def index_mock_files(self,mfiles):\n results = []\n for i in range(len(mfiles['file_name'])):\n print(\"Submitting {} to indexd at {}.\".format(mfiles['file_name'][i],mfiles['object_id'][i]))\n res = self.create_record(\n did=mfiles['object_id'][i],\n hashes={'md5':mfiles['md5sum'][i]},\n size=mfiles['file_size'][i],\n urls=mfiles['storage_urls'][i],\n file_name=mfiles['file_name'][i],\n acl=mfiles['acl'][i],\n authz=mfiles['authz'][i])\n results.append(res)\n return results", "def upload_files_s3(files, bucket):\n \n print('************************************')\n print('Uploading files to s3 bucket...')\n print('************************************')\n \n for i in range(len(files)):\n upload_file_s3(files[i], bucket)\n \n print('************************************')\n print('Upload complete')\n print('************************************')", "async def async_browse_media(self, media_content_type=None, media_content_id=None):\n return await media_source.async_browse_media(\n self.hass,\n media_content_id,\n content_filter=lambda item: item.media_content_type.startswith(\"audio/\"),\n )\n \n #TODO: combide the BrowseMedia Media Sources above with the BrowseMedia Directory below\n #if \"udisk\" in self._source_list: \n # if media_content_id not in (None, \"root\"):\n # raise BrowseError(\n # f\"Media not found: {media_content_type} / {media_content_id}\"\n # )\n\n # source_media_name = self._source_list.get(\"udisk\", \"USB Disk\")\n\n # if len(self._trackq) > 0:\n # radio = [\n # BrowseMedia(\n # title = preset,\n # media_class = MEDIA_CLASS_MUSIC,\n # media_content_id = index,\n # media_content_type = MEDIA_TYPE_MUSIC,\n # can_play = True,\n # can_expand = False,\n # )\n # for index, preset in enumerate(self._trackq, start=1)\n # ]\n\n # root = BrowseMedia(\n # title=self._name + \" \" + source_media_name,\n # media_class = MEDIA_CLASS_DIRECTORY,\n # media_content_id = \"root\",\n # media_content_type = \"listing\",\n # can_play = False,\n # can_expand = True,\n # children = radio,\n # )\n\n # else:\n # root = BrowseMedia(\n # title=self._name + \" \" + source_media_name,\n # media_class = MEDIA_CLASS_DIRECTORY,\n # media_content_id = \"root\",\n # media_content_type = \"listing\",\n # can_play = False,\n # can_expand = False,\n # )\n\n # return root", "def upload(self, asset, file):\n uploader = FrameioUploader(asset, file)\n uploader.upload()", "def upload_samples():\n # Retrieve a list of all files and paths within the target\n paths = Path(Config.target_dir).glob(Config.target_pattern)\n # Inform the user as to what we're doing\n logger.info(\"Assembling %s volume for submission\", Config.target_dir)\n # Loop through each identified file and upload it to the sandbox for analysis\n for path in paths:\n # Convert the path to a string\n filepath = str(path)\n # Grab the file name\n filename = os.path.basename(filepath)\n # Open the file for binary read, this will be our payload\n with open(filepath, 'rb') as upload_file:\n payload = upload_file.read()\n # Upload the file using the Sandbox\n response = Samples.upload_sample(file_name=filename, sample=payload)\n # Grab the SHA256 unique identifier for the file we just uploaded\n sha = response[\"body\"][\"resources\"][0][\"sha256\"]\n # Add this SHA256 to the volume payload element\n Analyzer.uploaded.append(sha)\n # Track the upload so we can remove the file when we're done\n Analyzer.files.append([filename, filepath, sha])\n # Inform the user of our progress\n logger.debug(\"Uploaded %s to %s\", filename, sha)", "def upload_file(request):\n\n if request.method == 'POST':\n form = StreamForm(request.POST)\n\n if form.is_valid() and request.FILES:\n stream_file = request.FILES['stream_file']\n fs = SaveStream()\n tmpfilename = fs.save(stream_file.name, stream_file)\n\n # Save streaming file info\n stream_name = form.cleaned_data['stream_name']\n stream_description = form.cleaned_data['stream_description']\n stream_quality = form.cleaned_data['stream_quality']\n stream_language = form.cleaned_data['stream_language']\n stream_category = form.cleaned_data['stream_category']\n\n cat = Category.objects.get(name=stream_category)\n\n stream = Stream(\n name=stream_name,\n description=stream_description,\n )\n\n stream.category = cat\n stream.save()\n\n # stream_quality is a list of QuerySet\n # We make the many-to-many relation\n stream.quality = stream_quality\n\n # Save streaming tmp file info\n StreamTmp(\n tmppath=tmpfilename,\n stream=stream,\n ).save() \n\n # Run encoding process in celery tasks with chord primitive\n chord(encode_stream.si(tmpfilename, qual.name) \n for qual in stream_quality)(post_encoding.si(tmpfilename))\n\n\n return HttpResponseRedirect('/admin/library/stream/encoding_process/')\n\n else:\n form = StreamForm()\n\n return render(request, 'upload.html', {'form': form})", "def upload_files(self, context, instance_ref, bless_files):\n raise Exception(\"Uploading files to the image service is not supported.\")", "def upload(all_files, session):\n remote_directory = unique_path('cli-import')\n log.info('uploading files to %s' % remote_directory)\n\n for filename in all_files:\n callback = _progress_callback\n log.info(\"Uploading %s\" % filename)\n session.uploadWrapper(filename, remote_directory, callback=callback)\n if callback:\n print('')\n return remote_directory", "def _upload_media(\n self,\n *,\n expected_mime_prefix: str | None = None,\n media_path: str,\n upload_type: str = \"link\",\n ):\n if media_path is None:\n file = Path(__file__).absolute()\n media_path = file.parent.parent.parent / \"images\" / \"PRAW logo.png\"\n else:\n file = Path(media_path)\n\n file_name = file.name.lower()\n file_extension = file_name.rpartition(\".\")[2]\n mime_type = {\n \"png\": \"image/png\",\n \"mov\": \"video/quicktime\",\n \"mp4\": \"video/mp4\",\n \"jpg\": \"image/jpeg\",\n \"jpeg\": \"image/jpeg\",\n \"gif\": \"image/gif\",\n }.get(\n file_extension, \"image/jpeg\"\n ) # default to JPEG\n if (\n expected_mime_prefix is not None\n and mime_type.partition(\"/\")[0] != expected_mime_prefix\n ):\n msg = f\"Expected a mimetype starting with {expected_mime_prefix!r} but got mimetype {mime_type!r} (from file extension {file_extension!r}).\"\n raise ClientException(msg)\n img_data = {\"filepath\": file_name, \"mimetype\": mime_type}\n\n url = API_PATH[\"media_asset\"]\n # until we learn otherwise, assume this request always succeeds\n upload_response = self._reddit.post(url, data=img_data)\n upload_lease = upload_response[\"args\"]\n upload_url = f\"https:{upload_lease['action']}\"\n upload_data = {item[\"name\"]: item[\"value\"] for item in upload_lease[\"fields\"]}\n\n response = self._read_and_post_media(file, upload_url, upload_data)\n if not response.ok:\n self._parse_xml_response(response)\n try:\n response.raise_for_status()\n except HTTPError as err:\n raise ServerError(response=err.response) from None\n\n websocket_url = upload_response[\"asset\"][\"websocket_url\"]\n\n if upload_type == \"link\":\n return f\"{upload_url}/{upload_data['key']}\", websocket_url\n return upload_response[\"asset\"][\"asset_id\"], websocket_url", "def UploadBaseFiles(self, issue, rpc_server, patch_list, patchset, options,\r\n files):\r\n\r\n def UploadFile(filename, file_id, content, is_binary, status, is_base):\r\n \"\"\"Uploads a file to the server.\"\"\"\r\n file_too_large = False\r\n if is_base:\r\n type = \"base\"\r\n else:\r\n type = \"current\"\r\n if len(content) > MAX_UPLOAD_SIZE:\r\n print (\"Not uploading the %s file for %s because it's too large.\" %\r\n (type, filename))\r\n file_too_large = True\r\n content = \"\"\r\n checksum = md5(content).hexdigest()\r\n if options.verbose > 0 and not file_too_large:\r\n print \"Uploading %s file for %s\" % (type, filename)\r\n url = \"/%d/upload_content/%d/%d\" % (int(issue), int(patchset), file_id)\r\n form_fields = [(\"filename\", filename),\r\n (\"status\", status),\r\n (\"checksum\", checksum),\r\n (\"is_binary\", str(is_binary)),\r\n (\"is_current\", str(not is_base)),\r\n ]\r\n if file_too_large:\r\n form_fields.append((\"file_too_large\", \"1\"))\r\n if options.email:\r\n form_fields.append((\"user\", options.email))\r\n ctype, body = EncodeMultipartFormData(form_fields,\r\n [(\"data\", filename, content)])\r\n response_body = rpc_server.Send(url, body,\r\n content_type=ctype)\r\n if not response_body.startswith(\"OK\"):\r\n StatusUpdate(\" --> %s\" % response_body)\r\n sys.exit(1)\r\n\r\n patches = dict()\r\n [patches.setdefault(v, k) for k, v in patch_list]\r\n for filename in patches.keys():\r\n base_content, new_content, is_binary, status = files[filename]\r\n file_id_str = patches.get(filename)\r\n if file_id_str.find(\"nobase\") != -1:\r\n base_content = None\r\n file_id_str = file_id_str[file_id_str.rfind(\"_\") + 1:]\r\n file_id = int(file_id_str)\r\n if base_content != None:\r\n UploadFile(filename, file_id, base_content, is_binary, status, True)\r\n if new_content != None:\r\n UploadFile(filename, file_id, new_content, is_binary, status, False)", "def upload_files(self, source_file_name, destination_blob_name):\n blob = self.bucket.blob(destination_blob_name)\n blob.upload_from_filename(source_file_name)\n\n print(\n \"File {} uploaded to {} in {} bucket.\".format(\n source_file_name, destination_blob_name, self.bucket\n )\n )", "def getMediaFiles(path):\n fileList = getMediaFileList(path)\n # dirList = getDirectoryList(path)\n\n # results = map(getMediaFiles, dirList)\n\n # for result in results:\n # fileList = fileList + result\n\n return fileList", "def _build_adhoc_medias(self, media_list, media_type):\n adhoc_medias = []\n media_id = 0\n for media in media_list:\n media_name = 'adhoc_media_' + media_type + '_' + self.viewport_name + '_' + str(media_id)\n adhoc_media = AdhocMedia()\n adhoc_media.id = media_name\n adhoc_media.url = media['path']\n adhoc_media.geometry.x = media['x_coord'] + self._get_viewport_offset()['x']\n adhoc_media.geometry.y = media['y_coord'] + self._get_viewport_offset()['y']\n adhoc_media.geometry.width = media['width']\n adhoc_media.geometry.height = media['height']\n adhoc_media.media_type = media_type\n # TODO figure out if media['on_finish'] or media['activity_config']['onFinish'] is better\n adhoc_media.on_finish = media['on_finish']\n adhoc_media.extra_args = media.get('activity_config', {}).get('args', '')\n adhoc_medias.append(adhoc_media)\n media_id += 1\n\n logger.info(\"Returning adhocmedias: %s for player: %s\" % (adhoc_medias, media_type))\n return adhoc_medias", "def upload_music(handler):\n user_id = handler.get_argument('user_id')\n music_path = handler.get_argument('path') #Having problems parsing this out\n sound_cloud_client = Petitions.instantiate_user(user_id)\n track = sound_cloud_client.post('/tracks', track={\n 'title': 'Testing Uploads',\n 'asset_data': open(music_path, 'rb')\n })\n\n return track.permalink_url # Improve messages. Change to Json", "def upload(jsonfiles):\n # clear S3 Bucket\n bucket = S3Bucket()\n bucket.clear()\n for jsonfile in jsonfiles:\n filename = os.path.basename(jsonfile)\n key = build_key(filename)\n logging.info(\"%s %s\", filename, key)\n # store json in S3 object\n bucket.store(key, jsonfile)", "def handle_upload(f, attrs):\n\n # chunked = False\n print 'UPLOAD DIRECTORY:', UPLOAD_DIRECTORY\n dest_folder = os.path.join(UPLOAD_DIRECTORY, attrs['qquuid'])\n dest = os.path.join(dest_folder, attrs['qqfilename'])\n save_upload(f, dest)", "def put_ids_to_queue(ids_list):\n LOGGER.debug('pushing %s ads to the queue', len(ids_list))\n for advert_id in ids_list:\n fetch_single_advert.delay(advert_id)", "def UploadBaseFiles(self, issue, rpc_server, patch_list, patchset, options,\n files):\n\n def UploadFile(filename, file_id, content, is_binary, status, is_base):\n \"\"\"Uploads a file to the server.\"\"\"\n file_too_large = False\n if is_base:\n type = \"base\"\n else:\n type = \"current\"\n if len(content) > MAX_UPLOAD_SIZE:\n print (\"Not uploading the %s file for %s because it's too large.\" %\n (type, filename))\n file_too_large = True\n content = \"\"\n checksum = md5(content).hexdigest()\n if options.verbose > 0 and not file_too_large:\n print \"Uploading %s file for %s\" % (type, filename)\n url = \"/%d/upload_content/%d/%d\" % (int(issue), int(patchset), file_id)\n form_fields = [(\"filename\", filename),\n (\"status\", status),\n (\"checksum\", checksum),\n (\"is_binary\", str(is_binary)),\n (\"is_current\", str(not is_base)),\n ]\n if file_too_large:\n form_fields.append((\"file_too_large\", \"1\"))\n if options.email:\n form_fields.append((\"user\", options.email))\n ctype, body = EncodeMultipartFormData(form_fields,\n [(\"data\", filename, content)])\n response_body = rpc_server.Send(url, body,\n content_type=ctype)\n if not response_body.startswith(\"OK\"):\n StatusUpdate(\" --> %s\" % response_body)\n sys.exit(1)\n\n patches = dict()\n [patches.setdefault(v, k) for k, v in patch_list]\n for filename in patches.keys():\n base_content, new_content, is_binary, status = files[filename]\n file_id_str = patches.get(filename)\n if file_id_str.find(\"nobase\") != -1:\n base_content = None\n file_id_str = file_id_str[file_id_str.rfind(\"_\") + 1:]\n file_id = int(file_id_str)\n if base_content != None:\n UploadFile(filename, file_id, base_content, is_binary, status, True)\n if new_content != None:\n UploadFile(filename, file_id, new_content, is_binary, status, False)", "def uploadGlobusData(self,upload_files=None,upload_folders=None):\n\t\ttdata = globus_sdk.TransferData(self.transfer_client,self.local_ep_id,\n\t\t\t\t\t\t\t\t\t self.transfer_client.endpoint_search(DATA_ENDPOINT_NAME)[0]['name'])\n\t\t\n\t\tif upload_files is not None:\n\t\t\tfor f in upload_files:\n\t\t\t\ttdata.add_item(os.path.abspath(f),os.path.basename(f),recursive=False)\t\n\t\telse:\n\t\t\tfor f in upload_folders:\n\t\t\t\ttdata.add_item(os.path.abspath(f),os.path.basename(f),recursive=True)\t\n\t\t\n\t\tself.transfer_result = self.transfer_client.submit_transfer(tdata)", "def ingest(self, files):\n for file in files:\n self.files.add(file)", "def _request_files(self, file_list, callback=None):\n id = self._network._send(_Messages.file_request, file_list, callback != None)\n self._save_callback(id, callback)", "def upload_groups(self, files: [str]):\n self.driver.get(\"https://reporting.smarterbalanced.org/admin-groups/import\")\n\n try:\n WebDriverWait(self.driver, 20).until(\n EC.presence_of_element_located((By.XPATH, '//*[@id=\"maincontent\"]/admin/div[1]/div[3]'))\n )\n except:\n print(\"Error\")\n\n self.driver.execute_script(\n f\"\"\"document.evaluate('//*[@id=\"maincontent\"]/admin/div[1]/div[3]', document, null, XPathResult.FIRST_ORDERED_NODE_TYPE, null).singleNodeValue.removeAttribute(\"hidden\")\"\"\")\n\n file_upload = self.driver.find_element_by_xpath('//*[@id=\"maincontent\"]/admin/div[1]/div[3]/input')\n [file_upload.send_keys(file) for file in files]", "def __count_media_files(self, CurrentDir):\n self.__counter_lock.acquire()\n self.media_counter = [0, 0]\n self.__counter_lock.release()\n if os.path.exists(CurrentDir):\n for i in os.listdir(CurrentDir):\n MediaType = self.thumb_filter(CurrentDir,i)\n if MediaType == TYPE_PHOTO:\n self.__counter_lock.acquire()\n self.media_counter[0] += 1\n self.__counter_lock.release()\n elif MediaType == TYPE_VIDEO:\n self.__counter_lock.acquire()\n self.media_counter[1] += 1 \n self.__counter_lock.release()", "def mediaGenerator(request):\n folder = 'content/' + request\n mediaPaths = glob(folder + '/*')\n return random.choice(mediaPaths)", "def upload_file(self, file_path, file_name, output_path):", "def process_group_upload(self, configlist):\n switches = [str(t[0]) for t in self.get_switches()]\n for swconfig in configlist: # for each\n dpid = list(swconfig.keys())[0]\n\n if dpid not in switches:\n break\n\n for flow in swconfig[dpid]:\n flow['dpid'] = dpid\n flow['operation'] = 'add'\n result = self.process_group_message(flow)\n print(result)\n return 'Groups added successfully!'", "def test_adding_media_to_channel(self):\n videos = [\n make_video(title='test title', media_id='1'),\n make_video(title='test title 2', media_id='2'),\n ]\n channels = [make_channel(title='test channel', media_ids=['1'], collection_id='3')]\n set_resources_and_sync(videos, channels)\n c = mpmodels.Channel.objects.filter(sms__id='3').first()\n self.assertIsNotNone(c)\n self.assertEqual(len(c.items.all()), 1)\n channels[0]['custom']['sms_collection_media_ids'] = 'collection_media_ids:1,2:'\n channels[0]['updated'] += 1\n set_resources_and_sync(videos, channels)\n self.assertEqual(len(c.items.all()), 2)\n # also check playlist\n playlist = mpmodels.Playlist.objects.filter(sms__id='3').first()\n self.assertEqual(len(playlist.media_items), 2)", "def uploadFiles(self, filenames):\n bucket = self._S3_USER_UPLOAD_BUCKET\n prefix = self._S3_USER_UPLOAD_DIR\n uuid_dir = uuid.uuid4()\n # TODO(aimee): This should upload to a user-namespaced directory\n for filename in filenames:\n basename = os.path.basename(filename)\n response = self._upload_s3(filename, bucket, f\"{prefix}/{uuid_dir}/{basename}\")\n return f\"Upload file subdirectory: {uuid_dir} (keep a record of this if you want to share these files with other users)\"", "def convert_to_web(base_path, files):\n for i, f in enumerate(files):\n imagetype = get_imagetype_from_filename(f)\n cmd = ('rsync '\n '{base_path}/qc/phantom/{imagetype}/{f} '\n '{base_path}/website/assets/{output}'.format(\n base_path=base_path, imagetype=imagetype, \n f=f, output=f[9:]))\n os.system(cmd)", "def copy_file(topic_list):\n print 'len topic list', len(topic_list)\n upload_dir = '/home/monkeys/yqnew/upload/data'\n os.makedirs(upload_dir)\n for topic in topic_list:\n upload_path = os.path.join(upload_dir, topic)\n if not os.path.isdir(upload_path):\n print \"准备创建文件夹\"\n os.makedirs(upload_path)\n else:\n print 'folder exists'\n file_path = os.path.join(JSON_DIR, topic)\n for paths, folders, files in os.walk(file_path):\n for i in files:\n upload_file = os.path.join(upload_path, i)\n if not os.path.isfile(upload_file):\n print '准备上传事件', topic, '的文件', upload_file\n shutil.copy2(os.path.join(JSON_DIR, topic, i), upload_file)\n else:\n print '本事件已在上传列表中'\n\n os.system('scp -r /home/monkeys/yqnew/upload/data [email protected]:/root/yqnew/static/')\n if os.path.exists(upload_dir):\n shutil.rmtree(upload_dir)\n\n print '上传完成'", "def upload_multiple():\n gene_list = request.json['gene_list']\n project_id = request.json['project_id']\n all_message = ''\n html = ''\n added_list = []\n button_list = ''\n\n for gene in sorted(gene_list):\n if gene == \"\" or gene in added_list:\n continue\n dct = create_panel_get_tx(gene, project_id)\n if dct[\"message\"] == \"added\":\n added_list.append(gene)\n else:\n all_message += dct[\"message\"]\n try:\n html += dct[\"html\"]\n except KeyError:\n pass\n try:\n button_list += dct[\"button_list\"]\n except KeyError:\n pass\n\n if len(added_list) > 0:\n added_message = render_template(\"added_list.html\", added_list=enumerate(added_list), length=len(added_list))\n all_message += added_message\n\n return jsonify({'message': all_message, 'html': html, 'button_list': button_list})", "def upload():\r\n\r\n if not os.path.isdir(TO_SEGMENT):\r\n os.mkdir(TO_SEGMENT)\r\n else:\r\n print(\"could not create upload directory: {}\".format(TO_SEGMENT))\r\n print(request.files.getlist(\"file\"))\r\n\r\n for upload in request.files.getlist(\"file\"):\r\n filename = upload.filename\r\n destination = \"/\".join([TO_SEGMENT, filename])\r\n upload.save(destination)\r\n\r\n return redirect(url_for('get_gallery'))", "def transcribe(self, paths2audio_files: List[str], batch_size: int = 4) -> List[str]:\n pass", "def push_backup(args: Arguments) -> None:\n\n files = get_files_from_previous_backup(args.site)\n bucket = get_bucket(args)\n\n for path in files:\n upload_file(\n path=path,\n site_name=args.site,\n bucket=bucket,\n bucket_directory=args.bucket_directory,\n )\n\n print(\"Done!\")", "def upload_blob(source_file_path, destination_blob_name):\n storage_client = storage.Client.from_service_account_json('gcp cred/ly-stt-1582263496331-f723d8a35a04.json')\n bucket = storage_client.get_bucket(bucket_name)\n bucket_has_audio = storage.Blob(bucket=bucket, name=destination_blob_name).exists(storage_client)\n if not bucket_has_audio:\n blob = bucket.blob(destination_blob_name)\n\n blob._chunk_size = 4194304\n\n blob.upload_from_filename(source_file_path)\n\n print(bucket_has_audio)\n print(\"done\")\n # print(bucket)\n # blob = bucket.blob(destination_blob_name)\n # print(blob)\n # blob.upload_from_filename(source_file_path)", "def upload_files(metadata, dir, rclone_path, remote_name):\n\n images = []\n today = datetime.datetime.today().strftime('%Y-%m-%d')\n\n # send images to the Drive\n for image in metadata:\n # prevent sending the same image twice (if two faces are detected)\n if image not in images:\n images.append(image)\n image_path = os.path.join(dir, image['image_name'])\n subprocess.run([rclone_path, 'copy', image_path, '{}:{}'.format(remote_name, today)])\n\n # upload metadata.json to the Drive\n subprocess.run([rclone_path, 'copy', METADATA_FILE, '{}:{}'.format(remote_name, today)])\n os.remove(METADATA_FILE)", "def upload():\n form = request.form\n\n # Create a unique \"session ID\" for this particular batch of uploads.\n upload_key = str(uuid4())\n\n # Is the upload using Ajax, or a direct POST by the form?\n is_ajax = False\n if form.get(\"__ajax\", None) == \"true\":\n is_ajax = True\n\n # Target folder for these uploads.\n target = app.config['UPLOAD_FOLDER'] + \"/{}\".format(upload_key)\n try:\n os.mkdir(target)\n except:\n if is_ajax:\n return ajax_response(False, \"Couldn't create upload directory: {}\".format(target))\n else:\n return \"Couldn't create upload directory: {}\".format(target)\n\n for image_upload in request.files.getlist(\"file\"):\n filename = secure_filename(image_upload.filename)\n destination = \"/\".join([target, filename])\n print(\"Accept incoming file:\", filename)\n print(\"Save it to:\", destination)\n image_upload.save(destination)\n upload_image.delay(destination)\n\n if is_ajax:\n return ajax_response(True, upload_key)\n else:\n return redirect(\"/\")", "def upload_release_files():\n version = get_release_version()\n target = sf_files + sourceforge_target_dir(version)\n\n print()\n print(\"Uploading release files...\")\n print(\" Source:\", release_path)\n print(\" Target: \" + target)\n print(\" Files: \" + ', '.join(glob.glob('*')))\n print()\n call_rsync(\n username,\n \"\",\n path.join(release_path, \"*\"),\n target\n )\n print()", "def uploadPackages(self, directory):\n files_to_upload_dict = {}\n files_to_upload_list = [ f for f in listdir(directory) if isfile(join(directory,f)) ]\n self.logger.debug(\"uploadPackages(\" + \"{})\".format(directory))\n #print \"Files to upload:\"\n for index in range(len(files_to_upload_list)):\n self.logger.info(files_to_upload_list[index])\n self.uploadFileToIM (directory, files_to_upload_list[index], files_to_upload_list[index])\n #file_tuple = {'files':{str(files_to_upload_list[index]), open(directory + files_to_upload_list[index], 'rb'), 'application/x-rpm'}} \n #file_tuple = {str(files_to_upload_list[index]), {open(directory + files_to_upload_list[index], 'rb'), 'application/x-rpm'}}\n #file_tuple = {'files': (str(files_to_upload_list[index]), open(directory + files_to_upload_list[index], 'rb'), 'application/x-rpm')}\n #file_tuple = (str(files_to_upload_list[index]), open(directory + files_to_upload_list[index], 'rb'))\n #file_tuple = {str(files_to_upload_list[index]), open(directory + files_to_upload_list[index], 'rb'), 'application/x-rpm'}\n #files_data_to_upload_list.append(file_tuple)\n #print \"Files to upload Dictionary:\"", "def store_files(files, tracking_id, time):\n errors = {}\n try:\n upload_path = os.path.join(app.config['DRS_UPLOADS'], '{0}'.format(tracking_id))\n if not os.path.isdir(upload_path):\n os.mkdir(upload_path)\n for file_name in files:\n file = files[file_name]\n file_path = os.path.join(upload_path, '{0}_{1}'.format(time, file.filename))\n file.save(file_path)\n if int(Utilities.convert_to_mb(os.path.getsize(file_path))) > 26:\n errors[file.filename] = ['size of file is greator than 26 MB which is not allowed']\n os.remove(file_path)\n break\n return errors\n except IOError:\n return IOError", "def add_media_file(self, input_file: str, output_file: str) -> None:\n if not os.path.exists(input_file):\n raise FFmpegNormalizeError(f\"file {input_file} does not exist\")\n\n ext = os.path.splitext(output_file)[1][1:]\n if (\n self.audio_codec is None or \"pcm\" in self.audio_codec\n ) and ext in PCM_INCOMPATIBLE_EXTS:\n raise FFmpegNormalizeError(\n f\"Output extension {ext} does not support PCM audio. \"\n \"Please choose a suitable audio codec with the -c:a option.\"\n )\n\n self.media_files.append(MediaFile(self, input_file, output_file))\n self.file_count += 1", "def upload_dataset(dataset_client: DatasetClient, dataset_id: str,\n dataset_list: list, max_workers: int = 4):\n dataset = dataset_client.get_dataset(dataset_id)\n\n def _f(dataset_item):\n source_data = [\n {\n 'data_uri': dataset_item.source_data[0].uri,\n 'data_type': dataset_item.source_data[0].type\n }\n ] # TODO: only one url to be uploaded\n attributes = dataset_item.attributes\n dataset.dataset_items.create(source_data=source_data, attributes=attributes)\n\n if max_workers > 1:\n with ThreadPoolExecutor(max_workers) as executor:\n results = list(tqdm(executor.map(_f, dataset_list), total=len(dataset_list)))\n return results\n return [_f(x) for x in tqdm(dataset_list)]", "def upload_single_file(request):\n message, success, title = \"\", 0, \"error\"\n is_data_ok = False\n\n if request.method == 'POST':\n data_in_post = [\"id_campaign\", \"field_name\"]\n # defined in utils.py\n is_data_in_post = check_all_data_available_in_post(\n data_in_post, request.POST)\n\n if is_data_in_post['success']:\n is_data_ok = True\n else:\n message = is_data_in_post['message']\n\n if is_data_ok:\n for filename, file in request.FILES.items():\n name = request.FILES[filename].name\n print(\"filename : \", name)\n\n # myfile = request.FILES['abm_company_list_file']\n myfile = request.FILES[filename]\n fs = FileSystemStorage()\n filename = fs.save(\"campaign/\" + myfile.name, myfile)\n print(filename)\n\n # get campaign id\n id_campaign = request.POST.get(\"id_campaign\")\n\n # django get campaign object from model\n campaign = Campaign.objects.filter(id=id_campaign).first()\n\n if campaign:\n # get specification record\n specification = Specification.objects.filter(campaign=campaign).first()\n if specification:\n # get field name to save\n field_name = request.POST.get(\"field_name\")\n\n # check object has property with field name\n if hasattr(specification, field_name):\n # nested_setattr(object, 'pet.name', 'Sparky')\n model_field_name = str(field_name) + \".name\"\n model_field_name = model_field_name.replace(\" \", \"\")\n print(model_field_name)\n\n # set nested attribute\n # ex. form.name\n nested_setattr(specification, model_field_name, filename)\n\n specification.save()\n print(nested_getattr(specification, model_field_name, 'default')) # will print string similar to filename\n\n success = 1\n title = 'success'\n message = \"specification updated successfully\"\n else:\n message += \"Error... Specification table has no field '\" + field_name + \"'\"\n\n else:\n message += \"Specification not exists with campaign: '\", str(campaign), \"'\"\n else:\n message += \"Campaign not exist with id : '\", id_campaign, \"'\"\n\n # uploaded_file_url = fs.url(filename)\n success = 1\n else:\n message = \"Please post data using post method\"\n\n jsonresponse = {\n \"success\": 1,\n \"title\": request.POST,\n \"message\": message,\n }\n return JsonResponse(jsonresponse, safe=False)", "def EnqueueAudioFile(audiofile):\n SoundsPath = os.path.join(AudioFilesPath, audiofile + \".mp3\")\n AudioPlaybackQueue2.append(SoundsPath)", "def convert_multiple(self, video_files, out, brate, _format):\n\n for video in video_files:\n self.to_audio(os.path.abspath(video),\n out, brate, _format)", "def upload_file(\n files: List[UploadFile] = File(...),\n # JSON serialized string\n meta: Optional[str] = Form(\"null\"), # type: ignore\n additional_params: Optional[str] = Form(\"null\"), # type: ignore\n fileconverter_params: FileConverterParams = Depends(FileConverterParams.as_form), # type: ignore\n preprocessor_params: PreprocessorParams = Depends(PreprocessorParams.as_form), # type: ignore\n keep_files: Optional[bool] = False,\n):\n if not indexing_pipeline:\n raise HTTPException(status_code=501, detail=\"Indexing Pipeline is not configured.\")\n\n file_paths: list = []\n file_metas: list = []\n\n meta_form = json.loads(meta) or {} # type: ignore\n if not isinstance(meta_form, dict):\n raise HTTPException(status_code=500, detail=f\"The meta field must be a dict or None, not {type(meta_form)}\")\n\n for file in files:\n try:\n file_path = Path(FILE_UPLOAD_PATH) / f\"{uuid.uuid4().hex}_{file.filename}\"\n with file_path.open(\"wb\") as buffer:\n shutil.copyfileobj(file.file, buffer)\n\n file_paths.append(file_path)\n meta_form[\"name\"] = file.filename\n file_metas.append(meta_form)\n finally:\n file.file.close()\n\n params = json.loads(additional_params) or {} # type: ignore\n\n # Find nodes names\n converters = indexing_pipeline.get_nodes_by_class(BaseConverter)\n preprocessors = indexing_pipeline.get_nodes_by_class(PreProcessor)\n\n for converter in converters:\n params[converter.name] = fileconverter_params.dict()\n for preprocessor in preprocessors:\n params[preprocessor.name] = preprocessor_params.dict()\n\n indexing_pipeline.run(file_paths=file_paths, meta=file_metas, params=params)\n\n # Clean up indexed files\n if not keep_files:\n for p in file_paths:\n p.unlink()", "def assets_push(ctx, metadata, dir, brizo, price, service_endpoint, timeout):\n try:\n files = [f for f in os.listdir(dir) if os.path.isfile(dir+'/'+f)]\n except NotADirectoryError:\n files = [dir]\n\n response = []\n metadata = json.load(open(metadata, 'r'))\n\n for f in files:\n metadata['base']['files'][0]['url'] = f\n response += [ctx.invoke(assets_publish,\n metadata=metadata,\n brizo=brizo,\n price=price,\n service_endpoint=service_endpoint,\n timeout=timeout)]", "def UploadFolderToGD(token_path, source_path, gd_folder): \n google_drive = ConnectGoogleDrive(token_path)\n file_cmd = spike.FileCMD()\n file_list = file_cmd.ListFiles(source_path)\n print('\\nUpload List:')\n print('\\n'.join(file_list))\n print('')\n \n item_list = google_drive.ListItems(gd_folder, None)\n folder_id = item_list[0]['id']\n \n for file_name in file_list:\n google_drive.UploadFile(source_path, folder_id, file_name)", "def upload(self, upload_request):\n raise NotImplementedError", "def push(self):\n\n self.start = time.time()\n self.log.info('Uploading {} files to database...'\n ''.format(len(self.filenames)))\n i = 0\n\n # Loop over a portion of files and upload them\n if self.n_files != -1:\n files = self.filenames[0:self.n_files]\n else:\n files = self.filenames\n\n for i, f in enumerate(files):\n\n # If were not debugging script allow exceptions and report them\n # later\n if not self.debug:\n try:\n self._push_one(f, **self.meta)\n\n except Exception as e:\n self.log.error('Error with {}'.format(f))\n self.log.error(e)\n self.errors.append((f, e))\n\n else:\n self._push_one(f, **self.meta)\n\n self.session.close()\n\n # Log the ending errors\n self.report(i + 1)", "def handle_request_upload(self, msg):\n\n\t\tdirect_response = not msg.arguments or msg.arguments[0] in ('', '/')\n\t\tresult = []\n\t\tfor file_obj in msg.options:\n\t\t\ttmpfilename, filename, name = file_obj['tmpfile'], file_obj['filename'], file_obj['name']\n\n\t\t\t# limit files to tmpdir\n\t\t\tif not os.path.realpath(tmpfilename).startswith(TEMPUPLOADDIR):\n\t\t\t\traise BadRequest('invalid file: invalid path')\n\n\t\t\t# check if file exists\n\t\t\tif not os.path.isfile(tmpfilename):\n\t\t\t\traise BadRequest('invalid file: file does not exists')\n\n\t\t\t# don't accept files bigger than umc/server/upload/max\n\t\t\tst = os.stat(tmpfilename)\n\t\t\tmax_size = int(ucr.get('umc/server/upload/max', 64)) * 1024\n\t\t\tif st.st_size > max_size:\n\t\t\t\tos.remove(tmpfilename)\n\t\t\t\traise BadRequest('filesize is too large, maximum allowed filesize is %d' % (max_size,))\n\n\t\t\tif direct_response:\n\t\t\t\twith open(tmpfilename) as buf:\n\t\t\t\t\tb64buf = base64.b64encode(buf.read())\n\t\t\t\tresult.append({'filename': filename, 'name': name, 'content': b64buf})\n\n\t\tif direct_response:\n\t\t\tself.finished(msg.id, result)\n\t\telse:\n\t\t\tself.handle_request_command(msg)", "def bulk_add_coupon(auth, file_name, coupon_id, base_url='https://api.cratejoy.com/v1/'):\n\n with open('{}'.format(file_name), 'r') as f:\n subscription_ids = [s.strip('\\n') for s in list(f.readlines())]\n\n payload = json.dumps({\n u'coupon_id': coupon_id,\n })\n\n counter = 0\n\n for sub in subscription_ids:\n counter += 1\n subscription_endpoint = '{}subscriptions/{}/coupons/'.format(\n base_url, sub)\n resp = requests.post(\n subscription_endpoint,\n data=payload,\n auth=auth\n )\n print('{}) Added coupon ID {} to {}, responded with status '\n 'code: {}'.format(counter, coupon_id, sub, resp.status_code))", "def upload():\n global FILE_NAME\n target = os.path.join(APP_ROOT, \"images\")\n print(target)\n\n if not os.path.isdir(target):\n os.mkdir(target)\n\n for file in request.files.getlist(\"file\"):\n print(file)\n filename = file.filename\n destination = \"/\".join([target, filename])\n FILE_NAME = destination\n file.save(destination)\n return render_template(\"complete.html\")", "def list_multipart_uploads(Bucket=None, Delimiter=None, EncodingType=None, KeyMarker=None, MaxUploads=None, Prefix=None, UploadIdMarker=None):\n pass", "def import_media(self, path):\n media_vertex = {}\n infile = configparser.ConfigParser()\n infile.read(path, encoding='utf-8')\n # Use the path name for error messages or assignments\n for field in infile.items(\"media\"):\n if (field[0].find(\"photo\") != -1 and\n len(field[0].split(\".\")) == 2):\n # Process a small set of photo credits for all the pandas\n # author = infile.get(\"media\", field[0] + \".author\")\n # if author in self.photo[\"credit\"].keys():\n # self.photo[\"credit\"][author] = self.photo[\"credit\"][author] + 1\n # else:\n # self.photo[\"credit\"][author] = 1\n # Track what the max number of panda photos an object has is\n # test_count = int(field[0].split(\".\")[1])\n # if test_count > self.photo[\"max\"]:\n # self.photo[\"max\"] = test_count\n # Accept the data and continue\n media_vertex[field[0]] = field[1]\n # TODO: track video info for apple counting as well\n else:\n # Accept the data and move along\n media_vertex[field[0]] = field[1]\n self.media.append(media_vertex)\n self.vertices.append(media_vertex)\n self.media_files.append(path)", "def _UploadWithProgressInternal(self, media, gcs_file_name):\n bucket, bucket_path = self._ParseBucketAndPath(gcs_file_name)\n request = self._service.objects().insert(bucket=bucket,\n media_body=media,\n name=bucket_path)\n if media._size == 0: # pylint: disable=protected-access\n return self._RunWithRetries(request.execute, self._CommonErrorMatcher)\n\n response = None\n logged_percent_done = 0\n while response is None:\n status, response = self._RunWithRetries(request.next_chunk,\n self._CommonErrorMatcher)\n if status:\n percent_done = int(status.progress() * 100)\n if percent_done - logged_percent_done >= 5:\n logging.info('Uploading to gs://%s/%s: %d%% complete.',\n bucket,\n bucket_path,\n int(status.progress() * 100))\n logged_percent_done = percent_done\n return response", "async def insert_requirements(conn, mapname):\n select_sql = \"\"\"insert into media_files(path, type, provided) select ?, ?, ? where not exists(select * from media_files where path=?)\"\"\"\n _ = select(conn, select_sql, (f\"pics/mapshots/{mapname}\", \"mapshot\", 0, f\"pics/mapshots/{mapname}\"))\n select_sql = \"\"\"insert into requirements(map_id, file_id) select (select map_id from maps where map_path=?), (select file_id from media_files where path=?)\"\"\"\n _ = select(conn, select_sql, (mapname, f\"pics/mapshots/{mapname}\"))\n (reqs, sky, texs, exts, linkeds) = await get_required_files(mapname)\n if reqs:\n for req in reqs:\n select_sql = \"\"\"insert into media_files(path, type, provided) select ?, ?, ?\n where not exists(select * from media_files where path=?)\"\"\"\n _ = select(conn, select_sql, (req, \"requiredfile\", 0, req))\n select_sql = \"\"\"insert into requirements(map_id, file_id) select (select map_id from maps where map_path=?), (select file_id from media_files where path=?)\"\"\"\n _ = select(conn, select_sql, (mapname, req))\n if sky:\n for suffix in [\"bk\", \"dn\", \"ft\", \"lf\", \"rt\", \"up\"]:\n select_sql = \"\"\"insert into media_files(path, type, provided) select ?, ?, ?\n where not exists(select * from media_files where path=?)\"\"\"\n _ = select(conn, select_sql, (sky + suffix, \"sky\", 0, sky + suffix))\n select_sql = \"\"\"insert into requirements(map_id, file_id) select (select map_id from maps where map_path=?), (select file_id from media_files where path=?)\"\"\"\n _ = select(conn, select_sql, (mapname, sky + suffix))\n\n if texs:\n for req in texs:\n select_sql = \"\"\"insert into media_files(path, type, provided) select ?, ?, ?\n where not exists(select * from media_files where path=?)\"\"\"\n _ = select(conn, select_sql, (req, \"texture\", 0, req))\n select_sql = \"\"\"insert into requirements(map_id, file_id) select (select map_id from maps where map_path=?), (select file_id from media_files where path=?)\"\"\"\n _ = select(conn, select_sql, (mapname, req))\n if exts:\n for req in exts:\n select_sql = \"\"\"insert into media_files(path, type, provided) select ?, ?, ?\n where not exists(select * from media_files where path=?)\"\"\"\n _ = select(conn, select_sql, (req, \"externalfile\", 0, req))\n select_sql = \"\"\"insert into requirements(map_id, file_id) select (select map_id from maps where map_path=?), (select file_id from media_files where path=?)\"\"\"\n _ = select(conn, select_sql, (mapname, req))\n if linkeds:\n for req in linkeds:\n select_sql = \"\"\"insert into media_files(path, type, provided) select ?, ?, ?\n where not exists(select * from media_files where path=?)\"\"\"\n _ = select(conn, select_sql, (req, \"linkedfile\", 0, req))\n select_sql = \"\"\"insert into requirements(map_id, file_id) select (select map_id from maps where map_path=?), (select file_id from media_files where path=?)\"\"\"\n _ = select(conn, select_sql, (mapname, req))", "def _upload_all(self, uploads):\n for upload in uploads:\n if isinstance(upload, dict):\n self._upload(upload)\n elif upload in uploads and isinstance(uploads[upload], dict):\n self._upload(uploads[upload])\n else:\n raise Exception('invalid upload object')", "def upload_all_parts(self):\n if not self.upload_id:\n raise RuntimeError(\"Attempting to use a multipart upload that has not been initiated.\")\n\n if self.file.name != \"<stdin>\":\n size_left = file_size = os.stat(self.file.name)[ST_SIZE]\n nr_parts = file_size / self.chunk_size + (file_size % self.chunk_size and 1)\n debug(\"MultiPart: Uploading %s in %d parts\" % (self.file.name, nr_parts))\n else:\n debug(\"MultiPart: Uploading from %s\" % (self.file.name))\n\n\tself.chunk_size = self.s3.config.multipart_chunk_size_mb * 1024 * 1024\n\n seq = 1\n\tif self.file.name != \"<stdin>\":\n while size_left > 0:\n offset = self.chunk_size * (seq - 1)\n current_chunk_size = min(file_size - offset, self.chunk_size)\n size_left -= current_chunk_size\n labels = {\n 'source' : unicodise(self.file.name),\n 'destination' : unicodise(self.uri.uri()),\n 'extra' : \"[part %d of %d, %s]\" % (seq, nr_parts, \"%d%sB\" % formatSize(current_chunk_size, human_readable = True))\n }\n try:\n self.upload_part(seq, offset, current_chunk_size, labels)\n except:\n error(u\"Upload of '%s' part %d failed. Aborting multipart upload.\" % (self.file.name, seq))\n self.abort_upload()\n raise\n seq += 1\n else:\n while True:\n buffer = self.file.read(self.chunk_size)\n offset = self.chunk_size * (seq - 1)\n current_chunk_size = len(buffer)\n labels = {\n 'source' : unicodise(self.file.name),\n 'destination' : unicodise(self.uri.uri()),\n 'extra' : \"[part %d, %s]\" % (seq, \"%d%sB\" % formatSize(current_chunk_size, human_readable = True))\n }\n if len(buffer) == 0: # EOF\n break\n try:\n self.upload_part(seq, offset, current_chunk_size, labels, buffer)\n except:\n error(u\"Upload of '%s' part %d failed. Aborting multipart upload.\" % (self.file.name, seq))\n self.abort_upload()\n raise\n seq += 1\n\n debug(\"MultiPart: Upload finished: %d parts\", seq - 1)", "def _upload_inline_media(\n self, inline_media: praw.models.InlineMedia\n ): # noqa: ANN001\n self._validate_inline_media(inline_media)\n inline_media.media_id = self._upload_media(\n media_path=inline_media.path, upload_type=\"selfpost\"\n )[0]\n return inline_media", "def _replace_files(self, index_key, media_key, new_list, fundamentals):\n _index=fundamentals.get(index_key, {})\n _media=fundamentals.get(media_key, {})\n for _file in new_list:\n _data=self._item_from_index(_file, 'data', _media)\n if not _data:\n self.log('Failed to write file %s due to no data'%_file)\n continue\n _file_name=self._item_from_index(_file, 'filename', _index)\n if _file_name:\n _stat=self.statfile(_file_name)\n if _stat and _stat['size']!=len(_data):\n try:\n self.writefile(_file_name, _data)\n except:\n self.log('Failed to write BREW file '+_file_name)\n if __debug__:\n raise", "def upload():\n # verify user\n email = flask.request.args[\"email\"]\n username = flask.request.args[\"username\"]\n\n file = flask.request.files[\"file\"]\n print(file.filename)\n file_bytestr = file.read()\n\n # query ms api\n emotion = ms_emotion_api(file_bytestr)\n print(emotion)\n if emotion is None:\n return flask.jsonify(error=\"MS API error, possibly no human face\")\n\n # save to mongodb\n saved = mongo.db.images.insert_one({\n \"filename\": file.filename,\n \"content\": file_bytestr,\n \"emotion\": emotion,\n \"date\": datetime.datetime.utcnow(),\n \"user_username\": username,\n \"user_email\": email,\n })\n # print(saved.inserted_id)\n # create user if needed\n mongo.db.users.update_one(filter={\n \"email\": email,\n }, update={\n \"$set\": {\"username\": username},\n # image_ids: list of foreign ids to images\n \"$push\": {\"image_ids\": saved.inserted_id},\n }, upsert=True)\n\n # client resend image_id when reporting music\n emotion[\"image_id\"] = str(saved.inserted_id)\n return flask.jsonify(emotion)" ]
[ "0.619183", "0.60925543", "0.6053126", "0.5934323", "0.59186196", "0.57231236", "0.5704533", "0.57031876", "0.57009745", "0.56800365", "0.5639666", "0.5630675", "0.56029576", "0.56010246", "0.55941606", "0.5494556", "0.54650325", "0.54639095", "0.5461023", "0.5386679", "0.53646606", "0.5339651", "0.53395617", "0.5324782", "0.5322305", "0.53209245", "0.53136164", "0.5310266", "0.5306981", "0.5303259", "0.52865374", "0.5282538", "0.5263117", "0.523738", "0.5234444", "0.5221241", "0.521464", "0.52099556", "0.5202597", "0.51910377", "0.5190917", "0.51878756", "0.5187129", "0.5175051", "0.5173582", "0.5170115", "0.5169157", "0.5168972", "0.51632893", "0.5162779", "0.5157919", "0.51574445", "0.514646", "0.51428944", "0.51349473", "0.51341844", "0.5131212", "0.51248956", "0.5107209", "0.5106054", "0.5103199", "0.51022243", "0.5095603", "0.50925004", "0.509071", "0.5079027", "0.50775445", "0.50648946", "0.5061595", "0.5043981", "0.5042767", "0.50411445", "0.50381386", "0.50374115", "0.50354004", "0.50315386", "0.5024256", "0.5020643", "0.50205123", "0.5018169", "0.501415", "0.50059634", "0.4989061", "0.4986093", "0.49858728", "0.49851236", "0.49791425", "0.49773598", "0.4966738", "0.49663866", "0.49636462", "0.49611083", "0.49581164", "0.4940051", "0.49330223", "0.49323925", "0.49321392", "0.49256244", "0.49132314", "0.49057135" ]
0.8371117
0
Removes all of the given files
def remove_files(files): for file_name in files: os.remove(file_name)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def remove_files(self, files: Set[str]) -> None:\n for f in files:\n src = os.path.join(self.get_directory(), f)\n os.remove(src)", "def clean(files):\n\tfor file in files:\n\t\ttry:\n\t\t\tos.remove(file)\n\t\texcept Exception as e:\n\t\t\tprint(e)", "def remove_files(file_list):\n###############################################################################\n for fpath in file_list:\n if os.path.exists(fpath):\n os.remove(fpath)\n # End if\n # End for", "def clean_data():\n for clean_file in clean_files:\n file_list = [f for f in os.listdir(\".\") if f.endswith(clean_file)]\n for f in file_list:\n os.remove(f)", "def remove_files(files):\n for file in files:\n if os.path.exists(file):\n if file.startswith(\"./\") or file.startswith(\".\\\\\"):\n file = file[2:]\n if os.path.isdir(file):\n rmtree(file)\n else:\n os.unlink(file)", "def remover_files():\n directory = os.getcwd()\n for file_name in glob.glob((\"{}/tmp/*\").format(directory)):\n remove(file_name)", "def cleanup(*args, **kwargs):\n for file in args:\n if exists(file):\n remove(file)\n for file in kwargs:\n if exists(file):\n remove(file)", "def delete_files(src_files):\n for i, src_file in enumerate(src_files):\n sys.stdout.write(str(i + 1) + ': ' + src_file + '\\n')\n subprocess.call(['rm', src_file])", "def _remove_files(self):\n if hasattr(self, 'files'):\n for file in self.files:\n if os.path.exists(file):\n os.remove(file)\n\n self._remove_changes()\n self._remove_temporary_files()", "def clean():\n clean_files()", "def cleanFiles(a_file_list):\n for entry in a_file_list:\n cmd = 'sudo rm ' + entry\n os.system(cmd)", "def rm(*fns):\n for fn in fns:\n try:\n os.remove(fn)\n except FileNotFoundError:\n pass", "def _deleteFiles(self, fileList):\n import os\n import glob\n\n for ent in fileList:\n # for fil in glob.glob(os.path.join(self._outputDir_, ent)):\n for fil in glob.glob(ent):\n try:\n if os.path.exists(fil):\n os.remove(fil)\n except OSError as e:\n self._reporter.writeError(\"Failed to delete '\" + fil + \"' : \" + e.strerror)\n raise", "def delete_b_files(intermediate_files: List[File]) -> None:\n for f in intermediate_files:\n f.remove()", "def remove_frames(tmpdir, files):\n for fname in files: os.remove(os.path.join(tmpdir, fname))\n if not(tmpdir == None): os.rmdir(tmpdir)", "def clean_files(self):\n self.filenames.clear()", "def clean():\n for dirpath, dirnames, filenames in os.walk('.'):\n for filename in filenames:\n if filename.endswith('.pyc') or filename.endswith('.pyo'):\n full_pathname = os.path.join(dirpath, filename)\n click.echo('Removing {}'.format(full_pathname))\n os.remove(full_pathname)", "def remove(args):\n files = []\n for path in args.files:\n if os.path.isdir(path):\n ft = filetree(path)\n files.extend(ft.filelist())\n else:\n files.append(path)\n for path in files:\n relpath = os.path.normpath(os.path.relpath(path, args.base))\n if relpath in args.cache:\n del args.cache[args.cache.index(relpath)]\n if args.delete and os.path.exists(path):\n os.remove(path)\n args.update = True\n return", "def clean_filesystem(files=[]):\n remove_files(files + find_cache_files())", "def remove(self, filenames=None, missing_ok=False):\n if not filenames:\n filenames = self.filenames\n\n for f in filenames:\n try:\n self.path.joinpath(f).unlink()\n except FileNotFoundError as e:\n if missing_ok == False:\n raise", "def deleteAllFiles(self, flush=True): \n \n if flush: \n self.flush(False) \n \n for filePath in self.filePathDict.keys(): \n if self.outDir is None: \n fullPath = filePath \n else: \n fullPath = os.path.join(self.outDir,filePath) \n \n if os.path.exists(fullPath): \n os.remove(fullPath)", "def remove_files(filename=None):\n os.remove(filename)\n print(\"The file %s has been removed\" % filename)", "def clean_files_for(file):\n for f in [file, f\"{file}.json\", f\"{file}.lock\"]:\n if os.path.isfile(f):\n os.remove(f)", "def delete_files(pths):\n for f in pths:\n try:\n os.remove(f)\n except OSError:\n log.debug(\"Found and ignored Error when deleting file %s\" % f)\n pass\n log.debug(\"deleted %d files\" % len(pths))", "def teardown():\n for filename in files_to_delete:\n delete_file(filename)", "def cleanup(e):\n for f in e.files:\n try:\n if os.path.isfile(f):\n os.remove(f)\n except OSError:\n continue\n\n return", "def _cleanup_files(self):\n\n for root, dirs, files in os.walk(self.build_directory):\n dirs_to_delete = [\n Path(root).joinpath(x) for x in dirs if x == '__pycache__'\n ]\n files_to_delete = [\n Path(root).joinpath(x) for x in files if Path(x).suffix == '.pyc'\n ]\n for d in dirs_to_delete:\n logger.info('Deleting: %s', d)\n shutil.rmtree(d)\n for f in files_to_delete:\n logger.info('Deleting: %s', f)\n f.unlink()", "def _remove_tmpfiles():\n for f in tmpfiles:\n try:\n os.remove(f)\n except OSError:\n pass", "def remove_all():\n storage = FileStorage()\n objects = storage.all()\n objects = list(objects.values())\n\n for element in objects:\n storage.delete(element)\n objects = storage.all()", "def _delete_files(p4, files, repo_name=None):\n if repo_name:\n msgstr = _(\"Deleting {num_commits} commit objects for repo '{repo_name}'.\")\n else:\n msgstr = _(\"Deleting {num_commits} commit objects for all repos.\")\n total = 0\n bite_size = 1000\n while len(files):\n to_delete = files[:bite_size]\n files = files[bite_size:]\n result = p4.run(\"delete\", to_delete)\n count = sum([int('depotFile' in row and row['action'] == 'delete') for row in result])\n total += count\n if count:\n for d in to_delete:\n if os.path.isfile(d):\n os.remove(d)\n result = p4.run(\"submit\", \"-d\", msgstr.format(num_commits=count, repo_name=repo_name))\n return total", "def cleanup_files(self):\n\n self.backup_files()\n self.delete_files()", "def clean():\n for f in [f for f in os.listdir() if f.endswith(\".part\")]:\n os.remove(f)", "def __rm(files):\n # Open master file and read in lines\n master = open('master.tex', 'r')\n lines = master.readlines()\n master.close()\n\n # Remove lines from master.tex if they exist\n for file in files:\n # Ask if the user really wants to remove preamble.tex\n if 'preamble.tex' in file:\n if not milc.questions.yesno(f'Are you sure you want to remove {emph(file)}?'):\n continue\n if not os.path.isfile(file):\n cli.log.error(f'I can\\'t find {emph(file)}.')\n else:\n try:\n lines.remove('\\\\input{'\n f'{file}'\n '}\\n')\n cli.log.info(f'Removed {emph(file)} from {emph(\"master.tex\")}')\n\n # Delete files if --delete flag is on\n if cli.config.rm.delete:\n cli.log.info(f'Deleting {emph(file)}')\n os.remove(file)\n except:\n cli.log.error(f'I can\\'t find {emph(file)} in the {emph(\"master.tex\")} file.')\n\n # Write new master.tex\n master = open('master.tex', 'w')\n master.writelines(lines)\n master.close()", "def remove_old_files(filelist):\n\n for filename in filelist:\n if path.exists(filename):\n try:\n remove(filename)\n print \"%s deleted\" % filename \n except Exception: #TODO Exception spesifik.\n stderr.write(\"%s cannot remove. Please check your priviledge\\n\"\n % filename)\n exit(1)", "def remove_intermediate_files(dir_):\n file_list = glob.glob(f'{dir_}/*temp*')\n [os.remove(f) for f in file_list]", "def deleteIntermediateFiles(self):\n uniq_files = set(self.files_to_delete)\n print (\"Deleting %d intermediate files\" % len(uniq_files))\n for fn in uniq_files:\n # don't delete log files\n if not fn.endswith(\".log\"):\n os.remove(fn)", "def remove(self, directory, files):\n if not files:\n return\n\n fs = self.app.filestorage\n for file in files:\n path = '{}/{}'.format(directory, file)\n if fs.exists(path) and not fs.isdir(path):\n fs.remove(path)", "def clean(self):\n files = ['CHG', 'CHGCAR', 'POSCAR', 'INCAR', 'CONTCAR',\n 'DOSCAR', 'EIGENVAL', 'IBZKPT', 'KPOINTS', 'OSZICAR',\n 'OUTCAR', 'PCDAT', 'POTCAR', 'vasprun.xml',\n 'WAVECAR', 'XDATCAR', 'PROCAR', 'ase-sort.dat',\n 'LOCPOT', 'AECCAR0', 'AECCAR1', 'AECCAR2',\n 'WAVECAR.GTO', 'vasp.out', 'vasp.err']\n for f in files:\n try:\n os.remove(f)\n except OSError:\n pass", "def __del__(self):\n for filename in self.files:\n unlink(filename)", "def remove_unused_files(self):\n\n response_list = self.client.api_call(\n f'files.list?'\n f'count=1000&'\n )\n assert response_list['ok']\n\n for file in [\n f for f in response_list['files']\n if not f['channels'] and not f['groups'] and not f['ims']\n ]:\n response_delete = self.client.api_call(\n f'files.delete?'\n f'file={file[\"id\"]}'\n )\n assert response_delete['ok']", "def clean_files(ftype, remove=False):\n import os\n files = os.listdir()\n found_files = [f for f in files if ftype in f]\n if remove:\n for ff in found_files:\n os.remove(ff)\n print(\"Removed {}\".format(ff))\n else:\n return found_files", "def remove_module_files(self, base_dir):\n for file, pkg in self.files:\n package_path = os.path.join(base_dir, 'gocat', pkg)\n # Check if entire package is to be deleted\n if file == '*':\n self._unstage_folder(package_path)\n else:\n file_path = os.path.join(package_path, file)\n if os.path.exists(file_path):\n os.remove(file_path)", "def cleanup_intermediate_files():\n\n dirs = (DIR_PAGE, DIR_SRGB, DIR_VTI, DIR_TIFF, DIR_BACK, DIR_TEXT)\n map(lambda dir: shutil.rmtree(os.path.join(cwd, dir)) , dirs)", "def clean_retrosheet_files(self):\n # Get zipped and unzipped folder names\n zippedFileFolder = Filepath.get_retrosheet_folder(folder='zipped')\n unzippedFileFolder = Filepath.get_retrosheet_folder(folder='unzipped')\n\n # Clean out all files in both folders\n for folder in (zippedFileFolder, unzippedFileFolder):\n os.chdir(folder)\n for file in os.listdir(os.getcwd()): \n if os.path.isdir(file): \n shutil.rmtree(file)\n else: \n os.remove(file)", "def delete_files(file_prototype):\n from os import remove\n folder = getFolder()\n if folder != '/':\n for f in list_files(folder, file_prototype):\n print('Deleting: '+f)\n remove(f)\n\n return()", "def clean(allimages, alldirs):\n\n for img in allimages:\n # Delete HTML files\n htmlfn = join(opts.root, img._dir._path, img._pagefn)\n if exists(htmlfn):\n if opts.verbose:\n print \"Deleting\", htmlfn\n try:\n os.unlink(htmlfn)\n except:\n print >> sys.stderr, \"Error: deleting\", htmlfn\n\n # Delete thumbnails\n if img._thumbfn:\n thumbfn = join(opts.root, img._thumbfn)\n if exists(thumbfn):\n if opts.verbose:\n print \"Deleting\", thumbfn\n try:\n os.unlink(thumbfn)\n img._thumbfn = None\n except:\n print >> sys.stderr, \"Error: deleting\", thumbfn\n\n for d in alldirs:\n files = dircache.listdir(join(opts.root, d._path))\n\n # Delete HTML files in directories\n for f in files:\n fn = join(opts.root, d._path, f)\n if f in [ dirindex_fn, allindex_fn, allcidx_fn,\n sortindex_fn, css_fn ] or \\\n f.startswith('trackindex-'):\n if opts.verbose:\n print \"Deleting\", fn\n try:\n os.unlink(fn)\n pass\n except:\n print >> sys.stderr, \"Error: deleting\", fn\n\n if f == index_fn and islink(fn):\n os.unlink(fn)", "def _del_files(self, index_key, _del_list, fundamentals):\n _index=fundamentals.get(index_key, {})\n for _file in _del_list:\n _file_name=self._item_from_index(_file, 'filename', _index)\n if _file_name:\n try:\n self.rmfile(_file_name)\n except Exception, e:\n self.log('Failed to delete file %s: %s'%(_file_name, str(e)))", "def space_cleaning():\n for file in os.listdir(\".\"):\n if file.endswith(\".png\"):\n os.remove(file)", "def remove_all_state_files():\n for state_file in Path(CONFIG_DIR).glob('**/state.json'):\n state_file.unlink()", "def remove_extra_files(self):\n\n for f in self._extra_files:\n if os.path.isfile(f):\n os.remove(f)", "def remove_matching_files(\n removal_ids: Set[str],\n directory: str,\n ):\n for file_name in os.listdir(directory):\n file_id, _ = os.path.splitext(file_name)\n if file_id in removal_ids:\n os.remove(os.path.join(directory, file_name))", "def cleanup(self):\n files = self.nlst()\n latest = self.latest_filename\n for filename in files:\n if filename != latest:\n result = self.delete(filename)\n logger.info(f\"Deleted old export from FTP: {result}\")", "def decide_files_to_delete(files: list) -> Set:\n files_to_keep = decide_files_to_keep(files)\n file_set = set(files)\n # using set theory: files_to_delete = files - files_to_keep\n return file_set.difference(files_to_keep)", "def clean():\n folders = ['utils_dfn/temp', 'utils_dfn/img', 'utils_dfn/mask', 'utils_dfn/output']\n for folder in folders:\n for item in os.listdir(folder):\n item_path = os.path.join(folder, item)\n if os.path.isdir(item_path):\n shutil.rmtree(item_path)\n elif os.path.isfile(item_path):\n os.remove(item_path)", "def delFilesFromDirs(self, files, dirs):\n\n\t\tself.__delActualDirs(dirs)\n\t\tTagging.delTagsFromElements(self, dirs, files)", "def recursive_rm(*patterns):\n for root, dirs, files in os.walk(u'.'):\n root = os.path.normpath(root)\n if root.startswith('.git/'):\n continue\n for file in files:\n for pattern in patterns:\n if fnmatch.fnmatch(file, pattern):\n safe_remove(os.path.join(root, file))\n for dir in dirs:\n for pattern in patterns:\n if fnmatch.fnmatch(dir, pattern):\n safe_rmtree(os.path.join(root, dir))", "def remove(self,filelist):\n\n self.ws.execute('svn remove %s' % (' '.join(filelist)))", "def remove_cruft_files(cls, files):\n valid_files = []\n for changes_file in files:\n if cls.is_changes(changes_file):\n LOG.debug(\"Checking: {c}\".format(c=changes_file))\n try:\n with mini_buildd.misc.open_utf8(changes_file) as cf:\n for fd in debian.deb822.Changes(cf).get(\"Files\", []):\n valid_files.append(fd[\"name\"])\n LOG.debug(\"Valid: {c}\".format(c=fd[\"name\"]))\n\n valid_files.append(os.path.basename(changes_file))\n except BaseException as e:\n mini_buildd.config.log_exception(LOG, \"Invalid changes file: {f}\".format(f=changes_file), e, logging.WARNING)\n\n for f in files:\n if os.path.basename(f) not in valid_files:\n # Be sure to never ever fail, just because cruft removal fails (instead log accordingly)\n try:\n if os.path.isdir(f):\n shutil.rmtree(f)\n else:\n os.remove(f)\n LOG.warning(\"Cruft file (not in any changes file) removed: {f}\".format(f=f))\n except BaseException as e:\n mini_buildd.config.log_exception(LOG, \"Can't remove cruft from incoming: {f}\".format(f=f), e, logging.CRITICAL)", "def clear_old_files(self):\n self.logger.logMsg(\"Clearing Old Files.....\")\n try:\n for files in os.listdir(self.download_path):\n path = os.path.join(self.download_path, files)\n os.remove(path)\n for files in os.listdir(self.outpath):\n path = os.path.join(self.outpath, files)\n os.remove(path)\n except Exception as e:\n self.logger.logError(\"Error Creating Old Files {}.....\".format(str(e)))\n raise Exception('Error in Clearing Old Files')\n\n self.logger.logMsg(\"Done Clearing Old Files.....\")", "def clean_temp_storage_dir(self, filenames):\n for fn in filenames:\n try:\n pathlib.Path(pathlib.PurePath(self.temp_storage_dir, fn)).unlink()\n except FileNotFoundError:\n pass", "def clean_dir(dir):\n files = glob.glob(dir)\n for file in files:\n os.remove(file)", "def clear_temp(remove_all=True):\n tf_list = []\n\n if remove_all:\n temp_dir = _get_temp_dir(False)\n temp_dir += (\n os.path.sep if os.path.sep not in temp_dir[len(temp_dir) - 1] else \"\"\n )\n tf_list = glob.glob(\"{0}TESS_*\".format(temp_dir))\n else:\n global _tempfiles\n\n tf_list = list(_tempfiles)\n _tempfiles.clear()\n\n for tf in tf_list:\n if os.path.isfile(tf):\n _remove_file(tf)", "def clear_files_paths(self):\n del self.__files_paths[:]", "def delete_all(self, prog:progress=None): \n\t\tself.__output_status('Delete all files')\n\t\tif (self.__check_terminated()):\n\t\t\treturn;\t\n\t\tdelete_dir(self.root)\n\t\ttime.sleep(0.3)", "def purge_htmlfiles(args, posts):\n htmlist = list_of_htmlfiles(args, posts)\n html_to_remove = list()\n for fullname in glob.glob(os.path.join(args.root, '*.htm*')):\n if fullname not in htmlist:\n html_to_remove.append(fullname)\n\n if len(html_to_remove) > args.thumbnails.threshold_htmlfiles:\n inpt = 'x'\n while inpt not in 'yn':\n inpt = input(f'{len(html_to_remove)} html files to remove. Continue [y|n]? ').lower()\n if inpt == 'n':\n return\n\n for name in html_to_remove:\n print('Removing html files', name)\n os.remove(name)", "def clean_all_folder():\n LOGGER.warning('removal of old files has been temporarily disabled')\n # paths_to_clean = CFG.remove_files\n # if paths_to_clean: # pylint: disable=using-constant-test\n # for remove_config in paths_to_clean: # pylint: disable=not-an-iterable\n # name = tuple(remove_config.keys())[0]\n # LOGGER.info(f'processing: {name}')\n # remove_config = remove_config[name]\n # if 'folder' not in remove_config.keys():\n # LOGGER.error(f'missing \"folder\" in {name}')\n # return\n # if 'age' not in remove_config.keys():\n # LOGGER.error(f'missing \"age\" in {name}')\n # return\n # if not os.path.exists(remove_config['folder']):\n # LOGGER.error(f'path does not exist: {remove_config[\"folder\"]}')\n # return\n # _remove_old_files_from_folder(**remove_config)\n # else:\n # LOGGER.debug('no folder to clean')", "def clear_debug_files(root_path_):\n\n ext_file = [\n \".sdf\",\n \".VC.db\",\n \".idb\",\n \".exp\",\n \".aps\",\n \".pdb\",\n \".obj\",\n \".res\",\n \".log\",\n \".tlog\",\n \".manifest\",\n \".lastbuildstate\",\n \".pch\",\n \".ipch\",\n \".cache\",\n \".ilk\",\n \".ipdb\",\n \".iobj\",\n \".aps\",\n ]\n\n ext_dir = [\n \"ipch\",\n\n ]\n if os.path.exists(root_path_):\n for root, dirs, files in os.walk(root_path_, topdown=True):\n for file in files:\n filename = os.path.join(root, file)\n delete_file(filename, ext_file)\n \n for dir in dirs:\n dir_path = os.path.join(root, dir)\n if dir.lower() in ext_dir:\n print(dir_path);\n shutil.rmtree(dir_path)\n\n for a_dir in ext_dir:\n path = os.path.join(root_path_, a_dir)\n if os.path.exists(path):\n shutil.rmtree(path)", "def _clean_files(self):\n if self.delfiles & 1:\n ProcUtils.remove(self.okm)\n if self.delfiles & 2:\n ProcUtils.remove(self.hkm)\n if self.delfiles & 4:\n ProcUtils.remove(self.qkm)\n if self.delfiles & 8:\n ProcUtils.remove(self.obc)\n\n if self.log is False:\n ProcUtils.remove(self.pcf_file)\n base = os.path.basename(self.okm)\n ProcUtils.remove(os.path.join(self.dirs['run'],\n '.'.join(['LogReport', base])))\n ProcUtils.remove(os.path.join(self.dirs['run'],\n '.'.join(['LogStatus', base])))\n ProcUtils.remove(os.path.join(self.dirs['run'],\n '.'.join(['LogUser', base])))", "def delete_leftovers(self):\n for each_file, artist in self.past_songs_db_data:\n if os.path.isfile(each_file): \n os.remove(each_file)\n print \"Deleted \" + each_file\n\n for each_file in os.listdir(\".\"):\n if each_file.endswith(\".jpg\"):\n os.remove(each_file)", "def clean(self):\n clean_list = [\n position\n for position in os.listdir()\n if os.path.isfile(position) and not position.startswith(\".\")\n ]\n self.move_files(clean_list)", "def clear(self, exclude=None):\n exclude = exclude or []\n for root, dirs, files in os.walk(self.config.output_dir):\n for f in files:\n if f not in exclude:\n os.unlink(os.path.join(root, f))\n for d in dirs:\n if d not in exclude:\n shutil.rmtree(os.path.join(root, d))", "def RemoveFilesWildcards(file_wildcard, root=os.curdir):\n for item in LocateFiles(file_wildcard, root):\n try:\n os.remove(item)\n except OSError, e:\n if e.errno != errno.ENOENT:\n raise", "def _clean_up(paths):\n print('Cleaning up')\n # Iterate over the given paths, unlinking them\n for path in paths:\n if os.path.exists(path):\n print('Removing %s' % path)\n os.unlink(path)\n else:\n print('%s Not found. Skipped.' % path)", "def _clean_files(self, in_subdirectory=False):\n files = self._file_explorer.ls()\n if not in_subdirectory:\n LOG.info(f\"Cleaning {len(files)} file(s) on the device\")\n for file_ in files:\n try:\n self._file_explorer.rm(file_)\n except Exception as e:\n # Try to explore subdirectory\n LOG.info(f\"Attempting to clean directory {file_}\")\n self._file_explorer.cd(file_)\n self._clean_files(in_subdirectory=True)\n if in_subdirectory:\n self._file_explorer.cd('..')\n else:\n LOG.info(\"Done cleaning FS\")", "def delFiles(self):\r\n \r\n logStr = \"{0:s}.{1:s}: \".format(self.__class__.__name__, sys._getframe().f_code.co_name)\r\n logger.debug(\"{0:s}{1:s}\".format(logStr,'Start.')) \r\n \r\n try: \r\n if os.path.exists(self.h5File): \r\n os.remove(self.h5File) \r\n logger.debug(\"{0:s} File {1:s} deleted.\".format(logStr,self.h5File)) \r\n except XmError:\r\n raise \r\n except Exception as e:\r\n logStrFinal=\"{:s}Exception: Line: {:d}: {!s:s}: {:s}\".format(logStr,sys.exc_info()[-1].tb_lineno,type(e),str(e))\r\n logger.error(logStrFinal) \r\n raise XmError(logStrFinal) \r\n finally:\r\n logger.debug(\"{0:s}{1:s}\".format(logStr,'_Done.'))", "def _remove_all_manifest_files(self):\n manifest_files = Path(self.manifest_dir).glob(\"**/elyra-component-manifest-*.json\")\n for file in manifest_files:\n os.remove(str(file))", "def _rm_glob(self, path):\r\n for path in glob.glob(path):\r\n shutil.rmtree(path)", "def cleanup(self):\r\n for f in [i for d in self.data.values() for i in d[\"filenames\"]]:\r\n try:\r\n os.unlink(f)\r\n except Exception: pass\r\n self.Destroy()", "def cleanup() -> None:\n\n for fname in glob(os.path.join(tdir, 'alexandria.*')):\n if os.path.splitext(fname)[1] not in {'.c', '.h'}:\n os.unlink(fname)", "async def delete_files(client, bucket, files: Set):\n result = await client.delete_objects(Bucket=bucket, Delete={\n 'Objects': [{'Key': file} for file in files]\n })\n print(result)", "def clean(self) -> None:\n # remove all *.py and *.pyi files in the folder\n for wc in [\"*.py\", \"*.pyi\", \"modules.json\"]:\n for f in (self.package_path).rglob(wc):\n f.unlink()", "def clean(self):\n if self.verbosity:\n self.header(\"Cleaning data files\")\n\n tsv_list = os.listdir(self.tsv_dir)\n\n if self.resume_mode:\n # get finished clean command logs of last update\n prev_cleaned = [\n x.file_name + '.TSV'\n for x in self.log_record.called.filter(\n command='cleancalaccessrawfile',\n finish_datetime__isnull=False\n )\n ]\n self.log(\"{} files already cleaned.\".format(len(prev_cleaned)))\n # remove these from tsv_list\n tsv_list = [x for x in tsv_list if x not in prev_cleaned]\n\n # Loop through all the files in the source directory\n if self.verbosity:\n tsv_list = progress.bar(tsv_list)\n for name in tsv_list:\n call_command(\n \"cleancalaccessrawfile\",\n name,\n verbosity=self.verbosity,\n keep_files=self.keep_files,\n )", "def clear_log_files(log_files):\n for log_file in log_files:\n try:\n open(log_file, 'w', 0).close()\n except IOError:\n pass", "def purge():\n all_hashes = read_all()\n used_hashes = read_used()\n\n for kind, hashes in used_hashes.items():\n to_remove = all_hashes[kind].difference(hashes)\n if kind == 'evs':\n delete_from_directory_by_hashes(EV_DIRECTORY, to_remove)\n elif kind == 'cache':\n delete_from_directory_by_hashes(CACHE_DIRECTORY, to_remove)\n elif kind == 'seeds':\n delete_from_directory_by_hashes(SEED_DIRECTORY, to_remove)\n\n reset_used()", "def delete_files_for_package(self, package):\n files = self.find_files_for_package(package, absolute_path=True)\n if not files:\n return\n path = os.path.dirname(files[0])\n for file in files:\n if os.path.exists(file):\n log.debug(\"Removing file '%s'\" % (file))\n os.unlink(file)\n if os.path.isdir(path) and os.listdir(path) == []:\n log.debug(\"Remove empty package repository '%s'\" % (path))\n os.rmdir(path)", "def _rm(folder):\n import os\n import shutil\n for the_file in os.listdir(folder):\n file_path = os.path.join(folder, the_file)\n try:\n if os.path.isfile(file_path):\n os.unlink(file_path)\n elif os.path.isdir(file_path):\n shutil.rmtree(file_path)\n except Exception as e:\n print(e)\n return", "def clean():\n possible_outputs = (\n '{}.html'.format(CONFIG['FULL_PROJECT_NAME']),\n '{}.epub'.format(CONFIG['FULL_PROJECT_NAME']),\n '{}.pdf'.format(CONFIG['FULL_PROJECT_NAME']),\n '{}.docx'.format(CONFIG['FULL_PROJECT_NAME']),\n '{}.odt'.format(CONFIG['FULL_PROJECT_NAME']),\n )\n\n for filename in possible_outputs:\n if os.path.exists(filename):\n os.remove(filename)\n print(\"Removed {}\".format(filename))", "def removeRedundantFiles(workdir, outputfiles=[]):\n\n logger.info(\"Removing redundant files prior to log creation\")\n\n workdir = os.path.abspath(workdir)\n\n dir_list = [\"AtlasProduction*\",\n \"AtlasPoint1\",\n \"AtlasTier0\",\n \"buildJob*\",\n \"CDRelease*\",\n \"csc*.log\",\n \"DBRelease*\",\n \"EvgenJobOptions\",\n \"external\",\n \"fort.*\",\n \"geant4\",\n \"geomDB\",\n \"geomDB_sqlite\",\n \"home\",\n \"o..pacman..o\",\n \"pacman-*\",\n \"python\",\n \"runAthena*\",\n \"share\",\n \"sources.*\",\n \"sqlite*\",\n \"sw\",\n \"tcf_*\",\n \"triggerDB\",\n \"trusted.caches\",\n \"workdir\",\n \"*.data*\",\n \"*.events\",\n \"*.py\",\n \"*.pyc\",\n \"*.root*\",\n \"JEM\",\n \"tmp*\",\n \"*.tmp\",\n \"*.TMP\",\n \"MC11JobOptions\",\n \"scratch\",\n \"jobState-*-test.pickle\",\n \"*.writing\",\n \"pwg*\",\n \"pwhg*\",\n \"*PROC*\",\n \"madevent\",\n \"HPC\",\n \"objectstore*.json\",\n \"saga\",\n \"radical\",\n \"ckpt*\"]\n\n # remove core and pool.root files from AthenaMP sub directories\n try:\n cleanupAthenaMP(workdir, outputfiles)\n except Exception, e:\n print(\"Failed to execute cleanupAthenaMP(): %s\" % (e))\n\n # explicitly remove any soft linked archives (.a files) since they will be dereferenced by the tar command (--dereference option)\n matches = []\n import fnmatch\n for root, dirnames, filenames in os.walk(workdir):\n for filename in fnmatch.filter(filenames, '*.a'):\n matches.append(os.path.join(root, filename))\n for root, dirnames, filenames in os.walk(os.path.dirname(workdir)):\n for filename in fnmatch.filter(filenames, 'EventService_premerge_*.tar'):\n matches.append(os.path.join(root, filename))\n if matches != []:\n for f in matches:\n remove(f)\n # else:\n # print(\"Found no archive files\")\n\n # note: these should be partitial file/dir names, not containing any wildcards\n exceptions_list = [\"runargs\", \"runwrapper\", \"jobReport\", \"log.\"]\n\n to_delete = []\n for _dir in dir_list:\n files = glob(os.path.join(workdir, _dir))\n exclude = []\n\n if files:\n for exc in exceptions_list:\n for f in files:\n if exc in f:\n exclude.append(os.path.abspath(f))\n\n _files = []\n for f in files:\n if not f in exclude:\n _files.append(os.path.abspath(f))\n to_delete += _files\n\n exclude_files = []\n for of in outputfiles:\n exclude_files.append(os.path.join(workdir, of))\n for f in to_delete:\n if not f in exclude_files:\n remove(f)\n\n # run a second pass to clean up any broken links\n broken = []\n for root, dirs, files in os.walk(workdir):\n for filename in files:\n path = os.path.join(root, filename)\n if os.path.islink(path):\n target_path = os.readlink(path)\n # Resolve relative symlinks\n if not os.path.isabs(target_path):\n target_path = os.path.join(os.path.dirname(path), target_path)\n if not os.path.exists(target_path):\n broken.append(path)\n else:\n # If it's not a symlink we're not interested.\n continue\n\n if broken:\n for p in broken:\n remove(p)\n\n return 0", "def remove_files(file, filename_excel):\n os.remove(file)\n print(\"Remove the data input sent by the user\")\n os.remove(velocity_input + \"/\" + filename_excel + \"_v_input.txt\")\n print(\"Remove the input file for velocity calculations\")\n os.remove(velocity_output + \"/\" + filename_excel + \"_vx.output\")\n print(\"Remove the vx output file\")\n os.remove(velocity_output + \"/\" + filename_excel + \"_vy.output\")\n print(\"Remove the vy output file\")\n os.remove(velocity_output + \"/\" + filename_excel + \"_vz.output\")\n print(\"Remove the vy output file\")\n os.remove(data_output + \"/\" + filename_excel + \"_results.xlsx\")\n print(\"Remove the data output sent to the user's email.\")", "def cleanUpTemporaryFiles(options):\n os.system(\"rm \"+options.output_directory_per_run+\"/*.abundance\")\n os.system(\"rm \"+options.output_directory_per_run+\"/*.phasing_score\")\n os.system(\"rm \"+options.output_directory_per_run+\"/*regionsOfInterest*\")\n os.system(\"mv \"+options.output_directory_per_run+\"/* \"+options.output_directory_per_run+\"/../\")\n os.system(\"rm -rf \"+options.output_directory_per_run)", "def clean(self):\n print(\"Cleaning outputs in %s\" % self.args.output)\n files = glob.glob(self.args.output + \"*.pkl\")\n for f in files:\n if os.path.exists(f):\n os.remove(f)", "def remove_all(self, *items):\n for item in items:\n self.remove(item)", "def Remove(args):\n parser = argparse.ArgumentParser(usage='rm [Options] PATHS...',\n description=Remove.__doc__)\n parser.add_argument(\n '-R', '-r', '--recursive', dest='recursive', action='store_true',\n default=False,\n help='remove directories recursively.')\n parser.add_argument(\n '-v', '--verbose', dest='verbose', action='store_true',\n default=False,\n help='verbose output.')\n parser.add_argument(\n '-f', '--force', dest='force', action='store_true',\n default=False,\n help='force, do not error it files does not exist.')\n parser.add_argument('files', nargs='+')\n options = parser.parse_args(args)\n\n try:\n for pattern in options.files:\n dst_files = glob.glob(pattern)\n if not dst_files:\n # Ignore non existing files when using force\n if options.force:\n continue\n raise OSError('rm: no such file or directory: ' + pattern)\n\n for dst in dst_files:\n if options.verbose:\n print('rm ' + dst)\n\n if os.path.isfile(dst) or os.path.islink(dst):\n for _ in range(5):\n try:\n # Check every time, since it may have been deleted after the\n # previous failed attempt.\n if os.path.isfile(dst) or os.path.islink(dst):\n os.remove(dst)\n break\n except OSError as error:\n print('Failed remove with %s, retrying' % error)\n time.sleep(5)\n else:\n print('Gave up.')\n raise OSError('rm: ' + str(error))\n\n if options.recursive:\n for _ in range(5):\n try:\n if os.path.isdir(dst):\n if sys.platform == 'win32':\n # shutil.rmtree doesn't handle junctions properly. Let's just\n # shell out to rd for this.\n subprocess.check_call([\n 'rd', '/s', '/q', os.path.normpath(dst)], shell=True)\n else:\n shutil.rmtree(dst)\n break\n except OSError as error:\n print('Failed rmtree with %s, retrying' % error)\n time.sleep(5)\n else:\n print('Gave up.')\n raise OSError('rm: ' + str(error))\n\n except OSError as error:\n print(error)\n\n return 0", "def destroyer(): # ;-)\n\n def find_files_to_remove(pyfile):\n for filename in (\"%sc\" % pyfile, \"%so\" % pyfile):\n if exists(filename):\n yield filename\n\n counter = 0\n try:\n while True:\n pyfile = (yield)\n for filename in find_files_to_remove(pyfile):\n try:\n log.debug('removing %s', filename)\n remove(filename)\n counter += 1\n except (IOError, OSError), e:\n log.error('cannot remove %s', filename)\n log.debug(e)\n except GeneratorExit:\n log.info(\"removed files: %s\", counter)", "def cleanup(options=None):\n if options is None:\n for f in glob.glob(\"*.grmpy.*\"):\n os.remove(f)\n elif options == 'regression':\n for f in glob.glob(\"*.grmpy.*\"):\n if f.startswith('regression'):\n pass\n else:\n os.remove(f)", "def clear_base_files(self):\r\n compilelock.get_lock()\r\n try:\r\n for base_dir in ('cuda_ndarray', 'cutils_ext', 'lazylinker_ext',\r\n 'scan_perform'):\r\n to_delete = os.path.join(self.dirname, base_dir + '.delete.me')\r\n if os.path.isdir(to_delete):\r\n try:\r\n shutil.rmtree(to_delete)\r\n _logger.debug('Deleted: %s', to_delete)\r\n except Exception:\r\n _logger.warning('Could not delete %s', to_delete)\r\n continue\r\n to_rename = os.path.join(self.dirname, base_dir)\r\n if os.path.isdir(to_rename):\r\n try:\r\n shutil.move(to_rename, to_delete)\r\n except Exception:\r\n _logger.warning('Could not move %s to %s',\r\n to_rename, to_delete)\r\n finally:\r\n compilelock.release_lock()", "def _delete_tcs(self, testcases):\n\n delete_q = []\n\n # Find all the metadata files associated with all the testcases\n for testcase in testcases:\n metadata_files = nh.get_metadata_files(testcase)\n delete_q += metadata_files.values()\n\n # Write the placeholder file to indicate that this file is deleted\n placeholder_f \\\n = nh.get_metadata_files(testcase, deleted=True)['deleted']\n with open(placeholder_f, 'w') as obj:\n obj.write('Deleted at epoch=%d' % int(time.time()))\n\n remove_files(delete_q, self.verbose, warn=True, force=True)", "def deleteCollectionOfFiles(folderPath):\n\tfor i in os.listdir(folderPath):\n\t\tdeleteDocumentFromPhone(\"/sdcard/\" + i)", "def clear(self):\n\n Console.info(\"Cleaning sprite files...\")\n Console.indent()\n \n for dirPath, dirNames, fileNames in os.walk(self.base):\n for fileName in fileNames:\n if fileName.startswith(\"jasysprite\"):\n filePath = os.path.join(dirPath, fileName)\n Console.debug(\"Removing file: %s\", filePath)\n os.remove(filePath)\n \n Console.outdent()", "def __removeFiles(self, pathToRemove, verbose=False):\n if os.path.isdir(pathToRemove):\n if not verbose:\n shutil.rmtree(pathToRemove)\n else:\n for root, dirs, files in os.walk(pathToRemove, topdown=False):\n for name in files:\n fileToRemove = os.path.join(root, name)\n print('Removing file ', fileToRemove)\n os.remove(fileToRemove)\n for name in dirs:\n dirToRemove = os.path.join(root, name)\n print('Removing directory ', dirToRemove)\n os.rmdir(dirToRemove)\n print('Removing directory ', pathToRemove)\n os.rmdir(pathToRemove)\n else:\n print('Removing File ', pathToRemove)\n os.remove(pathToRemove)" ]
[ "0.8326417", "0.8259835", "0.7764254", "0.7697654", "0.76045775", "0.76031864", "0.7492463", "0.7479637", "0.7377181", "0.73686516", "0.7330737", "0.7330138", "0.7317665", "0.7217544", "0.72154135", "0.720359", "0.719617", "0.71945643", "0.71943825", "0.71901155", "0.71693027", "0.71502703", "0.7121444", "0.7086343", "0.7078291", "0.7040704", "0.69513613", "0.69470316", "0.69141686", "0.69122446", "0.6891735", "0.6888107", "0.6873515", "0.68383795", "0.68356556", "0.68178886", "0.6816855", "0.68094236", "0.67970395", "0.67889374", "0.6788344", "0.67383826", "0.67311305", "0.6702672", "0.66978484", "0.6664216", "0.6629067", "0.6617947", "0.6612745", "0.6607959", "0.6607009", "0.6603155", "0.6601155", "0.65874815", "0.6580982", "0.6569555", "0.65645593", "0.6564404", "0.6564286", "0.6561136", "0.65578634", "0.65516365", "0.65318626", "0.6523461", "0.65218997", "0.65197057", "0.6503572", "0.6502293", "0.6501962", "0.6477292", "0.6473582", "0.6461018", "0.645872", "0.64513314", "0.64464486", "0.6431292", "0.6426991", "0.6423427", "0.64221174", "0.6418999", "0.6404872", "0.6388108", "0.6374063", "0.63728034", "0.6367924", "0.63657326", "0.6354715", "0.63435173", "0.6342076", "0.63370913", "0.6332652", "0.6329884", "0.63202584", "0.6319319", "0.6301307", "0.62963605", "0.6294932", "0.629388", "0.62894714", "0.6287344" ]
0.8486456
0
Takes information from a Bandwidth inbound message callback that includes media and responds with a text message containing the same media sent through Bandwidth's media resource.
def handle_inbound_media_mms(to, from_, media): downloaded_media_files = download_media_from_bandwidth(media) upload_media_to_bandwidth(downloaded_media_files) remove_files(downloaded_media_files) body = MessageRequest() body.application_id = MESSAGING_APPLICATION_ID body.to = [from_] body.mfrom = to body.text = "Rebound!" #Build the media URL by taking the media ids (that doubled as the file names) and appending them to #the bandwidth media base url body.media = [BANDWIDTH_MEDIA_BASE_ENDPOINT + media_file for media_file in downloaded_media_files] try: messaging_client.create_message(MESSAGING_ACCOUNT_ID, body) except Exception as e: print(e) return None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def handle_inbound_message():\n data = json.loads(request.data)\n\n if data[0][\"type\"] == \"message-received\":\n if \"call me\" in data[0][\"message\"][\"text\"]:\n handle_inbound_sms_call_me(data[0][\"message\"][\"to\"][0], data[0][\"message\"][\"from\"])\n elif \"media\" in data[0][\"message\"]:\n handle_inbound_media_mms(data[0][\"message\"][\"to\"][0], data[0][\"message\"][\"from\"], data[0][\"message\"][\"media\"])\n else:\n handle_inbound_sms(data[0][\"message\"][\"to\"][0], data[0][\"message\"][\"from\"])\n else:\n print(data)\n return \"\"", "def sms_reply():\n # Fetch the message\n media_msg = request.form.get('NumMedia')\n msg = request.form.get('Body').lower()\n resp = MessagingResponse()\n responded = False\n if '1' in media_msg:\n pic_url = request.form.get('MediaUrl0') # URL of the person's media\n # pprint(pic_url) # so you can see the URL that the picture generated \n resp.message(\"We have recieved your request for image analysis! Please wait for our response\")\n resp.message(pic_url)\n url = \"https://techclan-twitter.herokuapp.com/reverse_image?URL=\"\n url=url+pic_url\n resp.message('The image has been succesfully uploaded to our server!The Url of the image is :')\n response=requests.get(url)\n parsed=json.loads(response.text)\n s1=\"\"\n count=0\n for each in parsed:\n s1=s1+each+\"\\n ................\\n\"\n if count>5:\n break\n count=count+1\n resp.message('The reverse image analysis of image reports are:')\n resp.message(s1)\n time.sleep(1)\n u='http://18.205.87.224/api/text?id='\n u=u+pic_url\n response=requests.get(u)\n parsed=json.loads(response.text)\n resp.message(parsed)\n responded==True\n elif '5' in msg:\n r = requests.get('https://coronavirus-19-api.herokuapp.com/countries/india')\n if r.status_code == 200:\n data = r.json()\n text = f'_Covid-19 Cases in India_ \\n..........................\\nConfirmed Cases : *{data[\"cases\"]}* \\n................\\nToday Cases : *{data[\"todayCases\"]}* \\n..............\\nDeaths : *{data[\"deaths\"]}* \\n..................................\\nRecovered : *{data[\"recovered\"]}* \\n\\n..................\\nTotal Tested : *{data[\"totalTests\"]}* \\n\\n Type 0 to return to main menu'\n else:\n text = 'I could not retrieve the results at this time, sorry.'\n resp.message(text)\n responded = True \n \n elif '1' in msg:\n \n resp.message(\"wait we will fetch your results soon!!\")\n url = \"http://18.234.107.157:5000/api/text?id=\"\n ms=str(msg)\n #a,b=ms.split(' ',1)\n url=url+ms\n response=requests.get(url)\n parsed=json.loads(response.text)\n agree=0\n disagree=0\n discuss=0\n ctr=0\n for each in parsed:\n if ctr>100:\n break\n ctr=ctr+1\n answ=each.get('Score',\"error\")\n if answ == \"agree\":\n agree=agree+1\n elif answ == \"disagree\":\n disagree=disagree+1\n if(agree>disagree):\n resp.message(\"This is *REAL* according to our sources !! Our results are based on following sources..we cannot be 100% Sure.\")\n else:\n resp.message(\"This is *FAKE* according to our sources !! Our results are based on following sources..we cannot be 100% Sure.\")\n count=0\n s1=\"\"\n for each in parsed:\n s1=s1+each['link']+\"*Title :*\" +each['title']+\"\\n ................\\n\"\n if count>5:\n break\n count=count+1\n resp.message(s1)\n responded==True\n #reporting\n elif '3' in msg:\n # resp.message(\"We have reported your content to our police database!!\")\n ms=str(msg)\n a,b=ms.split(' ',1)\n url='https://spreadsheetupdate1.herokuapp.com/spreed?id='\n url=url+ms\n r=requests.get(url)\n resp.message(\"We have reported your content to our police database!!\")\n responded==True\n\n\n\n \n #for news\n\n elif msg=='news' or msg=='4':\n \n url=\"\"\"https://newsapi.org/v2/top-headlines?sources=bbc-news,cnn,cnbc,abc-news,google-news-uk,independent&apiKey=3ff5909978da49b68997fd2a1e21fae8\"\"\"\n r = requests.get(url)\n #resp.message(\"stay\") \n if r.status_code == 200:\n resp.message(\"stay here with us! We are fetching news for you \")\n data = r.json()\n articles = data['articles'][:5]\n result = \"\"\n ctr=0 \n for article in articles:\n # if ctr>10:\n # break\n # ctr=ctr+1\n title = article['title']\n url = article['url']\n if 'Z' in article['publishedAt']:\n published_at = datetime.datetime.strptime(article['publishedAt'][:19], \"%Y-%m-%dT%H:%M:%S\")\n else:\n published_at = datetime.datetime.strptime(article['publishedAt'], \"%Y-%m-%dT%H:%M:%S%z\")\n \n result += \"\"\"*{}*\nRead more: {}\n_Published at {:02}/{:02}/{:02} {:02}:{:02}:{:02} UTC_\n\"\"\".format(\n title,\n url, \n published_at.day, \n published_at.month, \n published_at.year, \n published_at.hour, \n published_at.minute, \n published_at.second\n )+\"\\n ..................\\n\"\n\n else:\n result = 'I cannot fetch news at this time. Sorry!'\n\n resp.message(result)\n responded = True\t\n else:\n phone_no = request.form.get('From')\n reply = fetch_reply(msg, phone_no)\n\n resp = MessagingResponse()\n resp.message(reply)\n responded = True\n \n\n \t\n\n return str(resp)", "def process_response(self, req, resp, resource, req_succeeded):\n if req.method == \"POST\":\n log.info((thisFilename, inspect.currentframe().f_code.co_name, \"media\", str(resp.media)))", "def handle_media( environ ):\n # TODO: implement me\n return 200, [], _html.format(\n title = 'MEDIA',\n head = '',\n body = 'MEDIA'\n )", "def ret_message(incoming_msg):\n # Create a object to create a reply.\n response = Response()\n\n # Set the text of the reply.\n response.text = \"Here's a fun little meme.\"\n\n # Craft a URL for a file to attach to message\n u = \"https://sayingimages.com/wp-content/uploads/\"\n u = u + \"aaaaaalll-righty-then-alrighty-meme.jpg\"\n response.files = u\n return response", "def receive_message(self, message, data):\n\n self.logger.debug('Plex media receive function called.')\n if data[MESSAGE_TYPE] == TYPE_MEDIA_STATUS:\n self.logger.debug('(PlexController) MESSAGE RECEIVED: ' + data)\n return True\n\n return False", "def sms_reply():\n # Start our TwiML response\n # if body.lower()==\"good\":\n message=\"Hi I'm IRIS, an Immediately Responsive Intelligent System\\nHow are you feeling today?\"\n user=request.form['Body']\n\n # message=\"Hi \"+ name+ \"\"\n # user=request.form['Body']\n\n if user==\"good\":\n message=\"Glad to hear it! I hope you continue to feel this way! Celebrate this feeling and hold onto what happened ot make you feel this way so that you can repeat it in the future!\"\n\n if user==\"sad\":\n message=\"I’m sorry to hear that. Here are some things I do to make me feel better: take a walk outside, listen to uplifting music, call or message a loved one, or watch or read something positive to take my mind off of what I’m feeling.\"\n\n if user==\"nervous\":\n message=\"It’s going to be ok! This feeling will not last forever.\"\n if user==\"lonely\":\n message=\"I’m here for you, and know that you are loved, supported, and important. The world would not be the same without you! For a loving quote respond\"\n\n if user==\"angry\":\n message=\"“Let me help you turn your anger into something positive. Here are some ways to burn off energy productively: take a long walk, remove yourself from the situation, paint of draw, listen to loud music, or take a break from what you are doing.\"\n\n if user==\"tired\":\n message=\"I understand what you are feeling well. I recommend taking a break to do an activity you enjoy, taking a nap, getting a coffee, doing 20 jumping jacks, listening to a pump-up playlist, or standing up to stretch for a bit.\"\n\n if user==\"average\":\n message=\"There are many things to look forward to!\"\n resp = MessagingResponse()\n\t # Add a message\n \n resp.message(message)\n\t # Add a picture message\n\t #msg.media(\"https://farm8.staticflickr.com/7090/6941316406_80b4d6d50e_z_d.jpg\")\n\n return str(resp)", "def incoming_sms():\n # Get the message body\n body = request.values.get('Body', None)\n\n # Start our TwiML response\n resp = MessagingResponse()\n\n # Determine the appropriate response/action for incoming message\n replyText = getReply(body)\n\n resp.message(replyText)\n\n return str(resp)", "def mms_reply():\n print('New MMS')\n client_number = request.form[\"From\"]\n MessageSid = request.form[\"MessageSid\"]\n video_url = request.form[\"MediaUrl0\"]\n MediaSid = video_url.split('/')[-1]\n media_content_type = request.form[\"MediaContentType0\"]\n\n file_path = os.path.join(app.config['INPUT_VIDEOS_PATH'], str(MessageSid))\n video_name = str(MessageSid) + '.mp4'\n\n # Download the video\n video_response = requests.get(video_url, stream=True)\n video_response.raise_for_status() # Throw an error for bad status codes\n with open(\"{}.mp4\".format(file_path), 'wb') as handle:\n for block in video_response.iter_content(1024):\n handle.write(block)\n\n # Call Scenescoop async \n async_scenescoop.apply_async(args=[file_path, video_name, client_number, MessageSid, MediaSid])\n \n # Send an empty response message\n resp = MessagingResponse()\n #resp.message(\"Got it, now wait...\")\n\n return str(resp)", "def process_message(self, msg, src):", "def handle_recording():\n\n recording_url = request.values.get(\"RecordingUrl\", None)\n\n resp = VoiceResponse()\n resp.say(\"Listen to your recorded message.\")\n resp.play(recording_url)\n resp.say(\"Goodbye.\")\n return str(resp)", "def process(self):\n received_message = SubscribeMessage(*self.message.value)\n allow, msg = customize.authorize_subscription(received_message.topic, self.connection)\n if allow:\n subscription_id = tornwamp_topic.topics.add_subscriber(\n received_message.topic,\n self.connection,\n )\n answer = SubscribedMessage(\n request_id=received_message.request_id,\n subscription_id=subscription_id\n )\n self.broadcast_messages = customize.get_subscribe_broadcast_messages(received_message, subscription_id, self.connection.id)\n else:\n answer = ErrorMessage(\n request_id=received_message.request_id,\n request_code=received_message.code,\n uri=\"tornwamp.subscribe.unauthorized\"\n )\n answer.error(msg)\n self.answer_message = answer", "def incoming_sms():\n txt = request.form['Body']\n\n # remove leading and trailing white space and make lowercase\n txt = txt.strip()\n txt = txt.lower()\n\n # handle random searches differently than breed searches\n if txt == 'random' or txt == 'dog':\n url = get_dogs.get_random_dog()\n else:\n url = get_dogs.request_breed(txt)\n \n resp = MessagingResponse()\n if url:\n resp.message(url)\n else:\n resp.message(\"Sorry! We couldn't find a dog matching that query. Please try \\\n a more general search term.\")\n return str(resp)", "def on_watch_message(self, bus, msg):\n msg_struct = msg.get_structure()\n if msg_struct:\n if msg_struct.get_name() == 'GstMessageTag':\n codec_name = ((msg_struct[\"taglist\"].nth_tag_name(0)))\n codec_value = msg_struct[\"taglist\"].get_string(codec_name)\n info_name = codec_name\n c_result, info_value = codec_value\n if c_result:\n self.info_handler(info_name, info_value)\n if codec_name == \"video-codec\":\n self.info_handler(codec_name, info_value)\n r_result, width, height = self.get_resolution()\n if r_result:\n info_name = \"resolution\"\n info_value = \"[{}x{}]\".format(width, height)\n self.info_handler(info_name, info_value)\n bus.remove_signal_watch()", "def handle_message(self, message):", "def handle(self, message):", "def onMessage(self, payload, isBinary):", "def callback(ch, method, properties, body):\r\n body = json.loads(body)\r\n print(f\"[x] Task in the queue {body}\")\r\n # Creating instance of AudioRecorder\r\n recorder = AudioRecorder(body)\r\n driver = recorder.prepare_browser(body['settings'])\r\n recorder.run(driver, body)", "def _respond_message(self, msg):\n self.set_status(200)\n self.set_header(\"Content-Type\", \"application/x-mplane+json\")\n self.write(mplane.model.unparse_json(msg))\n self.finish()", "def upload_media_to_bandwidth(media_files):\n for filename in media_files:\n with open(filename, \"rb\") as f:\n file_content = f.read()\n try:\n ##Note: The filename is doubling as the media id##\n response = messaging_client.upload_media(MESSAGING_ACCOUNT_ID, filename, str(len(file_content)), body=file_content)\n except Exception as e:\n print(e)", "def handle_recording():\n \n recording_url = request.values.get(\"RecordingUrl\", None)\n \n resp = twilio.twiml.Response()\n resp.say(\"Thanks for howling... take a listen to what you howled.\")\n resp.play(recording_url)\n resp.say(\"Goodbye.\")\n return str(resp)", "def received_message(self, m):\n self.receiver.handle_message(m)", "def receive_message(self, message):", "def callback_message(self, message):\n pass", "def callback_message(self, message):\n pass", "def application_message(self, bus, msg):\n\t\tmsgtype = msg.structure.get_name()\n\t\tif msgtype == 'partial_result':\n\t\t\tself.partial_result(msg.structure['hyp'], msg.structure['uttid'])\n\t\telif msgtype == 'result':\n\t\t\tself.final_result(msg.structure['hyp'], msg.structure['uttid'])\n\t\t\tself.pipeline.set_state(gst.STATE_PAUSED)", "def receive(self, data):\n try:\n xml = ElementTree.XML(data)\n except:\n raise StandardError(\"API request malformed\")\n\n mms = mobile.models.IncomingMMS.objects.create(\n id=xml.find('id').text,\n country=xml.find('country').text,\n sender=xml.find('senderNumber').text,\n recipient=xml.find('targetNumber').text,\n subject=xml.find('mms/subject').text,\n source=data\n )\n\n for item in xml.findall('mms/item'):\n if item.find('base64').text == 'true':\n data = b64decode(item.find('content').text)\n else:\n data = item.find('content').text\n\n mms_file = mobile.models.MMSFile(\n mms=mms\n )\n\n # Extract content type from MIME data\n matches = re.search('([^;]*/[^;]*);', item.find('mimeType').text)\n if matches:\n mms_file.content_type = matches.group(1)\n\n # Save file\n mms_file.file.save(\n name=item.find('name').text,\n content=ContentFile(data)\n )\n\n mms_file.save()\n\n return mms", "def message_callback(self, message):\n pass", "def process(self, message: Message, **kwargs: Any) -> None:", "def incoming_sms():\n # Get the message the user sent our Twilio number\n body = request.values.get('Body', None)\n\n # Start our TwiML response\n resp = MessagingResponse()\n\n # Determine the right reply for this message\n if body[0:7] == 'Newhigh':\n num = body[7:]\n result = ''\n with open('currentData.txt') as f:\n for i in range(int(num)):\n result += str(i + 1)\n result += '. '\n result += f.readline()\n # resp.message(result.stdout.decode('utf-8'))\n resp.message(result)\n return str(resp)\n \n if body[0:6] == 'Newlow':\n num = body[6:]\n result = subprocess.run(['tail','-n',num,'currentData.txt'], stdout=subprocess.PIPE)\n resp.message(result.stdout.decode('utf-8'))\n return str(resp)\n if body[0:4] == 'High':\n num = body[4:]\n num = '-' + num\n result = subprocess.run(['head',str(num),'AllBuildings.txt'], stdout=subprocess.PIPE)\n resp.message(result.stdout.decode('utf-8'))\n return str(resp)\n if body[0:3] == 'Low':\n num = body[3:]\n result = subprocess.run(['tail','-n',num,'AllBuildings.txt'], stdout=subprocess.PIPE)\n resp.message(result.stdout.decode('utf-8'))\n return str(resp)\n if body == 'Hourly':\n result = subprocess.run(['cat','ByHour.txt'], stdout=subprocess.PIPE)\n elif body == 'Weekday':\n result = subprocess.run(['cat','DaysOfWeek.txt'], stdout=subprocess.PIPE)\n # resp.message(fortune)\n elif body == '10minute':\n result = subprocess.run(['cat','data/PerTenMinutes.txt'], stdout=subprocess.PIPE)\n else:\n resp.message(\"June 2018 - Feb 2019 Totals\\n\\nCommands:\\n(# is any number between 1 and 50)\\nNewhigh# - Highest Past 10 Minutes\\nNewlow# - Lowest Past 10 Minutes\\n\\nBelow are cumulative annual figs:\\nHigh# - Highest of Year\\nLow# - Lowest of Year\\n\\nCampuswide Figures:\\n10minute - Ten Minute Intervals\\nHourly - 1 Hour Intervals\\nWeekday - By Day of the Week\\n\")\n return str(resp)\n resp.message(result.stdout.decode('utf-8'))\n return str(resp)", "def process_sms():\n phone_number = request.values.get('From', None)\n sms_message = request.values.get('Body', None)\n resp = twilio.twiml.Response()\n regina_answer = ask_regina(phone_number, sms_message, \"sms\")['text']\n resp.message(regina_answer)\n return str(resp)", "def on_part(self, raw_msg, source, **kwargs):", "def handle_message(self, data):\r\n print data\r\n\r\n #video stream starts\r\n if data[:13] == 'video_stream:':\r\n #port will be between 3000 to 7000\r\n port = int(data[13:17])\r\n self.partnum = int(data[18:])\r\n\r\n #creates a video file in cache\r\n if not exists(CASHE + movie_name + '\\\\'):\r\n makedirs(CASHE + movie_name + '\\\\')\r\n\r\n self.receive = Receiver(port, self.partnum, CASHE + movie_name + '\\\\')\r\n self.receive.start()\r\n\r\n #upload stream approved\r\n elif data[:16] == 'upload_approved:':\r\n port = int(data[16:])\r\n self.uploader = Uploader(port, upload_path)\r\n self.uploader.start()\r\n self.upload_num = 1\r\n\r\n elif data[:6] == 'parts:':\r\n if data[6:].isdigit():\r\n self.partnum = int(data[6:])\r\n\r\n elif data[:8] == 'invalid:':\r\n self.uploader = None\r\n if data[8:] == 'hash':\r\n self.upload_num = 2\r\n else:\r\n self.upload_num = 3\r\n\r\n print 'invalid upload'\r\n\r\n elif data == 'vid_not_found':\r\n self.partnum = -1\r\n print 'could not watch vid'\r\n\r\n elif data[:8] == 'results:':\r\n results = data[8:].split(':<!>:')\r\n self.res_list = [['Movie Name', 'views', 'grade']]\r\n for i in results:\r\n datas = i.split(':!:')\r\n self.res_list.append(datas)\r\n self.print_results = True\r\n '''if results == ['']:\r\n self.print_results = False\r\n else:\r\n self.print_results = True'''", "def on_media_state(self):\n try:\n if self.call.info().media_state == pj.MediaState.ACTIVE:\n if self.cfg['VoipIO']['debug']:\n self.system_logger.debug(\"CallCallback::on_media_state : Media is now active\")\n else:\n if self.cfg['VoipIO']['debug']:\n self.system_logger.debug(\"CallCallback::on_media_state : Media is inactive\")\n except:\n self.voipio.close_event.set()\n self.cfg['Logging']['system_logger'].exception('Uncaught exception in the CallCallback class.')\n raise", "def handle_inbound_sms_call_me(to, from_):\n handle_call_me(to, from_)", "def receive_message(self, _message, data: dict):\n if data[MESSAGE_TYPE] == TYPE_MEDIA_STATUS:\n self.logger.debug(\"(PlexController) MESSAGE RECEIVED: %r.\", data)\n return True\n\n return False", "def callback(parsed_msg, msg_object):\n assert msg_object.stream_id == stream_id\n assert parsed_msg in msg", "def _r_process_message(self, result, protocol):\n if isinstance(result.original_message, SubscribeMessage):\n self._r_subscribe_to_event(result.original_message.service_name,\n result.original_message.event_name,\n protocol)\n \n return result.response", "def incoming_sms():\n number = request.values.get('From', None)\n body = request.values.get('Body', None)\n print(body)\n # Start our TwiML response\n resp = MessagingResponse()\n\n body = body.lower()\n body = body.strip()\n body_arr = body.split()\n class_name = \"\"\n name = \"\"\n if len(body_arr) == 4:\n first_name = body_arr[0]\n last_name = body_arr[1]\n name = first_name + \" \" + last_name\n class_name = body_arr[2] + body_arr[3]\n elif len(body_arr) == 6:\n first_name = body_arr[0]\n last_name = body_arr[1]\n name = first_name + \" \" + last_name\n class_name = body_arr[2] + body_arr[3] + body_arr[4] + body_arr[5]\n else:\n resp.message(\"Invalid: Enter your name, class, and session# separated by spaces as shown\\n(eg: Avi Patel grade1 session1, Ravi Rao PreK session1, Mira Singh kg session2, etc.):\")\n return str(resp)\n\n if classes.find_one({'class':class_name}):\n forward_message(class_name, number, name)\n resp.message(\"Your teachers have been notified\")\n\n else:\n resp.message(\"Invalid: Enter your name, class, and session# separated by spaces as shown\\n(eg: Avi Patel grade1 session1, Ravi Rao PreK session1, Mira Singh kg session2, etc.):\")\n\n return str(resp)", "def delete_media():\n return Response(\"{}\", status=200, mimetype='application/json')\n message_status = request.form[\"MessageStatus\"]\n\n if (message_status == 'delivered'):\n MessageSid = request.form[\"MessageSid\"]\n MediaSid = request.form[\"MediaUrl0\"].split('/')[-1]\n delete_media_file(MessageSid, MediaSid)", "def callback_message(self, message):\n return \"hi bro\"", "def handle(self):\n if len(self.banner) <= 75:\n self.respond(\"220 %s\" %str(self.banner))\n else:\n self.push('220-%s\\r\\n' %str(self.banner))\n self.respond('220 ')", "def handle_recording():\n \n count = increment_count()\n\n recording_url = request.form.get(\"RecordingUrl\", None)\n call_sid = request.form.get('CallSid', None)\n\n print \"handle-recording. url: \" + str( recording_url )\n\n from_number = phones[call_sid]\n print \"from_number: \" + str( from_number )\n \n if from_number:\n sample_id = from_number\n else:\n sample_id = call_sid\n\n filename = sample_id + \".mp3\"\n\n rec_file = \"static/\" + filename;\n print \"rec file: \" + str( rec_file )\n\n if recording_url:\n urllib.urlretrieve( recording_url, rec_file )\n samples[call_sid] = url_for('static', filename=filename)\n\n resp = twilio.twiml.Response()\n resp.say(\"Thanks for shouting.\")\n # resp.play(recording_url)\n\n push_to_pusher(\"twilio\", str(from_number), str(sample_id), str(samples[call_sid]) )\n\n resp.say(\"Check the app for your shout.\")\n\n resp.say(\"Goodbye...\")\n\n return str(resp)", "def output_raw_message(text):\n database.messages_output_queue.put(text)", "def handle_outbound_message(self, message):\n key = self.r_key(self.redis_outbound_queue)\n self.r_server.rpush(key, message.to_json())", "def PlayCodedMessage(codedmessage):\n signal2Play=CODE.GetSignal(codedmessage)\n LIGHT.SendLighthouseSignal(signal2Play,signal2Play,signal2Play)", "def show_media(self, media):\n msg = media_to_chromecast_command(media, type=TYPE_DETAILS, requestid=self._inc_request())\n\n def cb():\n self._send_cmd(msg, inc_session_id=True, inc=False)\n\n self.launch(cb)", "def processMessage(self, *args, **kwargs):\r\n pass", "def _handle_message(self, bus, message):\n if message.type == Gst.MessageType.EOS:\n logger.info(\"End-Of-Stream reached.\\n\")\n # file finished playing\n self.pipeline.set_state(Gst.State.NULL)\n #self.playing = False\n # if self.finished_callback:\n # self.finished_callback()\n \n elif message.type == Gst.MessageType.ERROR:\n # error\n self.pipeline.set_state(Gst.State.NULL)\n err, debug_info = message.parse_error()\n logger.error(f\"Error received from element {message.src.get_name()}: {err.message}\\n\")\n logger.error(f\"Debugging information: {debug_info if debug_info else 'none'}\\n\")\n #self.playing = False \n elif message.type == Gst.MessageType.STATE_CHANGED:\n # We are only interested in state-changed messages from the pipeline\n if message.src == self.pipeline:\n old_state, new_state, pending_state = message.parse_state_changed()\n logger.info(f\"Pipeline state changed from {Gst.Element.state_get_name(old_state)} to {Gst.Element.state_get_name(new_state)}:\\n\")", "def text_message(update: Update, _: CallbackContext) -> None:\n update.message.reply_text(\n f\"Thank you for sending: {update.message.text},\\n\" +\n f\"but I am waiting only for images...\")", "def handle_message(self, data, channel):\n pass", "def _handle_custom_msg(self, content, buffers):\n self._msg_callbacks(self, content, buffers)", "def handleMessage(msg):", "def show_media(self, media=None, **kwargs):\n msg = media_to_chromecast_command(\n media, type=TYPE_DETAILS, requestId=self._inc_request(), **kwargs\n )\n\n def callback(): # pylint: disable=missing-docstring\n self._send_cmd(msg, inc_session_id=True, inc=False)\n\n self.launch(callback)", "def _r_handle_message_contents(self, msg, protocol):\n if isinstance(msg, ResponseMessage):\n d = self._waiting_messages.pop(msg.response_to, None)\n if d is not None:\n d.callback(msg)\n elif isinstance(msg, ServerMotdMessage):\n print(\"Connected: %s\" % msg.motd)\n self._r_successful_connection()\n elif isinstance(msg, EventMessage):\n callback = self._event_callbacks.get((msg.service_name, msg.event_name))\n if callback is not None:\n threads.deferToThread(callback, *msg.pargs, **msg.kwargs)", "def api_sendessage():\n message = request.args.get('message')\n send_lyrics(message)\n # stash_lyrics(message)\n return jsonify(status=\"success\")", "def handle(self) -> None:\r\n\r\n if self.data.get(\"message-id\") != None:\r\n if self.data[\"status\"] == \"error\":\r\n print(self.data[\"error\"])\r\n return\r\n else:\r\n requestData = self.obs.pendingResponses.pop(self.data[\"message-id\"])\r\n request = requestData[\"request-type\"]\r\n #Requests as of version 4.8.0\r\n\r\n #General\r\n if request == \"GetVersion\":\r\n pass\r\n\r\n elif request == \"GetAuthRequired\":\r\n if self.data[\"authRequired\"]:\r\n secret_string: str = self.obs.password + self.data[\"salt\"]\r\n secret_hash: sha256 = sha256(secret_string.encode(\"utf-8\"))\r\n secret: bytes = b64encode(secret_hash.digest())\r\n\r\n response_string: str = secret.decode(\"utf-8\") + self.data[\"challenge\"]\r\n response_hash: sha256 = sha256(response_string.encode(\"utf-8\"))\r\n response: bytes = b64encode(response_hash.digest())\r\n\r\n self.obs.requests.append({\r\n \"type\": \"Authenticate\",\r\n \"auth\": response.decode(\"utf-8\")})\r\n\r\n else:\r\n self.obs.requests.append({\"type\": \"GetSceneList\"})\r\n\r\n elif request == \"Authenticate\":\r\n self.obs.requests.append({\"type\": \"GetSceneList\"})\r\n\r\n elif request == \"SetHeartbeat\":\r\n #To be removed in 5.0.0\r\n pass\r\n\r\n elif request == \"SetFilenameFormatting\":\r\n pass\r\n\r\n elif request == \"GetFilenameFormatting\":\r\n pass\r\n\r\n elif request == \"GetStats\":\r\n pass\r\n\r\n elif request == \"BroadcastCustomMessage\":\r\n pass\r\n\r\n elif request == \"GetVideoInfo\":\r\n pass\r\n\r\n elif request == \"OpenProjector\":\r\n pass\r\n\r\n elif request == \"TriggerHotkeyByName\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"TriggerHotkeyBySequence\":\r\n #Unreleased\r\n pass\r\n\r\n #Media Control\r\n elif request == \"PlayPauseMedia\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"RestartMedia\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"StopMedia\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"NextMedia\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"PreviousMedia\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"GetMediaDuration\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"GetMediaTime\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"SetMediaTime\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"ScrubMedia\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"GetMediaState\":\r\n #Unreleased\r\n pass\r\n\r\n #Sources\r\n\r\n elif request == \"GetMediaSourcesList\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"GetSourcesList\":\r\n pass\r\n\r\n elif request == \"GetSourceTypesList\":\r\n pass\r\n\r\n elif request == \"GetVolume\":\r\n pass\r\n\r\n elif request == \"SetVolume\":\r\n pass\r\n\r\n elif request == \"GetMute\":\r\n pass\r\n\r\n elif request == \"SetMute\":\r\n pass\r\n\r\n elif request == \"ToggleMute\":\r\n pass\r\n\r\n elif request == \"GetAudioActive\":\r\n pass\r\n\r\n elif request == \"SetSourceName\":\r\n pass\r\n\r\n elif request == \"SetSyncOffset\":\r\n pass\r\n\r\n elif request == \"GetSyncOffset\":\r\n pass\r\n\r\n elif request == \"GetSourceSettings\":\r\n pass\r\n\r\n elif request == \"SetSourceSettings\":\r\n pass\r\n\r\n elif request == \"GetTextGDIPlusProperties\":\r\n pass\r\n\r\n elif request == \"SetTextGDIPlusProperties\":\r\n pass\r\n\r\n elif request == \"GetTextFreetype2Properties\":\r\n pass\r\n\r\n elif request == \"SetTextFreetype2Properties\":\r\n pass\r\n\r\n elif request == \"GetBrowserSourceProperties\":\r\n pass\r\n\r\n elif request == \"SetBrowserSourceProperties\":\r\n pass\r\n\r\n elif request == \"GetSpecialSources\":\r\n pass\r\n\r\n elif request == \"GetSourceFilters\":\r\n source = self.obs.getSource(requestData[\"sourceName\"])\r\n if source != None:\r\n for _filter in self.data[\"filters\"]:\r\n source.addFilter(_filter) #type: ignore\r\n\r\n elif request == \"GetSourceFilterInfo\":\r\n pass\r\n\r\n elif request == \"AddFilterToSource\":\r\n pass\r\n\r\n elif request == \"RemoveFilterFromSource\":\r\n pass\r\n\r\n elif request == \"ReorderSourceFilter\":\r\n pass\r\n\r\n elif request == \"MoveSourceFilter\":\r\n pass\r\n\r\n elif request == \"SetSourceFilterSettings\":\r\n pass\r\n\r\n elif request == \"SetSourceFilterVisibility\":\r\n pass\r\n \r\n elif request == \"GetAudioMonitorType\":\r\n pass\r\n\r\n elif request == \"SetAudioMonitorType\":\r\n pass\r\n\r\n elif request == \"TakeSourceScreenshot\":\r\n pass\r\n\r\n #Outpute\r\n elif request == \"ListOutputs\":\r\n pass\r\n\r\n elif request == \"GetOutputInfo\":\r\n pass\r\n\r\n elif request == \"StartOutput\":\r\n pass\r\n\r\n elif request == \"StopOutput\":\r\n pass\r\n\r\n #Profiles\r\n elif request == \"SetCurrentProfile\":\r\n pass\r\n\r\n elif request == \"GetCurrentProfile\":\r\n pass\r\n\r\n elif request == \"ListProfiles\":\r\n pass\r\n\r\n #Recording\r\n elif request == \"GetRecordingStatus\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"StartStopRecording\":\r\n pass\r\n\r\n elif request == \"StartRecording\":\r\n pass\r\n\r\n elif request == \"StopRecording\":\r\n pass\r\n\r\n elif request == \"PauseRecording\":\r\n pass\r\n\r\n elif request == \"ResumeRecording\":\r\n pass\r\n\r\n elif request == \"SetRecordingFolder\":\r\n pass\r\n\r\n elif request == \"GetRecordingFolder\":\r\n pass\r\n\r\n #Replay Buffer\r\n elif request == \"GetReplayBufferStatus\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"StartStopReplayBuffer\":\r\n pass\r\n\r\n elif request == \"StartReplayBuffer\":\r\n pass\r\n\r\n elif request == \"StopReplayBuffer\":\r\n pass\r\n\r\n elif request == \"SaveReplayBuffer\":\r\n pass\r\n\r\n #Scene Collections\r\n elif request == \"SetCurrentSceneCollection\":\r\n pass\r\n\r\n elif request == \"GetCurrentSceneCollection\":\r\n pass\r\n\r\n elif request == \"ListSceneCollections\":\r\n pass\r\n\r\n #Scene Items\r\n elif request == \"GetSceneItemList\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"GetSceneItemProperties\":\r\n pass\r\n\r\n elif request == \"SetSceneItemProperties\":\r\n pass\r\n\r\n elif request == \"ResetSceneItem\":\r\n pass\r\n\r\n elif request == \"SetSceneItemRender\":\r\n pass\r\n\r\n elif request == \"SetSceneItemPosition\":\r\n pass\r\n\r\n elif request == \"SetSceneItemTransform\":\r\n pass\r\n\r\n elif request == \"SetSceneItemCrop\":\r\n pass\r\n\r\n elif request == \"DeleteSceneItem\":\r\n pass\r\n\r\n elif request == \"AddSceneItem\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"DuplicateSceneItem\":\r\n pass\r\n\r\n #Scenes\r\n elif request == \"SetCurrentScene\":\r\n pass\r\n\r\n elif request == \"GetCurrentScene\":\r\n self.obs.setCurrentScene(self.data[\"name\"])\r\n\r\n elif request == \"GetSceneList\":\r\n for scene in self.data[\"scenes\"]:\r\n self.obs.addScene(scene)\r\n self.obs.setCurrentScene(self.data[\"current-scene\"])\r\n\r\n elif request == \"CreateScene\":\r\n pass\r\n\r\n elif request == \"ReorderSceneItems\":\r\n pass\r\n\r\n elif request == \"SetSceneTransitionOverride\":\r\n pass\r\n\r\n elif request == \"RemoveSceneTransitionOverride\":\r\n pass\r\n\r\n elif request == \"GetSceneTransitionOverride\":\r\n pass\r\n\r\n #Streaming\r\n elif request == \"GetStreamingStatus\":\r\n pass\r\n\r\n elif request == \"StartStopStreaming\":\r\n pass\r\n\r\n elif request == \"StartStreaming\":\r\n pass\r\n\r\n elif request == \"StopStreaming\":\r\n pass\r\n\r\n elif request == \"SetStreamSettings\":\r\n pass\r\n\r\n elif request == \"GetStreamSettings\":\r\n pass\r\n\r\n elif request == \"SaveStreamSettings\":\r\n pass\r\n\r\n elif request == \"SendCaptions\":\r\n pass\r\n\r\n #Studio Mode\r\n elif request == \"GetStudioModeStatus\":\r\n pass\r\n\r\n elif request == \"GetPreviewScene\":\r\n pass\r\n\r\n elif request == \"SetPreviewScene\":\r\n pass\r\n\r\n elif request == \"TransitionToProgram\":\r\n pass\r\n\r\n elif request == \"EnableStudioMode\":\r\n pass\r\n\r\n elif request == \"DisableStudioMode\":\r\n pass\r\n\r\n elif request == \"ToggleStudioMode\":\r\n pass\r\n\r\n #Transitions\r\n elif request == \"GetTransitionList\":\r\n pass\r\n\r\n elif request == \"GetCurrentTransition\":\r\n pass\r\n\r\n elif request == \"SetCurrentTransition\":\r\n pass\r\n\r\n elif request == \"SetTransitionDuration\":\r\n pass\r\n\r\n elif request == \"GetTransitionDuration\":\r\n pass\r\n\r\n elif request == \"GetTransitionPosition\":\r\n pass\r\n\r\n else:\r\n print(f\"Unhandled response of type {request} and data {self.data}.\")\r\n\r\n \r\n\r\n else:\r\n event: str = self.data[\"update-type\"]\r\n #Events as of 4.8.0\r\n\r\n #Scenes\r\n if event == \"SwitchScenes\":\r\n self.obs.setCurrentScene(self.data[\"scene-name\"])\r\n\r\n elif event == \"ScenesChanged\":\r\n #self.obs.purgeScenes()\r\n pass\r\n\r\n elif event == \"SceneCollectionChanged\":\r\n pass\r\n\r\n elif event == \"SceneCollectionListChanged\":\r\n pass\r\n\r\n #Transitions\r\n elif event == \"SwitchTransition\":\r\n pass\r\n\r\n elif event == \"TransitionListChanged\":\r\n pass\r\n\r\n elif event == \"TransitionDurationChanged\":\r\n pass\r\n\r\n elif event == \"TransitionBegin\":\r\n pass\r\n\r\n elif event == \"TransitionEnd\":\r\n pass\r\n\r\n elif event == \"TransitionVideoEnd\":\r\n pass\r\n\r\n #Profiles\r\n elif event == \"ProfileChanged\":\r\n pass\r\n\r\n elif event == \"ProfileListChanged\":\r\n pass\r\n\r\n #Streaming\r\n elif event == \"StreamStarting\":\r\n pass\r\n\r\n elif event == \"StreamStarted\":\r\n pass\r\n\r\n elif event == \"StreamStopping\":\r\n pass\r\n\r\n elif event == \"StreamStopped\":\r\n pass\r\n\r\n elif event == \"StreamStatus\":\r\n pass\r\n\r\n #Recording\r\n elif event == \"RecordingStarting\":\r\n pass\r\n\r\n elif event == \"RecordingStarted\":\r\n pass\r\n\r\n elif event == \"RecordingStopping\":\r\n pass\r\n\r\n elif event == \"RecordingStopped\":\r\n pass\r\n\r\n elif event == \"RecordingPaused\":\r\n pass\r\n\r\n elif event == \"RecordingResumed\":\r\n pass\r\n\r\n #Replay Buffer\r\n elif event == \"ReplayStarting\":\r\n pass\r\n\r\n elif event == \"ReplayStarted\":\r\n pass\r\n\r\n elif event == \"ReplayStopping\":\r\n pass\r\n\r\n elif event == \"ReplayStopped\":\r\n pass\r\n\r\n #Other\r\n elif event == \"Exiting\":\r\n pass\r\n\r\n #General\r\n elif event == \"Heartbeat\":\r\n pass\r\n\r\n elif event == \"BroadcastCustomMessage\":\r\n pass\r\n\r\n #Sources\r\n elif event == \"SourceCreated\":\r\n pass\r\n\r\n elif event == \"SourceDestroyed\":\r\n pass\r\n\r\n elif event == \"SourceVolumeChanged\":\r\n pass\r\n\r\n elif event == \"SourceMuteStateChanged\":\r\n pass\r\n\r\n elif event == \"SourceAudioDeactivated\":\r\n #Unreleased\r\n pass\r\n\r\n elif event == \"SourceAudioActivated\":\r\n #Unreleased\r\n pass\r\n\r\n elif event == \"SourceAudioSyncOffsetChanged\":\r\n pass\r\n\r\n elif event == \"SourceAudioMixersChanged\":\r\n pass\r\n\r\n elif event == \"SourceRenamed\":\r\n pass\r\n\r\n elif event == \"SourceFilterAdded\":\r\n pass\r\n\r\n elif event == \"SourceFilterRemoved\":\r\n pass\r\n\r\n elif event == \"SourceFilterVisibilityChanged\":\r\n source = self.obs.getSource(self.data[\"sourceName\"])\r\n if source != None:\r\n _filter = source.getFilter(self.data[\"filterName\"]) #type: ignore\r\n if _filter != None:\r\n _filter.setVisible(self.data[\"filterEnabled\"]) #type: ignore\r\n\r\n elif event == \"SourceFiltersReordered\":\r\n pass\r\n\r\n #Media\r\n elif event == \"MediaPlaying\":\r\n #Unreleased\r\n pass\r\n\r\n elif event == \"MediaPaused\":\r\n #Unreleased\r\n pass\r\n\r\n elif event == \"MediaRestarted\":\r\n #Unreleased\r\n pass\r\n\r\n elif event == \"MediaStopped\":\r\n #Unreleased\r\n pass\r\n\r\n elif event == \"MediaNext\":\r\n #Unreleased\r\n pass\r\n\r\n elif event == \"MediaPrevious\":\r\n #Unreleased\r\n pass\r\n\r\n elif event == \"MediaStarted\":\r\n #Unreleased\r\n pass\r\n\r\n elif event == \"MediaEnded\":\r\n #Unreleased\r\n pass\r\n\r\n #Scene Items\r\n elif event == \"SceneItemOrderChanged\":\r\n pass\r\n\r\n elif event == \"SceneItemAdded\":\r\n pass\r\n\r\n elif event == \"SceneItemRemoved\":\r\n pass\r\n\r\n elif event == \"SceneItemVisibilityChanged\":\r\n scene = self.obs.getScene(self.data[\"scene-name\"])\r\n if scene != None:\r\n source = scene.getSource(self.data[\"item-name\"]) #type: ignore\r\n if source != None:\r\n source.setVisible(self.data[\"item-visible\"]) #type: ignore\r\n \r\n\r\n elif event == \"SceneItemLockChanged\":\r\n pass\r\n\r\n elif event == \"SceneItemTransformChanged\":\r\n pass\r\n\r\n elif event == \"SceneItemSelected\":\r\n pass\r\n\r\n elif event == \"SceneItemDeselected\":\r\n pass\r\n\r\n #Studio Mode\r\n elif event == \"PreviewSceneChanged\":\r\n pass\r\n\r\n elif event == \"StudioModeSwitched\":\r\n pass\r\n\r\n #Unhandled Events\r\n else:\r\n print(\"Unhandled event with data: \" + str(self.data))", "def messages_media(request):\n\n msgs = get_messages_media()\n msgs_list = pagination(request, msgs, MESSAGES_PER_PAGE)\n\n dic = {\n 'chatmessages': msgs_list,\n 'PAG_TITLE': 'Messages with media data'\n }\n\n return render_to_response('whatsapp/chat.html', dic, context_instance=RequestContext(request))", "def handle_mic_get_status(event):\n data = {'muted': loop.is_muted()}\n message = event.response(data)\n message.context = {'client_name': 'mycroft_listener',\n 'source': 'audio',\n 'destination': [\"skills\"]}\n bus.emit(message)", "def handle_incoming_message(obj, reply_channel):\n if int(obj[message_type_key]) == 0:\n try:\n sub_obj = create_subscriber_object(reply_channel, obj)\n subscribers[reply_channel.name] = sub_obj\n except ApiException as exc:\n send_save_to_channel(reply_channel, str(exc))\n\n elif int(obj[message_type_key]) == 1:\n disconnect_subscriber(reply_channel)\n\n print(\"incoming_msg_handled\")", "def handle_message(self, validated_message: dict):\n self.logger.debug(f'Sensor received message {validated_message}')\n if (validated_message['messageType'] !=\n model.MessageTypes.Control.value):\n self.logger.debug(\n 'Sensor ignoring because messageType was not control'\n )\n return\n if validated_message['messageBody']['target'] != self.component_id:\n self.logger.debug(\n 'Sensor ignoring because not targeted at me'\n )\n return\n\n subtype = validated_message['messageSubtype']\n try:\n self.logger.debug(f'Dispatching message with subtype {subtype}')\n self.message_handler_table[subtype](validated_message)\n except KeyError:\n self.logger.warning(f'No handler for with subtype {subtype}')\n pass", "def _process_message(self, obj):\n pass", "def handle(self, message):\n print(\"You received a message:\")\n print(message)\n # Overwrite this function to do something with the message!", "def receive(self, message):", "def testPhotoBurstRateResponse(self):\n message = (mavutil.mavlink.GOPRO_COMMAND_PHOTO_BURST_RATE, mavutil.mavlink.GOPRO_REQUEST_SUCCESS)\n self.mgr.set_response_callback('vehicle','name', message)\n self.mgr.processMsgQueue.assert_called_with()", "def MessageHandlerMethod(**kwargs):\n data: dict = kwargs['data']\n bus: AbstractPikaBus = kwargs['bus']\n payload: dict = kwargs['payload']\n print(payload)\n if payload['reply']:\n payload['reply'] = False\n bus.Reply(payload=payload)", "def application_message(self, bus, msg):\n\t\tmsgtype = msg.structure.get_name()\n\t\tif msgtype == 'partial_result':\n\t\t\tself.partial_result(msg.structure['hyp'], msg.structure['uttid'])\n\t\telif msgtype == 'result':\n\t\t\tself.final_result(msg.structure['hyp'], msg.structure['uttid'])", "def _respond(self, message):\n try:\n if self.callback:\n self.callback(message)\n except Exception as e:\n LOG.error(e)\n self.service.respond(message)", "def message_recording(client):\n client.register_and_login('foo', 'default')\n client.add_message('test message 1')\n client.add_message('<test message 2>')\n rv = client.get('/')\n assert 'test message 1' in rv.data\n assert '&lt;test message 2&gt;' in rv.data", "def _on_inbound_message(self, message):\n if message.channel.startswith(\"actuators/commands/\"):\n actuation = self.inbound_message_deserializer.deserialize_actuator_command(message)\n if actuation.command == ActuatorCommandType.ACTUATOR_COMMAND_TYPE_SET:\n self.actuation_handler.handle_actuation(actuation.reference, actuation.value)\n\n state, value = self.actuator_status_provider.get_actuator_status(actuation.reference)\n actuator_status = ActuatorStatus.ActuatorStatus(actuation.reference, state, value)\n\n outbound_message = self.outbound_message_factory.make_from_actuator_status(actuator_status)\n if not self.connectivity_service.publish(outbound_message):\n self.outbound_message_queue.put(outbound_message)\n elif actuation.command == ActuatorCommandType.ACTUATOR_COMMAND_TYPE_STATUS:\n state, value = self.actuator_status_provider.get_actuator_status(actuation.reference)\n\n actuator_status = ActuatorStatus.ActuatorStatus(actuation.reference, state, value)\n\n outbound_message = self.outbound_message_factory.make_from_actuator_status(actuator_status)\n if not self.connectivity_service.publish(outbound_message):\n self.outbound_message_queue.put(outbound_message)\n elif actuation.command == ActuatorCommandType.ACTUATOR_COMMAND_TYPE_UNKNOWN:\n print(\"Received unsupported actuation command\")\n\n else:\n print(\"Received unsupported message: \\n\" +\n message.channel + \"\\n\" + message.payload)", "def message_dispatch():\n if not from_twilio(request):\n abort(403)\n resp = twilio.twiml.Response()\n if not session.get(\"pwd\"):\n session['pwd'] = '__META__ROOT__'\n body = request.values.get(\"Body\")\n number = request.values.get(\"From\")\n message = cmd_handler.handle(number,session,body)\n session.modified = True\n resp.message(message)\n # We are probably going to modify the session on every command.\n return str(resp)", "def _stream_data_callback(self, input_data, frame_count, time_info, status):\n logger.debug(\"Mic status: %s - frames: %s - time: %s\", status, frame_count, time_info)\n # run async in correct thread (ignore future?):\n asyncio.run_coroutine_threadsafe(self.chunk_queue.put(input_data), self.event_loop)\n return (input_data, pyaudio.paContinue)", "def callback_botmessage(self, message):\n pass", "def callback_botmessage(self, message):\n pass", "def callback_botmessage(self, message):\n pass", "def onMessageBegin(self, isBinary):", "def handle(message, outputQueue, runtime):\n if message[\"action\"] == \"load-slides\":\n pieName = message[\"src\"]\n jsonToSend = querySlidesFor(pieName, runtime[\"server\"])\n message = Message(\"WPHandler\", pieName, \"slideShow\", \"load-slides\", jsonToSend)\n print \"Wp sending message\"\n outputQueue.put(message)", "def handle_inbound_sms(to, from_):\n body = MessageRequest()\n body.application_id = MESSAGING_APPLICATION_ID\n body.to = [from_]\n body.mfrom = to\n body.text = \"The current date-time is: \" + str(time.time() * 1000) + \" milliseconds since the epoch\"\n try:\n messaging_client.create_message(MESSAGING_ACCOUNT_ID, body)\n except Exception as e:\n print(e)\n return None", "def on_bot_message():\n handle_bot_message(request.get_json())\n return \"ok\"", "def on_open(ws):\n data = {\n \"action\": \"start\",\n # this means we get to send it straight raw sampling\n \"content-type\": \"audio/l16;rate=%d\" % RATE,\n \"interim_results\": True,\n \"profanity_filter\": False\n }\n\n # Send the initial control message which sets expectations for the\n # binary stream that follows:\n ws.send(json.dumps(data).encode('utf8'))\n # Spin off a dedicated thread where we are going to read and\n # stream out audio.\n threading.Thread(target=read_audio, args=[ws]).start()", "def application_message(self, bus, msg):\n msgtype = msg.structure.get_name()\n if msgtype == 'partial_result':\n self.partial_result(msg.structure['hyp'], msg.structure['uttid'])\n if msgtype == 'result':\n self.final_result(msg.structure['hyp'], msg.structure['uttid'])", "def application_message(self, bus, msg):\n msgtype = msg.structure.get_name()\n if msgtype == 'partial_result':\n self.partial_result(msg.structure['hyp'], msg.structure['uttid'])\n if msgtype == 'result':\n self.final_result(msg.structure['hyp'], msg.structure['uttid'])", "def process_incoming(self, msg, status):\n return msg[0]", "def do_something(incoming_msg):\n return \"i did what you said - {}\".format(incoming_msg.text)", "def sendMessage(self, payload, isBinary):", "def on_sync_message(self, _, msg):\n message_name = msg.get_structure().get_name()\n print(message_name)\n if message_name == \"prepare-window-handle\":\n win_id = self.window_id\n assert win_id\n imagesink = msg.src\n imagesink.set_window_handle(win_id)", "def callback(ch, method, properties, body):\n print(f\" [x] Received {str(body)} kW.\")\n\n try:\n timestamp = properties.timestamp\n current_time = datetime.utcfromtimestamp(timestamp).replace(\n tzinfo=timezone.utc\n )\n except AttributeError:\n # If we don't get a timestamp from the broker, add a timestamp here.\n current_time = datetime.now().replace(tzinfo=timezone.utc)\n\n pv_photovoltaic = generate_pv_output(current_time)\n\n report_item = PVMeterReportItem(\n timestamp=current_time.isoformat(),\n pv_meter=int(body),\n pv_photovoltaic=pv_photovoltaic,\n )\n generate_report(report_item)\n\n ch.basic_ack(delivery_tag=method.delivery_tag)", "def run(self):\n #use subprocess for your bindings when develop a new functionality\n fulldate = datetime.now().strftime(\"%A, %d. %B %Y %I:%M%p\")\n\n hours = datetime.now().strftime(\"%I\")\n minutes = datetime.now().strftime(\"%I\")\n\n if self.req_from == 'jabber':\n response = {'request': self.request\n ,'text' : fulldate\n ,'jmsg' : fulldate\n ,'continue' : 0\n ,'type':'response' }\n\n if self.req_from == 'julius':\n response = {'request': self.request\n ,'say': \"IT'S, %d O'CLOCK AND %d MINUTES\" % ( int(hours), int(minutes))\n ,'text' : fulldate\n ,'continue' : 0\n ,'type' : 'response' }\n\n return response\n #import subprocess\n #s = subprocess.Popen(['ffmpeg', '-i', speech, flac ] , stderr=subprocess.STDOUT, stdout=subprocess.PIPE).communicate()[0]", "def MessageStream(self, request, context):\n context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)", "async def async_browse_media(self, media_content_type=None, media_content_id=None):\n return await build_item_response(\n self._zone_id,\n self.coordinator,\n media_content_type,\n media_content_id,\n )", "async def run_offer(pc, signaling):\n await signaling.connect()\n\n channel = pc.createDataChannel(\"chat\")\n stream = ImageStream()\n\n @channel.on(\"open\")\n def on_open():\n print(\"on open called\")\n stream.step()\n img_str = cv2.imencode('.jpg', stream.emit())[1].tostring()\n channel.send(img_str)\n\n @channel.on(\"message\")\n def on_message(message):\n x, y = float(message.split(',')[0]), float(message.split(',')[1])\n print(\"received ball coordinates from client: \", (x, y))\n print(\"error between the estimated and original \", stream.calculate_error(x, y))\n stream.step()\n img_str = cv2.imencode('.jpg', stream.emit())[1].tostring()\n channel.send(img_str)\n\n # send offer\n await pc.setLocalDescription(await pc.createOffer())\n await signaling.send(pc.localDescription)\n await consume_signaling(pc, signaling)", "def handle(msg):\n # Get text or data from the message\n text = msg.get(\"text\", None)\n data = msg.get(\"data\", None)\n\n if data is not None:\n # This is a message from a custom keyboard\n chat_id = msg[\"message\"][\"chat\"][\"id\"]\n content_type = \"data\"\n elif text is not None:\n # This is a text message from the user\n chat_id = msg[\"chat\"][\"id\"]\n content_type = \"text\"\n else:\n # This is a message we don't know how to handle\n content_type = \"unknown\"\n \n if content_type == \"text\":\n message = msg[\"text\"]\n logging.info(\"Received from chat_id={}: {}\".format(chat_id, message))\n\n if message == \"/start\":\n # Check against the server to see\n # if the user is new or not\n # TODO\n payload = {'chat_id':chat_id}\n r = requests.post(host_addr+'/register', json=payload)\n response = json.loads(r.content)\n if response['exists']:\n message = \"Welcome back!\"\n else:\n message = \"Welcome!\"\n bot.sendMessage(chat_id, message)\n\n \n elif message == \"/rate\":\n # Ask the server to return a random\n # movie, and ask the user to rate the movie\n # You should send the user the following information:\n # 1. Name of the movie\n # 2. A link to the movie on IMDB\n # TODO\n\n # Create a custom keyboard to let user enter rating\n payload = {'chat_id':chat_id}\n r = requests.post(host_addr+'/get_unrated_movie', json=payload)\n response = json.loads(r.content)\n movieid = response['id']\n movieinfo = '%s: %s' % (response['title'], response['url'])\n bot.sendMessage(chat_id, movieinfo)\n my_inline_keyboard = [[\n InlineKeyboardButton(text='1', callback_data=str(movieid)+' rate_movie_1'),\n InlineKeyboardButton(text='2', callback_data=str(movieid)+' rate_movie_2'),\n InlineKeyboardButton(text='3', callback_data=str(movieid)+' rate_movie_3'),\n InlineKeyboardButton(text='4', callback_data=str(movieid)+' rate_movie_4'),\n InlineKeyboardButton(text='5', callback_data=str(movieid)+' rate_movie_5')\n ]]\n keyboard = InlineKeyboardMarkup(inline_keyboard=my_inline_keyboard )\n bot.sendMessage(chat_id, \"How do you rate this movie?\", reply_markup=keyboard)\n\n \n elif message == \"/recommend\":\n # Ask the server to generate a list of\n # recommended movies to the user\n payload = {'chat_id':chat_id, 'top_n':3}\n r = requests.post(host_addr+'/recommend', json=payload)\n response = json.loads(r.content)\n # print(response)\n if response['movies']==[]:\n message = 'You have not rated enough movies, we cannot generate recommendation for you.'\n bot.sendMessage(chat_id, message)\n else:\n bot.sendMessage(chat_id, \"My recommendations:\")\n for item in response['movies']:\n movieinfo = '%s: %s' % (item['title'], item['url'])\n bot.sendMessage(chat_id, movieinfo)\n\n\n else:\n # Some command that we don't understand\n bot.sendMessage(chat_id, \"I don't understand your command.\")\n\n elif content_type == \"data\":\n # This is data returned by the custom keyboard\n # Extract the movie ID and the rating from the data\n # and then send this to the server\n # TODO\n # print(data)\n info = str.split(data)\n movieid = int(info[0])\n rate = info[1][-1]\n logging.info(\"Received rating: {}\".format(rate))\n bot.sendMessage(chat_id, \"Your rating is received!\")\n # logging.info('Movie id = %d' % movieid)\n payload = {'chat_id':chat_id, 'movie_id': movieid, 'rating': rate}\n r = requests.post(host_addr+'/rate_movie', json=payload)\n response = json.loads(r.content)\n logging.info('Update status: '+response['status'])", "def test_dispatch_raw(self):\n msg_helper = MessageHelper()\n worker_helper = WorkerHelper()\n broker = self.setup_broker(worker_helper)\n self.assertEqual(broker.get_messages('vumi', 'fooconn.foo'), [])\n msg = msg_helper.make_inbound('message')\n yield worker_helper.dispatch_raw('fooconn.foo', msg)\n self.assertEqual(broker.get_messages('vumi', 'fooconn.foo'), [msg])", "def receive_message(self, context, message):\r\n pass", "def onMessageFrame(self, payload):", "def receive_message(self, message):\r\n return", "def handle(self, message):\n for callback in self.callbacks:\n callback(message['data'])", "def handle(self, message):\n if not message['successful']:\n raise BayeuxError(\n 'Unsuccessful subscribe response: {}'\n .format(message.get('error')))", "def on_message(self, data):\n req = json.loads(data)\n self.serve(req)", "def on_message(self, data):\n req = json.loads(data)\n self.serve(req)" ]
[ "0.6259964", "0.6026629", "0.586778", "0.5793742", "0.5789032", "0.5778347", "0.56937456", "0.56709975", "0.5563751", "0.55476165", "0.5544929", "0.5535647", "0.55325127", "0.55144805", "0.55017763", "0.5446944", "0.5444091", "0.5428671", "0.5415789", "0.54080504", "0.5401289", "0.5378489", "0.53578794", "0.53400666", "0.53400666", "0.53297096", "0.5293248", "0.52578837", "0.52457666", "0.5244298", "0.52409095", "0.52340466", "0.5232544", "0.5226772", "0.5210952", "0.52094823", "0.5202055", "0.5170692", "0.51567626", "0.5153672", "0.5145031", "0.51421857", "0.5141309", "0.5138775", "0.5137272", "0.51344043", "0.51311064", "0.5130271", "0.51252776", "0.51241916", "0.5121951", "0.51040757", "0.5100243", "0.5098003", "0.5095325", "0.508604", "0.5081793", "0.5079599", "0.50715077", "0.50694203", "0.5065917", "0.506316", "0.5061348", "0.5059429", "0.5057921", "0.5057839", "0.50576353", "0.505468", "0.50507414", "0.50498646", "0.50483274", "0.50483215", "0.50465095", "0.50465095", "0.50465095", "0.5039084", "0.5029272", "0.50222284", "0.50187397", "0.50133836", "0.50117475", "0.50117475", "0.50065106", "0.50025547", "0.4989222", "0.49852112", "0.49764416", "0.49680236", "0.49609596", "0.49599195", "0.49575084", "0.49548724", "0.49370757", "0.4935407", "0.4930243", "0.49294975", "0.49280363", "0.4925753", "0.4919119", "0.4919119" ]
0.68463576
0
Take information from a Bandwidth inbound message callback and responds with a text message with the current date and time
def handle_inbound_sms(to, from_): body = MessageRequest() body.application_id = MESSAGING_APPLICATION_ID body.to = [from_] body.mfrom = to body.text = "The current date-time is: " + str(time.time() * 1000) + " milliseconds since the epoch" try: messaging_client.create_message(MESSAGING_ACCOUNT_ID, body) except Exception as e: print(e) return None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def main(connection, info, conf) :\r\n connection.rawsend(\"NOTICE %s :\u0001TIME %s\u0001\\n\" % (info[\"sender\"], time.strftime(\"%b %d %Y, %H:%M:%S %Z\")))", "def bitfinex2_on_message(caller, msg):\n msg = json.loads(msg)\n if caller.subbed_count == 7:\n if msg[1] == \"te\":\n chnl = msg[0]\n body = msg[2]\n df = pd.DataFrame.from_records(\n data=[{\n \"tid\": int(body[0]),\n \"price\": float(body[3]),\n \"volume\": float(body[2]),\n \"datetime\": pd.to_datetime(body[1], unit='ms')\n }],\n index=\"datetime\"\n )\n # print (df)\n df.index = df.index.tz_localize(\"GMT0\")\n caller.write(chnl, df)\n\n return chnl, df\n\n if type(msg) is dict and \"event\" in msg and msg[\"event\"] == \"subscribed\":\n caller.config[\"channel_symbol\"][msg[\"chanId\"]] = \"bitfinex2\" + \":\" + bdic[msg[\"symbol\"]]\n caller.subbed_count += 1\n return\n\n\n chnl = msg[0]\n body = msg[2]\n df = pd.DataFrame.from_records(\n data=[{\n \"tid\": int(body[0]),\n \"price\": float(body[3]),\n \"volume\": float(body[2]),\n \"datetime\": pd.to_datetime(body[1], unit='ms')\n }],\n index=\"datetime\"\n )\n df.index = df.index.tz_convert(\"GMT0\")\n caller.write(chnl, df)\n\n return chnl, df", "def run(self):\n #use subprocess for your bindings when develop a new functionality\n fulldate = datetime.now().strftime(\"%A, %d. %B %Y %I:%M%p\")\n\n hours = datetime.now().strftime(\"%I\")\n minutes = datetime.now().strftime(\"%I\")\n\n if self.req_from == 'jabber':\n response = {'request': self.request\n ,'text' : fulldate\n ,'jmsg' : fulldate\n ,'continue' : 0\n ,'type':'response' }\n\n if self.req_from == 'julius':\n response = {'request': self.request\n ,'say': \"IT'S, %d O'CLOCK AND %d MINUTES\" % ( int(hours), int(minutes))\n ,'text' : fulldate\n ,'continue' : 0\n ,'type' : 'response' }\n\n return response\n #import subprocess\n #s = subprocess.Popen(['ffmpeg', '-i', speech, flac ] , stderr=subprocess.STDOUT, stdout=subprocess.PIPE).communicate()[0]", "def lastMessageReceived():", "def incoming_sms():\n # Get the message the user sent our Twilio number\n body = request.values.get('Body', None)\n\n # Start our TwiML response\n resp = MessagingResponse()\n\n # Determine the right reply for this message\n if body[0:7] == 'Newhigh':\n num = body[7:]\n result = ''\n with open('currentData.txt') as f:\n for i in range(int(num)):\n result += str(i + 1)\n result += '. '\n result += f.readline()\n # resp.message(result.stdout.decode('utf-8'))\n resp.message(result)\n return str(resp)\n \n if body[0:6] == 'Newlow':\n num = body[6:]\n result = subprocess.run(['tail','-n',num,'currentData.txt'], stdout=subprocess.PIPE)\n resp.message(result.stdout.decode('utf-8'))\n return str(resp)\n if body[0:4] == 'High':\n num = body[4:]\n num = '-' + num\n result = subprocess.run(['head',str(num),'AllBuildings.txt'], stdout=subprocess.PIPE)\n resp.message(result.stdout.decode('utf-8'))\n return str(resp)\n if body[0:3] == 'Low':\n num = body[3:]\n result = subprocess.run(['tail','-n',num,'AllBuildings.txt'], stdout=subprocess.PIPE)\n resp.message(result.stdout.decode('utf-8'))\n return str(resp)\n if body == 'Hourly':\n result = subprocess.run(['cat','ByHour.txt'], stdout=subprocess.PIPE)\n elif body == 'Weekday':\n result = subprocess.run(['cat','DaysOfWeek.txt'], stdout=subprocess.PIPE)\n # resp.message(fortune)\n elif body == '10minute':\n result = subprocess.run(['cat','data/PerTenMinutes.txt'], stdout=subprocess.PIPE)\n else:\n resp.message(\"June 2018 - Feb 2019 Totals\\n\\nCommands:\\n(# is any number between 1 and 50)\\nNewhigh# - Highest Past 10 Minutes\\nNewlow# - Lowest Past 10 Minutes\\n\\nBelow are cumulative annual figs:\\nHigh# - Highest of Year\\nLow# - Lowest of Year\\n\\nCampuswide Figures:\\n10minute - Ten Minute Intervals\\nHourly - 1 Hour Intervals\\nWeekday - By Day of the Week\\n\")\n return str(resp)\n resp.message(result.stdout.decode('utf-8'))\n return str(resp)", "def onMessageBegin(self, isBinary):", "def callback(ch, method, properties, body):\n print(f\" [x] Received {str(body)} kW.\")\n\n try:\n timestamp = properties.timestamp\n current_time = datetime.utcfromtimestamp(timestamp).replace(\n tzinfo=timezone.utc\n )\n except AttributeError:\n # If we don't get a timestamp from the broker, add a timestamp here.\n current_time = datetime.now().replace(tzinfo=timezone.utc)\n\n pv_photovoltaic = generate_pv_output(current_time)\n\n report_item = PVMeterReportItem(\n timestamp=current_time.isoformat(),\n pv_meter=int(body),\n pv_photovoltaic=pv_photovoltaic,\n )\n generate_report(report_item)\n\n ch.basic_ack(delivery_tag=method.delivery_tag)", "def incoming(request):\n message = TropoIncomingMessage(\n sender=request.REQUEST.get('sender'),\n text=str(request.REQUEST.get('text')),\n received=datetime.now())\n \n return process_message(message)", "def handle_inbound_irc_msg(self, name, message):\n try:\n relevant = self.get_relevant(message)\n except Exception:\n self.print_traceback()\n return\n from datetime import datetime\n now = datetime.now(self.tz)\n if self.debug:\n from .ootil import OrderedPrettyPrinter as OrdPP\n pretty = OrdPP().pformat(dict(relevant, time=now.isoformat()))\n self.logger.debug(f\"{name}(msg)\\n{pretty}\")\n relevant[\"time\"] = now\n try:\n self.route_verdict(name, relevant)\n except Exception:\n self.print_traceback()", "def current_time(incoming_msg):\n # Extract the message content, without the command \"/time\"\n timezone = bot.extract_message(\"/time\", incoming_msg.text).strip()\n\n # Craft REST API URL to retrieve current time\n # Using API from http://worldclockapi.com\n u = \"http://worldclockapi.com/api/json/{timezone}/now\".format(timezone=timezone)\n r = requests.get(u).json()\n\n # If an invalid timezone is provided, the serviceResponse will include\n # error message\n if r[\"serviceResponse\"]:\n return \"Error: \" + r[\"serviceResponse\"]\n\n # Format of returned data is \"YYYY-MM-DDTHH:MM<OFFSET>\"\n # Example \"2018-11-11T22:09-05:00\"\n returned_data = r[\"currentDateTime\"].split(\"T\")\n cur_date = returned_data[0]\n cur_time = returned_data[1][:5]\n timezone_name = r[\"timeZoneName\"]\n\n # Craft a reply string.\n reply = \"In {TZ} it is currently {TIME} on {DATE}.\".format(\n TZ=timezone_name, TIME=cur_time, DATE=cur_date\n )\n return reply", "def get_message():\n # Only run xray in the AWS Lambda environment\n if runs_on_aws_lambda():\n xray_subsegment = xray_recorder.current_subsegment()\n xray_subsegment.put_annotation(\"key\", \"value\")\n # Sample metadata\n # subsegment.put_metadata(\"operation\", \"metadata\", \"python object/json\")\n xray_recorder.end_subsegment()", "def ServerSyncReceived(self,message):", "def text(message):\n room = session.get('room')\n emit('timerupdate', {'msg': message}, room=room)", "def process_med_info_response(self, sender, message, response):\n\t\tnow = datetime.datetime.now()\n\t\tmessage.datetime_responded = now\n\t\tmessage.save()\n\t\traise Exception(\"Not yet implemented\")", "def callback_message(self, message):\n pass", "def callback_message(self, message):\n pass", "def onMessage(self, payload, isBinary):", "def handle_inbound_message():\n data = json.loads(request.data)\n\n if data[0][\"type\"] == \"message-received\":\n if \"call me\" in data[0][\"message\"][\"text\"]:\n handle_inbound_sms_call_me(data[0][\"message\"][\"to\"][0], data[0][\"message\"][\"from\"])\n elif \"media\" in data[0][\"message\"]:\n handle_inbound_media_mms(data[0][\"message\"][\"to\"][0], data[0][\"message\"][\"from\"], data[0][\"message\"][\"media\"])\n else:\n handle_inbound_sms(data[0][\"message\"][\"to\"][0], data[0][\"message\"][\"from\"])\n else:\n print(data)\n return \"\"", "def callback_message(self, message):\n return \"hi bro\"", "def on_message(data):\n pass", "def callback(self, data):\n\n self.connection = pika.BlockingConnection(self.params)\n self.channel = self.connection.channel()\n\n # The fanout exchange broadcasts all the messages it receives to all the queues it knows.\n # That is what we need for our logger.\n # Tony changed to 'topic' to work with Kuilin's group\n self.channel.exchange_declare(exchange=self.logName,\n exchange_type='topic',\n auto_delete=True)\n\n #TONY WAS HERE\n #CONVERT THE DATA BEFORE SENDING\n #this extracts the data to a tuple\n data_tuple = struct.unpack(\"<hddhdddddddddddd\", data)\n #convert tuple to string and remove the parentheses on the ends\n data_to_send = str(data_tuple).strip(\"()\")\n\n # Publish the data to the exchange\n self.channel.basic_publish(exchange=self.logName,\n routing_key=self.RoutingKey,\n body=data_to_send) #used to be body=data (from Pilot)\n\n #tony was here\n #print(\"Sending: %r via %r and %r\" % (data,self.logName,self.RoutingKey))\n\n self.connection.close()", "def receive_message(self, message):", "def out(msg):\n now = datetime.datetime.now()\n #print (now.__str__() + ' - ' + msg)", "def on_t10_message():\n handle_t10_message(request.get_json())\n return \"ok\"", "def message_dispatch():\n if not from_twilio(request):\n abort(403)\n resp = twilio.twiml.Response()\n if not session.get(\"pwd\"):\n session['pwd'] = '__META__ROOT__'\n body = request.values.get(\"Body\")\n number = request.values.get(\"From\")\n message = cmd_handler.handle(number,session,body)\n session.modified = True\n resp.message(message)\n # We are probably going to modify the session on every command.\n return str(resp)", "def process_chatter(self, msg):\n # note, nothing in here is ROS specific, it's just python code that\n # runs when new info appears\n\n print msg.data # print the recieved message\n\n self.msgs_recieved += 1 # increase msg count\n self.msgs_recieved %= 500 # mod 500 so we don't get enormous numbers\n self.msg = \"%d messages recieved\" % self.msgs_recieved # set message", "def message_callback(self, message):\n pass", "def do_something(incoming_msg):\n return \"i did what you said - {}\".format(incoming_msg.text)", "def incoming_sms():\n # Get the message body\n body = request.values.get('Body', None)\n\n # Start our TwiML response\n resp = MessagingResponse()\n\n # Determine the appropriate response/action for incoming message\n replyText = getReply(body)\n\n resp.message(replyText)\n\n return str(resp)", "def handle(self):\n global latest_status\n data = self.request[0]\n socket = self.request[1]\n logging.info(\"Received {} bytes from {}\".format(len(data), self.client_address[0]))\n jss = interface.joystick_status_pb2.JoystickStatus()\n jss.ParseFromString(data)\n sent = jss.sent.ToDatetime()\n if not latest_status:\n latest_status = jss\n else:\n if latest_status.sent.ToDatetime() < sent:\n latest_status = jss\n else:\n logging.warning(\"Discarded stray package.\")\n ack = interface.joystick_status_pb2.JoystickAck()\n ack.sent.CopyFrom(jss.sent)\n ack.received.GetCurrentTime()\n response = ack.SerializeToString()\n socket.sendto(response, self.client_address)", "def nxlog_callback(ch, method, properties, body):\n\n def nx_formatter(event: dict) -> Dict:\n \"\"\"\n Форматирование nx'овой обвязки\n connector_id и dt - обязательные поля в событии\n :param event:\n :return:\n \"\"\"\n\n def cast(message: dict):\n \"\"\"приведение типов\"\"\"\n for k, v in message.items():\n if isinstance(v, datetime):\n message[k] = message[k].isoformat()\n if isinstance(v, decimal.Decimal):\n message[k] = int(message[k])\n try:\n message[k] = int(message[k])\n except (ValueError, TypeError):\n pass\n if k in ['username']:\n message[k] = str(message[k])\n return message\n\n nx_attributes = get_nx_attributes(event['connector_id'])\n f_message = NxlogMessage(**nx_attributes)\n f_message.hostname = socket.gethostname()\n event_time = parse(event['dt'])\n f_message.event_time = event_time\n f_message.detection_time = event_time\n f_message.raw = event\n f_message.md5 = md5_from_raw(event)\n return cast(f_message.to_dict())\n\n rmq_message = orjson.loads(body)\n logger.debug(\"Received message from queue: %s\", rmq_message)\n metric_notify_counter(app_module=rmq_message['connector_id'], metric_name=\"stream-of-events\")\n\n # if event is already exists in redis, there's no need in sending to nxlog\n rmq_message_id = f\"{rmq_message['connector_id']}_{rmq_message['id']}_{md5_from_raw(rmq_message)}\"\n\n if env.redis.exists(rmq_message_id):\n ch.basic_ack(delivery_tag=method.delivery_tag)\n logger.debug(f\"{rmq_message['id']} already exist\")\n return\n\n nx_message = nx_formatter(rmq_message)\n logger.debug(\"Try to send event to NXLog [%s] %s\", nx_message['raw']['connector_id'], nx_message['raw'])\n\n if not env.nxlog_client:\n env.nxlog_client = NXLogClient(**env.nxlog_config['nx_collector'])\n if env.nxlog_client.send_event(nx_message):\n ch.basic_ack(delivery_tag=method.delivery_tag)\n metric_notify_counter(app_module=rmq_message['connector_id'],\n metric_name=f\"sent_messages_{nx_message['DevType']}\")\n\n # put into redis after successful sending\n env.redis.set(rmq_message_id, body, ex=1209600) # срок хранения данных в базе 14 дней\n metric_notify_counter(app_module=rmq_message['connector_id'], metric_name=\"received-events\")\n\n return", "def on_message(ws, message):\n ts = time.time()\n st = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')\n message_dict = message_to_dict(message)\n print('[' + st + '] Event in channel: ' + message_dict['channel'] +\n '. Created by user: ' + message_dict['user'] + '. Event Type: ' +\n str(message_dict['type']) + '.')\n handle_response(message_dict)", "async def the_callback(data):\n print(f\"analog callback data: {data[1]} \", end='\\r')", "def incoming_sms():\n number = request.values.get('From', None)\n body = request.values.get('Body', None)\n print(body)\n # Start our TwiML response\n resp = MessagingResponse()\n\n body = body.lower()\n body = body.strip()\n body_arr = body.split()\n class_name = \"\"\n name = \"\"\n if len(body_arr) == 4:\n first_name = body_arr[0]\n last_name = body_arr[1]\n name = first_name + \" \" + last_name\n class_name = body_arr[2] + body_arr[3]\n elif len(body_arr) == 6:\n first_name = body_arr[0]\n last_name = body_arr[1]\n name = first_name + \" \" + last_name\n class_name = body_arr[2] + body_arr[3] + body_arr[4] + body_arr[5]\n else:\n resp.message(\"Invalid: Enter your name, class, and session# separated by spaces as shown\\n(eg: Avi Patel grade1 session1, Ravi Rao PreK session1, Mira Singh kg session2, etc.):\")\n return str(resp)\n\n if classes.find_one({'class':class_name}):\n forward_message(class_name, number, name)\n resp.message(\"Your teachers have been notified\")\n\n else:\n resp.message(\"Invalid: Enter your name, class, and session# separated by spaces as shown\\n(eg: Avi Patel grade1 session1, Ravi Rao PreK session1, Mira Singh kg session2, etc.):\")\n\n return str(resp)", "def handle_message(self, message):", "async def subscribe(self, payload):\n\n time = payload['inline_params']\n\n if not time:\n await self.__call__(payload)\n return\n\n result = self.sdk.scheduler.find(payload['chat'])\n if result and result['hour'] == time:\n await self.sdk.send_text_to_chat(\n payload[\"chat\"],\n \"Вы уже подписаны на ежедневный дайджест в {}:00\".format(time)\n )\n else:\n payload['command'] = 'today'\n self.sdk.scheduler.remove(payload['chat'])\n self.sdk.scheduler.add(\n CommandStatistics(self.sdk).stats,\n chat_id=str(payload['chat']),\n hour=time,\n args=[payload]\n )\n await self.sdk.send_text_to_chat(\n payload[\"chat\"],\n \"Вы успешно подписались на ежедневный дайджест в {}:00\".format(time)\n )", "def coinbasepro_on_message(caller, msg):\n msg = json.loads(msg)\n # if msg['type'] == 'match':\n if msg['type'][2] == 't':\n chnl = msg[\"product_id\"]\n df = pd.DataFrame.from_records(\n data=[{\n \"tid\": int(msg[\"trade_id\"]),\n \"price\": float(msg[\"price\"]),\n \"volume\": float(msg['size']) if msg['side'] == 'buy' else -float(msg['size']),\n \"datetime\": pd.to_datetime(msg[\"time\"])\n }],\n index=\"datetime\"\n )\n df.index = df.index.tz_convert(\"GMT0\")\n caller.write(chnl, df)\n\n return chnl, df", "def handleReturnTime(rtt):\n pass", "def callback(ch, method, properties, body):\n logging.info(\" [x] salary scheduling %r\" % (body, ))\n send_task('skype_messaging.notify_devs', ['Schedule Salary Update : %r' % body])\n data = json.loads(body)\n subcon_id = data['subcon_id']\n eta = datetime.strptime(data['scheduled_date'], '%Y-%m-%d %H:%M:%S')\n tz = timezone(data['timezone'])\n eta = tz.localize(eta)\n ph_tz = timezone('Asia/Manila')\n eta = eta.astimezone(ph_tz)\n logging.info(' [x] sending task %s @ %s' % (subcon_id, eta))\n send_task('ScheduleActivation.StaffSalaryUpdate', args=[subcon_id,], eta=eta)", "def message_handler(region, message):\n old_state = message[\"OldStateValue\"]\n new_state = message[\"NewStateValue\"]\n text = \"{}: {} -> {}\".format(message[\"AlarmName\"], old_state, new_state)\n link = \"https://console.aws.amazon.com/cloudwatch/home?region={}#s=Alarms&alarm={}\".format(\n region, message[\"AlarmName\"]\n )\n values = {\n \"attachments\": [\n {\n \"fallback\": text,\n \"pretext\": text,\n \"title\": message[\"AlarmDescription\"],\n \"title_link\": link,\n \"text\": message[\"NewStateReason\"],\n \"color\": \"#36a64f\" if new_state == \"OK\" else \"#d00000\",\n \"fields\": [\n {\n \"title\": \"Region\",\n \"value\": message[\"Region\"],\n },\n {\n \"title\": \"State Change\",\n \"value\": \"{} -> {}\".format(old_state, new_state)\n },\n {\n \"title\": \"Metric Name\",\n \"value\": message[\"Trigger\"][\"MetricName\"],\n },\n {\n \"title\": \"Namespace\",\n \"value\": message[\"Trigger\"][\"Namespace\"],\n },\n ],\n },\n ],\n \"username\": \"AWS Alarm - {}\".format(new_state),\n \"icon_emoji\": \":white_check_mark:\" if new_state == \"OK\" else \":no_entry_sign:\",\n }\n\n url = os.environ[\"SLACK_INCOMING_WEBHOOK\"]\n req = urllib.request.Request(url)\n req.add_header('Content-Type', 'application/json')\n content = json.dumps(values, ensure_ascii=False).encode(\"utf-8\")\n res = urllib.request.urlopen(req, data=content).read()\n logger.info(res)", "def handle(self, message):", "def request_realtime_info(self):\n self.socket_datastream.sendto(b\"!r\", self.ip_port_arduino_datastream)\n self.socket_datastream.sendto(b\"!s\", self.ip_port_arduino_datastream)", "def notification_handler(sender, data):\n print(\"{0}: {1}\".format(sender, data))", "def receive(self, message):", "def handleMessage(msg):", "def on_session_heartbeat(self, event):\n params = {}\n answer_seconds_since_epoch = float(event['Caller-Channel-Answered-Time'])/1000000\n # using UTC here .. make sure FS is using UTC also\n params['AnsweredTime'] = str(answer_seconds_since_epoch)\n heartbeat_seconds_since_epoch = float(event['Event-Date-Timestamp'])/1000000\n # using UTC here .. make sure FS is using UTC also\n params['HeartbeatTime'] = str(heartbeat_seconds_since_epoch)\n params['ElapsedTime'] = str(heartbeat_seconds_since_epoch - answer_seconds_since_epoch)\n called_num = event['variable_plivo_destination_number']\n if not called_num or called_num == '_undef_':\n called_num = event['Caller-Destination-Number'] or ''\n called_num = called_num.lstrip('+')\n params['To'] = called_num\n params['From'] = event['Caller-Caller-ID-Number'].lstrip('+')\n params['CallUUID'] = event['Unique-ID']\n params['Direction'] = event['Call-Direction']\n forwarded_from = get_substring(':', '@',\n event['variable_sip_h_Diversion'])\n if forwarded_from:\n params['ForwardedFrom'] = forwarded_from.lstrip('+')\n if event['Channel-State'] == 'CS_EXECUTE':\n params['CallStatus'] = 'in-progress'\n # RequestUUID through which this call was initiated if outbound\n request_uuid = event['variable_plivo_request_uuid']\n if request_uuid:\n params['RequestUUID'] = request_uuid\n accountsid = event['variable_plivo_accountsid']\n if accountsid:\n params['AccountSID'] = accountsid\n\n self.log.debug(\"Got Session Heartbeat from Freeswitch: %s\" % params)\n\n if self.get_server().call_heartbeat_url:\n self.log.debug(\"Sending heartbeat to callback: %s\" % self.get_server().call_heartbeat_url)\n spawn_raw(self.send_to_url, self.get_server().call_heartbeat_url, params)", "def message_recording(client):\n client.register_and_login('foo', 'default')\n client.add_message('test message 1')\n client.add_message('<test message 2>')\n rv = client.get('/')\n assert 'test message 1' in rv.data\n assert '&lt;test message 2&gt;' in rv.data", "def on_message(client1, userdata, message):\n print(\"message received \" ,str(message.payload.decode(\"utf-8\")))", "def sendTime(self):\n timestamp = datetime.datetime.now().strftime(\"%A, %d. %B %Y %I:%M%p\")\n self.send(timestamp)", "def onMessageFrame(self, payload):", "def twilio(request):\n\n # Log the SMS request\n timestamp = datetime.now()\n sms = models.Sms.objects.create(request.From, request.To, request.Body, datetime.now)\n sms.save()\n\n\n\n # Parse the SMS\n try: msgtype, beat, message = parse_sms(sms.body)\n except SMSParseError as e:\n # Handle error condition\n return render_to_response('twilio_responses/parseerror.xml', {'reason': e.reason})\n\n\n if msgtype == REGISTER:\n user = User(beat=beat, cell = request.From)\n user.save()\n return render_to_response('twilio_response/registrationreceived.xml')\n\n if msgtype == INCIDENT:\n # Log an incident\n\n # Is the user registered?\n try: user = models.User.objects.get(cell = sms.sender)\n except: \n return render_to_response('twilio_responses/parseerror.xml', {'reason': 'First register by texting register <beat number>'})\n\n \n\n if not beat:\n # Assume the user's in his home beat, unless he specified one\n beat = user.beat\n\n incident = Incident(reportedby = user, beat = beat, msg = message)\n incident.save()\n\n if beat.user == user:\n # If the admin submitted the incident, just notify the beat\n notify_beat(incident)\n return render_to_response('twilio_responses/beatnotified.xml')\n else:\n notify_admin(incident)\n return render_to_response('twilio_responses/adminnotified.xml')\n\n elif msgtype == NOTIFY:\n # The message should be an incident ID\n beat = user.beat\n \n try: \n incident = Incident.objects.get(id = int(msg))\n except:\n return render_to_response('twilio_responses/parseerror.xml', {'reason': 'Invalid incident: %s' % msg})\n\n if not beat.user == user:\n return render_to_response('twilio_responses/parseerror.xml', {'reason': 'You\\'re not the beat administrator'})\n\n return render_to_response('twilio_responses/beatnotified.xml')", "def incoming():\n create_table()\n viber_request = viber.parse_request(request.get_data())\n # Defining type of the request and replying to it\n if isinstance(viber_request, ViberMessageRequest):\n # Passing any message from user to message handler in handlers.py\n user_message_handler(viber, viber_request)\n elif isinstance(viber_request, ViberSubscribedRequest):\n viber.send_messages(viber_request.user.id, [\n TextMessage(text=\"Спасибо за подписку!\")\n ])\n elif isinstance(viber_request, ViberFailedRequest):\n logger.warn(\"client failed receiving message. failure: {0}\"\n .format(viber_request))\n elif isinstance(viber_request, ViberConversationStartedRequest):\n # First touch, sending to user keyboard with phone sharing button\n keyboard = kb.SHARE_PHONE_KEYBOARD\n viber.send_messages(viber_request.user.id, [\n TextMessage(\n text=txt.GREETING,\n keyboard=keyboard,\n min_api_version=3)\n ]\n )\n return Response(status=200)", "def broker_null(self, data):\n\n print(\"Heartbeat\")\n #TODO: Reset heartbeat timer or something like that", "def on_begin(self, args, kwargs):\n self.last_msg = datetime.datetime.utcnow()", "def _on_message(self, client, userdata, msg):\n # print 'receiving message'\n epoch_time = self._get_epoch_time()\n time_string = time.strftime(\"%a, %d %b %Y %H:%M:%S +0000\", time.gmtime())\n if not self.file.closed:\n self.file.write(str(epoch_time) + ',' + time_string + \",\" + msg.topic + \",\" + str(msg.payload) + '\\n')", "def post_call(self, date='2014-02-12', time='22:20:33', rnumber='0674767730', rname='Romain', snumber='0617382221', sname='Lolo'):\n xmldata1 = \"<?xml version=\\\"1.0\\\" encoding=\\\"UTF-8\\\"?>\" + \\\n \"<s:Envelope xmlns:s=\\\"http://schemas.xmlsoap.org/soap/envelope/\\\" s:encodingStyle=\\\"http://schemas.xmlsoap.org/soap/encoding/\\\">\" + \\\n \"<s:Body>\" + \\\n \"<u:AddMessage xmlns:u=\\\"urn:samsung.com:service:MessageBoxService:1\\\">\" + \\\n \"<MessageType>text/xml</MessageType>\" + \\\n \"<MessageID>call</MessageID>\" + \\\n \"<Message>\" + \\\n \"&lt;Category&gt;Incoming Call&lt;/Category&gt;\" + \\\n \"&lt;DisplayType&gt;Maximum&lt;/DisplayType&gt;\" + \\\n \"&lt;CallTime&gt;\" + \\\n \"&lt;Date&gt;\"\n xmldata2 = \"&lt;/Date&gt;\" + \\\n \"&lt;Time&gt;\"\n xmldata3 = \"&lt;/Time&gt;\" + \\\n \"&lt;/CallTime&gt;\" + \\\n \"&lt;Callee&gt;\" + \\\n \"&lt;Number&gt;\"\n xmldata4 = \"&lt;/Number&gt;\" + \\\n \"&lt;Name&gt;\"\n xmldata5 = \"&lt;/Name&gt;\" + \\\n \"&lt;/Callee&gt;\" + \\\n \"&lt;Caller&gt;\" + \\\n \"&lt;Number&gt;\"\n xmldata6 = \"&lt;/Number&gt;\" + \\\n \"&lt;Name&gt;\"\n xmldata7 = \"&lt;/Name&gt;\" + \\\n \"&lt;/Caller&gt;\" + \\\n \"</Message>\" + \\\n \"</u:AddMessage>\" + \\\n \"</s:Body>\" + \\\n \"</s:Envelope>\"\n\n #Create Header for Message\n header = \"POST /PMR/control/MessageBoxService HTTP/1.0\\r\\n\" + \\\n \"Content-Type: text/xml; charset=\\\"utf-8\\\"\\r\\n\" + \\\n \"Host: \" + self.host + \"\\r\\n\" + \\\n \"Content-Length: \" + str(len(xmldata1) + len(date) + \\\n len(xmldata2) + len(time) + \\\n len(xmldata3) + len(rnumber) + \\\n len(xmldata4) + len(rname) + \\\n len(xmldata5) + len(snumber) + \\\n len(xmldata6) + len(sname) + \\\n len(xmldata7)) + \"\\r\\n\" + \\\n \"SOAPACTION: urn:samsung.com:service:MessageBoxService:1#AddMessage\\r\\n\" + \\\n \"Connection: close\\r\\n\\r\\n\"\n #Create socket\n full_soap_request = header + \\\n xmldata1 + date + \\\n xmldata2 + time + \\\n xmldata3 + rnumber + \\\n xmldata4 + rname + \\\n xmldata5 + snumber +\\\n xmldata6 + sname +\\\n xmldata7\n msg_port = 52235;\n\n try:\n # Open Socket\n sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n sock.connect((self.host, msg_port))\n sock.send(full_soap_request.encode('utf-8'))\n read = sock.recv(1024)\n # print(\"\\n\\n Reader \\n\\n\" + read)\n sock.close()\n except socket.error, e:\n raise TVError(e[1], 'post_call')\n finally:\n sock.close()\n sock = None", "async def on_call(message, client):\n pass", "def incoming(self,message):\n #Convert to Dictionary, Whatever the input is\n if isinstance(message, str):\n message = json.loads(message)\n elif isinstance(message, bytes):\n message = self.deserialize(message)\n\n op = message.get(\"op\")\n if op == \"publish\":\n message[\"msg\"] = self.decompress(message[\"topic\"],message.get(\"msg\"))\n message[\"topic\"] = self.remap_topic(message[\"topic\"]) \n elif op == \"advertise\":\n message[\"topic\"] = self.remap_topic(message[\"topic\"])\n elif op == \"advertise_service\" or op == \"service_response\":\n message[\"service\"] = self.remap_service(message[\"service\"])\n\n\n message = json.dumps(message)\n #--------\n #replace JSON Null values in float32 types with infinity datatype (changed according to the error for LaserScan values)\n message = message.replace(\"null\", \"Infinity\")\n #--------\n self._protocol.incoming(message)", "def date(*args):\n current_date = datetime.now().isoformat(' ').split('.')[0]\n send.system_message(current_date)", "def listen_to_message(**payload):\n\n data = payload['data']\n\n try:\n message = data['text']\n user = data['user']\n message_id = data['client_msg_id']\n time = data['event_ts']\n channel = data['channel']\n process_data({'user': user, 'message': message, 'message_id': message_id, 'channel': channel, 'time': time})\n except KeyError:\n pass\n except Exception as e:\n logging.error(e)\n return None", "def on_message(mqttc,obj,msg):\n gateways = []\n output['measurement'] = 'LoStick'\n output['time'] = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')\n try:\n x = json.loads(msg.payload.decode('utf-8'))\n if \"up\" in msg._topic.decode(\"utf-8\"):\n airtime = x[\"metadata\"][\"airtime\"]\n gateways = x[\"metadata\"][\"gateways\"]\n fields[\"airtimeUL\"] = airtime\n output['fields'] = fields\n elif \"down\" in msg._topic.decode(\"utf-8\"):\n airtime = x[\"config\"][\"airtime\"]\n fields[\"airtimeDL\"] = airtime\n if len(gateways) > 1:\n rssi = 0\n for gw in gateways:\n rssi += gw[\"rssi\"]\n rssi /= len(gateways)\n fields[\"gw_rssi\"] = rssi\n print(\"outpout: \", output)\n output['tags'] = tags\n output['fields'] = fields\n client.write_points([output])\n sys.stdout.flush()\n except Exception as e:\n print(e)\n pass", "def ws_info(\n hass: HomeAssistant,\n connection: websocket_api.ActiveConnection,\n msg: dict,\n) -> None:\n connection.send_result(msg[\"id\"], hass.data[DOMAIN])", "def run(self):\n alogger.info(\"Recieved message from %s, Message: (%d) %s\" % (self.client.getaddress(), self.action_type, self.message))\n \n #Try to call th function associated with this message type.\n #format = \"handle_<type>\" (eg: handle_100)\n fn = globals().get(\"handle_\" + str(self.action_type))\n if fn and callable(fn):\n fn(self.message, self.address, self.client)\n else:\n alogger.info(\"Received unknown message from %d, type: %d\" % (self.client.getaddress(), self.action_type))", "def sms_reply():\n # Start our TwiML response\n # if body.lower()==\"good\":\n message=\"Hi I'm IRIS, an Immediately Responsive Intelligent System\\nHow are you feeling today?\"\n user=request.form['Body']\n\n # message=\"Hi \"+ name+ \"\"\n # user=request.form['Body']\n\n if user==\"good\":\n message=\"Glad to hear it! I hope you continue to feel this way! Celebrate this feeling and hold onto what happened ot make you feel this way so that you can repeat it in the future!\"\n\n if user==\"sad\":\n message=\"I’m sorry to hear that. Here are some things I do to make me feel better: take a walk outside, listen to uplifting music, call or message a loved one, or watch or read something positive to take my mind off of what I’m feeling.\"\n\n if user==\"nervous\":\n message=\"It’s going to be ok! This feeling will not last forever.\"\n if user==\"lonely\":\n message=\"I’m here for you, and know that you are loved, supported, and important. The world would not be the same without you! For a loving quote respond\"\n\n if user==\"angry\":\n message=\"“Let me help you turn your anger into something positive. Here are some ways to burn off energy productively: take a long walk, remove yourself from the situation, paint of draw, listen to loud music, or take a break from what you are doing.\"\n\n if user==\"tired\":\n message=\"I understand what you are feeling well. I recommend taking a break to do an activity you enjoy, taking a nap, getting a coffee, doing 20 jumping jacks, listening to a pump-up playlist, or standing up to stretch for a bit.\"\n\n if user==\"average\":\n message=\"There are many things to look forward to!\"\n resp = MessagingResponse()\n\t # Add a message\n \n resp.message(message)\n\t # Add a picture message\n\t #msg.media(\"https://farm8.staticflickr.com/7090/6941316406_80b4d6d50e_z_d.jpg\")\n\n return str(resp)", "def test_time_request_message(self):\n expected_topic = self.factory.common_topic + WAPMF.TIME\n expected_payload = None\n expected_message = Message(expected_topic, expected_payload)\n\n serialized_message = self.factory.make_time_request()\n\n self.assertEqual(expected_message, serialized_message)", "def listen():\n if request.method == 'GET':\n print request\n return verify_webhook(request)\n\n if request.method == 'POST':\n payload = request.json\n event = payload['entry'][0]['messaging']\n for x in event:\n if is_user_message(x):\n text = x['message']['text']\n sender_id = x['sender']['id']\n respond(sender_id, text)\n\n return \"ok\"", "def ceilometer_callback(self, ch, method, properties, body):\n payload = json.loads(body)\n try:\n message_body = json.loads(payload['oslo.message'])\n samples = message_body['args']['data']\n #print \"--------------------------------------------------\"\n self.pool.spawn_n(self.zabbix_sender.consume_samples,samples)\n except Exception,e:\n log.warn(str(e))", "def output_message_eval(info_dict):\n time_dict = {'time' : str(datetime.now().strftime(\"%H:%M:%S\"))}\n result_dict = dict(time_dict, **info_dict)\n database.results_output_queue.put(result_dict)", "def handle(self, message):\n print(\"You received a message:\")\n print(message)\n # Overwrite this function to do something with the message!", "def on_message(self, ws, message):\n message = json.loads(message)\n if message['type'] == 'error':\n self.on_error(None, message['message'])\n elif message['type'] == 'subscriptions':\n print(\"Subscribed to {}\".format(', '.join([ channel['name'] for channel in message['channels'] ])))\n else:\n if ((message['type']=='ticker' and message['product_id'] in self._ticker) or \n (message['type'] in [\"snapshot\", \"l2update\"] and message['product_id'] in self._level2) or \n (message['type'] in [\"received\",\"open\",\"done\",\"match\",\"change\",\"activate\"] )):\n self.messages.append(message)\n elif message['type']=='heartbeat':\n self.updated_time = time.time()", "def getinfo(timestamp):\n datetime, message = timestamp.split(']')\n\n date, time = datetime.split()\n date = date.strip('[')\n hour, minute = time.split(':')\n\n message = message.split()\n extra = message[1] # either 'asleep', 'up', or '#XXX'\n\n return date, int(hour), int(minute), extra", "def process_incoming(self, msg, status):\n return msg[0]", "def _route_message(self, msg):\n # check xml formatting\n try:\n xmldoc = minidom.parseString(msg)\n except xml.parsers.expat.ExpatError:\n _LOGGER.warning(\"ISY Received Malformed XML:\\n%s\", msg)\n return\n _LOGGER.log(LOG_VERBOSE, \"ISY Update Received:\\n%s\", msg)\n\n # A wild stream id appears!\n if f\"{ATTR_STREAM_ID}=\" in msg and ATTR_STREAM_ID not in self.data:\n self.update_received(xmldoc)\n\n # direct the event message\n cntrl = value_from_xml(xmldoc, ATTR_CONTROL)\n if not cntrl:\n return\n if cntrl == \"_0\": # ISY HEARTBEAT\n if self._loaded is None:\n self._loaded = ES_INITIALIZING\n self.isy.connection_events.notify(ES_INITIALIZING)\n elif self._loaded == ES_INITIALIZING:\n self._loaded = ES_LOADED\n self.isy.connection_events.notify(ES_LOADED)\n self._lasthb = now()\n self._hbwait = int(value_from_xml(xmldoc, ATTR_ACTION))\n _LOGGER.debug(\"ISY HEARTBEAT: %s\", self._lasthb.isoformat())\n elif cntrl == PROP_STATUS: # NODE UPDATE\n self.isy.nodes.update_received(xmldoc)\n elif cntrl[0] != \"_\": # NODE CONTROL EVENT\n self.isy.nodes.control_message_received(xmldoc)\n elif cntrl == \"_1\": # Trigger Update\n if f\"<{ATTR_VAR}\" in msg: # VARIABLE\n self.isy.variables.update_received(xmldoc)\n elif f\"<{ATTR_ID}>\" in msg: # PROGRAM\n self.isy.programs.update_received(xmldoc)\n elif f\"<{TAG_NODE}>\" in msg and \"[\" in msg: # Node Server Update\n pass # This is most likely a duplicate node update.\n elif f\"<{ATTR_ACTION}>\" in msg:\n action = value_from_xml(xmldoc, ATTR_ACTION)\n if action == ACTION_KEY:\n self.data[ACTION_KEY] = value_from_xml(xmldoc, TAG_EVENT_INFO)\n return\n if action == ACTION_KEY_CHANGED:\n self._program_key = value_from_xml(xmldoc, TAG_NODE)\n # Need to reload programs\n asyncio.run_coroutine_threadsafe(\n self.isy.programs.update(), self.isy.loop\n )\n elif cntrl == \"_3\": # Node Changed/Updated\n self.isy.nodes.node_changed_received(xmldoc)", "def handle_inbound_sms_call_me(to, from_):\n handle_call_me(to, from_)", "def on_t_message(self, message: IncomingMessage) -> None:\n with message.process():\n log.debug(f\"received [x] {message.routing_key}:{message.body}\")", "def msg(self):\n ended = time.time()\n started_wait = datetime.datetime.fromtimestamp(self.started).strftime(\n \"%Y-%m-%d %H:%M:%S\"\n )\n raised_date = datetime.datetime.fromtimestamp(ended).strftime(\n \"%Y-%m-%d %H:%M:%S\"\n )\n duration = ended - self.started\n return \"Info[started at {}, raised at {} after {}s]\".format(\n started_wait, raised_date, round(duration, 2)\n )", "def getSmsRecivedDate(self,timestamp):\n #convert millisec to sec\n timestamp = timestamp/1000\n date = datetime.datetime.fromtimestamp(timestamp).strftime('%Y-%b-%d %H:%M %p')\n return date", "def onMessageFrameData(self, payload):", "def slackMessage(binState):\n log = logging.getLogger('iob')\n\n if binState:\n location = \"Out\"\n else:\n location = \"In\"\n \n url = \"https://hooks.slack.com/services/{}\"\n \n payload = {\"text\": \"Bin is: {}\".format(location)}\n\n headers = {\"Content-Type\": \"application/json\"}\n\n response = requests.request(\n \"POST\",\n url,\n data=json.dumps(payload),\n headers=headers\n )\n\n log.debug(response.text)\n return", "async def log_time(self, event):\n sender = await event.get_sender()\n user = utils.get_display_name(sender)\n\n message = event.message\n\n time = message.date.astimezone(self.__to_zone).time().hour\n\n logging.debug(\"Got the following message: \\\"\" + event.raw_text + \"\\\" at time \" + str(time))\n\n self.__contact_times.labels(user).observe(time)", "def get_heartbeat_message(self):\n return self.messages[\"heartbeat\"].get()", "def ws_now():\n now = datetime.datetime.now()\n\n # business counter\n if int(now.timestamp()) % 2 == 0:\n PROM_METRICS['counter']['ws_srv_is_now_even'].labels(even='yes').inc()\n else:\n PROM_METRICS['counter']['ws_srv_is_now_even'].labels(even='no').inc()\n\n return make_response(jsonify({\n 'epoch': now.timestamp(),\n 'ctime': now.ctime(),\n 'date': str(now)\n }), 200)", "def on_bus_message(self, bus, message):\n pass", "def on_watch_message(self, bus, msg):\n msg_struct = msg.get_structure()\n if msg_struct:\n if msg_struct.get_name() == 'GstMessageTag':\n codec_name = ((msg_struct[\"taglist\"].nth_tag_name(0)))\n codec_value = msg_struct[\"taglist\"].get_string(codec_name)\n info_name = codec_name\n c_result, info_value = codec_value\n if c_result:\n self.info_handler(info_name, info_value)\n if codec_name == \"video-codec\":\n self.info_handler(codec_name, info_value)\n r_result, width, height = self.get_resolution()\n if r_result:\n info_name = \"resolution\"\n info_value = \"[{}x{}]\".format(width, height)\n self.info_handler(info_name, info_value)\n bus.remove_signal_watch()", "def callback_botmessage(self, message):\n pass", "def callback_botmessage(self, message):\n pass", "def callback_botmessage(self, message):\n pass", "def incomingMsg(self, ip):\n #print(\"[ACTM] Receiving data for IP\", ip)\n if ip in self.ip:\n idx = self.ip.index(ip)\n sID = idx + 1 #karena index mulai dari 0\n self.actChanged.emit(sID, True) #lalu notifikasi dashboard\n self.timer[idx].start() #lalu jalankan timernya", "def api_getmessage():\n message = receive_lyrics()\n if message != \"\":\n return jsonify(ready=True, msg=message)\n else:\n return jsonify(ready=False)", "def horde_message(self, message):", "def on_bot_message():\n handle_bot_message(request.get_json())\n return \"ok\"", "def message(self):\r\n url = '{0}/{1}'.format(self.get_url(), 'message')\r\n return http.Request('GET', url), parsers.parse_json", "def receive_message():\n\n msg_data = flask.request.get_json(silent=True, force=True)\n\n name = msg_data['name']\n picture_url = msg_data['picture_url']\n text = msg_data['text']\n time_sent = msg_data['time_sent']\n settings_key = ndb.Key(settings.Settings, msg_data['settings'])\n\n logging.info(\"Recording entry {name: %s, text: %s, time_sent: %d}\",\n name, text, time_sent)\n\n new_sm = stored_message.StoredMessage(name=name,\n picture_url=picture_url,\n response_triggered=False,\n text=text,\n time_sent=datetime.fromtimestamp(\n time_sent),\n settings=settings_key)\n new_sm.put()\n\n return SUCCESS", "def execute(self):\n return LOGGER.info(f\"{datetime.datetime.now()} - Sending notification in Slack\")", "def process_refill_response(self, sender, message, response):\n\t\tnow = datetime.datetime.now()\n\t\tmessage.datetime_responded = now\n\t\tmessage.save()\n\n\t\t# Switch on type of response\n\t\tif self.is_yes(response):\n\t\t\t# TODO(mgaba): Implement questions about weekly, monthly prescriptions. What's the right day?\n\t\t\t# Send out a medication ack message\n\t\t\t# Update state\n\t\t\tfeedbacks = message.feedbacks.all()\n\t\t\tfor feedback in feedbacks:\n\t\t\t\tfeedback.completed = True\n\t\t\t\tfeedback.datetime_responded = now\n\t\t\t\tfeedback.save()\n\n\t\t\tnotifications = message.notifications.all()\n\t\t\tfor notification in notifications:\n\t\t\t\tnotification.active = False\n\t\t\t\tnotification.save()\n\n\t\t\t# Calculate the time of the next earliest notification to put in the message that gets sent back\n\t\t\tearliest_notification = None\n\t\t\tnow = datetime.datetime.now()\n\t\t\tfor feedback in feedbacks:\n\t\t\t\tfeedback.prescription.filled = True\n\t\t\t\tfeedback.prescription.save()\n\t\t\t\tmed_notifications = Notification.objects.filter(prescription=feedback.prescription, _type=Notification.MEDICATION)\n\t\t\t\tfor med_notification in med_notifications:\n\t\t\t\t\tif med_notification.send_datetime < now:\n\t\t\t\t\t\tmed_notification.update_to_next_send_time()\n\t\t\t\t\tif earliest_notification == None or earliest_notification.send_datetime > med_notification.send_datetime:\n\t\t\t\t\t\tearliest_notification = med_notification\n\n\t\t\t# Convert the time of the next earliest notification to a string for the template\n\t\t\thour = earliest_notification.send_datetime.hour\n\t\t\tminute = earliest_notification.send_datetime.minute\n\t\t\tif hour == 0:\n\t\t\t\thour = 12\n\t\t\t\tampm = 'am'\n\t\t\telif hour == 12:\n\t\t\t\thour = 12\n\t\t\t\tampm = 'pm'\n\t\t\telif hour > 12:\n\t\t\t\thour = hour - 12\n\t\t\t\tampm = 'pm'\n\t\t\telse:\n\t\t\t\tampm = 'am'\n\t\t\tif earliest_notification.send_datetime.date() == now.date():\n\t\t\t\tday = \"today\"\n\t\t\telif earliest_notification.send_datetime.date() == now.date() + datetime.timedelta(days=1):\n\t\t\t\tday = \"tomorrow\"\n\t\t\telif earliest_notification.send_datetime.date() < now.date() + datetime.timedelta(days=7):\n\t\t\t\tweekdays = {'0':'Monday',\n\t\t\t\t '1':'Tuesday',\n\t\t\t\t '2':'Wednesday',\n\t\t\t\t '3':'Thursday',\n\t\t\t\t '4':'Friday',\n\t\t\t\t '5':'Saturday',\n\t\t\t\t '6':'Sunday'}\n\t\t\t\tday = \"on \" + weekdays[str(earliest_notification.send_datetime.weekday())]\n\n\t\t\t# Create new message\n\t\t\tcontext = {'hour':hour,\n\t\t\t 'minute':minute,\n\t\t\t 'ampm':ampm,\n\t\t\t 'day':day}\n\t\t\ttemplate = 'messages/refill_ack_message.txt'\n\t\t\tcontent = render_to_string(template, context)\n\t\t\tMessage.objects.create(to=sender, _type=Message.STATIC_ONE_OFF, previous_message=message, content=content)\n\t\t\treturn HttpResponse(content=content, content_type='text/plain')\n\n\t\telif self.is_no(response):\n\t\t\t# Send out a medication questionnaire message\n\t\t\t# Update state\n\t\t\tfeedbacks = message.feedbacks.all()\n\t\t\tfor feedback in feedbacks:\n\t\t\t\tfeedback.completed = False\n\t\t\t\tfeedback.datetime_responded = now\n\t\t\t\tfeedback.save()\n\n\t\t\t# Create a questionnaire message\n\t\t\ttemplate = 'messages/refill_questionnaire_message.txt'\n\t\t\tcontext = {'response_dict': iter(sorted(Message.REFILL_QUESTIONNAIRE_RESPONSE_DICTIONARY.items()))}\n\t\t\tcontent = render_to_string(template, context)\n\n\t\t\t# Create new message\n\t\t\tnew_m = Message.objects.create(to=sender, _type=Message.REFILL_QUESTIONNAIRE, previous_message=message,\n\t\t\t content=content)\n\t\t\tfor feedback in feedbacks:\n\t\t\t\tnew_m.feedbacks.add(feedback)\n\t\t\treturn HttpResponse(content=content, content_type='text/plain')\n\n\t\telif self.is_med_info(response):\n\t\t\t# Send out a med info message\n\t\t\t# TODO:Implement med info for real\n\t\t\tmessage.datetime_responded = None\n\t\t\tmessage.save()\n\t\t\tcontent = \"Medication information is a work in progress.\\n\\n\"+\\\n\t\t\t\t\t \"Did you pick up your meds?\\n\"+\\\n\t\t\t\t\t \"y - yes\\n\"+\\\n\t\t\t\t\t \"n - no\"\n\t\t\treturn HttpResponse(content=content, content_type='text/plain')\n\t\t\tpass\n\t\t# Unknown response\n\t\telse:\n\t\t\tmessage.datetime_responded = None\n\t\t\tmessage.save()\n\t\t\ttemplate = 'messages/unknown_response.txt'\n\t\t\tcontent = render_to_string(template)\n\t\t\tnew_m = Message.objects.create(to=sender, _type=Message.STATIC_ONE_OFF, content=content)\n\t\t\treturn HttpResponse(content=content, content_type='text/plain')\n\t\traise Exception(\"Not yet implemented\")", "def heartbeat():\n return jsonify(int(time.time()))", "def test_handle_weather_message_calls_current(self):\n pass", "def on_message(client, userdata, message): \n print(\"Topic: \" + message.topic + \" Message: \" + message.payload.decode('utf-8'))", "def message(**payload):\n web_client = payload[\"web_client\"]\n\n # Getting information from the response\n data = payload[\"data\"]\n channel_id = data.get(\"channel\")\n text = data.get(\"text\")\n subtype = data.get(\"subtype\")\n ts = data['ts']\n user = data.get('username') if not data.get('user') else data.get('user')\n # Creating a Converstion object\n message = Message(ts, user, text)\n\n # Appending the converstion attributes to the logs\n conversation.append(message.toDict())\n\n if subtype == 'bot_message': return\n\n do_respond(web_client, channel_id, text)", "def on_message(client, userdata, message):\n print(f'{message.topic} {message.payload.decode(\"utf-8\")}') # Print message topic and payload" ]
[ "0.6424744", "0.63330656", "0.6156469", "0.5986524", "0.5985888", "0.5976923", "0.59701186", "0.59519804", "0.58698654", "0.58497447", "0.5847021", "0.5816966", "0.57789177", "0.5738842", "0.5738044", "0.5738044", "0.5737132", "0.57114446", "0.5695387", "0.5639442", "0.563712", "0.5631486", "0.56197506", "0.5608703", "0.5602231", "0.55596375", "0.5557861", "0.55521065", "0.55473703", "0.5540704", "0.5526712", "0.5511139", "0.54938525", "0.54926205", "0.54864335", "0.54782236", "0.547373", "0.54736525", "0.5463332", "0.54536813", "0.54520106", "0.5451536", "0.54512453", "0.54273796", "0.5425959", "0.542107", "0.5418249", "0.5415036", "0.5410926", "0.5407889", "0.53937185", "0.5389983", "0.53807616", "0.5377782", "0.5368608", "0.5343849", "0.5335818", "0.53197443", "0.53166676", "0.53073674", "0.5305309", "0.5297348", "0.5296493", "0.52817374", "0.52773505", "0.5271347", "0.5265534", "0.52654046", "0.5254998", "0.5251395", "0.5243953", "0.52401704", "0.52383333", "0.52377254", "0.52360624", "0.5233784", "0.5232862", "0.523265", "0.5228656", "0.52234614", "0.5222453", "0.52125084", "0.52072495", "0.52029085", "0.5199655", "0.5199655", "0.5199655", "0.51979244", "0.51916724", "0.5190331", "0.51853615", "0.5182735", "0.5180239", "0.51800394", "0.5173609", "0.5172954", "0.5172515", "0.51725036", "0.5169135", "0.51658535" ]
0.5533975
30
Takes information from a Bandwidth inbound message callback and initiates a call
def handle_inbound_sms_call_me(to, from_): handle_call_me(to, from_)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def on_call(message, client):\n pass", "def _call(self, msg, cb, *args):\r\n if not self._status:\r\n raise InterfaceDisabledError('A disabled interface should not be '\r\n 'called.')\r\n\r\n if not callable(cb):\r\n raise TypeError('Callback has to be callable.')\r\n\r\n uid = uuid4().hex\r\n deferred = Deferred()\r\n deferred.addCallback(cb, *args)\r\n self._responses[uid] = deferred\r\n\r\n self._conn.sendMessage(self._iTag, self._clsName, msg, uid)", "def ProcessCallback(self, interface, info):\n pass", "def ProcessCallback(self, interface, info):\n pass", "def whenReadReady(self, channel, call):", "def subscribe(receiver):", "def subscribe(receiver):", "def subscribe(receiver):", "def onMessageBegin(self, isBinary):", "def call(self, procedure: str, *args: aiowamp.WAMPType,\n kwargs: aiowamp.WAMPDict = None,\n receive_progress: bool = None,\n call_timeout: float = None,\n cancel_mode: aiowamp.CancelMode = None,\n disclose_me: bool = None,\n resource_key: str = None,\n options: aiowamp.WAMPDict = None) -> aiowamp.CallABC:\n ...", "def call(self, msg, cb=None):\r\n self._call(msg, cb or self._cb)", "def on_incoming_call(self, call):\n\n try:\n current_time = time.time()\n remote_uri = hash_remote_uri(self.cfg, call.info().remote_uri)\n\n if not self.cfg['VoipIO']['reject_calls']:\n if self.voipio.black_list[get_user_from_uri(remote_uri)] < current_time:\n # answer the call\n self.voipio.call = call\n self.voipio.on_incoming_call(remote_uri)\n\n if self.cfg['VoipIO']['debug']:\n self.cfg['Logging']['system_logger'].debug(\"AccountCallback::on_incoming_call - Incoming call from %s\" % remote_uri)\n\n call_cb = CallCallback(self.cfg, call, self.voipio)\n call.set_callback(call_cb)\n\n call.answer()\n else:\n # rejected the call since the caller is blacklisted\n if self.cfg['VoipIO']['debug']:\n self.cfg['Logging']['system_logger'].debug(\"AccountCallback::on_incoming_call - Rejected call from blacklisted remote URI %s \" % remote_uri)\n wait_hours = (self.voipio.black_list[get_user_from_uri(remote_uri)] - current_time) / (60 * 60)\n self.cfg['Logging']['system_logger'].debug(\"AccountCallback::on_incoming_call - Must wait for %d hours\" % wait_hours)\n # respond by \"Busy here\"\n call.answer(486)\n\n self.voipio.on_rejected_call_from_blacklisted_uri(remote_uri)\n else:\n # reject the call since all calls must be rejected\n if self.cfg['VoipIO']['debug']:\n self.cfg['Logging']['system_logger'].debug(\"AccountCallback::on_incoming_call - Rejected call from %s\" % remote_uri)\n\n # respond by \"Busy here\"\n call.answer(486)\n # respond by \"Decline\"\n #call.answer(603)\n\n self.voipio.on_rejected_call(remote_uri)\n except:\n self.voipio.close_event.set()\n self.cfg['Logging']['system_logger'].exception('Uncaught exception in the AccountCallback class.')\n raise", "def on_call_update(self, event):\n # if plivo_app != 'true', check b leg Dial callback\n plivo_app_flag = event['variable_plivo_app'] == 'true'\n if not plivo_app_flag:\n # request Dial callbackUrl if needed\n aleg_uuid = event['Bridged-To']\n if not aleg_uuid:\n return\n bleg_uuid = event['Unique-ID']\n if not bleg_uuid:\n return\n disposition = event['variable_endpoint_disposition']\n if disposition != 'ANSWER':\n return\n ck_url = event['variable_plivo_dial_callback_url']\n if not ck_url:\n return\n ck_method = event['variable_plivo_dial_callback_method']\n if not ck_method:\n return\n params = {'DialBLegUUID': bleg_uuid,\n 'DialALegUUID': aleg_uuid,\n 'DialBLegStatus': 'answer',\n 'CallUUID': aleg_uuid\n }\n # add extra params\n extra_params = self.get_extra_fs_vars(event)\n if extra_params:\n params.update(extra_params)\n spawn_raw(self.send_to_url, ck_url, params, ck_method)\n return", "async def asterisk_init(request):\n\n try:\n phone = request.rel_url.query[\"phone\"]\n except KeyError:\n phone = None\n LOGGER.error(f\"No 'phone' parameter passed on: '{request.rel_url}'\")\n raise web.HTTPClientError(\n reason=ASTERISK_CALL_ERROR, body=None, text=None, content_type=None\n )\n try:\n message = request.rel_url.query[\"message\"]\n except KeyError:\n message = None\n LOGGER.error(f\"No 'message' parameter passed on: '{request.rel_url}'\")\n raise web.HTTPClientError(\n reason=ASTERISK_CALL_ERROR, body=None, text=None, content_type=None\n )\n\n # Prepare the URL to 'call' the Asterisk ARI\n asterisk_query_string = (\n f\"endpoint={ASTERISK_CHAN_TYPE}/{phone}&extension={ASTERISK_EXTENSION}\"\n + f\"&context={ASTERISK_CONTEXT}&callerId={ASTERISK_CALLERID}\"\n )\n asterisk_call_init = (\n f\"{ASTERISK_URL}/{ASTERISK_ARI_CHANNELS}?{asterisk_query_string}\"\n )\n # Place a call on the Asterisk system using HTTP Basic Auth on the PBX\n headers = await gen_headers(await gen_auth_string())\n\n try:\n session = ClientSession(timeout=CLIENT_TIMEOUT_TOTAL)\n call_resp = await session.post(\n url=asterisk_call_init, data=None, headers=headers\n )\n await session.close()\n if call_resp.status == 200:\n response_data = await call_resp.json()\n asterisk_chan = response_data[\"id\"]\n session = ClientSession(timeout=CLIENT_TIMEOUT_TOTAL)\n await session.post(\n url=CALL_REGISTER_URL\n + f\"/{CALL_REGISTER_APP_ROUTE_REGISTER_CALL}\"\n + f\"?phone={phone}&message={message}&asterisk_chan={asterisk_chan}\",\n data=None,\n headers=headers,\n )\n await session.close()\n else:\n LOGGER.error(\n f\"Asterisk server '{ASTERISK_URL}' response: {call_resp.status}. Unable to initialize the call.\"\n )\n\n except client_exceptions.ClientConnectorError as e:\n LOGGER.error(f\"Unable to connect to the Asterisk system: '{e}'\")\n raise web.HTTPClientError(\n reason=str(e), body=None, text=None, content_type=None\n )\n\n return web.json_response({\"status\": call_resp.status})", "def onMessage(self, payload, isBinary):", "def main(msg: func.ServiceBusMessage):\r\n\r\n # Extract the method into a dictionary\r\n msg_dict = json.loads(msg.get_body().decode(\"utf-8\"))\r\n\r\n logging.info(f\"Python ServiceBus queue trigger processed message: {msg_dict}\")\r\n\r\n # Enable a connection with the IoT Hub. The connectionstring for the IoT Hub\r\n # is preloaded in the Azure Functions configurations.\r\n connectino_string_iothub = os.getenv(\"connectionStringIotHub\")\r\n registry_manager = IoTHubRegistryManager(connectino_string_iothub)\r\n\r\n # Settings for the method that the IoT Device should run upon receiving the message.\r\n callback_method = \"start_fan\"\r\n callback_payload = {}\r\n device_method = CloudToDeviceMethod(\r\n method_name=callback_method, payload=callback_payload\r\n )\r\n\r\n # Sending the actual cloud-to-device message and invoke a function on the IoT device.\r\n device_id = msg_dict[\"IoTHub\"][\"ConnectionDeviceId\"]\r\n response = registry_manager.invoke_device_method(device_id, device_method)\r\n\r\n print(\"\")\r\n print(\"Device Method called\")\r\n print(\"Device Method name : {0}\".format(callback_method))\r\n print(\"Device Method payload : {0}\".format(callback_payload))\r\n print(\"\")\r\n print(\"Response status : {0}\".format(response.status))\r\n print(\"Response payload : {0}\".format(response.payload))", "def subscribe(receiver, catchup):", "def test_incoming_k(self):\n m_interface = Mock()\n m_interface.callback.return_value = True\n m_interface.read.return_value = ''\n upb = UPB(m_interface)\n upb.onCommand(address=(22,255), callback=m_interface.callback)\n m_interface.read.return_value = \"PU07141610FF3090\\x0DPU07151610FF308F\\x0D\"\n# time.sleep(4000)\n time.sleep(2)\n m_interface.callback.assert_called_with(address=(22,255), command='status', source=upb)\n m_interface.read.return_value = ''", "def call(self, callee: \"SIPPhoneTemplate\") -> None:", "def _initiate(self, call):\n if not self.gsm_call:\n raise Exception(\"No connectivity\")\n number = str(call.number)\n logger.info(\"initiate call to %s\", number)\n call_id = yield WaitDBus(self.gsm_call.Initiate, number, \"voice\")\n call_id = int(call_id)\n logger.info(\"call id : %d\", call_id)\n self.lines[call_id] = call\n # TODO: mabe not good idea to store this in the call itself,\n # beside, it makes pylint upset.\n call.__id = call_id", "def callback_message(self, message):\n pass", "def callback_message(self, message):\n pass", "def callback(ch, method, properties, body):\r\n body = json.loads(body)\r\n print(f\"[x] Task in the queue {body}\")\r\n # Creating instance of AudioRecorder\r\n recorder = AudioRecorder(body)\r\n driver = recorder.prepare_browser(body['settings'])\r\n recorder.run(driver, body)", "def incoming(self,message):\n #Convert to Dictionary, Whatever the input is\n if isinstance(message, str):\n message = json.loads(message)\n elif isinstance(message, bytes):\n message = self.deserialize(message)\n\n op = message.get(\"op\")\n if op == \"publish\":\n message[\"msg\"] = self.decompress(message[\"topic\"],message.get(\"msg\"))\n message[\"topic\"] = self.remap_topic(message[\"topic\"]) \n elif op == \"advertise\":\n message[\"topic\"] = self.remap_topic(message[\"topic\"])\n elif op == \"advertise_service\" or op == \"service_response\":\n message[\"service\"] = self.remap_service(message[\"service\"])\n\n\n message = json.dumps(message)\n #--------\n #replace JSON Null values in float32 types with infinity datatype (changed according to the error for LaserScan values)\n message = message.replace(\"null\", \"Infinity\")\n #--------\n self._protocol.incoming(message)", "def call(self, addr, *args, **kwargs):\n prepare_cb = kwargs.pop('prepare_cb', self.jitter.func_prepare_stdcall)\n super(self.__class__, self).call(prepare_cb, addr, *args)", "def call(self, addr, *args, **kwargs):\n prepare_cb = kwargs.pop('prepare_cb', self.jitter.func_prepare_stdcall)\n super(self.__class__, self).call(prepare_cb, addr, *args)", "def polling_call(self) -> global___Snippet.ClientCall:", "def place_call(self, number):\n call_params = urllib.urlencode({\n 'outgoingNumber' : number,\n 'forwardingNumber' : self.forwarding_number,\n 'subscriberNumber' : 'undefined',\n 'remember' : '0',\n 'phoneType' : self.phone_type,\n '_rnr_se': self.key\n })\n\n # Send the text, display status message \n self.response = self.opener.open(self.call_url, call_params).read()", "def ceilometer_callback(self, ch, method, properties, body):\n payload = json.loads(body)\n try:\n message_body = json.loads(payload['oslo.message'])\n samples = message_body['args']['data']\n #print \"--------------------------------------------------\"\n self.pool.spawn_n(self.zabbix_sender.consume_samples,samples)\n except Exception,e:\n log.warn(str(e))", "def handle_call(self):\n call_socket, address = self.call_socket.accept()\n print(\"connected call socket: {}\".format(call_socket))\n # gets name of user making the call:\n caller_name = self.receive_mes(call_socket)\n # gets from calling client user they want to call:\n receiver_name = self.receive_mes(call_socket)\n # gets receivers socket from dictionary\n if receiver_name not in self.client_dict:\n print(\"boi bye\")\n sys.exit(EXIT)\n receiver_sock = self.client_dict[receiver_name]\n mes = \"{} is calling you\".format(caller_name)\n self.send_mes(mes.encode(), receiver_sock)\n answer = self.receive_mes(receiver_sock)\n print(\"answer from {}: {}\".format(receiver_name, answer))\n if answer == \"Y\":\n self.send_mes(\"call\".encode(), call_socket)\n self.start_call()\n else:\n self.send_mes(\"no call\".encode(), call_socket)", "def on_incoming_call(self, remote_uri):\n if self.cfg['VoipIO']['debug']:\n self.cfg['Logging']['system_logger'].debug(\"VoipIO::on_incoming_call - from %s\" % remote_uri)\n\n # send a message that there is a new incoming call\n self.commands.send(Command('incoming_call(remote_uri=\"%s\")' % get_user_from_uri(remote_uri), 'VoipIO', 'HUB'))", "def incoming(self, msg):\n hdr = msg.header\n\n # Signals:\n if hdr.message_type is MessageType.signal:\n key = (hdr.fields.get(HeaderFields.path, None),\n hdr.fields.get(HeaderFields.interface, None),\n hdr.fields.get(HeaderFields.member, None)\n )\n cb = self.signal_callbacks.get(key, None)\n if cb is not None:\n cb(msg.body)\n return\n\n # Method returns & errors\n reply_serial = hdr.fields.get(HeaderFields.reply_serial, -1)\n reply_handle = self.awaiting_reply.pop(reply_serial, None)\n if reply_handle is not None:\n if hdr.message_type is MessageType.method_return:\n reply_handle.set_result(msg.body)\n return\n elif hdr.message_type is MessageType.error:\n reply_handle.set_exception(DBusErrorResponse(msg))\n return\n\n if self.on_unhandled:\n self.on_unhandled(msg)", "def callback(self, data):\n\n self.connection = pika.BlockingConnection(self.params)\n self.channel = self.connection.channel()\n\n # The fanout exchange broadcasts all the messages it receives to all the queues it knows.\n # That is what we need for our logger.\n # Tony changed to 'topic' to work with Kuilin's group\n self.channel.exchange_declare(exchange=self.logName,\n exchange_type='topic',\n auto_delete=True)\n\n #TONY WAS HERE\n #CONVERT THE DATA BEFORE SENDING\n #this extracts the data to a tuple\n data_tuple = struct.unpack(\"<hddhdddddddddddd\", data)\n #convert tuple to string and remove the parentheses on the ends\n data_to_send = str(data_tuple).strip(\"()\")\n\n # Publish the data to the exchange\n self.channel.basic_publish(exchange=self.logName,\n routing_key=self.RoutingKey,\n body=data_to_send) #used to be body=data (from Pilot)\n\n #tony was here\n #print(\"Sending: %r via %r and %r\" % (data,self.logName,self.RoutingKey))\n\n self.connection.close()", "def callback_connect(self):\n pass", "def callback_connect(self):\n pass", "def callback_connect(self):\n pass", "def Start(self):\n self.CallClient(standard.ReadBuffer, next_state=\"WrongProcess\")", "def stream_call(self):\n pass", "def callback(ch, method, properties, body):\n requestParams = json.loads(body.decode('utf-8'))\n # print(\"inside the callback\")\n arg1 = int(requestParams[0])\n arg2 = int(requestParams[1])\n result = whaleClassifier.test(arg1, arg2)\n # what this does it publish the RESULT to the exchange (as producers of content \n # cannot send stuff directly to queues, they send to exchanges and then exchanges \n # send to queues. Note Exchange='' is default exchange which then sends to the\n # queue that is listed on the ROUTING_KEY argument.)\n ch.basic_publish(exchange='', \n routing_key=results_queue, \n body=json.dumps(result),\n properties=pika.BasicProperties(\n delivery_mode = 2, # make message persistent\n ))\n # ch.basic_ack(delivery_tag=method.delivery_tag) #need this line so that we don't resend this same message again the next time\n # we start up this script. Which eventually clogs up memory", "def handle(self, message):\n for callback in self.callbacks:\n callback(message['data'])", "def run(config, logging, inq, subscribe_callback, unsubscribe_callback):", "def _invoke_callback(self, path: str, message: Message) -> None:\n if self._callback is None:\n return\n\n props = self._devices[path]\n\n # Get all the information wanted to pack in the advertisement data\n _local_name = props.get(\"Name\")\n _manufacturer_data = {\n k: bytes(v) for k, v in props.get(\"ManufacturerData\", {}).items()\n }\n _service_data = {k: bytes(v) for k, v in props.get(\"ServiceData\", {}).items()}\n _service_uuids = props.get(\"UUIDs\", [])\n\n # Pack the advertisement data\n advertisement_data = AdvertisementData(\n local_name=_local_name,\n manufacturer_data=_manufacturer_data,\n service_data=_service_data,\n service_uuids=_service_uuids,\n platform_data=(props, message),\n )\n\n device = BLEDevice(\n props[\"Address\"],\n props[\"Alias\"],\n {\"path\": path, \"props\": props},\n props.get(\"RSSI\", 0),\n uuids=_service_uuids,\n manufacturer_data=_manufacturer_data,\n )\n\n self._callback(device, advertisement_data)", "def handle_inbound_message():\n data = json.loads(request.data)\n\n if data[0][\"type\"] == \"message-received\":\n if \"call me\" in data[0][\"message\"][\"text\"]:\n handle_inbound_sms_call_me(data[0][\"message\"][\"to\"][0], data[0][\"message\"][\"from\"])\n elif \"media\" in data[0][\"message\"]:\n handle_inbound_media_mms(data[0][\"message\"][\"to\"][0], data[0][\"message\"][\"from\"], data[0][\"message\"][\"media\"])\n else:\n handle_inbound_sms(data[0][\"message\"][\"to\"][0], data[0][\"message\"][\"from\"])\n else:\n print(data)\n return \"\"", "def __call__(self, waveforms, telid, selected_gain_channel):", "def invoke(self, msg, req):\n if msg.name == 'forward':\n init = Initializer.create_init()\n try:\n init.timer()\n return\n except Exception, e:\n print 'Error', e.message\n else:\n raise schema.AvroException('unexpected message:', msg.getname())", "def run(self):\n\n def callback(ch, method, properties, body):\n json_body = json.loads(body)\n self.buffer.append(Fvalue.fromdict(json_body))\n\n sleep(5) # We introduce a slight delay to let the RabbitMQ container to accept connections\n connection = pika.BlockingConnection(pika.ConnectionParameters(host=self.mq_host,port=self.mq_port))\n channel = connection.channel()\n channel.exchange_declare(exchange=self.mq_host + '_exchange', exchange_type='direct')\n result = channel.queue_declare(exclusive=True)\n queue_name = result.method.queue\n channel.queue_bind(exchange=self.mq_host + '_exchange',\n queue=queue_name,\n routing_key=self.routing_key)\n channel.basic_consume(callback,queue=queue_name,no_ack=True)\n channel.start_consuming()", "def __call__(self, args, kwargs):\n callback = self._callback_ref()\n if callback is not None:\n callback(*args, **kwargs)", "def on_channel_bridge(self, event):\n # if plivo_app != 'true', check b leg Dial callback\n # request Dial callbackUrl if needed\n aleg_uuid = event['Bridge-A-Unique-ID']\n if not aleg_uuid:\n return\n bleg_uuid = event['Bridge-B-Unique-ID']\n if not bleg_uuid:\n return\n disposition = event['variable_endpoint_disposition']\n if disposition != 'ANSWER':\n return\n app_vars = event['variable_current_application_data']\n if not 'plivo_dial_callback_url' in app_vars:\n return\n ck_url = app_vars.split('plivo_dial_callback_url=')[1].split(',')[0]\n if not 'plivo_dial_callback_method' in app_vars:\n return\n ck_method = app_vars.split('plivo_dial_callback_method=')[1].split(',')[0]\n params = {'DialBLegUUID': bleg_uuid,\n 'DialALegUUID': aleg_uuid,\n 'DialBLegStatus': 'answer',\n 'CallUUID': aleg_uuid\n }\n spawn_raw(self.send_to_url, ck_url, params, ck_method)\n return", "def message_callback(self, message):\n pass", "def test_dispatch_inbound(self):\n msg_helper = MessageHelper()\n worker_helper = WorkerHelper()\n broker = self.setup_broker(worker_helper)\n self.assertEqual(broker.get_messages('vumi', 'fooconn.inbound'), [])\n msg = msg_helper.make_inbound('message')\n yield worker_helper.dispatch_inbound(msg, 'fooconn')\n self.assertEqual(broker.get_messages('vumi', 'fooconn.inbound'), [msg])", "def call(self) -> global___Snippet.ClientCall:", "def call(self) -> global___Snippet.ClientCall:", "def callback(self, message):\n kwargs = self.callback_kwargs\n\n if kwargs['return_msg']:\n self._callback_return_full_message(message)\n return\n\n # unpack\n try:\n alert_dict, metadata_dict = self._unpack(message)\n except Exception as e:\n self._log_and_print(f\"Error unpacking message: {e}\", severity=\"DEBUG\")\n message.nack() # nack so message does not leave subscription\n return\n\n # run user filter\n if kwargs[\"user_filter\"] is not None:\n try:\n alert_dict = kwargs[\"user_filter\"](alert_dict, **kwargs)\n except Exception as e:\n self._log_and_print(f\"Error running user_filter: {e}\", severity=\"DEBUG\")\n message.nack()\n return\n\n # run user callback\n if kwargs[\"user_callback\"] is not None:\n # get args for user_callback\n args = [] # requires args are ordered properly here & in user_callback\n if kwargs.get(\"send_alert_bytes\", False):\n args.append(message.data)\n if kwargs.get(\"send_metadata\", False):\n args.append(metadata_dict)\n try:\n # execute user callback\n success = kwargs[\"user_callback\"](alert_dict, *args, **kwargs) # bool\n\n except Exception as e:\n success = False\n msg = f\"Error running user_callback: {e}\"\n else:\n if not success:\n msg = \"user_callback reported it was unsuccessful.\"\n finally:\n if not success:\n self._log_and_print(msg, severity=\"DEBUG\")\n message.nack()\n return\n\n if alert_dict is not None:\n # save so stream_alerts can return it, in case the user wants it (broker)\n self.save_alert(alert_dict)\n\n # communicate with the main thread\n self.queue.put(1) # 1 alert successfully processed\n # block until main thread acknowledges so we don't ack msgs that get lost\n if kwargs['max_results'] is not None:\n self.queue.join() # single background thread => one-in-one-out\n\n else:\n self._log_and_print(\"alert_dict is None\")\n\n message.ack()", "def make_phone_call(self):\n client = Client(account_sid, auth_token)\n\n call = client.calls.create(\n url='http://demo.twilio.com/docs/classic.mp3',\n to=self.emergency_number,\n from_='+16505499680'\n )\n\n print(call.sid)", "def pollster_callback(self, _active_socket, readable, writable):\n \n # assume we are readable, because we are only registered for read\n assert readable\n message = self._receive_message() \n\n # if we get None, that means the socket would have blocked\n # go back and wait for more\n if message is None:\n return\n\n # we handle our own message traffic (i.e. resilient client handshakes\n # and signoffs).\n # otherwise, feed message into the receive queue to be handled\n # elsewhere\n if message.control[\"message-type\"] in self._dispatch_table:\n self._dispatch_table[message.control[\"message-type\"]](\n message.control, message.body\n )\n self._send_ack(\n message.control[\"message-type\"],\n message.ident, \n message.control[\"message-id\"])\n elif not \"client-tag\" in message.control:\n self._log.error(\"receive: invalid message '%s'\" % (\n message.control, \n ))\n else:\n if message.control[\"client-tag\"] in self._active_clients:\n self._receive_queue.append((message.control, message.body, ))\n self._send_ack(\n message.control[\"message-type\"],\n message.ident, \n message.control[\"message-id\"]\n )\n else:\n self._log.error(\n \"receive: No active client %s message discarded\" % (\n message.control[\"client-tag\"]\n )\n )", "def on_bus_message(self, bus, message):\n pass", "async def the_callback(data):\n print(f\"analog callback data: {data[1]} \", end='\\r')", "async def run(self):\n\n self.connection = await aio_pika.connect(self.mq_connection_str, loop=asyncio.get_event_loop())\n self.channel = await self.connection.channel()\n\n # connect to exchanger market data\n # market data send with routing key format: message_type.data_type.exchange.pair[.time_frame]\n # message_type == update | starting, data_type == ticker | candles | depth,\n # exchange, pair, time_frame - sending by listing_info\n binding_mask = '*.*.*.#'\n topic_logs_exchange = await self.channel.declare_exchange(self.exchanger, aio_pika.ExchangeType.TOPIC)\n queue_topic = await self.channel.declare_queue('', auto_delete=True)\n await queue_topic.bind(topic_logs_exchange, routing_key=binding_mask)\n\n # listener queue for listing information\n queue_for_listing = await self.channel.declare_queue('', auto_delete=True)\n await queue_for_listing.bind(topic_logs_exchange, routing_key=self.name_queue_for_listing)\n\n # listener queue for error\n queue_for_error = await self.channel.declare_queue('', auto_delete=True)\n await queue_for_error.bind(topic_logs_exchange, routing_key=self.name_queue_for_error)\n\n def callback_crypto_currency_market_data(message):\n \"\"\"Callback for consume market data\"\"\"\n body = json.loads(message.body.decode('utf-8'))\n \n # routing_key have view: message_type.data_type.exchange.pair[.time_frame]\n # message_type == update | starting, data_type == ticker | candles | depth,\n # exchange, pair, time_frame - sending by listing_info\n # mask: *.*.*.#\n message_type = message.routing_key.split('.')[0]\n data_id = '.'.join(message.routing_key.split('.')[1:])\n\n if message_type == 'update':\n for observer in self.subscribers.get(data_id):\n asyncio.get_event_loop().create_task(observer.update(\n dict(\n data_id=message.routing_key,\n data=body\n )\n ))\n elif message_type == 'starting':\n # if exist waiters, send data and move waiters in subscribers\n if not self.waiters_first_msg.get(data_id):\n return\n\n new_subscribers = []\n while self.waiters_first_msg[data_id]:\n observer = self.waiters_first_msg[data_id].pop()\n asyncio.get_event_loop().create_task(observer.update(\n dict(\n data_id=message.routing_key,\n data=body\n )\n ))\n new_subscribers.append(observer)\n\n # if not subscribers on this data_id, init new dict-value, else append to exist array\n subscribers = self.subscribers.get(data_id, None)\n if not subscribers and new_subscribers:\n self.subscribers[data_id] = new_subscribers\n asyncio.get_event_loop().create_task(self._send_message_for_subscribe(data_id))\n else:\n for new_subscriber in new_subscribers:\n if new_subscriber not in self.subscribers[data_id]:\n self.subscribers[data_id].append(new_subscriber)\n\n def callback_crypto_currency_listing(message):\n \"\"\"Callback for consume information about access pairs, exchanges and timeframes\"\"\"\n body = json.loads(message.body.decode('utf-8'))\n data_id = TYPE_LISTING\n\n if not self.waiters_first_msg.get(data_id):\n return\n\n while self.waiters_first_msg[data_id]:\n observer = self.waiters_first_msg[data_id].pop()\n asyncio.get_event_loop().create_task(observer.update(\n dict(\n data_id=data_id,\n data=body\n )\n ))\n\n def callback_crypto_currency_error(message):\n \"\"\"Callback for consume error queue\"\"\"\n logger.error(message.body.decode('utf-8'))\n\n body = json.loads(message.body.decode('utf-8'))\n\n # validation\n error_place = body.get('error_place')\n message = 'Sorry! Error on server'\n if not message or not error_place:\n return\n\n # send information to ws, that wait or subscribe on error_place\n waiters = self.waiters_first_msg.get(error_place, ())\n for observer in waiters:\n asyncio.get_event_loop().create_task(observer.update(\n dict(\n data_id=error_place,\n error=message\n )\n ))\n\n subscribers = self.subscribers.get(error_place, ())\n for observer in subscribers:\n asyncio.get_event_loop().create_task(observer.update(\n dict(\n data_id=error_place,\n data=message\n )\n ))\n\n await queue_topic.consume(callback_crypto_currency_market_data)\n await queue_for_listing.consume(callback_crypto_currency_listing)\n await queue_for_error.consume(callback_crypto_currency_error)", "def call(self, body):\n try:\n json_data = json.loads(body.decode('utf-8'))\n except (json.decoder.JSONDecodeError, UnicodeDecodeError) as err:\n raise Exception(\"Unable to deserialize message body (%s), \"\n \"rejecting: %r\" % (err, body))\n try:\n meta_type = json_data.get(\"meta\", {}).get(\"type\")\n event = getattr(eiffellib.events, meta_type)(json_data.get(\"meta\", {}).get(\"version\"))\n except (AttributeError, TypeError) as err:\n raise Exception(\"Malformed message. Rejecting: %r\" % json_data)\n try:\n event.rebuild(json_data)\n except Exception as err:\n raise Exception(\"Unable to deserialize message (%s): %r\" % (err, json_data))\n try:\n ack = self._call_subscribers(meta_type, event)\n self._call_followers(event)\n except: # noqa, pylint:disable=bare-except\n _LOG.error(\"Caught exception while processing subscriber \"\n \"callbacks, some callbacks may not have been called: %s\",\n traceback.format_exc())\n ack = False\n return ack, True # Requeue only if ack is False.", "def call(self, message: Message) -> None:\n self.fn(message)", "def _incoming_read(self, client, data, error):\n\n if error is not None:\n client.close()\n del self._incoming[client]\n return\n\n incoming = self._incoming[client]\n incoming.unpacker.feed(data)\n for req_id, message in incoming.unpacker:\n self._call_handler(\n partial(self._queue_response,\n client, req_id),\n self._call_interface.queue_call,\n message,\n )", "def callevent_handler(data):\n return CallEventHandler(data)", "def __init__(self, client):\n self.client = client\n self.call_params = {\n }", "def __call__(self, ia):\n ia.handler = True\n ia.inbound_handler = True\n\n # Circuits properties\n ia.names = self.names\n ia.priority = self.kwargs.get(\"priority\", 0)\n ia.channel = \"{0}.{1}\".format(constants.INBOUND_MSG_DEST_PREFIX, self.names[0])\n ia.override = self.kwargs.get(\"override\", False)\n ia.event = True\n\n @wraps(ia)\n def inbound_app_decorator(itself, event, *args, **kwargs):\n \"\"\"\n The decorated method\n\n :param itself: The method to decorate\n :type itself: resilient_circuits.ResilientComponent\n :param event: The Event with the StompFrame and the Message read off the Message Destination\n :type event: resilient_circuits.action_message.InboundMessage\n \"\"\"\n\n def _invoke_inbound_app(evt, **kwds):\n \"\"\"\n The code to call when a method with the decorator `@inbound_app(<inbound_destination_api_name>)`\n is invoked.\n\n Returns result_list when method with the decorator `@inbound_app(<inbound_destination_api_name>)` is\n finished processing.\n\n A method that has this handler should yield a str when done\n - E.g:\n `yield \"Processing Complete!\"`\n\n :param evt: The Event with the StompFrame and the Message read off the Message Destination\n :type ia: resilient_circuits.action_message.FunctionMessage\n \"\"\"\n result_list = []\n LOG.debug(\"Running _invoke_inbound_app in Thread: %s\", threading.currentThread().name)\n\n # Invoke the actual Function\n ia_results = ia(itself, evt.message, evt.message.get(\"action\", \"Unknown\"))\n\n for r in ia_results:\n LOG.debug(r)\n result_list.append(r)\n\n return result_list\n\n invoke_inbound_app = task(_invoke_inbound_app, event)\n ia_result = yield itself.call(invoke_inbound_app, \"functionworker\")\n yield ia_result.value\n\n return inbound_app_decorator", "def handle(self):\n self.ip = self.client_address[0]\n self.port = self.client_address[1]\n self.connection = self.request\n\n state.addConnection(self.connection)\n\n # Loop that listens for messages from the client\n while True:\n received_string = self.connection.recv(4096)\n\n if len(received_string) == 0:\n continue\n\n # TODO: Add handling of received payload from client\n\n # Convert payload from JSON to object\n payloadToData = json.loads(received_string)\n\n # determine what request is being made\n request_handler = RequestHandler(payloadToData,\n state,\n self.connection)\n\n # execute and generate response (JSON formatted)\n jsonResponse = request_handler.callHandler()\n\n if not jsonResponse == 'BROADCAST':\n # send response\n self.connection.send(bytes(jsonResponse, \"ascii\"))", "def listen(limb, hd):\n def callback(data):\n playback(limb, hd, str(data.data))\n\n rospy.Subscriber(\"handshake/play\", std_msgs.msg.String, callback)\n rospy.loginfo('listening...')\n rospy.spin()", "def __call__(self, *args, **kwargs):\n return self.method(self.receiver, *args, **kwargs)", "def _initReceived(self, msg):\r\n if len(msg) != 32:\r\n log.msg('Protocol Error: iInit message has invalid format.')\r\n self.transport.loseConnection()\r\n return\r\n\r\n d = self._endpoint.processInit(self, msg[:16], msg[16:])\r\n d.addCallbacks(self._initSuccessful, self._initFailed)", "def subscribe(receiver, updateInterval=10):", "def call(self, *args, **kwargs):", "def receive(channel):\n\n def callback(ch, method, properties, body):\n\n event = json.loads(body)\n event_info = event['event_info']\n event_type = event['type']\n success = True\n logger.info(f\"Received event {event}\")\n\n try:\n # Events coming from account microservice\n\n if event_type == USER_CREATED_EVENT:\n\n add_and_publish_event(\n GlobalPreferencesCreatedEvent(event['uuid'], event_info['id'], dict(\n vehicles=['bus', 'subway', 'train', 'tram', 'car', 'walking', 'bike', 'taxi',\n 'enjoy', 'mobike'],\n personal_vehicles=[])),\n PREFERENCES_CREATED)\n\n elif event_type == USER_DELETED_EVENT:\n\n add_and_publish_event(GlobalPreferencesDeletedEvent(event['uuid'], event_info['id']), PREFERENCES_DELETED)\n\n # Events generated in this microservice\n\n elif event_type == PREFERENCES_CREATED_EVENT:\n add_global_preferences(GlobalPreferences(**event_info))\n\n elif event_type == PREFERENCES_MODIFIED_EVENT:\n modify_global_preferences(GlobalPreferences(**event_info))\n\n elif event_type == PREFERENCES_DELETED_EVENT:\n delete_global_preferences(GlobalPreferences(**event_info))\n\n elif event_type == CALENDAR_CREATED_EVENT:\n add_calendar(Calendar(**event_info))\n\n elif event_type == CALENDAR_MODIFIED_EVENT:\n modify_calendar(Calendar(**event_info))\n\n elif event_type == CALENDAR_DELETED_EVENT:\n delete_calendar(Calendar(**event_info))\n\n except SQLAlchemyError as e:\n\n # to deal with at least once delivery of rabbitmq and the create methods which are not idempotent\n if (event_type == USER_CREATED_EVENT or event_type == PREFERENCES_CREATED_EVENT or event_type == CALENDAR_CREATED_EVENT) \\\n and method.redelivered and isinstance(e, IntegrityError):\n logger.info(f'Not processed redelivered event {event}')\n\n else:\n logger.info(f\"Couldn't process event {event}\")\n success = False\n\n finally:\n if success: # ack only if the event has been processed\n ch.basic_ack(delivery_tag=method.delivery_tag)\n logger.info(f\"Processed and acked event {event}\")\n\n # channel.basic_qos(prefetch_count=1)\n channel.basic_consume(callback,\n queue=CALENDAR_QUEUE)\n\n logger.info(\"Started listening to events\")\n channel.start_consuming()", "def begin_handling(self, butterfly: Butterfly):\n res = self.net.handle(butterfly)\n return self._event_loop.create_task(res)", "def subscribe(self, callback):\n self.channel.basic_consume(callback, queue=self.queue_name)\n self.channel.start_consuming()", "def __init__(self, *args):\r\n \r\n self.bl = None\r\n self.buddy = None\r\n self.connection = None\r\n \r\n #\r\n # incoming\r\n #\r\n #__init__(self, bl, connection, command, encoded)\r\n if type(args[0]) == BuddyList:\r\n self.bl = args[0]\r\n self.connection = args[1]\r\n if self.connection:\r\n self.buddy = self.connection.buddy\r\n self.command = args[2]\r\n \r\n # decode from line format to raw binary\r\n # and then let the message parse it \r\n self.blob = decodeLF(args[3])\r\n self.parse()\r\n \r\n # the incoming message is now properly initialized and somebody\r\n # could now call its execute() method to trigger its action\r\n return\r\n \r\n \r\n #\r\n # outgoing\r\n #\r\n #__init__(self, connection, blob)\r\n #__init__(self, buddy, blob)\r\n if type(args[0]) in [InConnection, OutConnection, Buddy]:\r\n if type(args[0]) in [InConnection, OutConnection]:\r\n self.connection = args[0]\r\n if self.connection.buddy:\r\n self.buddy = self.connection.buddy\r\n \r\n elif type(args[0]) == Buddy:\r\n self.buddy = args[0]\r\n self.connection = self.buddy.conn_out\r\n \r\n if len(args) > 1:\r\n blob = args[1]\r\n if type(blob) in [list, tuple]:\r\n self.blob = \" \".join(str(part) for part in blob)\r\n else:\r\n self.blob = str(blob)\r\n else:\r\n self.blob = \"\"\r\n \r\n self.command = type(self).__name__[12:]", "def onMessageFrame(self, payload):", "def test_broadcast_call(self, TwilioRestClient_mock):\n from sosbeacon.event.message import broadcast_call\n broadcast_call('84973796065')\n self.assertEqual(1, TwilioRestClient_mock.call_count)", "def callback(parsed_msg, msg_object):\n assert msg_object.stream_id == stream_id\n assert parsed_msg in msg", "def handle(self, message):", "def on_message(data):\n pass", "def attach_message_bus(self):\n print(\"Connecting to Mycroft message bus\")\n self.client = MessageBusClient()\n print(\"Calling client.run_in_thread()\")\n try:\n self.client.run_in_thread()\n except Exception as e:\n print(\"ERROR: run_in_thread() failed - is Mycroft running?\")\n sys.exit(1)", "def receive_message(self, message):", "def serve(self, rq):\n # Call callback by key directly from socket\n request = rq['request']\n\n if request in self.callbacks :\n self.callbacks[request](rq)\n else :\n print \"unrecognised request\"", "def serve(self, rq):\n # Call callback by key directly from socket\n request = rq['request']\n\n if request in self.callbacks :\n self.callbacks[request](rq)\n else :\n print \"unrecognised request\"", "def call(self, method, name, params=None, payload=None, **kwds):", "def run(self):\n self.initialize()\n\n # run the start callback\n tools.run_callback(\"start\", {'request': self._request})\n\n data = self._request.getData()\n pyhttp = self._request.getHttp()\n config = self._request.getConfiguration()\n\n # allow anyone else to handle the request at this point\n handled = tools.run_callback(\"handle\", \n {'request': self._request},\n mappingfunc=lambda x,y:x,\n donefunc=lambda x:x)\n\n if not handled == 1:\n blosxom_handler(self._request)\n\n # do end callback\n tools.run_callback(\"end\", {'request': self._request})", "def _handle_one_message(self):\n\n type, data = self.cxn.recv_message()\n\n if type.startswith(\"call\"):\n if len(data) != 3:\n message = (type, data)\n raise MessageError.invalid(message, \"incorrect number of args\")\n flags = {\n \"want_response\": type == \"call\",\n }\n call = Call(data[0], data[1], data[2], flags, self.client)\n self._handle_call(call)\n return False\n\n raise MessageError.bad_type(type)", "def on_open(ws):\n data = {\n \"action\": \"start\",\n # this means we get to send it straight raw sampling\n \"content-type\": \"audio/l16;rate=%d\" % RATE,\n \"interim_results\": True,\n \"profanity_filter\": False\n }\n\n # Send the initial control message which sets expectations for the\n # binary stream that follows:\n ws.send(json.dumps(data).encode('utf8'))\n # Spin off a dedicated thread where we are going to read and\n # stream out audio.\n threading.Thread(target=read_audio, args=[ws]).start()", "def event_in_cb(self, msg):\n self.event = msg.data", "async def run_offer(pc, signaling):\n await signaling.connect()\n\n channel = pc.createDataChannel(\"chat\")\n stream = ImageStream()\n\n @channel.on(\"open\")\n def on_open():\n print(\"on open called\")\n stream.step()\n img_str = cv2.imencode('.jpg', stream.emit())[1].tostring()\n channel.send(img_str)\n\n @channel.on(\"message\")\n def on_message(message):\n x, y = float(message.split(',')[0]), float(message.split(',')[1])\n print(\"received ball coordinates from client: \", (x, y))\n print(\"error between the estimated and original \", stream.calculate_error(x, y))\n stream.step()\n img_str = cv2.imencode('.jpg', stream.emit())[1].tostring()\n channel.send(img_str)\n\n # send offer\n await pc.setLocalDescription(await pc.createOffer())\n await signaling.send(pc.localDescription)\n await consume_signaling(pc, signaling)", "def events_and_callbacks_naoqi():\n\n # ALMemory acts as the hub for the distribution of event notifications.\n # Source: https://developer.softbankrobotics.com/nao6/naoqi-developer-guide/naoqi-apis/naoqi-core/almemory\n # Example: https://developer.softbankrobotics.com/nao6/naoqi-developer-guide/other-tutorials/python-sdk-tutorials/python-sdk-examples/vision/face\n\n class MyModule(naoqi.ALModule):\n \"\"\"Mandatory docstring\"\"\"\n\n def myCallback(self, key, value, message):\n \"\"\"Mandatory docstring\"\"\"\n\n print(\"Callback: key={}, value={}, message={}\".format(key, value, message))\n\n # Create a broker\n # TODO(TK): why?\n naoqi.ALBroker(\"pythonBroker\", IP_ME, PORT_ME, IP_ROBOT, PORT_ROBOT)\n\n # Create an instance of our callback handling module, and add it to global scope:\n global myModule # needs to be in global scope\n myModule = MyModule(\"myModule\")\n\n # [naoqi] Subscribe to events:\n proxy_memory = naoqi.ALProxy(\"ALMemory\", IP_ROBOT, PORT_ROBOT)\n print \"FaceDetected events before={}\".format(proxy_memory.getEventHistory(\"FaceDetected\"))\n proxy_memory.subscribeToEvent(\"FaceDetected\", \"myModule\", \"myCallback\")\n\n # qi framework\n def mycallback(key, value):\n print(\"qi callback: key={}, value={}\".format(key, value))\n sess = proxy_memory.session()\n mem = sess.service(\"ALMemory\")\n sub = mem.subscriber(\"FaceDetected\")\n sub.signal.connect(functools.partial(mycallback, \"FaceDetected\"))\n\n # Raise an event:\n proxy_memory.raiseEvent(\"FaceDetected\", str(datetime.datetime.now()))\n proxy_memory.raiseEvent(\"AnotherEvent\", str(datetime.datetime.now()))\n print \"FaceDetected events after={}\".format(proxy_memory.getEventHistory(\"FaceDetected\"))\n time.sleep(0.1) # give it some time to process", "async def __call__(self, payload):\n self.sdk.log(\"/metrika_subscribe handler fired with payload {}\".format(payload))\n\n buttons = []\n time = 19\n\n for rows in range(0,3):\n row = []\n\n for cols in range(0,2):\n row.append({\n 'text': '{}:00'.format(str(time % 24).zfill(2)),\n 'callback_data': 'subscribe|{}'.format(time % 24)\n })\n time += 1\n\n buttons.append(row)\n\n buttons.append([\n {\n 'text': 'Отписаться',\n 'callback_data': 'unsubscribe'\n }\n ])\n\n await self.sdk.send_inline_keyboard_to_chat(payload['chat'], 'Выберете время', buttons)", "async def call_rpc(self, rpc_message: RpcMessage, options: dict, bus_client: \"BusClient\"):\n raise NotImplementedError()", "def handle_msg(msg):\n if comm._msg_callback:\n comm._msg_callback(msg)", "def scan_callback(self, scan_msg):\n self.scan = scan_msg", "def call(self, addr, *args, **kwargs):\n prepare_cb = kwargs.pop('prepare_cb', self.jitter.func_prepare_systemv)\n super(self.__class__, self).call(prepare_cb, addr, *args)", "def call(self, addr, *args, **kwargs):\n prepare_cb = kwargs.pop('prepare_cb', self.jitter.func_prepare_systemv)\n super(self.__class__, self).call(prepare_cb, addr, *args)", "def call(self, addr, *args, **kwargs):\n prepare_cb = kwargs.pop('prepare_cb', self.jitter.func_prepare_systemv)\n super(self.__class__, self).call(prepare_cb, addr, *args)", "def call(self, addr, *args, **kwargs):\n prepare_cb = kwargs.pop('prepare_cb', self.jitter.func_prepare_systemv)\n super(self.__class__, self).call(prepare_cb, addr, *args)", "def call(self, addr, *args, **kwargs):\n prepare_cb = kwargs.pop('prepare_cb', self.jitter.func_prepare_systemv)\n super(self.__class__, self).call(prepare_cb, addr, *args)", "def call(self, addr, *args, **kwargs):\n prepare_cb = kwargs.pop('prepare_cb', self.jitter.func_prepare_systemv)\n super(self.__class__, self).call(prepare_cb, addr, *args)" ]
[ "0.63667405", "0.60182863", "0.59576505", "0.59576505", "0.59211445", "0.589763", "0.589763", "0.589763", "0.5851596", "0.5813852", "0.5791376", "0.57506865", "0.57401466", "0.5735596", "0.5733125", "0.57320815", "0.571448", "0.5710372", "0.570059", "0.5682062", "0.5653709", "0.5653709", "0.5653049", "0.56425714", "0.5633873", "0.5633873", "0.5620766", "0.5611832", "0.5583558", "0.55589837", "0.5535881", "0.5513726", "0.5513554", "0.5499407", "0.5499407", "0.5499407", "0.549764", "0.5471786", "0.54628867", "0.5457652", "0.5449686", "0.5428686", "0.5424845", "0.5407906", "0.53973526", "0.53939605", "0.53927606", "0.53568465", "0.53471977", "0.534352", "0.5331956", "0.5331956", "0.53176767", "0.53173584", "0.5310254", "0.52974826", "0.5289053", "0.5277672", "0.52745587", "0.5270927", "0.5262806", "0.5256688", "0.52552307", "0.5254691", "0.52515656", "0.5247331", "0.5247203", "0.52436453", "0.5242359", "0.52397174", "0.52383256", "0.52296203", "0.52212304", "0.521866", "0.5214515", "0.5214055", "0.5207387", "0.52064145", "0.5205412", "0.51962996", "0.519537", "0.5194261", "0.5194261", "0.5192901", "0.5186839", "0.5183223", "0.5181837", "0.5178578", "0.51701164", "0.51687175", "0.5167971", "0.5167006", "0.5165493", "0.51650935", "0.5163067", "0.5163067", "0.5163067", "0.5163067", "0.5163067", "0.5163067" ]
0.62250584
1
A method for showing how to handle Bandwidth messaging callbacks. For inbound SMS that contains the phrase "call me", a phone call is made and the user is asked to forward the call to another number For inbound SMS that doesn't contain the phrase "call me", the response is a SMS with the date and time. For inbound MMS with a media attachment, the response is the same media attachment sent through Bandwidth's media resource. For all other events, the callback is logged to console
def handle_inbound_message(): data = json.loads(request.data) if data[0]["type"] == "message-received": if "call me" in data[0]["message"]["text"]: handle_inbound_sms_call_me(data[0]["message"]["to"][0], data[0]["message"]["from"]) elif "media" in data[0]["message"]: handle_inbound_media_mms(data[0]["message"]["to"][0], data[0]["message"]["from"], data[0]["message"]["media"]) else: handle_inbound_sms(data[0]["message"]["to"][0], data[0]["message"]["from"]) else: print(data) return ""
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def handle_inbound_sms_call_me(to, from_):\n handle_call_me(to, from_)", "def handle_inbound_media_mms(to, from_, media):\n downloaded_media_files = download_media_from_bandwidth(media)\n upload_media_to_bandwidth(downloaded_media_files)\n remove_files(downloaded_media_files)\n body = MessageRequest()\n body.application_id = MESSAGING_APPLICATION_ID\n body.to = [from_]\n body.mfrom = to\n body.text = \"Rebound!\"\n #Build the media URL by taking the media ids (that doubled as the file names) and appending them to\n #the bandwidth media base url\n body.media = [BANDWIDTH_MEDIA_BASE_ENDPOINT + media_file for media_file in downloaded_media_files]\n try:\n messaging_client.create_message(MESSAGING_ACCOUNT_ID, body)\n except Exception as e:\n print(e)\n return None", "def incoming_sms():\n txt = request.form['Body']\n\n # remove leading and trailing white space and make lowercase\n txt = txt.strip()\n txt = txt.lower()\n\n # handle random searches differently than breed searches\n if txt == 'random' or txt == 'dog':\n url = get_dogs.get_random_dog()\n else:\n url = get_dogs.request_breed(txt)\n \n resp = MessagingResponse()\n if url:\n resp.message(url)\n else:\n resp.message(\"Sorry! We couldn't find a dog matching that query. Please try \\\n a more general search term.\")\n return str(resp)", "def handle_inbound_sms(to, from_):\n body = MessageRequest()\n body.application_id = MESSAGING_APPLICATION_ID\n body.to = [from_]\n body.mfrom = to\n body.text = \"The current date-time is: \" + str(time.time() * 1000) + \" milliseconds since the epoch\"\n try:\n messaging_client.create_message(MESSAGING_ACCOUNT_ID, body)\n except Exception as e:\n print(e)\n return None", "def incoming_sms():\n number = request.values.get('From', None)\n body = request.values.get('Body', None)\n print(body)\n # Start our TwiML response\n resp = MessagingResponse()\n\n body = body.lower()\n body = body.strip()\n body_arr = body.split()\n class_name = \"\"\n name = \"\"\n if len(body_arr) == 4:\n first_name = body_arr[0]\n last_name = body_arr[1]\n name = first_name + \" \" + last_name\n class_name = body_arr[2] + body_arr[3]\n elif len(body_arr) == 6:\n first_name = body_arr[0]\n last_name = body_arr[1]\n name = first_name + \" \" + last_name\n class_name = body_arr[2] + body_arr[3] + body_arr[4] + body_arr[5]\n else:\n resp.message(\"Invalid: Enter your name, class, and session# separated by spaces as shown\\n(eg: Avi Patel grade1 session1, Ravi Rao PreK session1, Mira Singh kg session2, etc.):\")\n return str(resp)\n\n if classes.find_one({'class':class_name}):\n forward_message(class_name, number, name)\n resp.message(\"Your teachers have been notified\")\n\n else:\n resp.message(\"Invalid: Enter your name, class, and session# separated by spaces as shown\\n(eg: Avi Patel grade1 session1, Ravi Rao PreK session1, Mira Singh kg session2, etc.):\")\n\n return str(resp)", "def callback_message(self, message):\n pass", "def callback_message(self, message):\n pass", "def onMessage(self, payload, isBinary):", "def incoming_sms():\n # Get the message body\n body = request.values.get('Body', None)\n\n # Start our TwiML response\n resp = MessagingResponse()\n\n # Determine the appropriate response/action for incoming message\n replyText = getReply(body)\n\n resp.message(replyText)\n\n return str(resp)", "def sms_reply():\n # Start our TwiML response\n # if body.lower()==\"good\":\n message=\"Hi I'm IRIS, an Immediately Responsive Intelligent System\\nHow are you feeling today?\"\n user=request.form['Body']\n\n # message=\"Hi \"+ name+ \"\"\n # user=request.form['Body']\n\n if user==\"good\":\n message=\"Glad to hear it! I hope you continue to feel this way! Celebrate this feeling and hold onto what happened ot make you feel this way so that you can repeat it in the future!\"\n\n if user==\"sad\":\n message=\"I’m sorry to hear that. Here are some things I do to make me feel better: take a walk outside, listen to uplifting music, call or message a loved one, or watch or read something positive to take my mind off of what I’m feeling.\"\n\n if user==\"nervous\":\n message=\"It’s going to be ok! This feeling will not last forever.\"\n if user==\"lonely\":\n message=\"I’m here for you, and know that you are loved, supported, and important. The world would not be the same without you! For a loving quote respond\"\n\n if user==\"angry\":\n message=\"“Let me help you turn your anger into something positive. Here are some ways to burn off energy productively: take a long walk, remove yourself from the situation, paint of draw, listen to loud music, or take a break from what you are doing.\"\n\n if user==\"tired\":\n message=\"I understand what you are feeling well. I recommend taking a break to do an activity you enjoy, taking a nap, getting a coffee, doing 20 jumping jacks, listening to a pump-up playlist, or standing up to stretch for a bit.\"\n\n if user==\"average\":\n message=\"There are many things to look forward to!\"\n resp = MessagingResponse()\n\t # Add a message\n \n resp.message(message)\n\t # Add a picture message\n\t #msg.media(\"https://farm8.staticflickr.com/7090/6941316406_80b4d6d50e_z_d.jpg\")\n\n return str(resp)", "def message_callback(self, message):\n pass", "def bitfinex2_on_message(caller, msg):\n msg = json.loads(msg)\n if caller.subbed_count == 7:\n if msg[1] == \"te\":\n chnl = msg[0]\n body = msg[2]\n df = pd.DataFrame.from_records(\n data=[{\n \"tid\": int(body[0]),\n \"price\": float(body[3]),\n \"volume\": float(body[2]),\n \"datetime\": pd.to_datetime(body[1], unit='ms')\n }],\n index=\"datetime\"\n )\n # print (df)\n df.index = df.index.tz_localize(\"GMT0\")\n caller.write(chnl, df)\n\n return chnl, df\n\n if type(msg) is dict and \"event\" in msg and msg[\"event\"] == \"subscribed\":\n caller.config[\"channel_symbol\"][msg[\"chanId\"]] = \"bitfinex2\" + \":\" + bdic[msg[\"symbol\"]]\n caller.subbed_count += 1\n return\n\n\n chnl = msg[0]\n body = msg[2]\n df = pd.DataFrame.from_records(\n data=[{\n \"tid\": int(body[0]),\n \"price\": float(body[3]),\n \"volume\": float(body[2]),\n \"datetime\": pd.to_datetime(body[1], unit='ms')\n }],\n index=\"datetime\"\n )\n df.index = df.index.tz_convert(\"GMT0\")\n caller.write(chnl, df)\n\n return chnl, df", "async def on_call(message, client):\n pass", "def incoming_sms():\n # Get the message the user sent our Twilio number\n body = request.values.get('Body', None)\n\n # Start our TwiML response\n resp = MessagingResponse()\n\n # Determine the right reply for this message\n if body[0:7] == 'Newhigh':\n num = body[7:]\n result = ''\n with open('currentData.txt') as f:\n for i in range(int(num)):\n result += str(i + 1)\n result += '. '\n result += f.readline()\n # resp.message(result.stdout.decode('utf-8'))\n resp.message(result)\n return str(resp)\n \n if body[0:6] == 'Newlow':\n num = body[6:]\n result = subprocess.run(['tail','-n',num,'currentData.txt'], stdout=subprocess.PIPE)\n resp.message(result.stdout.decode('utf-8'))\n return str(resp)\n if body[0:4] == 'High':\n num = body[4:]\n num = '-' + num\n result = subprocess.run(['head',str(num),'AllBuildings.txt'], stdout=subprocess.PIPE)\n resp.message(result.stdout.decode('utf-8'))\n return str(resp)\n if body[0:3] == 'Low':\n num = body[3:]\n result = subprocess.run(['tail','-n',num,'AllBuildings.txt'], stdout=subprocess.PIPE)\n resp.message(result.stdout.decode('utf-8'))\n return str(resp)\n if body == 'Hourly':\n result = subprocess.run(['cat','ByHour.txt'], stdout=subprocess.PIPE)\n elif body == 'Weekday':\n result = subprocess.run(['cat','DaysOfWeek.txt'], stdout=subprocess.PIPE)\n # resp.message(fortune)\n elif body == '10minute':\n result = subprocess.run(['cat','data/PerTenMinutes.txt'], stdout=subprocess.PIPE)\n else:\n resp.message(\"June 2018 - Feb 2019 Totals\\n\\nCommands:\\n(# is any number between 1 and 50)\\nNewhigh# - Highest Past 10 Minutes\\nNewlow# - Lowest Past 10 Minutes\\n\\nBelow are cumulative annual figs:\\nHigh# - Highest of Year\\nLow# - Lowest of Year\\n\\nCampuswide Figures:\\n10minute - Ten Minute Intervals\\nHourly - 1 Hour Intervals\\nWeekday - By Day of the Week\\n\")\n return str(resp)\n resp.message(result.stdout.decode('utf-8'))\n return str(resp)", "def handle(self, message):\n for callback in self.callbacks:\n callback(message['data'])", "def processMessage(self, *args, **kwargs):\r\n pass", "def on_message(data):\n pass", "def callback_message(self, message):\n return \"hi bro\"", "def on_bus_message(self, bus, message):\n pass", "def process(self, message: Message, **kwargs: Any) -> None:", "def on_data(self, raw_data):\n data = json.loads(raw_data)\n\n if \"in_reply_to_status_id\" in data:\n status = Status.parse(None, data)\n return self.on_status(status)\n if \"delete\" in data:\n delete = data[\"delete\"][\"status\"]\n return self.on_delete(delete[\"id\"], delete[\"user_id\"])\n if \"disconnect\" in data:\n return self.on_disconnect_message(data[\"disconnect\"])\n if \"limit\" in data:\n return self.on_limit(data[\"limit\"][\"track\"])\n if \"scrub_geo\" in data:\n return self.on_scrub_geo(data[\"scrub_geo\"])\n if \"status_withheld\" in data:\n return self.on_status_withheld(data[\"status_withheld\"])\n if \"user_withheld\" in data:\n return self.on_user_withheld(data[\"user_withheld\"])\n if \"warning\" in data:\n return self.on_warning(data[\"warning\"])\n\n log.error(\"Received unknown message type: %s\", raw_data)", "def _handle_custom_msg(self, content, buffers):\n self._msg_callbacks(self, content, buffers)", "def sms_reply():\n # Fetch the message\n media_msg = request.form.get('NumMedia')\n msg = request.form.get('Body').lower()\n resp = MessagingResponse()\n responded = False\n if '1' in media_msg:\n pic_url = request.form.get('MediaUrl0') # URL of the person's media\n # pprint(pic_url) # so you can see the URL that the picture generated \n resp.message(\"We have recieved your request for image analysis! Please wait for our response\")\n resp.message(pic_url)\n url = \"https://techclan-twitter.herokuapp.com/reverse_image?URL=\"\n url=url+pic_url\n resp.message('The image has been succesfully uploaded to our server!The Url of the image is :')\n response=requests.get(url)\n parsed=json.loads(response.text)\n s1=\"\"\n count=0\n for each in parsed:\n s1=s1+each+\"\\n ................\\n\"\n if count>5:\n break\n count=count+1\n resp.message('The reverse image analysis of image reports are:')\n resp.message(s1)\n time.sleep(1)\n u='http://18.205.87.224/api/text?id='\n u=u+pic_url\n response=requests.get(u)\n parsed=json.loads(response.text)\n resp.message(parsed)\n responded==True\n elif '5' in msg:\n r = requests.get('https://coronavirus-19-api.herokuapp.com/countries/india')\n if r.status_code == 200:\n data = r.json()\n text = f'_Covid-19 Cases in India_ \\n..........................\\nConfirmed Cases : *{data[\"cases\"]}* \\n................\\nToday Cases : *{data[\"todayCases\"]}* \\n..............\\nDeaths : *{data[\"deaths\"]}* \\n..................................\\nRecovered : *{data[\"recovered\"]}* \\n\\n..................\\nTotal Tested : *{data[\"totalTests\"]}* \\n\\n Type 0 to return to main menu'\n else:\n text = 'I could not retrieve the results at this time, sorry.'\n resp.message(text)\n responded = True \n \n elif '1' in msg:\n \n resp.message(\"wait we will fetch your results soon!!\")\n url = \"http://18.234.107.157:5000/api/text?id=\"\n ms=str(msg)\n #a,b=ms.split(' ',1)\n url=url+ms\n response=requests.get(url)\n parsed=json.loads(response.text)\n agree=0\n disagree=0\n discuss=0\n ctr=0\n for each in parsed:\n if ctr>100:\n break\n ctr=ctr+1\n answ=each.get('Score',\"error\")\n if answ == \"agree\":\n agree=agree+1\n elif answ == \"disagree\":\n disagree=disagree+1\n if(agree>disagree):\n resp.message(\"This is *REAL* according to our sources !! Our results are based on following sources..we cannot be 100% Sure.\")\n else:\n resp.message(\"This is *FAKE* according to our sources !! Our results are based on following sources..we cannot be 100% Sure.\")\n count=0\n s1=\"\"\n for each in parsed:\n s1=s1+each['link']+\"*Title :*\" +each['title']+\"\\n ................\\n\"\n if count>5:\n break\n count=count+1\n resp.message(s1)\n responded==True\n #reporting\n elif '3' in msg:\n # resp.message(\"We have reported your content to our police database!!\")\n ms=str(msg)\n a,b=ms.split(' ',1)\n url='https://spreadsheetupdate1.herokuapp.com/spreed?id='\n url=url+ms\n r=requests.get(url)\n resp.message(\"We have reported your content to our police database!!\")\n responded==True\n\n\n\n \n #for news\n\n elif msg=='news' or msg=='4':\n \n url=\"\"\"https://newsapi.org/v2/top-headlines?sources=bbc-news,cnn,cnbc,abc-news,google-news-uk,independent&apiKey=3ff5909978da49b68997fd2a1e21fae8\"\"\"\n r = requests.get(url)\n #resp.message(\"stay\") \n if r.status_code == 200:\n resp.message(\"stay here with us! We are fetching news for you \")\n data = r.json()\n articles = data['articles'][:5]\n result = \"\"\n ctr=0 \n for article in articles:\n # if ctr>10:\n # break\n # ctr=ctr+1\n title = article['title']\n url = article['url']\n if 'Z' in article['publishedAt']:\n published_at = datetime.datetime.strptime(article['publishedAt'][:19], \"%Y-%m-%dT%H:%M:%S\")\n else:\n published_at = datetime.datetime.strptime(article['publishedAt'], \"%Y-%m-%dT%H:%M:%S%z\")\n \n result += \"\"\"*{}*\nRead more: {}\n_Published at {:02}/{:02}/{:02} {:02}:{:02}:{:02} UTC_\n\"\"\".format(\n title,\n url, \n published_at.day, \n published_at.month, \n published_at.year, \n published_at.hour, \n published_at.minute, \n published_at.second\n )+\"\\n ..................\\n\"\n\n else:\n result = 'I cannot fetch news at this time. Sorry!'\n\n resp.message(result)\n responded = True\t\n else:\n phone_no = request.form.get('From')\n reply = fetch_reply(msg, phone_no)\n\n resp = MessagingResponse()\n resp.message(reply)\n responded = True\n \n\n \t\n\n return str(resp)", "def onMessageBegin(self, isBinary):", "def listen_for_any_message(self, msg, match):\n question=\"{}\".format(msg)\n return self.cbmodel.get_response(question)", "def sms_notification(request, source):\n\n # call provider factory based on slug\n source = source.lower()\n try:\n provider = get_service_provider(slug=source)\n except Exception, e:\n log.critical(e)\n raise Http404()\n\n # do a simple IP check\n ip = request.META['REMOTE_ADDR']\n\n if not provider.is_ip_allowed(ip):\n log.warn(\"Illegal call from %s\" % ip)\n raise Http404()\n\n log.info(\"Got request notification from %s\" % source)\n\n # extract message data\n try:\n msisdn, text, number = provider.get_primal_data(request.GET)\n log.debug(\"%s %s %s\" % (msisdn, text, number))\n except Exception, e:\n return HttpResponse(provider.handle_notification_error(e, request))\n\n log.debug(\"%s Request input: msisdn:%s, text:%s, number:%s\" % \\\n (source, msisdn, text, number))\n\n # collect purchase data, send success signal and say thanks to your\n # notification service\n la = provider.get_large_account(la_number = number, text = text)\n provider.dispatch_purchase(la = la, msisdn = msisdn, text = text)\n return HttpResponse(provider.NOTIFICATION_REPLY)", "def handle_message(self, message):", "def incoming(self, msg):\n hdr = msg.header\n\n # Signals:\n if hdr.message_type is MessageType.signal:\n key = (hdr.fields.get(HeaderFields.path, None),\n hdr.fields.get(HeaderFields.interface, None),\n hdr.fields.get(HeaderFields.member, None)\n )\n cb = self.signal_callbacks.get(key, None)\n if cb is not None:\n cb(msg.body)\n return\n\n # Method returns & errors\n reply_serial = hdr.fields.get(HeaderFields.reply_serial, -1)\n reply_handle = self.awaiting_reply.pop(reply_serial, None)\n if reply_handle is not None:\n if hdr.message_type is MessageType.method_return:\n reply_handle.set_result(msg.body)\n return\n elif hdr.message_type is MessageType.error:\n reply_handle.set_exception(DBusErrorResponse(msg))\n return\n\n if self.on_unhandled:\n self.on_unhandled(msg)", "def handle(self, message):", "def handleMessage(msg):", "def handle_send_messages():\n items = {k: v for k, v in subscribers.items() if v}\n for key in items:\n subscriber_obj = items[key]\n sim_id = get_sim_id(subscriber_obj)\n if sim_id and type(sim_id) is int:\n frame_messenger(subscriber_obj)\n elif sim_id and sim_id == \"live\":\n live_messenger(subscriber_obj)", "def process_sms():\n phone_number = request.values.get('From', None)\n sms_message = request.values.get('Body', None)\n resp = twilio.twiml.Response()\n regina_answer = ask_regina(phone_number, sms_message, \"sms\")['text']\n resp.message(regina_answer)\n return str(resp)", "def handle_incoming_message(obj, reply_channel):\n if int(obj[message_type_key]) == 0:\n try:\n sub_obj = create_subscriber_object(reply_channel, obj)\n subscribers[reply_channel.name] = sub_obj\n except ApiException as exc:\n send_save_to_channel(reply_channel, str(exc))\n\n elif int(obj[message_type_key]) == 1:\n disconnect_subscriber(reply_channel)\n\n print(\"incoming_msg_handled\")", "def on_media_state(self):\n try:\n if self.call.info().media_state == pj.MediaState.ACTIVE:\n if self.cfg['VoipIO']['debug']:\n self.system_logger.debug(\"CallCallback::on_media_state : Media is now active\")\n else:\n if self.cfg['VoipIO']['debug']:\n self.system_logger.debug(\"CallCallback::on_media_state : Media is inactive\")\n except:\n self.voipio.close_event.set()\n self.cfg['Logging']['system_logger'].exception('Uncaught exception in the CallCallback class.')\n raise", "def MessageHandlerMethod(**kwargs):\n data: dict = kwargs['data']\n bus: AbstractPikaBus = kwargs['bus']\n payload: dict = kwargs['payload']\n print(payload)\n if payload['reply']:\n payload['reply'] = False\n bus.Reply(payload=payload)", "def message_dispatch():\n if not from_twilio(request):\n abort(403)\n resp = twilio.twiml.Response()\n if not session.get(\"pwd\"):\n session['pwd'] = '__META__ROOT__'\n body = request.values.get(\"Body\")\n number = request.values.get(\"From\")\n message = cmd_handler.handle(number,session,body)\n session.modified = True\n resp.message(message)\n # We are probably going to modify the session on every command.\n return str(resp)", "def listen():\n if request.method == 'GET':\n print request\n return verify_webhook(request)\n\n if request.method == 'POST':\n payload = request.json\n event = payload['entry'][0]['messaging']\n for x in event:\n if is_user_message(x):\n text = x['message']['text']\n sender_id = x['sender']['id']\n respond(sender_id, text)\n\n return \"ok\"", "def on_bus_message(self, channel, method_frame, header_frame, body):\n\n try:\n # there are two messages that get broadcast that we really\n # don\"t care about. They have to do with feed synchronization\n # and other internal book-keeping\n if method_frame.routing_key in self.capture_events:\n event = {\n \"content_type\": header_frame.content_type,\n \"routing_key\": method_frame.routing_key,\n \"body\": body\n }\n self.logger.debug(\"Received Message: %s - %s\" % (header_frame.content_type, method_frame.routing_key))\n self.processor_pool.apply_async(process_event, (self.event_processor, event))\n\n else:\n self.logger.debug(\"Unknown message info: %s\" % method_frame.routing_key)\n\n except:\n self.logger.exception(\"Error processing bus message\")", "def handle_messages(self):\n\n #Get the time at which the code started running\n current_time = datetime.datetime.now()\n\n #get all messages between now and the time where a message was last received\n messages = self.client.messages.list(\n date_sent_before = datetime.datetime.now()+ datetime.timedelta(hours = TIMEDIFFERENCE),\n date_sent_after = self.last_message_timing + datetime.timedelta(hours = TIMEDIFFERENCE)\n )\n\n #Iterate through all the new messages\n for record in messages:\n #If it is not from the Twilio Client\n if record.from_ != 'whatsapp:+14155238886':\n #Then update the timing of the last message to the current time\n self.last_message_timing = current_time\n #If the message sent is the '?' that seeks to get the number\n #of people in the queue\n if record.body == '?':\n #Get the data about people from firebase\n people_data = self.firebase.get_data('people_count')\n #Get the number of people queueing\n no_of_people = people_data['people_count']\n #Create a message from the API to tell the person\n #asking the number of people in the queue\n message = self.client.messages.create(\n body='The number of the people in the queue is {}'.format(no_of_people),\n from_='whatsapp:{sender_number}'.format(**self.config),\n to=record.from_\n )", "def handle_message(self, validated_message: dict):\n self.logger.debug(f'Sensor received message {validated_message}')\n if (validated_message['messageType'] !=\n model.MessageTypes.Control.value):\n self.logger.debug(\n 'Sensor ignoring because messageType was not control'\n )\n return\n if validated_message['messageBody']['target'] != self.component_id:\n self.logger.debug(\n 'Sensor ignoring because not targeted at me'\n )\n return\n\n subtype = validated_message['messageSubtype']\n try:\n self.logger.debug(f'Dispatching message with subtype {subtype}')\n self.message_handler_table[subtype](validated_message)\n except KeyError:\n self.logger.warning(f'No handler for with subtype {subtype}')\n pass", "def callback(parsed_msg, msg_object):\n assert msg_object.stream_id == stream_id\n assert parsed_msg in msg", "def callback_botmessage(self, message):\n pass", "def callback_botmessage(self, message):\n pass", "def callback_botmessage(self, message):\n pass", "def test_broadcast_sms(self, TwilioRestClient_mock):\n from google.appengine.ext import ndb\n from sosbeacon.event.message import Message\n from sosbeacon.event.message import broadcast_sms\n\n message_key = Mock(spec=ndb.Key)\n message_key.kind.return_value = \"Message\"\n message = Message(key=message_key, message_type='em', message={'email': '', 'sms': 'Test sms'})\n message_key.get.return_value = message\n\n broadcast_sms('84973796065', message, 'http://google.com', 'longly', 'longly')\n self.assertEqual(1, TwilioRestClient_mock.call_count)", "def incoming(phone_number, text, backend_api, timestamp=None, \n domain_scope=None, backend_message_id=None, delay=True,\n backend_attributes=None):\n # Log message in message log\n phone_number = clean_phone_number(phone_number)\n msg = SMSLog(\n phone_number = phone_number,\n direction = INCOMING,\n date = timestamp or datetime.utcnow(),\n text = text,\n domain_scope = domain_scope,\n backend_api = backend_api,\n backend_message_id = backend_message_id,\n )\n if backend_attributes:\n for k, v in backend_attributes.items():\n setattr(msg, k, v)\n if settings.SMS_QUEUE_ENABLED:\n msg.processed = False\n msg.datetime_to_process = datetime.utcnow()\n msg.queued_timestamp = msg.datetime_to_process\n msg.save()\n enqueue_directly(msg)\n else:\n msg.processed = True\n msg.save()\n process_incoming(msg, delay=delay)\n return msg", "def twilio(request):\n\n # Log the SMS request\n timestamp = datetime.now()\n sms = models.Sms.objects.create(request.From, request.To, request.Body, datetime.now)\n sms.save()\n\n\n\n # Parse the SMS\n try: msgtype, beat, message = parse_sms(sms.body)\n except SMSParseError as e:\n # Handle error condition\n return render_to_response('twilio_responses/parseerror.xml', {'reason': e.reason})\n\n\n if msgtype == REGISTER:\n user = User(beat=beat, cell = request.From)\n user.save()\n return render_to_response('twilio_response/registrationreceived.xml')\n\n if msgtype == INCIDENT:\n # Log an incident\n\n # Is the user registered?\n try: user = models.User.objects.get(cell = sms.sender)\n except: \n return render_to_response('twilio_responses/parseerror.xml', {'reason': 'First register by texting register <beat number>'})\n\n \n\n if not beat:\n # Assume the user's in his home beat, unless he specified one\n beat = user.beat\n\n incident = Incident(reportedby = user, beat = beat, msg = message)\n incident.save()\n\n if beat.user == user:\n # If the admin submitted the incident, just notify the beat\n notify_beat(incident)\n return render_to_response('twilio_responses/beatnotified.xml')\n else:\n notify_admin(incident)\n return render_to_response('twilio_responses/adminnotified.xml')\n\n elif msgtype == NOTIFY:\n # The message should be an incident ID\n beat = user.beat\n \n try: \n incident = Incident.objects.get(id = int(msg))\n except:\n return render_to_response('twilio_responses/parseerror.xml', {'reason': 'Invalid incident: %s' % msg})\n\n if not beat.user == user:\n return render_to_response('twilio_responses/parseerror.xml', {'reason': 'You\\'re not the beat administrator'})\n\n return render_to_response('twilio_responses/beatnotified.xml')", "def application_message(self, bus, msg):\n msgtype = msg.structure.get_name()\n if msgtype == 'partial_result':\n self.partial_result(msg.structure['hyp'], msg.structure['uttid'])\n if msgtype == 'result':\n self.final_result(msg.structure['hyp'], msg.structure['uttid'])", "def application_message(self, bus, msg):\n msgtype = msg.structure.get_name()\n if msgtype == 'partial_result':\n self.partial_result(msg.structure['hyp'], msg.structure['uttid'])\n if msgtype == 'result':\n self.final_result(msg.structure['hyp'], msg.structure['uttid'])", "def received_message(self, m):\n self.receiver.handle_message(m)", "def receive(channel):\n\n def callback(ch, method, properties, body):\n\n event = json.loads(body)\n event_info = event['event_info']\n event_type = event['type']\n success = True\n logger.info(f\"Received event {event}\")\n\n try:\n # Events coming from account microservice\n\n if event_type == USER_CREATED_EVENT:\n\n add_and_publish_event(\n GlobalPreferencesCreatedEvent(event['uuid'], event_info['id'], dict(\n vehicles=['bus', 'subway', 'train', 'tram', 'car', 'walking', 'bike', 'taxi',\n 'enjoy', 'mobike'],\n personal_vehicles=[])),\n PREFERENCES_CREATED)\n\n elif event_type == USER_DELETED_EVENT:\n\n add_and_publish_event(GlobalPreferencesDeletedEvent(event['uuid'], event_info['id']), PREFERENCES_DELETED)\n\n # Events generated in this microservice\n\n elif event_type == PREFERENCES_CREATED_EVENT:\n add_global_preferences(GlobalPreferences(**event_info))\n\n elif event_type == PREFERENCES_MODIFIED_EVENT:\n modify_global_preferences(GlobalPreferences(**event_info))\n\n elif event_type == PREFERENCES_DELETED_EVENT:\n delete_global_preferences(GlobalPreferences(**event_info))\n\n elif event_type == CALENDAR_CREATED_EVENT:\n add_calendar(Calendar(**event_info))\n\n elif event_type == CALENDAR_MODIFIED_EVENT:\n modify_calendar(Calendar(**event_info))\n\n elif event_type == CALENDAR_DELETED_EVENT:\n delete_calendar(Calendar(**event_info))\n\n except SQLAlchemyError as e:\n\n # to deal with at least once delivery of rabbitmq and the create methods which are not idempotent\n if (event_type == USER_CREATED_EVENT or event_type == PREFERENCES_CREATED_EVENT or event_type == CALENDAR_CREATED_EVENT) \\\n and method.redelivered and isinstance(e, IntegrityError):\n logger.info(f'Not processed redelivered event {event}')\n\n else:\n logger.info(f\"Couldn't process event {event}\")\n success = False\n\n finally:\n if success: # ack only if the event has been processed\n ch.basic_ack(delivery_tag=method.delivery_tag)\n logger.info(f\"Processed and acked event {event}\")\n\n # channel.basic_qos(prefetch_count=1)\n channel.basic_consume(callback,\n queue=CALENDAR_QUEUE)\n\n logger.info(\"Started listening to events\")\n channel.start_consuming()", "def receive_message(self, message):", "def application_message(self, bus, msg):\n\t\tmsgtype = msg.structure.get_name()\n\t\tif msgtype == 'partial_result':\n\t\t\tself.partial_result(msg.structure['hyp'], msg.structure['uttid'])\n\t\telif msgtype == 'result':\n\t\t\tself.final_result(msg.structure['hyp'], msg.structure['uttid'])", "def onMessageFrame(self, payload):", "def callback(ch, method, properties, body):\r\n body = json.loads(body)\r\n print(f\"[x] Task in the queue {body}\")\r\n # Creating instance of AudioRecorder\r\n recorder = AudioRecorder(body)\r\n driver = recorder.prepare_browser(body['settings'])\r\n recorder.run(driver, body)", "def handle_message(event):\n intention = parse_intention(event.message.text)\n if intention == config.QUERY_INTENTION:\n handle_query_weather_message(event)\n elif intention == config.SUBSCRIBE_INTENTION:\n handle_subscribe_message(event)\n else:\n handle_unknown_message(event)", "def on_watch_message(self, bus, msg):\n msg_struct = msg.get_structure()\n if msg_struct:\n if msg_struct.get_name() == 'GstMessageTag':\n codec_name = ((msg_struct[\"taglist\"].nth_tag_name(0)))\n codec_value = msg_struct[\"taglist\"].get_string(codec_name)\n info_name = codec_name\n c_result, info_value = codec_value\n if c_result:\n self.info_handler(info_name, info_value)\n if codec_name == \"video-codec\":\n self.info_handler(codec_name, info_value)\n r_result, width, height = self.get_resolution()\n if r_result:\n info_name = \"resolution\"\n info_value = \"[{}x{}]\".format(width, height)\n self.info_handler(info_name, info_value)\n bus.remove_signal_watch()", "def _on_message(self, raw_msg):\n strmsg = raw_msg.decode()\n msg = json.loads(strmsg)\n\n print(msg)\n\n if self._handlers.get(msg['msgid']):\n for handler in self._handlers[msg['msgid']]:\n handler.handle(msg)", "def callback(self, data):\n\n self.connection = pika.BlockingConnection(self.params)\n self.channel = self.connection.channel()\n\n # The fanout exchange broadcasts all the messages it receives to all the queues it knows.\n # That is what we need for our logger.\n # Tony changed to 'topic' to work with Kuilin's group\n self.channel.exchange_declare(exchange=self.logName,\n exchange_type='topic',\n auto_delete=True)\n\n #TONY WAS HERE\n #CONVERT THE DATA BEFORE SENDING\n #this extracts the data to a tuple\n data_tuple = struct.unpack(\"<hddhdddddddddddd\", data)\n #convert tuple to string and remove the parentheses on the ends\n data_to_send = str(data_tuple).strip(\"()\")\n\n # Publish the data to the exchange\n self.channel.basic_publish(exchange=self.logName,\n routing_key=self.RoutingKey,\n body=data_to_send) #used to be body=data (from Pilot)\n\n #tony was here\n #print(\"Sending: %r via %r and %r\" % (data,self.logName,self.RoutingKey))\n\n self.connection.close()", "def process_messages(self):\n pass", "def test_get_sms_message(self):\n pass", "def handle_message(self, mxmsg):\n if self._handler is None:\n raise NotImplementedError()\n\n self.notify_started()\n response = self._handler(mxmsg)\n if response == ():\n self.no_response()\n elif isinstance(response, str):\n self.send_message(message=response, type=MessageTypes.PING)\n elif isinstance(response, dict):\n self.send_message(**response)\n else:\n raise ValueError(\"Unsupported handler return type %r\" %\n type(response))", "def on_message(self, ws, message):\n message = json.loads(message)\n if message['type'] == 'error':\n self.on_error(None, message['message'])\n elif message['type'] == 'subscriptions':\n print(\"Subscribed to {}\".format(', '.join([ channel['name'] for channel in message['channels'] ])))\n else:\n if ((message['type']=='ticker' and message['product_id'] in self._ticker) or \n (message['type'] in [\"snapshot\", \"l2update\"] and message['product_id'] in self._level2) or \n (message['type'] in [\"received\",\"open\",\"done\",\"match\",\"change\",\"activate\"] )):\n self.messages.append(message)\n elif message['type']=='heartbeat':\n self.updated_time = time.time()", "def onMessageFrameData(self, payload):", "def _handle_message(self, msg):\n self.event('message', msg)", "def on_message(self, msg) -> None:\n\n decoded_msg = json.loads(msg)\n message_type = decoded_msg[\"type\"]\n\n if message_type == MSG_SUBCRIPTIONS:\n\n product_ids = decoded_msg[\"channels\"]\n logging.debug(\"Subscriptions: {}\".format(product_ids))\n\n elif message_type == MSG_SNAPSHOT:\n\n product_id = decoded_msg[\"product_id\"]\n self._snapshot(decoded_msg)\n\n # Old best bid and ask doesn't exist yet, this will always set a new bbo\n self.set_if_new_bbo(product_id)\n\n elif message_type == MSG_L2UPDATE:\n\n product_id = decoded_msg[\"product_id\"]\n self.update(decoded_msg)\n\n self.set_if_new_bbo(product_id)\n\n self.event_count += 1", "def _r_handle_message_contents(self, msg, protocol):\n if isinstance(msg, ResponseMessage):\n d = self._waiting_messages.pop(msg.response_to, None)\n if d is not None:\n d.callback(msg)\n elif isinstance(msg, ServerMotdMessage):\n print(\"Connected: %s\" % msg.motd)\n self._r_successful_connection()\n elif isinstance(msg, EventMessage):\n callback = self._event_callbacks.get((msg.service_name, msg.event_name))\n if callback is not None:\n threads.deferToThread(callback, *msg.pargs, **msg.kwargs)", "def run(self):\n alogger.info(\"Recieved message from %s, Message: (%d) %s\" % (self.client.getaddress(), self.action_type, self.message))\n \n #Try to call th function associated with this message type.\n #format = \"handle_<type>\" (eg: handle_100)\n fn = globals().get(\"handle_\" + str(self.action_type))\n if fn and callable(fn):\n fn(self.message, self.address, self.client)\n else:\n alogger.info(\"Received unknown message from %d, type: %d\" % (self.client.getaddress(), self.action_type))", "def handle_msg(msg):\n if comm._msg_callback:\n comm._msg_callback(msg)", "def listen_to_message(**payload):\n\n data = payload['data']\n\n try:\n message = data['text']\n user = data['user']\n message_id = data['client_msg_id']\n time = data['event_ts']\n channel = data['channel']\n process_data({'user': user, 'message': message, 'message_id': message_id, 'channel': channel, 'time': time})\n except KeyError:\n pass\n except Exception as e:\n logging.error(e)\n return None", "def inbound_handler():\n req = request.json\n # Take the time to clear out any past reminders\n try:\n virtual_tn = req['to']\n assert len(virtual_tn) <= 18\n sms_from = req['from']\n assert len(sms_from) <= 18\n req['body']\n except (TypeError, KeyError, AssertionError) as e:\n msg = (\"Malformed inbound message: {}\".format(req))\n log.error({\"message\": msg, \"status\": \"failed\", \"exc\": str(e)})\n return Response('There was an issue parsing your request.', status=400)\n else:\n Reminder.clean_expired()\n try:\n appt = Reminder.query.filter_by(\n contact_num=sms_from).one()\n except NoResultFound:\n msg = \"No existing un-responded reminder for contact {}.\".format(\n sms_from)\n log.info({\"message\": msg})\n return Response(status=200)\n else:\n message = req['body'].upper()\n if 'YES' in message:\n appt.will_attend = True\n confirm = True\n elif 'NO' in message:\n appt.will_attend = False\n confirm = False\n else:\n confirm = None\n db_session.add(appt)\n try:\n send_reply.apply_async((appt.id,), {'confirm': confirm})\n except ConnectionError as e:\n log.critical({\"message\": \"unable to connect to redis\",\n \"exc\": type(e)})\n db_session.rollback()\n return Response(status=500)\n else:\n db_session.commit()\n log.info({\"message\":\n (\"successfully recorded response from {}, scheduled \"\n \"SMS confirmation for appointment {}\").format(\n sms_from, appt.id),\n \"reminder_id\": appt.id})\n return Response(status=200)", "def ceilometer_callback(self, ch, method, properties, body):\n payload = json.loads(body)\n try:\n message_body = json.loads(payload['oslo.message'])\n samples = message_body['args']['data']\n #print \"--------------------------------------------------\"\n self.pool.spawn_n(self.zabbix_sender.consume_samples,samples)\n except Exception,e:\n log.warn(str(e))", "def test_broadcast_call(self, TwilioRestClient_mock):\n from sosbeacon.event.message import broadcast_call\n broadcast_call('84973796065')\n self.assertEqual(1, TwilioRestClient_mock.call_count)", "def message_callback(self, message):\n message_data = json.loads(message)\n\n if message_data.get('command') == 'error':\n return self.command_error(message_data)\n\n if 'device_type' in message_data and not message_data['device_type'].startswith(self.device_filter):\n return\n\n # Try to find a matching command and execute it\n command_name = message_data['command']\n command_data = message_data.get('data', {})\n device_name = message_data.get('name')\n\n command_handler_name = 'command_{}'.format(command_name)\n if not hasattr(self, command_handler_name):\n logging.info(\"{} does not support command {}\".format(\n self,\n command_name\n ))\n return\n\n command_handler = getattr(self, command_handler_name)\n return command_handler(device_name, command_data)", "def handle(self, message):\n print(\"You received a message:\")\n print(message)\n # Overwrite this function to do something with the message!", "def VoiceMessageReceived(self,prefix,session,data,TCP=False):\n pass", "def get_message():\n # Only run xray in the AWS Lambda environment\n if runs_on_aws_lambda():\n xray_subsegment = xray_recorder.current_subsegment()\n xray_subsegment.put_annotation(\"key\", \"value\")\n # Sample metadata\n # subsegment.put_metadata(\"operation\", \"metadata\", \"python object/json\")\n xray_recorder.end_subsegment()", "def application_message(self, bus, msg):\n\t\tmsgtype = msg.structure.get_name()\n\t\tif msgtype == 'partial_result':\n\t\t\tself.partial_result(msg.structure['hyp'], msg.structure['uttid'])\n\t\telif msgtype == 'result':\n\t\t\tself.final_result(msg.structure['hyp'], msg.structure['uttid'])\n\t\t\tself.pipeline.set_state(gst.STATE_PAUSED)", "def process_message(self, msg, src):", "def ProcessCall(self, Call):\n Call.MarkAsSeen()\n\n self.SayByVoice(Call, self.Messages['welcome_phrase'] % Call.PartnerDisplayName)\n self.SayByVoice(Call, self.Messages['tell_rules'])\n\n # record wav file with buddy's speech\n TemporaryFileWAV = tempfile.NamedTemporaryFile(prefix= Call.PartnerHandle +\"_record_\", suffix=\".wav\", delete=False)\n TemporaryFileWAV.close()\n\n Call.OutputDevice(Skype4Py.callIoDeviceTypeFile, TemporaryFileWAV.name)\n\n # give 10 seconds for user to speak\n time.sleep(10)\n\n # terminate speech recording\n Call.OutputDevice(Skype4Py.callIoDeviceTypeFile, None)\n\n self.SayByVoice(Call, self.Messages['10_seconds_passed'])\n\n # convert wav into the flac using http://flac.sourceforge.net/ binary\n ChromeRecognizeURL = \"https://www.google.com/speech-api/v1/recognize?xjerr=1&client=chromium&lang=en-US\"\n ConvertCommand = \"flac --channels=1 --sample-rate=16000 %s\" % TemporaryFileWAV.name\n subprocess.call(ConvertCommand)\n\n TemporaryFileFlacName = TemporaryFileWAV.name.replace('.wav','.flac')\n TemporaryFileFlac = open(TemporaryFileFlacName,\"rb\")\n\n # send flac to the google recognize API (warning, this API is unofficial, use only for testing)\n GoogleRecognizeAPIURL = \"https://www.google.com/speech-api/v1/recognize?xjerr=1&client=chromium&lang=en-US\"\n GoogleRecognizeRequest = urllib2.Request(GoogleRecognizeAPIURL, TemporaryFileFlac.read(), {'Content-Type': 'audio/x-flac; rate=16000'})\n DataAnswer = json.loads(urllib2.urlopen(GoogleRecognizeRequest).read())\n\n TemporaryFileFlac.close()\n\n # closest variant is always first in results\n if len(DataAnswer['hypotheses']) > 0:\n ClosestVariant = DataAnswer['hypotheses'][0]['utterance']\n else:\n ClosestVariant = \"nothing\"\n\n self.SayByVoice(Call, \"You asked: %s\" % ClosestVariant)\n self.SayByVoice(Call, \"My answer is: %s\" % self.AI.respond(ClosestVariant, Call.PartnerHandle))\n self.SayByVoice(Call, \"Goodbye!\")\n\n # clean rubbish and finish the call\n os.remove(TemporaryFileWAV.name)\n os.remove(TemporaryFileFlacName)\n\n Call.Finish()", "def ServerSyncReceived(self,message):", "def on_message(self, message):\n\n # Start an infinite loop when this is called\n if message == \"read_camera\":\n self.camera_loop = PeriodicCallback(self.loop, 10)\n self.camera_loop.start()\n\n # Extensibility for other methods\n else:\n print(\"Unsupported function: \" + message)", "def handle_message(self, data, channel):\n pass", "def onAdaptorData(self, message):\n #logging.debug(\"%s onadaptorData, message: %s\", ModuleName, message)\n if message[\"characteristic\"] == \"acceleration\":\n for a in self.accel:\n if a.id == self.idToName[message[\"id\"]]: \n a.processAccel(message)\n break\n elif message[\"characteristic\"] == \"temperature\":\n for t in self.temp:\n if t.id == self.idToName[message[\"id\"]]:\n t.processTemp(message)\n break\n elif message[\"characteristic\"] == \"ir_temperature\":\n for t in self.irTemp:\n if t.id == self.idToName[message[\"id\"]]:\n t.processIrTemp(message)\n break\n elif message[\"characteristic\"] == \"gyro\":\n for g in self.gyro:\n if g.id == self.idToName[message[\"id\"]]:\n g.processGyro(message)\n break\n elif message[\"characteristic\"] == \"magnetometer\":\n for g in self.magnet:\n if g.id == self.idToName[message[\"id\"]]:\n g.processMagnet(message)\n break\n elif message[\"characteristic\"] == \"buttons\":\n for b in self.buttons:\n if b.id == self.idToName[message[\"id\"]]:\n b.processButtons(message)\n break\n elif message[\"characteristic\"] == \"humidity\":\n for b in self.humidity:\n if b.id == self.idToName[message[\"id\"]]:\n b.processHumidity(message)\n break\n elif message[\"characteristic\"] == \"binary_sensor\":\n for b in self.binary:\n if b.id == self.idToName[message[\"id\"]]:\n b.processBinary(message)\n break\n elif message[\"characteristic\"] == \"power\":\n for b in self.power:\n if b.id == self.idToName[message[\"id\"]]:\n b.processPower(message)\n break\n elif message[\"characteristic\"] == \"battery\":\n for b in self.battery:\n if b.id == self.idToName[message[\"id\"]]:\n b.processBattery(message)\n break\n elif message[\"characteristic\"] == \"connected\":\n for b in self.connected:\n if b.id == self.idToName[message[\"id\"]]:\n b.processConnected(message)\n break\n elif message[\"characteristic\"] == \"luminance\":\n for b in self.luminance:\n if b.id == self.idToName[message[\"id\"]]:\n b.processLuminance(message)\n break", "def onMessage(self, message):\n raise NotImplementedError", "def run(self):\n #use subprocess for your bindings when develop a new functionality\n fulldate = datetime.now().strftime(\"%A, %d. %B %Y %I:%M%p\")\n\n hours = datetime.now().strftime(\"%I\")\n minutes = datetime.now().strftime(\"%I\")\n\n if self.req_from == 'jabber':\n response = {'request': self.request\n ,'text' : fulldate\n ,'jmsg' : fulldate\n ,'continue' : 0\n ,'type':'response' }\n\n if self.req_from == 'julius':\n response = {'request': self.request\n ,'say': \"IT'S, %d O'CLOCK AND %d MINUTES\" % ( int(hours), int(minutes))\n ,'text' : fulldate\n ,'continue' : 0\n ,'type' : 'response' }\n\n return response\n #import subprocess\n #s = subprocess.Popen(['ffmpeg', '-i', speech, flac ] , stderr=subprocess.STDOUT, stdout=subprocess.PIPE).communicate()[0]", "def onMessage(self):\n \"\"\"\n Validates that the received message is from a student and then broadcasts the message to the rest of the class.\n\n @param self: self is the instance of this object.\n @param message: the message that is received\n @param student: the student that sent the message\n \"\"\"\n pass", "def _process_message(self, obj):\n pass", "def _handle_msg(self, msg):\n data = msg['content']['data']\n method = data['method']\n\n if method == 'update':\n if 'state' in data:\n state = data['state']\n if 'buffer_paths' in data:\n _put_buffers(state, data['buffer_paths'], msg['buffers'])\n self.set_state(state)\n\n # Handle a state request.\n elif method == 'request_state':\n self.send_state()\n\n # Handle a custom msg from the front-end.\n elif method == 'custom':\n if 'content' in data:\n self._handle_custom_msg(data['content'], msg['buffers'])\n\n # Catch remainder.\n else:\n self.log.error('Unknown front-end to back-end widget msg with method \"%s\"' % method)", "def respond_to_message(self):\n\n MessageEventHandler(self.state, self.meta_data, self.message_data).handle_events(events=self.events)\n data = Converter(self.state).get_messages(meta_data=self.meta_data, message_data=self.message_data)\n\n outgoing_messages = data.get(\"messages\", [])\n events_to_publish = data.get(\"publish_events\", [])\n\n agent_messages = [message[\"message\"] for message in outgoing_messages if message[\"sending_to\"] == \"AGENT\"]\n user_messages = [message[\"message\"] for message in outgoing_messages if message[\"sending_to\"] == \"USER\"]\n\n agent_response = Util.send_messages(messages=agent_messages, sending_to=\"AGENT\")\n user_response = Util.send_messages(messages=user_messages, sending_to=\"USER\")\n\n if agent_response or user_response:\n\n Util.update_state(meta_data=self.meta_data, state=self.state)\n Util.log_events(meta_data=self.meta_data, state=self.state, events=events_to_publish)\n\n return 1", "def on_message_received(ch, method, properties, body):\n # the body contains the command flag followed by a colon ':' and the message for the drone\n # decode the body to utf8\n received_bytes = body.decode('utf-8')\n # split the received_bytes to get the command _flag and message\n recieved_message = received_bytes.split(':')\n # since rabbit mq body is a byte\n if (str(recieved_message[0]) == \"c01\"):\n # c01 - command center orders the drone to deliver a item\n print(\"Order Received from the command center to deliver an item to the following address \\n\", str(\n recieved_message[1]))\n time.sleep(2)\n # print in the drone's console that the item has been lift off\n print('\\nLifting off the Item to the delivery address.')\n print('\\nUpdating Status to the command centre ......')\n # Assume the drone has reached the delivery address . Now send a\n # message to the warehouse command center that it has reached the\n # delivery area\n time.sleep(5)\n rpc_sendback(\"c02\")\n # Assume the drone has delivered the item and issue the status message\n # to the command center\n time.sleep(5)\n rpc_sendback(\"c03\")\n # #Assume the drone has reached the parking spot and issue the message to the command center that is available for next instruction\n time.sleep(5)\n rpc_sendback(\"c04\")\n\n else:\n print(\"Received Instruction from Warehouse \" +\n str(recieved_message[1]))\n channel.basic_ack(delivery_tag=method.delivery_tag)\n # channel.start_consuming()", "def _process_message(self, json_object):\n\n message = json.loads(json_object)\n if message['type'] == \"relay\":\n self._process_relay(message)\n elif message['type'] == \"control\":\n self._process_control(message)\n else:\n print(\"ERROR Received message has invalid type\\n\")\n return", "def _handle_one_message(self):\n\n type, data = self.cxn.recv_message()\n\n if type.startswith(\"call\"):\n if len(data) != 3:\n message = (type, data)\n raise MessageError.invalid(message, \"incorrect number of args\")\n flags = {\n \"want_response\": type == \"call\",\n }\n call = Call(data[0], data[1], data[2], flags, self.client)\n self._handle_call(call)\n return False\n\n raise MessageError.bad_type(type)", "def main(msg: func.ServiceBusMessage):\r\n\r\n # Extract the method into a dictionary\r\n msg_dict = json.loads(msg.get_body().decode(\"utf-8\"))\r\n\r\n logging.info(f\"Python ServiceBus queue trigger processed message: {msg_dict}\")\r\n\r\n # Enable a connection with the IoT Hub. The connectionstring for the IoT Hub\r\n # is preloaded in the Azure Functions configurations.\r\n connectino_string_iothub = os.getenv(\"connectionStringIotHub\")\r\n registry_manager = IoTHubRegistryManager(connectino_string_iothub)\r\n\r\n # Settings for the method that the IoT Device should run upon receiving the message.\r\n callback_method = \"start_fan\"\r\n callback_payload = {}\r\n device_method = CloudToDeviceMethod(\r\n method_name=callback_method, payload=callback_payload\r\n )\r\n\r\n # Sending the actual cloud-to-device message and invoke a function on the IoT device.\r\n device_id = msg_dict[\"IoTHub\"][\"ConnectionDeviceId\"]\r\n response = registry_manager.invoke_device_method(device_id, device_method)\r\n\r\n print(\"\")\r\n print(\"Device Method called\")\r\n print(\"Device Method name : {0}\".format(callback_method))\r\n print(\"Device Method payload : {0}\".format(callback_payload))\r\n print(\"\")\r\n print(\"Response status : {0}\".format(response.status))\r\n print(\"Response payload : {0}\".format(response.payload))", "def _bright_received(self):\n self._call_subscribers(on_level=1)", "def sms():\n def send_sms(number, message):\n #get session bus\n try:\n session_bus = dbus.SessionBus()\n except dbus.exceptions.DBusException:\n click.echo(chalk.red('Have a display you must'))\n return\n\n #check for kdeconnect\n try:\n devices_dbus_obj = session_bus.get_object('org.kde.kdeconnect','/modules/kdeconnect/devices')\n except dbus.exceptions.DBusException:\n click.echo(chalk.red('kdeconnect not installed it appears'))\n return\n\n #get devices ids\n devices_xml = devices_dbus_obj.Introspect(dbus_interface='org.freedesktop.DBus.Introspectable')\n devices_xml = ET.fromstring(devices_xml)\n nodes = devices_xml.findall('node')\n if(len(nodes) is 0):\n click.echo(chalk.red('Devices there are not'))\n return\n deviceIDs = list()\n for node in nodes:\n deviceIDs.append(node.get('name'))\n\n #get devices properties\n deviceID_Props = dict()\n for ID in deviceIDs:\n try:\n device = session_bus.get_object('org.kde.kdeconnect', '/modules/kdeconnect/devices/' + ID)\n deviceProps = device.GetAll('', dbus_interface='org.freedesktop.DBus.Properties')\n deviceID_Props[ID] = deviceProps\n except dbus.exceptions.DBusException:\n #don't create an entry in the dictionary if the object, or a GetAll method does not exist\n pass\n if(len(deviceID_Props) is 0):\n click.echo(chalk.red('Devices there are not'))\n return\n\n #eliminate non sms devices\n devices_no_sms = list()\n for device in deviceID_Props:\n keeping = False\n for plugin in deviceID_Props[device]['supportedPlugins']:\n if('sms' in plugin):\n keeping = True\n if(not keeping):\n devices_no_sms.append(device)\n for device in devices_no_sms:\n del deviceID_Props[device]\n\n #if there are no devices that support sms\n if(len(deviceID_Props) is 0):\n click.echo(chalk.red('Devices that support sms there are not'))\n return\n #elif only one device was found that supports sms\n elif(len(deviceID_Props) is 1):\n click.echo(chalk.yellow('Device using: ' + str(list(deviceID_Props.values())[0]['name'])))\n sendMessage = session_bus.get_object('org.kde.kdeconnect', '/modules/kdeconnect/devices/' + str(list(deviceID_Props.keys())[0]) + '/sms')\n sendMessage.sendSms(number, message, dbus_interface='org.kde.kdeconnect.device.sms')\n return\n #otherwise get user to choose device\n else:\n choice_map = dict()\n for idx, device in enumerate(deviceID_Props, start=1):\n click.echo(chalk.green(str(idx) + ': ' + deviceID_Props[device]['name']))\n choice_map[str(idx)] = device\n choice = click.prompt(chalk.blue('Device, you must select: '), default='1', type=click.Choice(choice_map.keys()))\n #click.echo('you chose: ' + choice_map[the_chosen_device] + ' with id: ' + deviceNames_IDs[choice_map[the_chosen_device]])\n sendMessage = session_bus.get_object('org.kde.kdeconnect', '/modules/kdeconnect/devices/' + choice_map[choice] + '/sms')\n sendMessage.sendSms(number, message, dbus_interface='org.kde.kdeconnect.device.sms')\n return\n\n click.echo(chalk.blue('For whom you want to send an sms'))\n friend_name = input().strip()\n friend_name_lower = friend_name.lower()\n if os.path.isfile(PEOPLE_CONFIG_FILE_PATH):\n with open(PEOPLE_CONFIG_FILE_PATH) as fin:\n contents = yaml.load(fin)\n entries = contents['entries']\n for entry in entries:\n if(friend_name == entry['name'] or friend_name_lower == entry['name']):\n number = entry['mobile']\n break\n if('number' not in locals()):\n click.echo(chalk.red('Friend not found.'))\n else:\n if(len(number) is not 0):\n click.echo(chalk.blue('Message, you must enter: '))\n message = input(':')\n click.echo(chalk.yellow('Device to send sms to ' + number + ' looking for: '))\n send_sms(number, message)\n else:\n click.echo(chalk.red('Friends number not in people file, run `yoda people setup` to add it.'))\n else:\n click.echo(chalk.red('The People file does not exist, run `yoda people setup` to create an entry.'))", "def receive(self, message):", "def handleModuleMessage(self, data, datapathSendFcn):\n print (\"handler not implemented in silverline.\")", "async def callback(request):\n\tsignature = request.headers['X-Line-Signature']\n\tbody = await request.text()\n\ttry:\n\t\tevents = line_parser.parse(body, signature)\n\t\tfor event in events:\n\t\t\tawait _line_to_discord(event)\n\texcept LineBotApiError as e:\n\t\tprint(\"got exception from LINE Message API: {0}\".format(e.message))\n\t\tfor m in e.error.details:\n\t\t\tprint(\" {0} : {1}\".format(m.property, m.message))\n\t\treturn web.Response(status=200)\n\texcept InvalidSignatureError:\n\t\treturn web.Response(status=400)\n\n\treturn web.Response()", "def nxlog_callback(ch, method, properties, body):\n\n def nx_formatter(event: dict) -> Dict:\n \"\"\"\n Форматирование nx'овой обвязки\n connector_id и dt - обязательные поля в событии\n :param event:\n :return:\n \"\"\"\n\n def cast(message: dict):\n \"\"\"приведение типов\"\"\"\n for k, v in message.items():\n if isinstance(v, datetime):\n message[k] = message[k].isoformat()\n if isinstance(v, decimal.Decimal):\n message[k] = int(message[k])\n try:\n message[k] = int(message[k])\n except (ValueError, TypeError):\n pass\n if k in ['username']:\n message[k] = str(message[k])\n return message\n\n nx_attributes = get_nx_attributes(event['connector_id'])\n f_message = NxlogMessage(**nx_attributes)\n f_message.hostname = socket.gethostname()\n event_time = parse(event['dt'])\n f_message.event_time = event_time\n f_message.detection_time = event_time\n f_message.raw = event\n f_message.md5 = md5_from_raw(event)\n return cast(f_message.to_dict())\n\n rmq_message = orjson.loads(body)\n logger.debug(\"Received message from queue: %s\", rmq_message)\n metric_notify_counter(app_module=rmq_message['connector_id'], metric_name=\"stream-of-events\")\n\n # if event is already exists in redis, there's no need in sending to nxlog\n rmq_message_id = f\"{rmq_message['connector_id']}_{rmq_message['id']}_{md5_from_raw(rmq_message)}\"\n\n if env.redis.exists(rmq_message_id):\n ch.basic_ack(delivery_tag=method.delivery_tag)\n logger.debug(f\"{rmq_message['id']} already exist\")\n return\n\n nx_message = nx_formatter(rmq_message)\n logger.debug(\"Try to send event to NXLog [%s] %s\", nx_message['raw']['connector_id'], nx_message['raw'])\n\n if not env.nxlog_client:\n env.nxlog_client = NXLogClient(**env.nxlog_config['nx_collector'])\n if env.nxlog_client.send_event(nx_message):\n ch.basic_ack(delivery_tag=method.delivery_tag)\n metric_notify_counter(app_module=rmq_message['connector_id'],\n metric_name=f\"sent_messages_{nx_message['DevType']}\")\n\n # put into redis after successful sending\n env.redis.set(rmq_message_id, body, ex=1209600) # срок хранения данных в базе 14 дней\n metric_notify_counter(app_module=rmq_message['connector_id'], metric_name=\"received-events\")\n\n return" ]
[ "0.6835022", "0.60718656", "0.59854895", "0.59607", "0.5945775", "0.5932107", "0.5932107", "0.5917332", "0.5839464", "0.5817131", "0.5794533", "0.574719", "0.5722084", "0.5713493", "0.57134485", "0.5699287", "0.5698061", "0.56944233", "0.5692819", "0.5692426", "0.56885827", "0.5686601", "0.5684375", "0.56739146", "0.56652766", "0.56640935", "0.5655968", "0.5651607", "0.56513226", "0.5589333", "0.5586128", "0.55782646", "0.5572112", "0.5568191", "0.55655515", "0.5555582", "0.5550819", "0.5536466", "0.5535157", "0.55166185", "0.5512326", "0.5503031", "0.5503031", "0.5503031", "0.5495615", "0.5492256", "0.5486648", "0.54670316", "0.54670316", "0.54610324", "0.54561156", "0.54473877", "0.54366434", "0.5416855", "0.53739625", "0.53579897", "0.5356547", "0.53562313", "0.53519696", "0.5314771", "0.5311782", "0.5308953", "0.5288002", "0.5280093", "0.5263049", "0.52579546", "0.5257927", "0.5254823", "0.52538395", "0.52500975", "0.5248601", "0.5248563", "0.52433836", "0.5239468", "0.5228221", "0.52258724", "0.52215785", "0.5219365", "0.52173305", "0.5214549", "0.5211959", "0.521046", "0.52103657", "0.5206973", "0.52052903", "0.5205104", "0.5194728", "0.5193359", "0.5191489", "0.5186703", "0.518585", "0.5182849", "0.5163212", "0.5161922", "0.5160935", "0.51555026", "0.51531124", "0.5149464", "0.5148202", "0.51472366" ]
0.6912686
0
Formats |record| with color.
def format(self, record): msg = super(ColoredFormatter, self).format(record) color = self._COLOR_MAPPING.get(record.levelname) if self._use_colors and color: msg = '%s%s%s' % (color, msg, self._RESET) return msg
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def format(self, record):\n\n\t\t# Use copy.copy - c.f. https://stackoverflow.com/a/7961390\n\t\tcolored_record = copy.copy(record)\n\n\t\tcolor = None\n\t\ttry:\n\t\t\tcolor = record.color\n\t\texcept AttributeError as e:\n\t\t\tpass\n\n\t\tif color is not None:\n\t\t\tif color is None or not color or color == \"none\":\n\t\t\t\tpass\n\t\t\telif color == \"white\":\n\t\t\t\twhite = \"\\033[37m\"\n\t\t\t\tclear = \"\\033[0;0m\"\n\t\t\t\tcolored_record.msg = \"{0:s}{1:s}{2:s}\".format(\n\t\t\t\t\twhite,\n\t\t\t\t\tcolored_record.msg,\n\t\t\t\t\tclear,\n\t\t\t\t)\n\t\t\telse:\n\t\t\t\traise WCMIError(\"error: ConsoleFilter: unrecognized color `{0:s}'.\".format(str(color)))\n\n\t\treturn super().format(colored_record)", "def colored_formatter(record):\n\n colours = {\n \"info\": (\"blue\", \"normal\"),\n \"debug\": (\"magenta\", \"normal\"),\n \"warning\": (\"yellow\", \"normal\"),\n \"print\": (\"green\", \"normal\"),\n \"error\": (\"red\", \"bold\"),\n }\n\n levelname = record.levelname.lower()\n\n if levelname == \"error\":\n return\n\n if levelname.lower() in colours:\n levelname_color = colours[levelname][0]\n header = color_text(\"[{}]: \".format(levelname.upper()), levelname_color)\n\n message = record.getMessage()\n\n if levelname == \"warning\":\n warning_category_groups = re.match(r\"^\\w*?(.+?Warning) (.*)\", message)\n if warning_category_groups is not None:\n warning_category, warning_text = warning_category_groups.groups()\n\n warning_category_colour = color_text(\n \"({})\".format(warning_category), \"cyan\"\n )\n message = \"{} {}\".format(\n color_text(warning_text, \"\"), warning_category_colour\n )\n\n sys.__stdout__.write(\"{}{}\\n\".format(header, message))\n sys.__stdout__.flush()\n\n return", "def format(self, record):\n\n level_colors = {\n 'DEBUG': strc('DEBUG', 'yellow', 'bold'),\n 'INFO': strc('INFO', 'blue', 'bold'),\n 'WARNING': strc('WARNING', 'yellow', 'bold'),\n 'ERROR': strc('ERROR', 'red', 'bold'),\n 'CRITICAL': strc('CRITICAL', 'red', 'bold')}\n\n if record.levelname in level_colors.keys():\n record.levelname = level_colors[record.levelname]\n record.name = strc(record.name, 'black', 'bold')\n\n return logging.Formatter.format(self, record)", "def update_format(self, record):\n prefix = \"\\u001b[\"\n color = f\"{prefix}{self.color_map[record.levelno]}m\"\n bold = f\"{prefix}1m\"\n gray = f\"{prefix}1m{prefix}30m\"\n reset = f\"{prefix}0m\"\n self._style._fmt = (\n f\"%(asctime)s\"\n f\" {gray}│{reset} {color}%(levelname)-8s{reset} {gray}│{reset} \"\n )\n if hasattr(record, \"function\"):\n self._style._fmt += (\n f\"{gray}%(indent)s{reset}\"\n f\"{bold}%(function)s{reset}{gray}:{reset}\"\n \" %(message)s\"\n )\n else:\n self._style._fmt += \"%(indent)s%(message)s\"", "def format(self, record):\n log_fmt = self.FORMATS.get(record.levelno)\n return BaseFormatter(log_fmt).format(record)", "def format(self, record):\n message = record.getMessage()\n asctime = self.formatTime(record, self.datefmt)\n name = yellow(record.name)\n\n s = \"%(timestamp)s %(levelname)s %(name)s \" % {\n \"timestamp\": green(\"%s,%03d\" % (asctime, record.msecs), bold=True),\n \"levelname\": self.LEVELS[record.levelname],\n \"name\": name,\n }\n\n if \"\\n\" in message:\n indent_length = len(re_color_codes.sub(\"\", s))\n message = message.replace(\"\\n\", \"\\n\" + \" \" * indent_length)\n\n s += message\n return s", "def format(self, record):\n msg = logging.Formatter.format(self, record)\n label, color = self.label(record)\n if self.strip:\n return \"{:10s}{}\".format(label, sub(\"\\033\\\\[[0-9]+m\", \"\", msg, 0))\n else:\n return \"\\033[1;{}m{:10s}\\033[0m{}\".format(color, label, msg)", "def _log_format_onecolor(record):\n\n return LEVEL_COLORS.get(record.levelname)", "def format(self, record):\n message = super(ConsoleFormatter, self).format(record)\n color_code = self.color(self.log_colors, record.levelname)\n if hasattr(record, 'ctx'):\n metadata = record.ctx.invocation_metadata()\n for item in metadata:\n if item.key == 'author_name':\n setattr(record, 'user', item.value)\n elif item.key == 'correlation_id':\n setattr(record, 'correlationId', item.value)\n\n for key, value in record.__dict__.items():\n #this allows to have numeric keys\n if (key not in RESERVED_ATTR_HASH\n and not (hasattr(key, \"startswith\")\n and key.startswith('_'))):\n message = append(color_code=color_code, message=message, key=key, value=value)\n return message", "def format(self, record):\n\n\n if not hasattr(record, 'filename_'):\n record.file_indicator = '-'\n else:\n record.file_indicator = os.path.relpath(record.filename_.strip(),\n self.study_dir)\n record.line_indicator = self.format_aggregated(\n record,\n 'line_number',\n ' line %d:',\n ' lines [%s]:',\n optional=True)\n record.column_indicator = self.format_aggregated(\n record,\n 'column_number',\n ' column %d:',\n ' columns [%s]:',\n optional=True)\n record.cause_indicator = self.format_aggregated(\n record,\n 'cause',\n \"; value encountered: '%s'\",\n \"; values encountered: ['%s']\",\n join_string=\"', '\",\n optional=True)\n\n # format the string based on these fields\n formatted_result = super(LogfileStyleFormatter, self).format(record)\n\n # prepend an empty line if the filename is different than before\n current_filename = getattr(record, 'filename_', '')\n if (self.previous_filename is not None and\n current_filename != self.previous_filename):\n formatted_result = '\\n' + formatted_result\n self.previous_filename = current_filename\n\n return formatted_result", "def get_color(self, record):\n if record.level >= logbook.ERROR:\n return 'red'\n elif record.level >= logbook.NOTICE:\n return 'yellow'\n elif record.level >= logbook.INFO:\n return 'green'\n elif record.level >= logbook.DEBUG:\n return 'darkblue'\n return 'lightgray'", "def format(self, record):\n\n scrubbed = record[\"message\"]\n # scrubs any messages that match the message pattern\n if isinstance(scrubbed, dict):\n scrubbed = json.dumps(scrubbed)\n for search, replace in self.scrub_patterns.items():\n scrubbed = re.sub(search, replace, scrubbed)\n record[\"extra\"][\"scrubbed\"] = scrubbed\n\n if not record[\"extra\"].get(\"device\") or record[\"extra\"].get(\"device\") is None:\n record[\"extra\"][\"device\"] = \"\"\n else:\n record[\"extra\"][\"device\"] = f\"{record['extra']['device']} - \"\n return self.fmt", "def format(self, record):\n mappings = {\n 'asctime': create_timestamp,\n 'message': lambda r: r.msg,\n }\n\n formatters = self.parse()\n\n log_record = {}\n for formatter in formatters:\n try:\n log_record[formatter] = mappings[formatter](record)\n except KeyError:\n log_record[formatter] = record.__dict__[formatter]\n\n return json.dumps(log_record)", "def get_color(self, record):\n if record.level == CRITICAL:\n return Fore.RED + Style.DIM\n elif record.level == ERROR:\n return Fore.RED + Style.BRIGHT\n elif record.level == WARNING:\n return Fore.YELLOW + Style.DIM\n elif record.level == NOTICE:\n return Fore.CYAN + Style.BRIGHT\n elif record.level == DEBUG:\n return Fore.GREEN + Style.BRIGHT\n return Fore.WHITE", "def format(self, record):\n extra = {\n \"message\": record.getMessage(),\n \"time\": self.formatTime(record, self.datefmt),\n \"msecs\": record.msecs,\n \"name\": record.name,\n \"level\": record.levelname,\n }\n\n keys = filter(self.filterer, record.__dict__)\n extra.update({k: record.__dict__[k] for k in keys})\n return str(CustomEncoder().encode(extra))", "def format(self, record):\n row = [self.formatTime(record, self.datefmt), record.name, record.levelname]\n keys = filter(self.filterer, record.__dict__)\n extra = [record.__dict__[k] for k in keys]\n\n self.writer.writerow(row + extra + [record.getMessage()])\n data = self.output.getvalue()\n self.output.truncate(0)\n self.output.seek(0)\n return data.strip()", "def color_domain_record_cells(val):\n if isinstance(val, int):\n color = \"yellow\" if val < 3 else None\n elif isinstance(val, float):\n color = \"yellow\" if val > 4.30891 or val < 2.72120 else None\n else:\n color = None\n return f\"background-color: {color}\"", "def format(self, record: logging.LogRecord) -> str:\n return filter_datum(self.fields, self.REDACTION,\n super().format(record), self.SEPARATOR)", "def format(self, record: LogRecord) -> str:\n json_record: Dict = self.json_record(record.getMessage(), record)\n mutated_record: Dict = self.mutate_json_record(json_record)\n mutated_record = mutated_record if mutated_record is not None else json_record\n\n return self.to_json(mutated_record)", "def format(self, record):\n # Standard document\n document = {\n 'created_at': datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f'),\n 'level': record.levelname,\n 'thread': record.thread,\n 'threadName': record.threadName,\n 'message': record.getMessage(),\n 'loggerName': record.name,\n 'fileName': record.pathname,\n 'module': record.module,\n 'method': record.funcName,\n 'lineNumber': record.lineno,\n 'hostname': socket.getfqdn(socket.gethostname()),\n 'ip': socket.gethostbyname(socket.gethostname())\n }\n # Standard document decorated with exception info\n if record.exc_info is not None:\n document.update({\n 'exception': {\n 'message': str(record.exc_info[1]),\n 'code': 0,\n 'stackTrace': self.formatException(record.exc_info)\n }\n })\n # Standard document decorated with extra contextual information\n if len(self.DEFAULT_PROPERTIES) != len(record.__dict__):\n contextual_extra = set(record.__dict__).difference(set(self.DEFAULT_PROPERTIES))\n if contextual_extra:\n for key in contextual_extra:\n document[key] = record.__dict__[key]\n return document", "def format(self, record):\n record.message = indent_string(record.getMessage())\n if \"%(asctime)\" in self._fmt:\n record.asctime = self.formatTime(record, self.datefmt)\n s = self._fmt % record.__dict__\n if record.exc_info:\n # Cache the traceback text to avoid converting it multiple times\n # (it's constant anyway)\n if not record.exc_text:\n record.exc_text = self.formatException(record.exc_info)\n if record.exc_text:\n if s[-1:] != \"\\n\":\n s = s + \"\\n\"\n s = \"{0} Exception:\\n {1}\".format(s, indent_string(record.exc_text))\n return s", "def formatter(record):\n\n lines = record[\"message\"].splitlines()\n prefix = (\n \"{time:YY-MM-DD HH:mm:ss.S} | {level.name:<8} | \"\n + \"{file}.{function}:{line} - \".format(**record)\n )\n indented = (\n lines[0] + \"\\n\" + \"\\n\".join(\" \" * len(prefix) + line for line in lines[1:])\n )\n record[\"message\"] = indented.strip()\n return (\n \"<g>{time:YY-MM-DD HH:mm:ss.S}</> | <lvl>{level.name:<8}</> | \"\n + \"<e>{file}.{function}:{line}</> - <lvl>{message}\\n</>{exception}\"\n )", "def format(self, record):\n return '[{}] {}'.format(QBShFormatter.LEVEL_DICT[record.levelname], record.getMessage())", "def emit(self, record):\n # Need to make a actual copy of the record\n # to prevent altering the message for other loggers\n myrecord = copy.copy(record)\n levelno = myrecord.levelno\n if levelno >= 50: # CRITICAL / FATAL\n front = '\\033[30;41m' # black/red\n elif levelno >= 40: # ERROR\n front = '\\033[30;41m' # black/red\n elif levelno >= 30: # WARNING\n front = '\\033[30;43m' # black/yellow\n elif levelno >= 20: # INFO\n front = '\\033[30;42m' # black/green\n elif levelno >= 10: # DEBUG\n front = '\\033[30;46m' # black/cyan\n else: # NOTSET and anything else\n front = '\\033[0m' # normal\n\n myrecord.levelname = '%s%s\\033[0m' % (front, myrecord.levelname)\n logging.StreamHandler.emit(self, myrecord)", "def format(self, record):\n data = record.__dict__.copy()\n\n # if record.args:\n # msg = record.msg % record.args\n # else:\n # msg = record.msg\n\n data.update(\n username=getpass.getuser(),\n time=datetime.now(),\n host=gethostname(),\n #args=tuple(unicode(arg) for arg in record.args)\n args=record.args\n )\n if 'exc_info' in data and data['exc_info']:\n data['exc_info'] = self.formatException(data['exc_info'])\n return data", "def get_formatted_record(self, record_format: str = None) -> str:\n if record_format:\n return record_format.format_map(defaultdict(str, **self.dict_values))\n raise RecordFormatError(\"Format string must be set\")", "def default_format(data, color):\n if color:\n out = '\\n\\n\\nTitle: {0}\\nDate: {1}\\nLink: {2}\\n\\nImages links: {3}'.format(\n colored(data['title'], 'green'),\n data['pubDate'],\n colored(data['link'], 'blue'),\n colored(data['media'], 'blue'),)\n else:\n out = '\\n\\n\\nTitle: {0}\\nDate: {1}\\nLink: {2}\\n\\nImages links: {3}'.format(\n data['title'],\n data['pubDate'],\n data['link'],\n data['media'],)\n return out", "def format(self, record):\n data = dict()\n\n data[\"category\"] = record.name\n data[\"timestamp\"] = datetime.datetime.utcnow()\\\n .replace(tzinfo=utc)\\\n .strftime('%Y-%m-%dT%H:%M:%SZ')\n data[\"level\"] = record.levelname\n data[\"message\"] = record.msg\n data[\"threadName\"] = record.threadName\n data[\"hostname\"] = self.hostname\n \n return data", "def format(self, record):\n message = {\n \"time\": datetime.utcfromtimestamp(record.created).isoformat(),\n \"level\": record.levelname,\n \"name\": record.name,\n \"message\": record.getMessage(),\n \"process\": record.process,\n \"thread\": record.threadName,\n \"hostname\": self.hostname,\n \"filename\": record.filename,\n \"function\": record.funcName,\n \"lineNo\": record.lineno,\n }\n\n if record.exc_info:\n message[\n \"exception\"\n ] = f\"{record.exc_info[0].__name__}: {record.exc_info[1]}\"\n message[\"traceback\"] = traceback.format_exc()\n\n return json.dumps(message, ensure_ascii=False)", "def transform(self, src_record):\n src_record.colon = ':'\n src_record.space = ' '\n src_record.sep = ' - '\n src_record.prefix = ''\n return \\\n (self.color_levelname \\\n (skip_repeat_line1 \\\n (src_record)))", "def formatTime(self, record, datefmt=None):\n ct = self.converter(record.created)\n _format = datefmt or self.default_time_format\n\n s = ct.strftime(_format)\n\n return s", "def format(self, record):\n # Standard document\n document = {\n 'timestamp': dt.datetime.utcnow(),\n 'level': record.levelname,\n 'thread': record.thread,\n 'threadName': record.threadName,\n 'message': record.getMessage(),\n 'loggerName': record.name,\n 'fileName': record.pathname,\n 'module': record.module,\n 'method': record.funcName,\n 'lineNumber': record.lineno\n }\n # Standard document decorated with exception info\n if record.exc_info is not None:\n document.update({\n 'exception': {\n 'message': str(record.exc_info[1]),\n 'code': 0,\n 'stackTrace': self.formatException(record.exc_info)\n }\n })\n # Standard document decorated with extra contextual information\n if len(self.DEFAULT_PROPERTIES) != len(record.__dict__):\n contextual_extra = set(record.__dict__).difference(\n set(self.DEFAULT_PROPERTIES))\n if contextual_extra:\n for key in contextual_extra:\n document[key] = record.__dict__[key]\n return document", "def format(self, record):\n data = {}\n\n data[\"category\"] = record.name\n data[\"timestamp\"] = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')\n data[\"level\"] = record.levelname\n data[\"message\"] = record.msg\n data[\"threadName\"] = record.threadName\n return json.dumps(data)", "def _format(color: Union[List[int], str], style: str = '') -> QTextCharFormat:\n _color = QColor()\n if isinstance(color, str):\n _color.setNamedColor(color)\n else:\n _color.setRgb(color[0], color[1], color[2])\n\n f = QTextCharFormat()\n f.setForeground(_color)\n if 'bold' in style:\n f.setFontWeight(QFont.Bold)\n if 'italic' in style:\n f.setFontItalic(True)\n\n return f", "def format_time(self, record):\n record.dbtime = time.strftime(\"%Y-%m-%d %H:%M:%S\", time.localtime(record.created))\n return", "def emit(self, record):\n try:\n msg = self.format(record)\n stream = self.stream\n fs = \"\\033[s\\033[?25l\\033[?6;7h\\033[%d;0f\\033E\\n%%s\\033[u\\033[?25h\" % (cols-2,) if active else \"%s\\n\"\n if not _unicode: #if no unicode support...\n stream.write(fs % msg)\n else:\n try:\n if (isinstance(msg, unicode) and\n getattr(stream, 'encoding', None)):\n ufs = u'\\033[s\\033[?25l\\033[?6;7h\\033[%d;0f\\033E\\n%%s\\033[u\\033[?25h' % (cols-2,) if active else u'%s\\n'\n try:\n stream.write(ufs % msg)\n except UnicodeEncodeError:\n #Printing to terminals sometimes fails. For example,\n #with an encoding of 'cp1251', the above write will\n #work if written to a stream opened or wrapped by\n #the codecs module, but fail when writing to a\n #terminal even when the codepage is set to cp1251.\n #An extra encoding step seems to be needed.\n stream.write((ufs % msg).encode(stream.encoding))\n else:\n stream.write(fs % msg)\n except UnicodeError:\n stream.write(fs % msg.encode(\"UTF-8\"))\n self.flush()\n except (KeyboardInterrupt, SystemExit):\n raise\n except TypeError:\n return\n except:\n self.handleError(record)", "def my_color_function(field):\n if field > 100000000:\n return \"#ff0000\"\n else:\n return \"#008000\"", "def format(self, record):\n # type: (LogRecord) -> str\n try:\n return str(getattr(self, record.levelname)(record))\n except AttributeError as err:\n raise RuntimeError('Unknown record level (name: %s)' % record.levelname) from err", "def format_time(self, record):\n record.dbtime = time.strftime(\"%Y-%m-%d %H:%M:%S\", time.localtime(record.created))", "def emit_marker(record):\n logging.debug(\"Formatting individual record {}\".format(record))\n global individual_markers \n marker = record.copy()\n # logging.debug(\"Emitting individual marker: {}\".format(marker))\n individual_markers.append(marker)", "def color_levelname (self, record):\n record.levelname = '%s%s%s' % \\\n (self.term.bold_red if record.levelno >= 50 else \\\n self.term.bold_red if record.levelno >= 40 else \\\n self.term.bold_yellow if record.levelno >= 30 else \\\n self.term.bold_white if record.levelno >= 20 else \\\n self.term.yellow, record.levelname.title(), self.term.normal)\n return record", "def field_style(field_name, bushfire=None):\r\n if bushfire:\r\n try:\r\n value = getattr(bushfire, field_name)\r\n if field_name == \"dfes_incident_no\":\r\n return \"\" if value else \"color:red;\"\r\n else:\r\n return \"\"\r\n except:\r\n return \"\"\r\n else:\r\n return \"\"", "def format(self, record: logging.LogRecord = None) -> str:\n # s = super().format(record)\n s = None\n e = {}\n e['id'] = uuid.uuid4().hex\n e['message'] = record.getMessage()\n # log.warning('record.message: %r', record.getMessage())\n # log.warning('record.args: %r', record.args)\n e['created'] = record.created\n e['priority'] = record.levelname\n e['args'] = record.args\n e['source_code'] = {}\n e['source_code']['pathname'] = record.pathname\n e['source_code']['funcName'] = record.funcName\n e['source_code']['lineno'] = record.lineno\n ctx = record.args.get(PIPELINE_CONTEXT_KEY, None)\n if ctx:\n e[PIPELINE_CONTEXT_KEY] = ctx.toDict()\n # use array enclosure a[] to mainain the log file\n # yaml compliant as new events are appended\n # - event1:\n # - event2:\n # - ...\n a = [e]\n s = yaml.dump(a)\n return s", "def format(self, record):\n stack = inspect.stack(context=0)\n depth = len(stack)\n if self.baseline is None:\n self.baseline = depth\n if self.cut is None:\n filenames = map(lambda x: x.filename, stack)\n self.cut = self.identify_cut(filenames)\n\n # Inject custom information into the record\n record.indent = \".\" * (depth - self.baseline + self.manual_push)\n if depth > self.cut:\n record.function = stack[self.cut].function\n\n # Format the record using custom information\n self.update_format(record)\n out = super().format(record)\n\n # Remove custom information from the record\n del record.indent\n if hasattr(record, \"function\"):\n del record.function\n\n return out", "def format_time(self, record):\r\n record.created = time.strftime(\"%d.%m.%Y %H:%M:%S\", time.localtime(record.created))", "def colorize(self):\n return", "def color(value):\r\n return 'RGB({}, {}, {})'.format(value.red(), value.blue(), value.green())", "def _format (color, style=''):\n _format = QtGui.QTextCharFormat()\n if color != '':\n _format.setForeground(getattr(QtCore.Qt, color))\n if 'bold' in style:\n _format.setFontWeight(QtGui.QFont.Bold)\n if 'italic' in style:\n _format.setFontItalic(True)\n return _format", "def format(self, record: LogRecord) -> str:\n record.asctime = datetime.datetime.utcnow().strftime(\"%Y-%m-%dT%H:%M:%S.%fZ\")\n\n message = record.getMessage()\n if record.exc_info:\n eno = record.exc_info\n stacktrace = \"\".join(traceback.format_exception(None, eno[1], eno[2]))\n message += f\" excp: {stacktrace}\"\n if record.stack_info:\n stack = self.formatStack(record.stack_info)\n message += f\" trace: {stack}\"\n\n log_output = {\n \"tool\": type(self.checker).__name__,\n \"type\": \"infrastructure\",\n \"severity\": record.levelname,\n \"severityLevel\": max(0, record.levelno // 10 - 1),\n \"timestamp\": record.asctime,\n \"module\": record.module,\n \"function\": record.funcName,\n \"flag\": self.checker.flag,\n \"flagIndex\": self.checker.flag_idx,\n \"runId\": self.checker.run_id,\n \"roundId\": self.checker.round,\n \"relatedRoundId\": self.checker.flag_round,\n \"message\": message,\n \"teamName\": self.checker.team,\n \"teamId\": self.checker.team_id,\n \"serviceName\": self.checker.service_name,\n \"method\": self.checker.method,\n }\n\n return LOGGING_PREFIX + json.dumps(log_output)", "def format(color, style=''):\n _color = QColor()\n if type(color) is not str:\n _color.setRgb(color[0], color[1], color[2])\n else:\n _color.setNamedColor(color)\n\n _format = QTextCharFormat()\n _format.setForeground(_color)\n if 'bold' in style:\n _format.setFontWeight(QFont.Weight.Bold)\n if 'italic' in style:\n _format.setFontItalic(True)\n\n return _format", "def template(self, record):\n\n def _log_format_onecolor(record):\n \"\"\"\n Normal console output format\n \"\"\"\n\n return LEVEL_COLORS.get(record.levelname)\n\n def _log_format_notset(record, stylized=True):\n \"\"\"\n Default log format.\n \"\"\"\n\n reset = Style.RESET_ALL\n\n levelname = {\n 'style_before': LEVEL_COLORS.get(record.levelname) + Style.BRIGHT,\n 'format': '(%(levelname)s)',\n 'style_after': reset,\n 'prefix': '',\n 'suffix': '',\n }\n\n name = {\n 'style_before': Fore.WHITE + Style.DIM + Style.BRIGHT,\n 'format': '%(name)s',\n 'style_after': Fore.RESET + Style.RESET_ALL,\n 'prefix': ' ',\n 'suffix': ' ',\n }\n\n # format prefix + style_before + message + style_after + suffix\n result = reset\n for i in [levelname, name]:\n result += f\"{i['prefix']}{i['style_before']}{i['format']}{i['style_after']}{i['suffix']}\"\n result += reset\n\n return result\n\n # Template Switcher\n templates = {\n 'NOTSET': _log_format_notset,\n 'INFO': _log_format_onecolor,\n 'DELIMITER': _log_format_onecolor,\n 'TOPIC': _log_format_onecolor,\n 'WARNING': _log_format_onecolor,\n }\n\n return templates.get(record.levelname, _log_format_notset)(record)", "def format(self, data):", "def change_color_by_tag(member):\n if DIFF_TAG in member:\n if member[DIFF_TAG] == 'deleted':\n out.change_color('RED')\n print '-',\n elif member[DIFF_TAG] == 'added':\n out.change_color('GREEN')\n print '+',\n else:\n out.change_color('BLACK')\n print ' ',", "def _format_time(record, datefmt=None):\n time_tuple = time.localtime(record.created)\n tz_name = time.tzname[time_tuple.tm_isdst]\n return '%(date_time)s-%(millis)03d-%(tz_name)s' % dict(\n date_time=time.strftime('%Y%m%d-%H%M%S', time_tuple),\n millis=record.msecs,\n tz_name=tz_name,\n )", "def emit(self, record):\n msg = self.format(record)\n self.console.print(msg)", "def format_row(self, row):\n raise NotImplementedError()", "def format(self):\n ...", "def fmt_color(fmt, color):\n\n if(fmt == 'rgb'):\n r, g, b = color\n return \"rgb({}, {}, {})\".format(r, g, b)\n elif(fmt == 'hex'):\n hexval = hex(color)[2:]\n while(len(hexval) < 6):\n hexval = \"0\" + hexval\n return \"#\" + hexval\n elif(fmt == 'hsl'):\n h, s, l = color\n return \"hsl({}, {}%, {}%)\".format(h, s, l)\n elif(fmt == 'cmyk'):\n c, m, y, k = color\n return \"cmyk({}%, {}%, {}%, {}%)\".format(c, m, y, k)", "def asformat(self, format):", "def emit(self, record):\n try:\n msg = self.format(record)\n log_level = record.levelno\n self.write_log_buffer(msg, log_level)\n except Exception:\n self.handleError(record)", "def format_cell_updated(self, cell, value=None):\n self.is_not_used()\n if value is not None:\n cell.value = value\n\n cell.fill = PatternFill(start_color='7fffd4', end_color='7fffd4', fill_type='solid')\n cell.font = Font(name='Ubuntu', size=11, color='555555', bold=False, italic=False)", "def format_color(\n color: Union[ColorInputType, Any],\n warn_if_invalid: bool = True\n) -> Union[ColorType, Any]:\n if not isinstance(color, ColorInputInstance):\n return color\n if not isinstance(color, pygame.Color):\n try:\n if isinstance(color, VectorInstance) and 3 <= len(color) <= 4:\n if PYGAME_V2:\n for j in color:\n if not isinstance(j, int):\n raise ValueError('color cannot contain floating point values')\n c = pygame.Color(*color)\n else:\n c = pygame.Color(color)\n except ValueError:\n if warn_if_invalid:\n warn(f'invalid color value \"{color}\"')\n else:\n raise\n return color\n else:\n c = color\n return c.r, c.g, c.b, c.a", "def bgcolor(v=''):\n return str(v)", "def format(self, record, *args, **kwargs):\n if (\n (\n hasattr(record, 'line_number') or\n hasattr(record, 'line_number_list') or\n hasattr(record, 'column_number') or\n hasattr(record, 'column_number_list'))\n and not hasattr(record, 'filename_')):\n raise ValueError(\n 'Tried to log about a line/column with no filename')\n return super(ValidationMessageFormatter, self).format(record,\n *args,\n **kwargs)", "def UseColor(self, use_color):\n self.use_color = use_color\n self._formatter.UseColor(use_color)", "def colorize(lead, num, color):\n if num != 0 and ANSIBLE_COLOR and color is not None:\n return \"%s%s%-15s\" % (stringc(lead, color), stringc(\"=\", color), stringc(str(num), color))\n else:\n return \"%s=%-4s\" % (lead, str(num))", "def serialize_to_json(self, record, pretty=False):\n return self._json_serializer.to_json(record)", "def format_data(self, data):", "def format(cls, result, record_delimiter=';', quote_mark='\"'):\n record_delimiter = quote_mark + record_delimiter + quote_mark\n ret = ''\n if result.is_empty():\n return ret\n\n ret += quote_mark + record_delimiter.join(\n [s.replace(quote_mark, 2*quote_mark)\n for s in result.get_long_names()]) + quote_mark + '\\n'\n\n for row in result.rows:\n ret += quote_mark + record_delimiter.join(\n [s.replace(quote_mark, 2*quote_mark) for s in row])\\\n + quote_mark + '\\n'\n return ret", "def emit(self, record):\n try:\n msg = self.format(record)\n log_level = record.levelno\n self.write_log(msg, log_level)\n except Exception:\n self.handleError(record)", "def process_record(self, record):\n raise NotImplementedError('Process record needs to be customized')", "def _color(self, args):", "def colored(self):\n return colored(str(self), **self.color_opts)", "def handle(cls, record):\n print(datetime.datetime.now(), record, flush=True)", "def color(self, s, fg=None, style=None):\n return LogStr(s, fg=fg, style=style)", "def format(color, style='', bgcolor=''):\n _color = QtGui.QColor()\n _color.setNamedColor(color)\n _format = QtGui.QTextCharFormat()\n _format.setForeground(_color)\n if 'bold' in style:\n _format.setFontWeight(QtGui.QFont.Bold)\n if 'italic' in style:\n _format.setFontItalic(True)\n if bgcolor != '':\n _color.setNamedColor(bgcolor)\n _format.setBackground(_color)\n\n return _format", "def _colorstr(self, args):", "def fill(self, color):\n self.format.fill(self, color)", "def format_color_name(string, frame_name):\n if frame_name == \"primary\":\n color = \"red\"\n else:\n color = \"green\"\n return format_color(string, color)", "def format_aggregated(record,\n field_name,\n single_fmt='%s',\n multiple_fmt='[%s]',\n join_string=', ',\n max_join=3,\n optional=False):\n attr_val = getattr(record, field_name, None)\n attr_list = getattr(record, field_name + '_list', None)\n if attr_val is not None:\n attr_indicator = single_fmt % attr_val\n elif attr_list is not None:\n # treat None as 'format all of them, no maximum'\n if max_join is None:\n max_join = len(attr_list)\n string_list = list(str(val) for val in attr_list[:max_join])\n num_skipped = len(attr_list) - len(string_list)\n if num_skipped != 0:\n string_list.append('(%d more)' % num_skipped)\n attr_indicator = multiple_fmt % join_string.join(string_list)\n elif optional:\n attr_indicator = ''\n else:\n raise ValueError(\n \"Tried to format an absent non-optional log field: '%s'\" %\n field_name)\n return attr_indicator", "def format(self, row):\n return json.dumps(row.print_fields)", "def metric_recorded(self, record):\n if record.name in self.headers and self.current_row is not None:\n if record.name == \"learning_rate\" and not record.is_scalar:\n # record is a list of scalars\n value = \",\".join([f\"{lr:.4f}\" for lr in record.value])\n elif record.is_scalar and isinstance(record.value, int):\n value = str(record.value)\n else:\n assert record.is_scalar\n\n value = f\"{record.value:.4f}\"\n\n self.current_row[record.name] = value", "def reformat(ctx):\n pass", "def format_color(string, color):\n cs = \"\\x1b[38;2;{};{};{}m{}\\x1b[0m\"\n\n # my colors\n if color == \"red1\":\n r, g, b = 215, 0, 0\n elif color == \"green1\":\n r, g, b = 0, 255, 0\n elif color == \"blue1\":\n r, g, b = 50, 50, 255\n\n # list from https://www.rapidtables.com/web/color/RGB_Color.html\n elif color == \"Black\":\n r, g, b = 0, 0, 0\n elif color == \"White\":\n r, g, b = 255, 255, 255\n elif color == \"Red\":\n r, g, b = 255, 0, 0\n elif color == \"Lime\":\n r, g, b = 0, 255, 0\n elif color == \"Blue\":\n r, g, b = 0, 0, 255\n elif color == \"Yellow\":\n r, g, b = 255, 255, 0\n elif color == \"Cyan\":\n r, g, b = 0, 255, 255\n elif color == \"Magenta\":\n r, g, b = 255, 0, 255\n elif color == \"Silver\":\n r, g, b = 192, 192, 192\n elif color == \"Gray\":\n r, g, b = 128, 128, 128\n elif color == \"Maroon\":\n r, g, b = 128, 0, 0\n elif color == \"Olive\":\n r, g, b = 128, 128, 0\n elif color == \"Green\":\n r, g, b = 0, 128, 0\n elif color == \"Purple\":\n r, g, b = 128, 0, 128\n elif color == \"Teal\":\n r, g, b = 0, 128, 128\n elif color == \"Navy\":\n r, g, b = 0, 0, 128\n elif color == \"maroon\":\n r, g, b = 128, 0, 0\n elif color == \"dark red\":\n r, g, b = 139, 0, 0\n elif color == \"brown\":\n r, g, b = 165, 42, 42\n elif color == \"firebrick\":\n r, g, b = 178, 34, 34\n elif color == \"crimson\":\n r, g, b = 220, 20, 60\n elif color == \"red\":\n r, g, b = 255, 0, 0\n elif color == \"tomato\":\n r, g, b = 255, 99, 71\n elif color == \"coral\":\n r, g, b = 255, 127, 80\n elif color == \"indian red\":\n r, g, b = 205, 92, 92\n elif color == \"light coral\":\n r, g, b = 240, 128, 128\n elif color == \"dark salmon\":\n r, g, b = 233, 150, 122\n elif color == \"salmon\":\n r, g, b = 250, 128, 114\n elif color == \"light salmon\":\n r, g, b = 255, 160, 122\n elif color == \"orange red\":\n r, g, b = 255, 69, 0\n elif color == \"dark orange\":\n r, g, b = 255, 140, 0\n elif color == \"orange\":\n r, g, b = 255, 165, 0\n elif color == \"gold\":\n r, g, b = 255, 215, 0\n elif color == \"dark golden rod\":\n r, g, b = 184, 134, 11\n elif color == \"golden rod\":\n r, g, b = 218, 165, 32\n elif color == \"pale golden rod\":\n r, g, b = 238, 232, 170\n elif color == \"dark khaki\":\n r, g, b = 189, 183, 107\n elif color == \"khaki\":\n r, g, b = 240, 230, 140\n elif color == \"olive\":\n r, g, b = 128, 128, 0\n elif color == \"yellow\":\n r, g, b = 255, 255, 0\n elif color == \"yellow green\":\n r, g, b = 154, 205, 50\n elif color == \"dark olive green\":\n r, g, b = 85, 107, 47\n elif color == \"olive drab\":\n r, g, b = 107, 142, 35\n elif color == \"lawn green\":\n r, g, b = 124, 252, 0\n elif color == \"chart reuse\":\n r, g, b = 127, 255, 0\n elif color == \"green yellow\":\n r, g, b = 173, 255, 47\n elif color == \"dark green\":\n r, g, b = 0, 100, 0\n elif color == \"green\":\n r, g, b = 0, 128, 0\n elif color == \"forest green\":\n r, g, b = 34, 139, 34\n elif color == \"lime\":\n r, g, b = 0, 255, 0\n elif color == \"lime green\":\n r, g, b = 50, 205, 50\n elif color == \"light green\":\n r, g, b = 144, 238, 144\n elif color == \"pale green\":\n r, g, b = 152, 251, 152\n elif color == \"dark sea green\":\n r, g, b = 143, 188, 143\n elif color == \"medium spring green\":\n r, g, b = 0, 250, 154\n elif color == \"spring green\":\n r, g, b = 0, 255, 127\n elif color == \"sea green\":\n r, g, b = 46, 139, 87\n elif color == \"medium aqua marine\":\n r, g, b = 102, 205, 170\n elif color == \"medium sea green\":\n r, g, b = 60, 179, 113\n elif color == \"light sea green\":\n r, g, b = 32, 178, 170\n elif color == \"dark slate gray\":\n r, g, b = 47, 79, 79\n elif color == \"teal\":\n r, g, b = 0, 128, 128\n elif color == \"dark cyan\":\n r, g, b = 0, 139, 139\n elif color == \"aqua\":\n r, g, b = 0, 255, 255\n elif color == \"cyan\":\n r, g, b = 0, 255, 255\n elif color == \"light cyan\":\n r, g, b = 224, 255, 255\n elif color == \"dark turquoise\":\n r, g, b = 0, 206, 209\n elif color == \"turquoise\":\n r, g, b = 64, 224, 208\n elif color == \"medium turquoise\":\n r, g, b = 72, 209, 204\n elif color == \"pale turquoise\":\n r, g, b = 175, 238, 238\n elif color == \"aqua marine\":\n r, g, b = 127, 255, 212\n elif color == \"powder blue\":\n r, g, b = 176, 224, 230\n elif color == \"cadet blue\":\n r, g, b = 95, 158, 160\n elif color == \"steel blue\":\n r, g, b = 70, 130, 180\n elif color == \"corn flower blue\":\n r, g, b = 100, 149, 237\n elif color == \"deep sky blue\":\n r, g, b = 0, 191, 255\n elif color == \"dodger blue\":\n r, g, b = 30, 144, 255\n elif color == \"light blue\":\n r, g, b = 173, 216, 230\n elif color == \"sky blue\":\n r, g, b = 135, 206, 235\n elif color == \"light sky blue\":\n r, g, b = 135, 206, 250\n elif color == \"midnight blue\":\n r, g, b = 25, 25, 112\n elif color == \"navy\":\n r, g, b = 0, 0, 128\n elif color == \"dark blue\":\n r, g, b = 0, 0, 139\n elif color == \"medium blue\":\n r, g, b = 0, 0, 205\n elif color == \"blue\":\n r, g, b = 0, 0, 255\n elif color == \"royal blue\":\n r, g, b = 65, 105, 225\n elif color == \"blue violet\":\n r, g, b = 138, 43, 226\n elif color == \"indigo\":\n r, g, b = 75, 0, 130\n elif color == \"dark slate blue\":\n r, g, b = 72, 61, 139\n elif color == \"slate blue\":\n r, g, b = 106, 90, 205\n elif color == \"medium slate blue\":\n r, g, b = 123, 104, 238\n elif color == \"medium purple\":\n r, g, b = 147, 112, 219\n elif color == \"dark magenta\":\n r, g, b = 139, 0, 139\n elif color == \"dark violet\":\n r, g, b = 148, 0, 211\n elif color == \"dark orchid\":\n r, g, b = 153, 50, 204\n elif color == \"medium orchid\":\n r, g, b = 186, 85, 211\n elif color == \"purple\":\n r, g, b = 128, 0, 128\n elif color == \"thistle\":\n r, g, b = 216, 191, 216\n elif color == \"plum\":\n r, g, b = 221, 160, 221\n elif color == \"violet\":\n r, g, b = 238, 130, 238\n elif color == \"magenta\":\n r, g, b = 255, 0, 255\n elif color == \"orchid\":\n r, g, b = 218, 112, 214\n elif color == \"medium violet red\":\n r, g, b = 199, 21, 133\n elif color == \"pale violet red\":\n r, g, b = 219, 112, 147\n elif color == \"deep pink\":\n r, g, b = 255, 20, 147\n elif color == \"hot pink\":\n r, g, b = 255, 105, 180\n elif color == \"light pink\":\n r, g, b = 255, 182, 193\n elif color == \"pink\":\n r, g, b = 255, 192, 203\n elif color == \"antique white\":\n r, g, b = 250, 235, 215\n elif color == \"beige\":\n r, g, b = 245, 245, 220\n elif color == \"bisque\":\n r, g, b = 255, 228, 196\n elif color == \"blanched almond\":\n r, g, b = 255, 235, 205\n elif color == \"wheat\":\n r, g, b = 245, 222, 179\n elif color == \"corn silk\":\n r, g, b = 255, 248, 220\n elif color == \"lemon chiffon\":\n r, g, b = 255, 250, 205\n elif color == \"light golden rod yellow\":\n r, g, b = 250, 250, 210\n elif color == \"light yellow\":\n r, g, b = 255, 255, 224\n elif color == \"saddle brown\":\n r, g, b = 139, 69, 19\n elif color == \"sienna\":\n r, g, b = 160, 82, 45\n elif color == \"chocolate\":\n r, g, b = 210, 105, 30\n elif color == \"peru\":\n r, g, b = 205, 133, 63\n elif color == \"sandy brown\":\n r, g, b = 244, 164, 96\n elif color == \"burly wood\":\n r, g, b = 222, 184, 135\n elif color == \"tan\":\n r, g, b = 210, 180, 140\n elif color == \"rosy brown\":\n r, g, b = 188, 143, 143\n elif color == \"moccasin\":\n r, g, b = 255, 228, 181\n elif color == \"navajo white\":\n r, g, b = 255, 222, 173\n elif color == \"peach puff\":\n r, g, b = 255, 218, 185\n elif color == \"misty rose\":\n r, g, b = 255, 228, 225\n elif color == \"lavender blush\":\n r, g, b = 255, 240, 245\n elif color == \"linen\":\n r, g, b = 250, 240, 230\n elif color == \"old lace\":\n r, g, b = 253, 245, 230\n elif color == \"papaya whip\":\n r, g, b = 255, 239, 213\n elif color == \"sea shell\":\n r, g, b = 255, 245, 238\n elif color == \"mint cream\":\n r, g, b = 245, 255, 250\n elif color == \"slate gray\":\n r, g, b = 112, 128, 144\n elif color == \"light slate gray\":\n r, g, b = 119, 136, 153\n elif color == \"light steel blue\":\n r, g, b = 176, 196, 222\n elif color == \"lavender\":\n r, g, b = 230, 230, 250\n elif color == \"floral white\":\n r, g, b = 255, 250, 240\n elif color == \"alice blue\":\n r, g, b = 240, 248, 255\n elif color == \"ghost white\":\n r, g, b = 248, 248, 255\n elif color == \"honeydew\":\n r, g, b = 240, 255, 240\n elif color == \"ivory\":\n r, g, b = 255, 255, 240\n elif color == \"azure\":\n r, g, b = 240, 255, 255\n elif color == \"snow\":\n r, g, b = 255, 250, 250\n elif color == \"black\":\n r, g, b = 0, 0, 0\n elif color == \"dim gray\":\n r, g, b = 105, 105, 105\n elif color == \"gray\":\n r, g, b = 128, 128, 128\n elif color == \"dark gray\":\n r, g, b = 169, 169, 169\n elif color == \"silver\":\n r, g, b = 192, 192, 192\n elif color == \"light gray\":\n r, g, b = 211, 211, 211\n elif color == \"gainsboro\":\n r, g, b = 220, 220, 220\n elif color == \"white smoke\":\n r, g, b = 245, 245, 245\n elif color == \"white\":\n r, g, b = 255, 255, 255\n else:\n r, g, b = 255, 255, 255\n\n return cs.format(r, g, b, string)", "def __init__(self, fmt=BASIC_FORMAT, datefmt=None, style='%', record_custom_attrs=None, mix_extra=False, mix_extra_position='tail', skipkeys=False, ensure_ascii=True, check_circular=True, allow_nan=True, cls=None, indent=None, separators=None, encoding='utf-8', default=None, sort_keys=False, **kw):\n if style not in _STYLES:\n raise ValueError('`style` must be one of: %s' % ','.join(\n _STYLES.keys()))\n if mix_extra_position not in _MIX_EXTRA_ORDER:\n raise ValueError('`mix_extra_position` must be one of: %s' % ','.join(\n _MIX_EXTRA_ORDER))\n # compatible python2 start\n if sys.version_info < (3, 0):\n kw.update(encoding=encoding)\n logging.Formatter.__init__(\n self, fmt='', datefmt=datefmt)\n else:\n logging.Formatter.__init__(\n self, fmt='', datefmt=datefmt, style=style)\n # compatible python2 end\n\n self.json_fmt = self.parseFmt(fmt)\n self.record_custom_attrs = record_custom_attrs\n self._style = _STYLES[style](self.json_fmt)\n self._style._fmt = ''\n self.mix_extra = mix_extra\n self.mix_extra_position = mix_extra_position\n\n self.checkRecordCustomAttrs(self.record_custom_attrs)\n\n # support `json.dumps` parameters start\n self.skipkeys = skipkeys\n self.ensure_ascii = ensure_ascii\n self.check_circular = check_circular\n self.allow_nan = allow_nan\n self.cls = cls\n self.indent = indent\n self.separators = separators\n self.encoding = encoding\n self.default = default\n self.sort_keys = sort_keys\n self.kw = kw\n # support `json.dumps` parameters end", "def dump_record(record):\n rec = E.record()\n\n leader = record.get('leader')\n if leader:\n rec.append(E.leader(leader))\n\n if isinstance(record, GroupableOrderedDict):\n items = record.iteritems(with_order=False, repeated=True)\n else:\n items = iteritems(record)\n\n for df, subfields in items:\n # Control fields\n if len(df) == 3:\n if isinstance(subfields, string_types):\n controlfield = E.controlfield(subfields)\n controlfield.attrib['tag'] = df[0:3]\n rec.append(controlfield)\n elif isinstance(subfields, (list, tuple, set)):\n for subfield in subfields:\n controlfield = E.controlfield(subfield)\n controlfield.attrib['tag'] = df[0:3]\n rec.append(controlfield)\n else:\n # Skip leader.\n if df == 'leader':\n continue\n\n if not isinstance(subfields, (list, tuple, set)):\n subfields = (subfields,)\n\n df = df.replace('_', ' ')\n for subfield in subfields:\n if not isinstance(subfield, (list, tuple, set)):\n subfield = [subfield]\n\n for s in subfield:\n datafield = E.datafield()\n datafield.attrib['tag'] = df[0:3]\n datafield.attrib['ind1'] = df[3]\n datafield.attrib['ind2'] = df[4]\n\n if isinstance(s, GroupableOrderedDict):\n items = s.iteritems(with_order=False, repeated=True)\n elif isinstance(s, dict):\n items = iteritems(s)\n else:\n datafield.append(E.subfield(s))\n\n items = tuple()\n\n for code, value in items:\n if not isinstance(value, string_types):\n for v in value:\n datafield.append(E.subfield(v, code=code))\n else:\n datafield.append(E.subfield(value, code=code))\n\n rec.append(datafield)\n return rec", "def __repr__(self):\r\n return f\"Color('{self.color}')\"", "def reduce_field_abrs(values):\n if \"c\" in values:\n values[\"color\"] = parse_color(values.pop(\"c\"))\n if \"ls\" in values:\n values[\"linestyle\"] = values.pop(\"ls\")\n if \"lw\" in values:\n values[\"linewidth\"] = values.pop(\"lw\")\n return values", "def emit(self, record):\n try:\n if record.exc_info:\n _, exc, *_ = record.exc_info\n if hasattr(exc, \"__pretty_exc__\"):\n try:\n self.emit_pretty_exception(exc, verbose=_is_verbose())\n if not _is_verbose():\n return\n # pylint: disable-next=broad-except\n except Exception: # noqa: BLE001, S110 # nosec B110\n pass\n\n msg = self.format(record)\n Tqdm.write(msg, file=self.stream, end=getattr(self, \"terminator\", \"\\n\"))\n self.flush()\n except (BrokenPipeError, RecursionError):\n raise\n except Exception: # noqa: BLE001, pylint: disable=broad-except\n self.handleError(record)", "def colourise(value, background=False):\n if background:\n return mark_safe(\"\".join((r\"\\cellcolor{\", COLOURUPS.get(value, \"white\"),\n \"}{\", value, \"}\")))\n else:\n return mark_safe(\"\".join((r\"\\textcolor{\", COLOURUPS.get(value, \"purple\"),\n \"}{\", value, \"}\")))", "def anyTextToColor(self, mystr, r=None):\n\n if len(mystr) < 3:\n # pad up with zeros\n while len(mystr) % 3 != 0:\n mystr += \"0\"\n\n i = 0\n sum1 = 0\n sum2 = 0\n sum3 = 0\n for c in mystr:\n if i % 3 == 0:\n sum1 += int( str(ord(c)) + str(i)[::-1])\n if i % 3 == 1:\n sum2 += int(str(ord(c)) + str(i)[::-1])\n if i % 3 == 2:\n sum3 += int(str(ord(c)) + str(i)[::-1])\n i += 1\n\n x1 = sum1 % 255\n x2 = sum2 % 255\n x3 = sum3 % 255\n\n if r is not None:\n x1 = r\n\n # if we wants to force a shade of green\n # x2 = 255\n\n outstr = \"%x%x%x\" % (x1, x2, x3)\n\n while len(outstr) < 6:\n outstr += \"a\"\n\n return outstr", "def setFormat( self, fmt, style = '{' ):\n formatter = logging.Formatter( fmt, style = style )\n for handler in self.logger.handlers:\n handler.setFormatter( formatter )", "def add_formatter(self, regexp, conv, userdata=None):\n if isinstance(regexp, str):\n regexp = re.compile(regexp)\n self._formatters.append([regexp, conv, userdata])", "def _style_colours(self):\n\n pass", "def data_prettified(self, instance):\n\n # Convert the data to sorted, indented JSON\n response = json.dumps(instance.data, sort_keys=True, indent=2)\n\n # Truncate the data. Alter as needed\n response = response\n\n # Get the Pygments formatter\n formatter = HtmlFormatter(style='colorful')\n\n # Highlight the data\n response = highlight(response, JsonLexer(), formatter)\n\n # Get the stylesheet\n style = \"<style>\" + formatter.get_style_defs() + \"</style><br>\"\n\n # Safe the output\n return mark_safe(style + response)", "def format(self, item):\n raise NotImplementedError()", "def formatRecord(record, year, month, date_start, date_end, tier_report=False):\n\n addQuotes = lambda s : \"'\" + s + \"'\"\n\n rd = {}\n\n rd[HOST] = addQuotes(record[HOST]) if HOST in record else addQuotes(record[TIER])\n rd[VO_NAME] = addQuotes(record[VO_NAME]) if record[VO_NAME] else 'null'\n if not tier_report:\n rd[USERSN] = addQuotes(record[USERSN])\n rd[VO_ISSUER] = addQuotes(record[VO_ISSUER]) if record[VO_ISSUER] else \"''\"\n rd[VO_GROUP] = addQuotes(record[VO_GROUP]) if record[VO_GROUP] else \"''\"\n rd[VO_ROLE] = addQuotes(record[VO_ROLE]) if record[VO_ROLE] else \"''\"\n rd[N_JOBS] = record[N_JOBS]\n rd[CPU_DURATION] = int(record[CPU_DURATION])\n rd[WALL_DURATION] = int(record[WALL_DURATION])\n rd[KSI2K_CPU_DURATION] = int(record[KSI2K_CPU_DURATION]) if record[KSI2K_CPU_DURATION] is not None else 'null'\n rd[KSI2K_WALL_DURATION] = int(record[KSI2K_WALL_DURATION]) if record[KSI2K_WALL_DURATION] is not None else 'null'\n rd[MONTH] = month\n rd[YEAR] = year\n rd[DATE_START] = addQuotes(date_start)\n rd[DATE_END] = addQuotes(date_end)\n\n # proper db entry, full\n if tier_report:\n base = '''%(host)s, %(vo_name)s, ''' + \\\n '''%(n_jobs)i, %(cpu_duration)s, %(ksi2k_cpu_duration)s, %(wall_duration)s, %(ksi2k_wall_duration)s, ''' + \\\n '''%(month)s, %(year)s, %(date_start)s, %(date_end)s'''\n else:\n base = '''%(host)s, %(vo_name)s, %(usersn)s, %(vo_group)s, %(vo_role)s, ''' + \\\n '''%(n_jobs)i, %(cpu_duration)s, %(ksi2k_cpu_duration)s, %(wall_duration)s, %(ksi2k_wall_duration)s, ''' + \\\n '''%(month)s, %(year)s, %(date_start)s, %(date_end)s'''\n\n s = base % rd\n return s", "def print_color(line, color=Colors.DEFAULT):\n sys.stdout.write(\"{}{}{}\".format(color.value, line, Colors.DEFAULT.value))", "def _logging_handler(self, record):\n if self.enable:\n message = self.log.handlers[0].format(record)\n self._log_lines.append(str(message))\n self.widget.object = \"<br/>\".join(self._log_lines[::-1])", "def _recordInfo(record, outfmt, fmtdict = None) :\n if fmtdict is None :\n fmtdict = _GB_RECORD_FMTDICT\n return [fmtdict[x](record) for x in outfmt]" ]
[ "0.8311548", "0.7502058", "0.74881405", "0.7452882", "0.7222441", "0.7179237", "0.7129312", "0.70246935", "0.6647282", "0.6643303", "0.6622566", "0.6554629", "0.6488578", "0.64701414", "0.64188516", "0.6313016", "0.6285745", "0.61440796", "0.59593135", "0.59322673", "0.5924928", "0.5848499", "0.5826485", "0.5812966", "0.58063215", "0.5797361", "0.57933277", "0.57707405", "0.57416284", "0.5711762", "0.56982654", "0.568053", "0.56750494", "0.567425", "0.5655015", "0.5653265", "0.5650104", "0.5639318", "0.5601521", "0.55719566", "0.5571929", "0.5483412", "0.547635", "0.54600567", "0.54384726", "0.5372695", "0.5364958", "0.53341544", "0.5313119", "0.5272459", "0.5255793", "0.5200826", "0.5197604", "0.518427", "0.5183915", "0.51733136", "0.51675457", "0.5162919", "0.51450807", "0.51355743", "0.5127961", "0.5108546", "0.5083594", "0.5069106", "0.5065812", "0.5051088", "0.5030209", "0.50248027", "0.50198597", "0.50142974", "0.50077856", "0.50008905", "0.49594587", "0.49543402", "0.49315894", "0.49314225", "0.49215564", "0.49167863", "0.490634", "0.4894656", "0.48945805", "0.48897287", "0.48803192", "0.4876009", "0.4856552", "0.48302937", "0.4816071", "0.48093623", "0.48087668", "0.4803311", "0.47999018", "0.47979575", "0.4796737", "0.47851253", "0.4784408", "0.47826853", "0.47809508", "0.4768527", "0.47632167", "0.4760409" ]
0.76215637
1
Setup the logging module.
def setup_logging(debug=False, quiet=0): fmt = '%(asctime)s: %(levelname)-7s: ' if debug: fmt += '%(filename)s:%(funcName)s: ' fmt += '%(message)s' # 'Sat, 05 Oct 2013 18:58:50 -0400 (EST)' datefmt = '%a, %d %b %Y %H:%M:%S %z' tzname = time.strftime('%Z', time.localtime()) if tzname and ' ' not in tzname and len(tzname) <= 5: # If the name is verbose, don't include it. Some systems like to use # "Eastern Daylight Time" which is much too chatty. datefmt += f' ({tzname})' if debug: level = logging.DEBUG elif quiet <= 0: level = logging.INFO elif quiet <= 1: level = logging.WARNING elif quiet <= 2: level = logging.ERROR elif quiet <= 3: level = logging.CRITICAL formatter = ColoredFormatter(fmt, datefmt) handler = logging.StreamHandler(stream=sys.stdout) handler.setFormatter(formatter) logger = logging.getLogger() logger.addHandler(handler) logger.setLevel(level)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __setup_logging(self):\n\n loglevel = logging.INFO\n if self.config[\"verbose\"]:\n loglevel = logging.DEBUG\n\n FORMAT = '[%(asctime)s %(filename)s:%(lineno)s %(levelname)s] %(message)s'\n if self.config[\"log\"]:\n logging.basicConfig(format=FORMAT, level=loglevel, filename=self.config[\"log\"])\n else:\n logging.basicConfig(format=FORMAT, level=loglevel)", "def _configure_logging(self):\n pass", "def setupLogging():\n global enabled, dummyInstance\n from pyemma.util.config import conf_values\n args = conf_values['Logging']\n\n if args.enabled:\n if args.tofile and args.file:\n filename = args.file\n else:\n filename = None\n try:\n logging.basicConfig(level=args.level,\n format=args.format,\n datefmt='%d-%m-%y %H:%M:%S',\n filename=filename,\n filemode='a')\n except IOError as ie:\n import warnings\n warnings.warn('logging could not be initialized, because of %s' % ie)\n return\n \"\"\" in case we want to log to both file and stream, add a separate handler\"\"\"\n if args.toconsole and args.tofile:\n ch = logging.StreamHandler()\n ch.setLevel(args.level)\n ch.setFormatter(logging.Formatter(args.format))\n logging.getLogger('').addHandler(ch)\n else:\n dummyInstance = dummyLogger()\n\n enabled = args.enabled", "def setup_logging():\n logging.basicConfig(format='%(levelname)s: %(message)s', level=LOGLEVEL)", "def _setup_logging(self):\n if self.app_config_has(\"logging\"):\n log_config = self.app_config()[\"logging\"]\n filename_list = [\n v['filename'] for k, v in\n _find_config_tree(log_config, \"filename\")\n ]\n # pre-create directory in advance for all loggers\n for file in filename_list:\n file_dir = os.path.dirname(file)\n if file_dir and not os.path.isdir(file_dir):\n os.makedirs(file_dir, exist_ok=True)\n dictConfig(log_config)\n else:\n log = getLogger()\n handler = StreamHandler()\n formatter = Formatter(\n \"%(asctime)s-%(threadName)s-%(name)s-%(levelname)s-%(message)s\"\n )\n handler.setFormatter(formatter)\n log.addHandler(handler)\n log.setLevel(DEBUG)\n msg = (\"Starting \" + os.path.basename(__name__) +\n \" version \" + __version__ + \" on \" +\n \"_\".join(uname()).replace(\" \", \"_\"))\n logger = getLogger(__name__)\n logger.debug(msg)", "def setup_logging():\n log.setup('keystone')", "def setup_logging():\r\n import ConfigParser # change this to configparser for Python 3\r\n # import logging\r\n import logging.config\r\n global logger\r\n\r\n try:\r\n \tlogging.config.fileConfig(\"celog.conf\")\r\n except ConfigParser.NoSectionError: \r\n\t# if there is no configuration file setup a default configuration\r\n logging.basicConfig(filename='code_extract.log',level= _logging_level,\r\n\t\t\tformat='%(asctime)s %(levelname)s - %(message)s',\r\n\t\t\tdatefmt='%Y %b %d, %a %H:%M:%S'\r\n\t\t\t)\r\n \r\n logger = logging.getLogger('%s' % __name__)\r\n\r\n logger.debug('logger ready')", "def setup_logging():\n product_name = \"plasma\"\n logging.setup(cfg.CONF, product_name)\n LOG.info(\"Logging enabled!\")\n LOG.debug(\"command line: %s\", \" \".join(sys.argv))", "def setup():\n config['global']['log.access_file'] = ''\n config['global']['log.error_file'] = ''\n config['global']['log.screen'] = False\n log_level = getattr(logging, config.log_level)\n logging.root.setLevel(logging.NOTSET)\n file_log.setLevel(log_level)\n logging.root.addHandler(file_log)\n if config.log_screen:\n console_log.setLevel(log_level)\n logging.root.addHandler(console_log)", "def setupLogging(self):\n\t\ttry:\n\t\t\tself.logger = logging.getLogger(__name__)\n\t\t\thandler = RotatingFileHandler(self.logFile, maxBytes=500000, backupCount=5)\n\t\t\tformat = \"%(asctime)s %(levelname)-8s %(message)s\"\n\t\t\thandler.setFormatter(logging.Formatter(format))\n\t\t\thandler.setLevel(logging.INFO)\n\t\t\tself.logger.addHandler(handler)\n\t\t\tself.logger.setLevel(logging.INFO)\n\t\texcept Exception as err:\n\t\t\terrorStr = 'Error initializing log file, ',err\n\t\t\tprint(errorStr)\n\t\t\texit(1)", "def _initialize_logging(self):\n LOG_CFG = os.environ.get('LOG_CFG', 'LOCAL')\n configure_logging(LOG_CFG)\n self.logger = logging.getLogger(self.__class__.__name__)", "def setup_logger():\n root = logging.getLogger()\n root.setLevel(LOGGING_LEVEL)\n formatter = logging.Formatter('%(asctime)s - %(message)s')\n ch = logging.StreamHandler(sys.stdout)\n ch.setLevel(LOGGING_LEVEL)\n ch.setFormatter(formatter)\n root.addHandler(ch)", "def setup_logging():\n lvl = os.getenv(\"LOG_LEVEL\")\n path = os.getenv(\"LOG_PATH\")\n\n logger = get_logger()\n logger.setLevel(lvl)\n\n filehandler = logging.FileHandler(path)\n filehandler.setLevel(lvl)\n filehandler.setFormatter(logging.Formatter(\n \"[%(asctime)s] %(levelname)s: %(message)s\",\n datefmt=\"%Y-%d-%m %H:%M:%S\"\n ))\n\n streamhandler = logging.StreamHandler()\n streamhandler.setLevel(lvl)\n streamhandler.setFormatter(logging.Formatter(\"%(message)s\"))\n\n logger.addHandler(filehandler)\n logger.addHandler(streamhandler)", "def setup_log(self):\n self.logger, _ = get_logger(\"datatransform\")", "def setup_logger() -> None:\n LOGGER.setLevel(logging.DEBUG)\n formatter = logging.Formatter('%(levelname)s \\t|%(asctime)s \\t| %(name)s \\t| %(message)s')\n\n if not check_if_dir_exists(FILENAMES.LOG_DIR):\n os.mkdir(to_abs_file_path(FILENAMES.LOG_DIR))\n\n file_handler: logging.FileHandler = logging.FileHandler(to_abs_file_path(FILENAMES.LOG), mode='w')\n file_handler.setLevel(logging.INFO)\n file_handler.setFormatter(formatter)\n\n console_handler: logging.StreamHandler = logging.StreamHandler()\n console_handler.setLevel(logging.WARNING)\n\n LOGGER.addHandler(file_handler)\n LOGGER.addHandler(console_handler)\n LOGGER.info('Filehandler and Console_Handler were born, let\\'s start logging')", "def setup_logger():\n LOG_DIR = unicode( os.environ.get(u'usep_gh__LOG_DIR') )\n LOG_LEVEL = unicode( os.environ.get(u'usep_gh__LOG_LEVEL') )\n filename = u'%s/usep_gh_handler.log' % LOG_DIR\n formatter = logging.Formatter( u'[%(asctime)s] %(levelname)s [%(name)s:%(lineno)s] %(message)s' )\n logger = logging.getLogger( __name__ )\n # logger = logging.getLogger( u'usep_gh_handler' )\n level_dict = { u'debug': logging.DEBUG, u'info':logging.INFO }\n logger.setLevel( level_dict[LOG_LEVEL] )\n file_handler = logging.FileHandler( filename )\n file_handler.setFormatter( formatter )\n logger.addHandler( file_handler )\n logger.debug( u'in utils.log_helper.setup_logger(); log initialized at %s' % unicode(datetime.datetime.now()) )\n return logger", "def log_setup(self):\n # Logger initialisation\n logger = logging.getLogger(self.app_name)\n logger.setLevel(logging.DEBUG)\n\n # Creating console handler and set level to debug\n ch = logging.StreamHandler()\n ch.setLevel(logging.DEBUG)\n\n # Creating formatter\n formatter = logging.Formatter(\n '%(asctime)s - %(name)s - %(levelname)s - %(message)s'\n )\n\n # Adding formatter to ch\n ch.setFormatter(formatter)\n\n # Adding ch to logger\n logger.addHandler(ch)\n\n # Setting the Logger Level (INFO)\n logger.setLevel(logging.INFO)\n\n return logger", "def setup_class(cls):\n if os.path.exists(logfilename):\n os.remove(logfilename)\n log = logutils.get_logger(__name__)\n log.root.handlers = []\n logutils.config(mode='standard', console_lvl='stdinfo',\n file_name=logfilename)", "def setup_logging():\n formatter = logging.Formatter(LOG_FORMAT)\n level = logging.INFO\n\n file_handler = logging.FileHandler('db.log')\n file_handler.setFormatter(formatter)\n file_handler.setLevel(level)\n\n console_handler = logging.StreamHandler()\n console_handler.setFormatter(formatter)\n console_handler.setLevel(level)\n\n logger = logging.getLogger()\n logger.addHandler(file_handler)\n logger.addHandler(console_handler)\n\n logger.setLevel(level)", "def initialize_logging():\n\n print 'Setting up logging...'\n\n log_level = app.config['LOGGING_LEVEL']\n # Set up default logging for submodules to use STDOUT\n # datefmt='%m/%d/%Y %I:%M:%S %p'\n fmt = '[%(asctime)s] %(levelname)s in %(module)s: %(message)s'\n logging.basicConfig(stream=sys.stdout, level=log_level, format=fmt)\n\n # Make a new log handler that uses STDOUT\n handler = logging.StreamHandler(sys.stdout)\n handler.setFormatter(logging.Formatter(fmt))\n handler.setLevel(log_level)\n\n # Remove the Flask default handlers and use our own\n handler_list = list(app.logger.handlers)\n\n for log_handler in handler_list:\n app.logger.removeHandler(log_handler)\n\n app.logger.addHandler(handler)\n app.logger.setLevel(log_level)\n app.logger.info('Logging handler established')", "def _setup_logger():\n root = logging.getLogger()\n root.setLevel(logging.INFO)\n\n log_handle = logging.StreamHandler(sys.stdout)\n formatter = logging.Formatter(\n \"[%(levelname)s] (%(asctime)s) - %(message)s\", datefmt=\"%Y-%m-%d %H:%M:%S\"\n )\n log_handle.setFormatter(formatter)\n root.addHandler(log_handle)\n\n logging.info(\"Initializing snakes\")", "def setup_logging():\n\n log = os.environ.get('ZHMC_LOG', None)\n\n if log is None:\n log = DEFAULT_LOG\n\n log_components = LOGGER_NAMES.keys()\n\n for lc in log_components:\n reset_logger(lc)\n\n handler = logging.StreamHandler(stream=sys.stderr)\n fs = '%(levelname)s %(name)s: %(message)s'\n handler.setFormatter(logging.Formatter(fs))\n\n log_specs = log.split(',')\n for log_spec in log_specs:\n\n # ignore extra ',' at begin, end or in between\n if log_spec == '':\n continue\n\n try:\n log_comp, log_level = log_spec.split('=', 1)\n except ValueError:\n raise ValueError(\"Missing '=' in COMP=LEVEL specification \"\n \"in ZHMC_LOG variable: {}\".format(log_spec))\n\n level = getattr(logging, log_level.upper(), None)\n if level is None:\n raise ValueError(\"Invalid level in COMP=LEVEL specification \"\n \"in ZHMC_LOG variable: {}\".format(log_spec))\n\n if log_comp not in log_components:\n raise ValueError(\"Invalid component in COMP=LEVEL specification \"\n \"in ZHMC_LOG variable: {}\".format(log_spec))\n\n setup_logger(log_comp, handler, level)", "def setup_logger(self):\n setup_logger(logger, 'mayavi.log', mode=self.log_mode)", "def setup_logging():\n if not app.debug:\n if app.config.get('LOG_CFG'):\n # initialize the Flask logger (removes all handlers)\n _ = app.logger\n dictConfig(app.config.get('LOG_CFG'))\n else:\n # capability with previous config settings\n # Should have LOG_FILE and LOG_LEVEL set\n if app.config.get('LOG_FILE') is not None:\n handler = RotatingFileHandler(app.config.get('LOG_FILE'), maxBytes=10000000, backupCount=100)\n else:\n handler = StreamHandler(stream=sys.stderr)\n\n handler.setFormatter(\n Formatter('%(asctime)s %(levelname)s: %(message)s '\n '[in %(pathname)s:%(lineno)d]')\n )\n app.logger.setLevel(app.config.get('LOG_LEVEL', DEBUG))\n app.logger.addHandler(handler)", "def init_logging():\n global logger\n logger = logging.getLogger('autogen_quartus')", "def setup_logging(self) -> None:\n logger.setup_logging(self.settings)\n base_format = self.settings.core.logging_format\n base_datefmt = self.settings.core.logging_datefmt\n\n # configure channel logging if required by configuration\n if self.settings.core.logging_channel:\n channel_level = self.settings.core.logging_channel_level\n channel_format = self.settings.core.logging_channel_format or base_format\n channel_datefmt = self.settings.core.logging_channel_datefmt or base_datefmt\n channel_params = {}\n if channel_format:\n channel_params['fmt'] = channel_format\n if channel_datefmt:\n channel_params['datefmt'] = channel_datefmt\n formatter = logger.ChannelOutputFormatter(**channel_params)\n handler = logger.IrcLoggingHandler(self, channel_level)\n handler.setFormatter(formatter)\n\n # set channel handler to `sopel` logger\n LOGGER = logging.getLogger('sopel')\n LOGGER.addHandler(handler)", "def setup_logging(self):\n console_handler = logging.StreamHandler()\n request_logging.assign_request_filter(console_handler,\n self.additional_fields)\n logging.basicConfig(level=self.level,\n format=self.format_string,\n handlers=[console_handler])\n for handler in logging.root.handlers:\n handler.setFormatter(RedactionFormatter(handler.formatter))\n logger = logging.getLogger(__name__)\n logger.info('Established logging defaults')\n self._setup_log_levels()", "def _setup_logging(self):\n global log\n\n # Parse the ini file to validate it\n parser = ConfigParser.ConfigParser()\n parser.read(self.ini_file)\n\n # Check for the presence of [loggers] in self.ini_file\n if not parser.has_section('loggers'):\n self._fail('Config file does not have [loggers] section', use_log=False)\n\n logging.config.fileConfig(self.ini_file)\n\n # Use \"name.pid\" to avoid importer confusions in the logs\n logger_name = 'debexpo.importer.%s' % os.getpid()\n log = logging.getLogger(logger_name)", "def _setup_logging(config):\n if config.debug:\n logging.basicConfig(\n format=\"%(asctime)s - %(levelname)s - %(message)s\", level=logging.DEBUG\n )\n else:\n logging.basicConfig(\n format=\"%(asctime)s - %(levelname)s - %(message)s\", level=logging.INFO\n )", "def setup_logging():\n logger = logging.getLogger()\n logger.level = logging.DEBUG\n stream_handler = logging.StreamHandler(sys.stdout)\n logger.addHandler(stream_handler)", "def _setup_logging():\n logging.Formatter.converter = time.gmtime\n logging.basicConfig(\n format='%(asctime)s %(message)s',\n level=logging.DEBUG,\n filename='conduit-proxy.log')\n\n console = logging.StreamHandler()\n console.setLevel(logging.INFO)\n logging.getLogger().addHandler(console)", "def setup_logging():\n\n coloredlogs.install(\n level=DEBUG, fmt=\"%(asctime)s %(name)s[%(process)d] %(levelname)s %(message)s\"\n )", "def setup_logger():\n formatter = ColoredFormatter(\n (\n '%(log_color)s%(levelname)-5s%(reset)s '\n '%(yellow)s[%(asctime)s]%(reset)s'\n '%(green)s %(name)s %(purple)s %(filename)s %(purple)s %(funcName)s %(purple)s:%(lineno)d%(reset)s '\n '%(bold_blue)s%(message)s%(reset)s'\n ),\n datefmt='%y-%m-%d %H;%M:%S',\n log_colors={\n 'DEBUG': 'blue',\n 'INFO': 'yellow',\n 'WARNING': 'red',\n 'ERROR': 'blue,bg_bold_red',\n 'CRITICAL': 'red,bg_white',\n }\n )\n\n logger = logging.getLogger('shen-yue-is-beautiful')\n handler = logging.StreamHandler()\n handler.setFormatter(formatter)\n logger.addHandler(handler)\n logger.setLevel(logging.DEBUG)\n\n return logger", "def setup_logging(log_file):\n\tglobal logger\n\tif log_file:\n\t\tlogging.basicConfig(format='%(asctime)s %(levelname)s %(message)s',filename=log_file,filemode='w',level=logging.INFO)\n\telse:\n\t\tlogging.basicConfig(format='%(asctime)s %(levelname)s %(message)s',level=logging.INFO)\n\tlogger = logging.getLogger('default')", "def initLogging(self):\n logging.basicConfig(level=self.loglevel, stream=sys.stderr)", "def setup_logging():\n formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n console = logging.StreamHandler(sys.stdout)\n console.setLevel(logging.DEBUG)\n console.setFormatter(formatter)\n root = logging.getLogger()\n root.addHandler(console)\n root.setLevel(logging.DEBUG)", "def setupLogger():\n logging.basicConfig(level=logging.DEBUG,\n format='%(asctime)s %(name)s %(levelname)s: %(message)s',\n datefmt='%Y-%m-%d %H:%M:%S',\n filename='prepareToSubmit.log',\n filemode='w')\n # define a Handler which writes INFO messages or higher to the sys.stderr\n console = logging.StreamHandler()\n console.setLevel(logging.INFO)\n # set a format which is simpler for console use\n formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')\n # tell the handler to use this format\n console.setFormatter(formatter)\n # add the handler to the root logger\n logging.getLogger('').addHandler(console)", "def setup_logging():\n\n # make sure all messages are propagated to the top-level logger\n LOG.setLevel(logging.DEBUG)\n err_format = logging.Formatter(TERM.RED + \"%(asctime)s | %(name)s | %(levelname)s | %(message)s\" + TERM.NORMAL)\n out_format = logging.Formatter(\"%(asctime)s | %(name)s | %(levelname)s | %(message)s\")\n\n gen_log = logging.getLogger(\"pickup.generator_profile\")\n tgt_log = logging.getLogger(\"pickup.target_profile\")\n\n if not OPTIONS.quiet:\n stdout_handler = logging.StreamHandler(sys.stdout)\n\n if OPTIONS.debug:\n stdout_handler.setLevel(logging.DEBUG)\n else:\n stdout_handler.setLevel(logging.INFO)\n\n stdout_handler.addFilter( ReverseLevelFilter(logging.INFO) )\n stdout_handler.setFormatter(out_format)\n LOG.addHandler(stdout_handler)\n gen_log.addHandler(stdout_handler)\n tgt_log.addHandler(stdout_handler)\n\n stderr_handler = logging.StreamHandler(sys.stderr)\n stderr_handler.setLevel(logging.WARNING)\n stderr_handler.setFormatter(err_format)\n\n if not exists(\"logs\"):\n os.makedirs(\"logs\")\n os.chmod(\"logs\", 0700)\n\n LOG_FILE = join(\"logs\", \"pickup.log\")\n debug_handler = RotatingFileHandler(LOG_FILE,\n maxBytes=100000, backupCount=5)\n\n if exists(LOG_FILE):\n os.chmod(LOG_FILE, 0600)\n\n debug_handler.setLevel(logging.DEBUG)\n debug_handler.setFormatter(out_format)\n\n LOG.addHandler(stderr_handler)\n LOG.addHandler(debug_handler)\n\n # plugin loggers\n gen_log.setLevel(logging.DEBUG)\n gen_log.addHandler(stderr_handler)\n gen_log.addHandler(debug_handler)\n\n tgt_log.setLevel(logging.DEBUG)\n tgt_log.addHandler(stderr_handler)\n tgt_log.addHandler(debug_handler)", "def setup_class(self):\n # Initialize instance variable(s)\n self.log = logging.getLogger()\n self.log.level = logging.DEBUG", "def setup_logfile():\r\n from core.general.appinit import log_init\r\n log_init(\r\n 'general',\r\n 'django_api'\r\n )", "def init_logging():\n global logger\n logging.basicConfig(\n format='%(levelname)s - %(message)s',\n )\n logger = logging.getLogger('runner')\n logger.setLevel(os.environ.get('LOGGING_LEVEL', 'INFO'))", "def logging_setup(args, log_dir):\n timestamp_file = datetime.now().strftime(\"%Y%m%d-%H.%M_rcf_abb.log\")\n log_file = Path(log_dir) / timestamp_file\n\n handlers = []\n\n if not args.skip_logfile:\n handlers.append(log.FileHandler(log_file, mode=\"a\"))\n if not args.quiet:\n handlers.append(log.StreamHandler(sys.stdout))\n\n log.basicConfig(\n level=log.DEBUG if args.debug else log.INFO,\n format=\"%(asctime)s:%(levelname)s:%(funcName)s:%(message)s\",\n handlers=handlers,\n )", "def initLogger(self):\n loglevel = self.loglevels[self.loglevel]\n log_format = '%(asctime)s name=%(name)s loglevel=%(levelname)s message=%(message)s'\n logging.basicConfig(format=log_format,\n level=loglevel)\n \tmultiprocessing.log_to_stderr(loglevel)", "def _init():\n global logger\n logger = logging.getLogger(\"Log\")", "def setup_logging(log_basedir=\"logs\"):\n BASEDIR = os.path.abspath(os.path.dirname(__file__))\n LOGDIR = os.path.join(BASEDIR,log_basedir)\n \n # Check if the logs directory exists and is writable\n if not os.path.isdir(LOGDIR):\n print('ERROR: Log directory {} does not exist.'.format(LOGDIR))\n sys.exit(1)\n if not os.access(LOGDIR, os.W_OK):\n print('ERROR: No permissions to write to log directory {}.'.format(LOGDIR))\n sys.exit(1)\n\n # Set the log message format\n fmt = '%(levelname)s - %(asctime)s.%(msecs).03d %(process)d [%(filename)s:%(lineno)d] %(message)s'\n datefmt = '%m%d %H:%M:%S'\n formatter = logging.Formatter(fmt, datefmt)\n\n # Log to console\n console_handler = logging.StreamHandler(sys.stdout)\n console_handler.setLevel(logging.DEBUG)\n console_handler.setFormatter(formatter)\n\n root = logging.getLogger()\n root.setLevel(logging.DEBUG)\n root.addHandler(console_handler)\n\n # Log to file, use a rotating file\n file_name = os.path.join(LOGDIR, '{}.log'.format(\"flask_api_otrs\") )\n\n file_handler = logging.handlers.RotatingFileHandler(file_name, backupCount=7)\n file_handler.setFormatter(formatter)\n root.addHandler(file_handler)", "async def setup(self):\n\t\tlogging.config.dictConfig(self.log_settings['log'])\n\t\tself.logger = logging.getLogger('Responder3')\n\t\tself.create_dir_strucutre()\n\n\t\tif 'handlers' in self.log_settings:\n\t\t\tasync for handlerclass, handler in self.get_handlers():\n\t\t\t\tawait self.start_extension(handlerclass, self.log_settings[self.log_settings['handlers'][handler]])", "def setUp(self):\n self.logger = logging.getLogger(glutil.root_package_name)\n self.orig_handlers = self.logger.handlers\n self.logger.handlers = []\n self.level = self.logger.level\n self.logger.level = logging.DEBUG\n\n self.rt_logger = logging.getLogger()\n self.orig_root_handlers = self.rt_logger.handlers\n self.rt_logger.handlers = []\n self.root_level = self.rt_logger.level\n self.rt_logger.level = logging.CRITICAL", "def setup():\n global log_handler\n\n if vaex.settings.main.logging.setup:\n logger.setLevel(logging.DEBUG)\n\n # create console handler and accept all loglevels\n if vaex.settings.main.logging.rich:\n from rich.logging import RichHandler\n log_handler = RichHandler()\n else:\n log_handler = logging.StreamHandler()\n\n # create formatter\n formatter = logging.Formatter('%(levelname)s:%(threadName)s:%(name)s:%(message)s')\n\n\n # add formatter to console handler\n log_handler.setFormatter(formatter)\n log_handler.setLevel(logging.DEBUG)\n\n # add console handler to logger\n logger.addHandler(log_handler)\n\n logging.getLogger(\"vaex\").setLevel(logging.ERROR) # default to higest level\n _set_log_level(vaex.settings.main.logging.error, logging.ERROR)\n _set_log_level(vaex.settings.main.logging.warning, logging.WARNING)\n _set_log_level(vaex.settings.main.logging.info, logging.INFO)\n _set_log_level(vaex.settings.main.logging.debug, logging.DEBUG)\n # VAEX_DEBUG behaves similar to VAEX_LOGGING_DEBUG, but has more effect\n DEBUG_MODE = os.environ.get('VAEX_DEBUG', '')\n if DEBUG_MODE:\n _set_log_level(DEBUG_MODE, logging.DEBUG)", "def setup_logging():\n logging.basicConfig(\n filename=os.getenv(\"SERVICE_LOG\", \"server.log\"),\n level=logging.DEBUG,\n format=\"%(levelname)s: %(asctime)s pid:%(process)s module:%(module)s %(message)s\",\n datefmt=\"%d/%m/%y %H:%M:%S\",\n )", "def setup_logging(\n module,\n default_level=logging.INFO,\n env_key='LOG_CFG',\n logpath=os.getcwd(),\n config_path=None\n):\n\n if not os.path.exists(os.path.dirname(logpath)):\n os.makedirs(os.path.dirname(logpath))\n timestamp = datetime.datetime.now().strftime(\"%Y-%m-%d_%H:%M\")\n fpath = os.path.join(logpath, module, timestamp)\n\n path = config_path if config_path is not None else os.getenv(env_key, None)\n if path is not None and os.path.exists(path):\n with open(path, 'rt') as f:\n config = yaml.safe_load(f.read())\n for h in config['handlers'].values():\n if h['class'] == 'logging.FileHandler':\n h['filename'] = os.path.join(logpath, module, timestamp, h['filename'])\n touch(h['filename'])\n for f in config['filters'].values():\n if '()' in f:\n f['()'] = globals()[f['()']]\n logging.config.dictConfig(config)\n else:\n lpath=os.path.join(logpath, timestamp)\n if not os.path.exists(lpath):\n os.makedirs(lpath)\n logging.basicConfig(level=default_level, filename=os.path.join(lpath,\"base.log\"))", "def _begin_logging(self):\n logconf.set_up_root_logger(self.opts.logfile)", "def setup(log_level, log_name):\n\n # Log format string for flake8 compliance\n log_fmt = ('%(levelname)-8s %(asctime)s%(filename)s:%(lineno)-4s '\n '%(message)s')\n\n # Configure logging\n config = {\n 'version': 1,\n 'disable_existing_loggers': False,\n 'formatters': {\n 'default': {\n 'format': log_fmt,\n 'datefmt': '%Y-%m-%d %H:%M:%S',\n },\n },\n 'handlers': {\n 'console': {\n 'class': 'logging.StreamHandler',\n 'formatter': 'default',\n },\n },\n 'loggers': {\n 'createtransfers': {\n 'level': log_level,\n 'handlers': ['console'],\n },\n },\n }\n\n logger = logging.getLogger(log_name)\n logging.config.dictConfig(config)\n return logger", "def setup_logging(module=None, level=logging.INFO): # pragma: no cover\n logger = logging.getLogger(module or '')\n logger.setLevel(level)\n logging.Formatter.converter = time.gmtime\n formatter = logging.Formatter(\n '%(asctime)s - %(name)s - %(processName)s - %(levelname)s - %(message)s'\n )\n stream_handler = logging.StreamHandler(sys.stderr)\n stream_handler.setLevel(level)\n stream_handler.setFormatter(formatter)\n logger.addHandler(stream_handler)\n return logger", "def _configure_logging(self):\r\n self._logger = logging.getLogger('AWSIoTPythonSDK.core')\r\n self._logger.setLevel(logging.ERROR)\r\n self._streamHandler = logging.StreamHandler()\r\n self._formatter = logging.Formatter(\r\n '%(asctime)s - %(name)s - %(levelname)s - %(message)s')\r\n self._streamHandler.setFormatter(self._formatter)\r\n self._logger.addHandler(self._streamHandler)", "def setup_logger():\n now = datetime.now()\n logging.basicConfig(level=logging.DEBUG)\n logging.getLogger(\"requests\").setLevel(logging.WARNING)\n logging.getLogger(\"urllib3\").setLevel(logging.WARNING)\n logging.info(f\"Script run on: {now}\")", "def setup_logging( cfg ):\n global _LOGGING_FORMAT_, _DATE_FORMAT_\n format,date = _LOGGING_FORMAT_,_DATE_FORMAT_\n \n if not cfg.get('logging', True):\n logging.basicConfig(handler=logging.NullHandler)\n return\n \n #check passed in cfgs if formats changed\n if cfg.get('log_format', False):\n format = cfg.get('log_format')\n if cfg.get('log_date_format',False):\n date = cfg.get('log_date_format')\n \n if cfg.get('log_debug', False):\n logging.basicConfig(level=logging.DEBUG,\n format=format,\n datefmt=date,\n filename=cfg.get('log_path', 'errors.log'))\n console = logging.StreamHandler()\n console.setLevel(logging.DEBUG)\n logging.getLogger().addHandler(console)\n \n elif cfg.get('log_warnings', False):\n logging.basicConfig(level=logging.WARNING,\n format=format,\n datefmt=date,\n filename=cfg.get('log_path','errors.log'))\n \n else:# Errors are always logged. deal.\n logging.basicConfig(level=logging.ERROR,\n format=format,\n datefmt=date,\n filename=cfg.get('log_path','errors.log'))", "def setup_logging(config: Any) -> Logger:\n green = \"\\033[32m\"\n reset = \"\\033[0m\"\n logger = setup_logger(\n name=f\"{green}[ignite]{reset}\",\n level=logging.DEBUG if config.debug else logging.INFO,\n format=\"%(name)s: %(message)s\",\n filepath=config.output_dir / \"training-info.log\",\n )\n return logger", "def _configure_logging(self):\n self.log_level = Scaffold.LOG_LEVEL_MAP.get(self.log_level, ERROR)\n formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n\n # assign the windmill instance logger\n #logging.basicConfig()\n self.log = logging.getLogger(self.name)\n self.log.setLevel(self.log_level)\n\n if self.log_path:\n file_path = None\n if self.log_path.endswith('.log'):\n file_path = self.log_path\n else:\n file_path = os.path.join(self.log_path, self.name + '.log')\n assert file_path\n file_handler = logging.FileHandler(file_path)\n file_handler.setLevel(self.log_level)\n file_handler.setFormatter(formatter)\n self.log.addHandler(file_handler)\n\n # if we are in verbose mode, then we send log output to console\n if self.verbose:\n # add the console logger for verbose mode\n console_handler = logging.StreamHandler()\n console_handler.setLevel(self.log_level)\n console_handler.setFormatter(formatter)\n self.log.addHandler(console_handler)\n\n self.log.info('Logging configured for: %s', self.name)", "def setupLogger(self):\n self.logger = logging.getLogger('SIMULATOR' + str(self.iSimulatorID))\n self.logger.setLevel(logging.DEBUG)\n formatter = logging.Formatter(\"%(asctime)s - %(name)s - %(levelname)s - %(message)s\")\n #add formatter to ch and fh\n\n #fh = logging.FileHandler('log.apistub')\n #fh.setLevel(logging.DEBUG)\n #fh.setFormatter(formatter)\n sh = logging.StreamHandler()\n sh.setLevel(logging.DEBUG)\n sh.setFormatter(formatter)\n\n #self.logger.addHandler(fh)\n self.logger.addHandler(sh)\n self.logger.disabled = BLOGGING_DISABLED", "def setup_logging(log_dir: Optional[str] = None) -> None:\n config: Dict[str, Any] = {\n \"version\": 1,\n \"disable_existing_loggers\": True,\n \"formatters\": {\"console\": {\"format\": \"%(asctime)s:\\t%(message)s\"}},\n \"handlers\": {\n \"console\": {\n \"level\": \"WARNING\",\n \"class\": \"logging.StreamHandler\",\n \"formatter\": \"console\",\n \"stream\": \"ext://sys.stdout\",\n }\n },\n \"loggers\": {\n LOG_NAME: {\"handlers\": [\"console\"], \"level\": \"DEBUG\", \"propagate\": False}\n },\n }\n if log_dir is not None:\n config[\"loggers\"][LOG_NAME][\"handlers\"].append(\"file\")\n config[\"formatters\"][\"file\"] = {\n \"format\": \"%(asctime)s - %(levelname)s - %(name)s - %(message)s\"\n }\n config[\"handlers\"][\"file\"] = {\n \"level\": \"DEBUG\",\n \"class\": \"logging.handlers.RotatingFileHandler\",\n \"formatter\": \"file\",\n \"filename\": os.path.join(log_dir, LOG_NAME + \".log\"),\n \"maxBytes\": 1000000,\n \"backupCount\": 3,\n }\n logging.config.dictConfig(config)", "def configure(cls):\n logger = logging.getLogger()\n logger.setLevel(logging.INFO)\n logger_handler = logging.StreamHandler()\n logger.addHandler(logger_handler)\n logger_handler.setFormatter(logging.Formatter('%(message)s'))\n cls.logger = logger", "def init_logging():\n\n logger = logging.getLogger()\n handler = logging.StreamHandler()\n formatter = logging.Formatter(\n '%(asctime)s %(levelname)-8s %(message)s')\n handler.setFormatter(formatter)\n logger.addHandler(handler)\n logger.setLevel(logging.INFO)", "def init_logger(self):\n\n if self.args.log_level:\n log_level = getattr(logging, self.args.log_level)\n if coloredlogs:\n coloredlogs.install(level=log_level, fmt=LOG_FMT)\n else:\n logging.basicConfig(level=log_level)\n ch = logging.StreamHandler()\n formatter = logging.Formatter(LOG_FMT)\n ch.setFormatter(formatter)\n elif coloredlogs:\n coloredlogs.install(level='INFO', fmt=LOG_FMT)\n\n if coloredlogs:\n effective_level = coloredlogs.get_level()\n else:\n effective_level = logger.getEffectiveLevel()\n\n # make sure warning and error display at any effective level\n if effective_level > logging.WARNING:\n self.warning = logger.critical\n else:\n self.warning = logger.warning\n\n if effective_level > logging.ERROR:\n self.error = logger.critical\n else:\n self.error = logger.error\n\n self.info = logger.info\n self.debug = logger.debug\n self.exception = logger.exception\n self.critical = logger.critical", "def setup_logging(global_bootstrap_options):\n if get_logging_handlers():\n raise AssertionError(\"setup_logging should not be called while Handlers are installed.\")\n\n ignores = global_bootstrap_options.ignore_pants_warnings\n global_level = global_bootstrap_options.level\n level = LogLevel.ERROR if getattr(global_bootstrap_options, \"quiet\", False) else global_level\n log_dir = global_bootstrap_options.logdir\n\n Native().init_rust_logging(level.level, global_bootstrap_options.log_show_rust_3rdparty)\n setup_logging_to_stderr(level, warnings_filter_regexes=ignores)\n if log_dir:\n setup_logging_to_file(global_level, log_dir=log_dir, warnings_filter_regexes=ignores)", "def __init_logging(self):\n\n logger = logging.getLogger('__name__')\n if os.path.exists(constants.LOG_FILE):\n logger.setLevel(logging.DEBUG)\n logger_file_handler = logging.FileHandler(constants.LOG_FILE)\n logger_formatter = logging.Formatter('%(asctime)s:%(levelname)s:%(message)s')\n logger_file_handler.setFormatter(logger_formatter)\n logger.addHandler(logger_file_handler)\n else:\n logger.disabled = True", "def init_logs(self):\n\n handler = logging.FileHandler(self.app.config['LOG'])\n handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s'))\n self.app.logger.addHandler(handler)\n if self.app.config.get(\"LOG_LEVEL\") == \"DEBUG\":\n self.app.logger.setLevel(logging.DEBUG)\n elif self.app.config.get(\"LOG_LEVEL\") == \"WARN\":\n self.app.logger.setLevel(logging.WARN)\n else:\n self.app.logger.setLevel(logging.INFO)\n self.app.logger.info('Startup with log: %s' % self.app.config['LOG'])", "def setup_global_logging():\n\n global global_logging_started\n\n if global_logging_started:\n return\n\n orig_logger_class = logging.getLoggerClass()\n logging.setLoggerClass(StreamTeeLogger)\n try:\n stdout_logger = logging.getLogger(__name__ + '.stdout')\n stderr_logger = logging.getLogger(__name__ + '.stderr')\n finally:\n logging.setLoggerClass(orig_logger_class)\n\n stdout_logger.setLevel(logging.INFO)\n stderr_logger.setLevel(logging.ERROR)\n stdout_logger.set_stream(sys.stdout)\n stderr_logger.set_stream(sys.stderr)\n sys.stdout = stdout_logger\n sys.stderr = stderr_logger\n\n exception_logger = logging.getLogger(__name__ + '.exc')\n sys.excepthook = LoggingExceptionHook(exception_logger)\n\n logging.captureWarnings(True)\n\n rawinput = 'input'\n builtins._original_raw_input = getattr(builtins, rawinput)\n setattr(builtins, rawinput, global_logging_raw_input)\n\n global_logging_started = True", "def setup_logger():\n\n global _logger\n global _has_logbook\n\n if _has_logbook:\n _logger = Logger('UoM_WIFI')\n try:\n log_path = join(sys.argv[1], '%s.log' % USERNAME)\n except IndexError:\n log_path = join(split(abspath(__file__))[0], '%s.log' % USERNAME)\n\n # because the log file is owned by root, if this program is ran by a\n # regular user, we need to prevent it from crashing by writing to a file\n # owned by root\n try:\n # create the handler\n log_handler = RotatingFileHandler(log_path)\n\n # push the context object to the application stack\n log_handler.push_application()\n except IOError:\n _has_logbook = False", "def setUpModule():\n logging.basicConfig()\n # logPoint('module %s' % __name__)", "def _setup_logging(self, log_level_name: str):\n log_level_name = log_level_name.upper()\n if log_level_name not in ['CRITICAL','ERROR', 'WARNING', 'INFO',\n 'DEBUG']:\n print('Invalid debug level: {}'.format(log_level_name))\n sys.exit(0)\n\n log_level = getattr(logging, log_level_name)\n\n logger = logging.getLogger(ROOT_NAMESPACE)\n logger.setLevel(log_level)\n\n ch = logging.StreamHandler()\n ch.setLevel(log_level)\n formatter = logging.Formatter(\n '%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n ch.setFormatter(formatter)\n\n logger.addHandler(ch)", "def setup_logging():\n log_format = '%(asctime)-15s %(levelname)s: %(message)s'\n logging.basicConfig(format=log_format, level=logging.DEBUG,\n filename='counting_consumer.out')", "def setup_logging():\n name_json = 'logging_config.json'\n path_json = os.path.join(os.path.dirname(__file__), name_json)\n with open(path_json, 'r') as f_json:\n dict_config = json.load(f_json)\n logging.config.dictConfig(dict_config)", "def setUp(self):\n self.logger = logging.getLogger(\"dbs test logger\")", "def setup_logging(use_syslog=False):\n\n LOG.setLevel(logging.INFO)\n if use_syslog:\n ch = SysLogHandler()\n else:\n ch = logging.StreamHandler(sys.stdout)\n ch.setFormatter(logging.Formatter('%(asctime)s %(name)s[%(process)d] '\n '%(levelname)s: %(message)s'))\n LOG.addHandler(ch)", "def configure_logger():\n logger = logging.getLogger()\n handler = logging.StreamHandler()\n formatter = logging.Formatter('%(asctime)s %(levelname)-8s %(message)s')\n handler.setFormatter(formatter)\n logger.addHandler(handler)\n logger.setLevel(logging.INFO)", "def setup_logger():\n mc_logger = logging.getLogger('chess_logger')\n mc_logger.setLevel(logging.DEBUG)\n console_handler = logging.StreamHandler()\n formatter = logging.Formatter(fmt='%(asctime)s %(levelname)-8s %(message)s',\n datefmt='%Y-%m-%d %H:%M:%S')\n console_handler.setFormatter(formatter)\n mc_logger.addHandler(console_handler)", "def setup_logger(logLevel=\"DEBUG\"):\n logroot = logging.getLogger(\"c\")\n logroot.propagate = False\n logroot.setLevel(logLevel)\n\n module_console_handler = logging.StreamHandler()\n\n # log_format_module = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'\n # log_format_module = \"%(name)s - %(levelname)s: %(message)s\"\n # log_format_module = '%(levelname)s: %(message)s'\n log_format_module = \"%(name)s: %(message)s\"\n # log_format_module = \"%(message)s\"\n\n formatter = logging.Formatter(log_format_module)\n module_console_handler.setFormatter(formatter)\n\n logroot.addHandler(module_console_handler)\n\n logging.addLevelName(5, \"TRACE\")\n # use it like this\n # logroot.log(5, 'Exceedingly verbose debug')\n\n # example log line\n logg = logging.getLogger(f\"c.{__name__}.setup_logger\")\n logg.debug(f\"Done setting up logger\")", "def setup_logger(config):\n filename = config[\"LOGGER_FILE\"]\n log_dir = '/'.join(filename.split('/')[0:-1]) + \"/\"\n\n check_and_create_directory(log_dir)\n\n level = config[\"LOGGER_LOGLEVEL\"].upper()\n filemode = 'a'\n _format = '%(asctime)s %(name)8s %(module)15s %(funcName)12s %(' \\\n 'levelname)7s: %(message)s'\n _dateformat = '(%d.%m.%Y, %H:%M:%S)'\n\n logging.basicConfig(filename=filename, filemode=filemode, level=level,\n format=_format, datefmt=_dateformat)\n\n logging.getLogger(\"requests\").setLevel(logging.WARNING)\n logging.getLogger(\"werkzeug\").setLevel(logging.WARNING)\n\n # Display log simultaneously on console\n if config[\"CONSOLE_LOGGING\"]:\n add_terminal_logging(_format, level)", "def configure_logging():\n\n level = logging.INFO\n logging.getLogger().setLevel(level)\n logging.basicConfig(\n level=level,\n format=(\n \"[%(asctime)s][%(levelname)s][%(filename)s:%(lineno)d]\"\n + \"[%(processName)s] %(message)s\"\n ),\n )", "def init() -> None:\n\n\t\tif Logging.logger:\n\t\t\treturn\n\n\t\tLogging.enableFileLogging \t\t= Configuration.get('logging.enableFileLogging')\n\t\tLogging.enableScreenLogging\t\t= Configuration.get('logging.enableScreenLogging')\n\t\tLogging.stackTraceOnError\t\t= Configuration.get('logging.stackTraceOnError')\n\t\tLogging.enableBindingsLogging\t= Configuration.get('logging.enableBindingsLogging')\n\t\tLogging.queueSize\t\t\t\t= Configuration.get('logging.queueSize')\n\n\t\tLogging._configureColors(Configuration.get('cse.console.theme'))\n\n\t\tLogging.logger\t\t\t\t\t= logging.getLogger('logging')\t\t\t# general logger\n\t\tLogging.loggerConsole\t\t\t= logging.getLogger('rich')\t\t\t\t# Rich Console logger\n\t\tLogging._console\t\t\t\t= Console()\t\t\t\t\t\t\t\t# Console object\n\t\tLogging._richHandler\t\t\t= ACMERichLogHandler()\n\n\t\tLogging.setLogLevel(Configuration.get('logging.level'))\t\t\t\t\t# Assign the initial log level\n\n\t\t# Add logging queue\n\t\tLogging.queue = Queue(maxsize = Logging.queueSize)\n\t\tLogging.queueOn()\n\n\t\t# List of log handlers\n\t\tLogging._handlers = [ Logging._richHandler ]\n\t\t#Logging._handlers = [ ACMERichLogHandler() ]\n\n\t\t# Log to file only when file logging is enabled\n\t\tif Logging.enableFileLogging:\n\t\t\tfrom ..services import CSE as CSE\n\n\t\t\tlogpath = Configuration.get('logging.path')\n\t\t\tos.makedirs(logpath, exist_ok = True)# create log directory if necessary\n\t\t\tlogfile = f'{logpath}/cse-{CSE.cseType.name}.log'\n\t\t\tlogfp = logging.handlers.RotatingFileHandler(logfile,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t maxBytes = Configuration.get('logging.size'),\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t backupCount = Configuration.get('logging.count'))\n\t\t\tlogfp.setLevel(Logging.logLevel)\n\t\t\tlogfp.setFormatter(logging.Formatter('%(levelname)s %(asctime)s %(message)s'))\n\t\t\tLogging.logger.addHandler(logfp) \n\t\t\tLogging._handlers.append(logfp)\n\n\t\t# config the logging system\n\t\tlogging.basicConfig(level = Logging.logLevel, format = '%(message)s', datefmt = '[%X]', handlers = Logging._handlers)\n\n\t\t# Start worker to handle logs in the background\n\t\tfrom ..helpers.BackgroundWorker import BackgroundWorkerPool\n\t\tLogging._logWorker = BackgroundWorkerPool.newActor(Logging.loggingActor, name = 'loggingWorker', ignoreException = True)\n\t\tLogging._logWorker.start()\t# Yes, this could be in one line but the _logworker attribute may not be assigned yet before the \n\t\t\t\t\t\t\t\t\t# actor callback is executed, and this might result in a None exception\n\n\t\t# React on config update. Only assig if it hasn't assigned before\n\t\tfrom ..services import CSE\n\t\tif not CSE.event.hasHandler(CSE.event.configUpdate, Logging.configUpdate):\t\t# type: ignore [attr-defined]\n\t\t\tCSE.event.addHandler(CSE.event.configUpdate, Logging.configUpdate)\t\t\t# type: ignore", "def _setup_logging(self, logger=None, **kwargs):\n if logger or len(logging.root.handlers) != 0:\n self._custom_logger = True\n else:\n # log_level is the only bootstrap config item\n boot_config = load_config(bootstrap=True, **kwargs)\n logging.config.dictConfig(default_config(level=boot_config.log_level))\n\n self._custom_logger = False\n\n return logger or logging.getLogger(__name__)", "def initialize_logging(log_level=logging.INFO):\n if not app.debug:\n print('Setting up logging...')\n\n # Set up default logging for submodules to use STDOUT\n # datefmt='%m/%d/%Y %I:%M:%S %p'\n fmt = '[%(asctime)s] %(levelname)s in %(module)s: %(message)s'\n logging.basicConfig(stream=sys.stdout, level=log_level, format=fmt)\n\n # Make a new log handler that uses STDOUT\n handler = logging.StreamHandler(sys.stdout)\n handler.setFormatter(logging.Formatter(fmt))\n handler.setLevel(log_level)\n\n # Remove the Flask default handlers and use our own\n handler_list = list(app.logger.handlers)\n for log_handler in handler_list:\n app.logger.removeHandler(log_handler)\n app.logger.addHandler(handler)\n app.logger.setLevel(log_level)\n app.logger.propagate = False\n app.logger.info('Logging handler established')", "def init_logging():\n app.logger.addHandler(logging.StreamHandler())\n log_level = app.config['LOG_LEVEL']\n app.logger.setLevel(getattr(logging, log_level))", "def initialize_logging(log_level=logging.INFO):\n if not app.debug:\n print 'Setting up logging...'\n # Set up default logging for submodules to use STDOUT\n # datefmt='%m/%d/%Y %I:%M:%S %p'\n fmt = '[%(asctime)s] %(levelname)s in %(module)s: %(message)s'\n logging.basicConfig(stream=sys.stdout, level=log_level, format=fmt)\n # Make a new log handler that uses STDOUT\n handler = logging.StreamHandler(sys.stdout)\n handler.setFormatter(logging.Formatter(fmt))\n handler.setLevel(log_level)\n # Remove the Flask default handlers and use our own\n handler_list = list(app.logger.handlers)\n for log_handler in handler_list:\n app.logger.removeHandler(log_handler)\n app.logger.addHandler(handler)\n app.logger.setLevel(log_level)\n app.logger.info('Logging handler established')", "def init_logging():\n logger.setLevel(logging.DEBUG)\n # set a common log format\n logFormatter = logging.Formatter(\"%(asctime)s [%(threadName)-12.12s] [%(levelname)-5.5s] %(message)s\")\n # setup our rotating file handler and assign our common formatter to it\n rotating_file_handler = RotatingFileHandler('my_log.log', maxBytes=200000, backupCount=10)\n rotating_file_handler.setFormatter(logFormatter)\n logger.addHandler(rotating_file_handler)\n \n if DEBUG:\n # print to stdout if we are debugging\n stream_handler = logging.StreamHandler(sys.stdout)\n stream_handler.setFormatter(logFormatter)\n logger.addHandler(stream_handler)", "def setup_logging_with_config(config: DynaBox):\n global logger\n logger = setup_logging_threatbus(config, logger_name)", "def setup_logging():\n for name, logger in loggers.items():\n logger.setLevel(LOGGING_MAPPING.get(options.logging, logging.DEBUG))\n handler = logging.FileHandler(\n getattr(options, '{}_log_file_path'.format(name))\n )\n formatter = logging.Formatter(\n '%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n handler.setFormatter(formatter)\n logger.addHandler(handler)", "def setup(self, cfg: Config) -> None:\n super().setup(cfg)\n\n if DEBUG:\n self.loglevel = logging.DEBUG\n else:\n self.loglevel = self.LOG_LEVELS.get(cfg.loglevel.lower(), logging.INFO)\n\n self.error_log.setLevel(self.loglevel)", "def setup_logging(log_level=logging.INFO, log_filename=None) -> Logger:\n logger = logging.getLogger()\n\n # Set log format to dislay the logger name to hunt down verbose logging modules\n fmt = \"%(name)-25s %(levelname)-8s %(message)s\"\n\n # Use colored logging output for console\n coloredlogs.install(level=log_level, fmt=fmt, logger=logger)\n\n # Quiet some internal logs\n logging.getLogger(\"dex_ohlcv.eventscanner\").setLevel(logging.INFO)\n\n # Disable logging of JSON-RPC requests and reploes\n logging.getLogger(\"web3.RequestManager\").setLevel(logging.WARNING)\n logging.getLogger(\"web3.providers.HTTPProvider\").setLevel(logging.WARNING)\n # logging.getLogger(\"web3.RequestManager\").propagate = False\n\n # Disable all internal debug logging of requests and urllib3\n # E.g. HTTP traffic\n logging.getLogger(\"requests\").setLevel(logging.WARNING)\n logging.getLogger(\"urllib3\").setLevel(logging.WARNING)\n\n # IPython notebook internal\n logging.getLogger(\"asyncio\").setLevel(logging.WARNING)\n\n # Datadog tracer agent\n # https://ddtrace.readthedocs.io/en/stable/basic_usage.html\n logging.getLogger(\"ddtrace\").setLevel(logging.INFO)\n\n # Flooding of OpenAPI spec debug notes on startup\n logging.getLogger(\"openapi_spec_validator\").setLevel(logging.WARNING)\n\n if log_filename:\n # Append to the log file\n handler = logging.FileHandler(log_filename, 'w+')\n formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n handler.setFormatter(formatter)\n logger.addHandler(handler)\n\n return logger", "def initialize_logging(log_level=logging.INFO):\n if not app.debug:\n print('Setting up logging...')\n # Set up default logging for submodules to use STDOUT\n # datefmt='%m/%d/%Y %I:%M:%S %p'\n fmt = '[%(asctime)s] %(levelname)s in %(module)s: %(message)s'\n logging.basicConfig(stream=sys.stdout, level=log_level, format=fmt)\n # Make a new log handler that uses STDOUT\n handler = logging.StreamHandler(sys.stdout)\n handler.setFormatter(logging.Formatter(fmt))\n handler.setLevel(log_level)\n # Remove the Flask default handlers and use our own\n handler_list = list(app.logger.handlers)\n for log_handler in handler_list:\n app.logger.removeHandler(log_handler)\n app.logger.addHandler(handler)\n app.logger.setLevel(log_level)\n app.logger.info('Logging handler established')", "def _initialize_logging(self):\n if self._custom_logger:\n self._logger.debug(\"Skipping logging init: custom logger detected\")\n return\n\n try:\n log_config = self._ez_client.get_logging_config(\n local=bool(self._config.runner_id)\n )\n except Exception as ex:\n self._logger.warning(\n \"Unable to retrieve logging configuration from Beergarden, the default \"\n \"configuration will be used instead. Caused by: {0}\".format(ex)\n )\n return\n\n try:\n configure_logging(\n log_config,\n namespace=self._system.namespace,\n system_name=self._system.name,\n system_version=self._system.version,\n instance_name=self._config.instance_name,\n )\n except Exception as ex:\n # Reset to default config as logging can be seriously wrong now\n logging.config.dictConfig(default_config(level=self._config.log_level))\n\n self._logger.exception(\n \"Error encountered during logging configuration. This most likely \"\n \"indicates an issue with the Beergarden server plugin logging \"\n \"configuration. The default configuration will be used instead. Caused \"\n \"by: {0}\".format(ex)\n )\n return\n\n # Finally, log uncaught exceptions using the configuration instead of stderr\n self._set_exception_hook(self._logger)", "def _init_logger(self):\n self.logger = logging.getLogger('WSClientAPILogger')\n self.logger.setLevel(logging.DEBUG)\n self.logger_handler = logging.FileHandler(self.__class__.__name__ + '.log')\n self.logger_handler.setLevel(logging.DEBUG)\n self.logger_formatter = logging.Formatter('%(asctime)s %(name)-12s %(levelname)-8s %(message)s',\n datefmt='%d-%m %H:%M:%S')\n self.logger_handler.setFormatter(self.logger_formatter)\n self.logger.addHandler(self.logger_handler)", "def _set_logging():\n log_level = os.getenv(\"TRIKI_LOG_LEVEL\", \"INFO\")\n quiet = os.getenv(\"TRIKI_NO_LOG_FILE\")\n handlers = [logging.StreamHandler()]\n if not quiet:\n handlers.append(logging.FileHandler(\"triki_click_analysis.log\"))\n logging.basicConfig(\n level=log_level,\n format=\"%(asctime)-15s %(levelname)s: %(message)s\",\n handlers=handlers,\n )", "def setup_logger(log=None, level='INFO'):\n if not log:\n log = logging.getLogger()\n if not log.handlers:\n channel = logging.StreamHandler()\n channel.setFormatter(DebugLogFormatter())\n\n log.setLevel(level)\n log.addHandler(channel)\n\n # setup styling for repo loggers\n repo_logger = logging.getLogger('libvcs')\n channel = logging.StreamHandler()\n channel.setFormatter(RepoLogFormatter())\n channel.addFilter(RepoFilter())\n repo_logger.setLevel(level)\n repo_logger.addHandler(channel)", "def setup_logs(arg_log_dir, log_level='debug'):\n assert log_level.lower() in ('debug', 'info', 'warning', 'error', 'critical')\n global logger\n cl_logger = log.LogManager(app_name=APP_NAME,\n log_name=__name__,\n log_dir=arg_log_dir)\n logger = cl_logger.logger\n logger.setLevel(log_level.upper())", "def _init_logging(self):\n # Setup logging variable\n self.log = logging.getLogger(\"collection-log\")\n self.log.setLevel(logging.INFO)\n self.formatter = logging.Formatter(\"%(asctime)s %(message)s\", \"%Y-%m-%d %H:%M:%S\")\n\n # Log to stdout\n streamhandler = logging.StreamHandler()\n streamhandler.setLevel(logging.INFO)\n streamhandler.setFormatter(self.formatter)\n self.log.addHandler(streamhandler)", "def loggerSetup(logLevel=logging.INFO):\n logger = logging.getLogger(__name__)\n outHandler = logging.StreamHandler(sys.stdout)\n outHandler.setFormatter(logging.Formatter(\"%(asctime)s:%(levelname)s:%(module)s: %(message)s\"))\n outHandler.setLevel(logLevel)\n logger.addHandler(outHandler)\n logger.setLevel(logLevel)\n return logger", "def setup_logging(verbose=False):\n\n logger = logging.getLogger()\n logger.setLevel(logging.INFO if not verbose else logging.DEBUG)", "def setup_logger(args):\n\timport logging\n\timport sys\n\timport pplogger\n\n\tlogger = None\n\tif args.NoLogger:\n\t\tlogger = pplogger.Logger(name=current_script_name, log_dir=args.output_dir, log_format=1, enabled=False).get()\n\telse:\n\t\tcurrent_script_name = os.path.basename(__file__).replace('.py','')\n\t\tlogger = pplogger.Logger(name=current_script_name, log_dir=args.output_dir, log_format=1, enabled=True).get() # gives logname --> snapsnap_query.py\n\t\tlogger.setLevel(logging.DEBUG)\n\t\t## This works. Exceptions are written to the log AND printed to sys.stderr\n\t\t## An alternative solution is to make one big \"try except\" block in main:\n\t\tdef handleException(excType, excValue, traceback, logger=logger):\n\t\t\tlogger.error(\"Logging an uncaught exception\", exc_info=(excType, excValue, traceback))\n\t\tsys.excepthook = handleException\n\treturn logger", "def log_setup():\n logger = logging.getLogger('diskover')\n logger_warn = logging.getLogger('diskover_warn')\n eslogger = logging.getLogger('elasticsearch')\n diskover_eslogger = logging.getLogger('diskover_elasticsearch')\n loglevel = config['logLevel'].get()\n if options.debug:\n loglevel = 'DEBUG'\n if loglevel == 'DEBUG':\n loglevel = logging.DEBUG\n elif loglevel == 'INFO':\n loglevel = logging.INFO\n else:\n loglevel = logging.WARN\n logformat = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'\n if logtofile:\n # create log file name using top dir names and datestamp\n treedirsstr = ''\n if args:\n n = 1\n dirs = args[0:]\n x = len(dirs)\n for d in dirs:\n if d != '/':\n d = d.rstrip('/')\n treedirsstr += os.path.basename(d)\n if n < x:\n treedirsstr += '_'\n n += 1\n else:\n treedirsstr = os.path.basename(os.getcwd())\n logfiletime = datetime.now().isoformat()\n logname = 'diskover_' + treedirsstr + '_' + logfiletime + '.log'\n logfile = os.path.join(logdir, logname)\n handler_file = logging.FileHandler(logfile)\n handler_file.setFormatter(logging.Formatter(logformat))\n logger.setLevel(loglevel)\n logger.addHandler(handler_file)\n # console logging\n handler_con = logging.StreamHandler()\n handler_con.setFormatter(logging.Formatter(logformat))\n logger.addHandler(handler_con)\n # warnings log\n logname_warn = 'diskover_' + treedirsstr + '_' + logfiletime + '_warnings.log'\n logfile_warn = os.path.join(logdir, logname_warn)\n handler_warnfile = logging.FileHandler(logfile_warn)\n handler_warnfile.setFormatter(logging.Formatter(logformat))\n logger_warn.setLevel(logging.WARN)\n logger_warn.addHandler(handler_warnfile)\n # es logger\n eslogger.setLevel(logging.WARN)\n eslogger.addHandler(handler_file)\n eslogger.addHandler(handler_con)\n # diskover es logger\n diskover_eslogger.setLevel(loglevel)\n diskover_eslogger.addHandler(handler_file)\n diskover_eslogger.addHandler(handler_con)\n else:\n handler_file = None\n handler_warnfile = None\n handler_con = None\n logging.basicConfig(format=logformat, level=loglevel)\n eslogger.setLevel(logging.WARN)\n return logger, logger_warn, loglevel, logformat, \\\n handler_file, handler_warnfile, handler_con", "def setup_logging(log_level=logging.DEBUG):\n logging.basicConfig(level=log_level)\n fmt = \"%(asctime)s %(levelname)s (%(threadName)s) \" \"[%(name)s] %(message)s\"\n colorfmt = \"%(log_color)s{}%(reset)s\".format(fmt)\n datefmt = \"%Y-%m-%d %H:%M:%S\"\n\n try:\n from colorlog import ColoredFormatter\n\n logging.getLogger().handlers[0].setFormatter(\n ColoredFormatter(\n colorfmt,\n datefmt=datefmt,\n reset=True,\n log_colors={\n \"DEBUG\": \"cyan\",\n \"INFO\": \"green\",\n \"WARNING\": \"yellow\",\n \"ERROR\": \"red\",\n \"CRITICAL\": \"red\",\n },\n )\n )\n except ImportError:\n pass\n\n logger = logging.getLogger(\"\")\n logger.setLevel(log_level)" ]
[ "0.8276232", "0.8171842", "0.7997143", "0.79586464", "0.7953245", "0.7951404", "0.79431313", "0.787981", "0.78718454", "0.7863005", "0.785225", "0.7807847", "0.7795416", "0.77918327", "0.77759767", "0.77299064", "0.77242017", "0.7685312", "0.76751614", "0.7672853", "0.76626015", "0.76351655", "0.761925", "0.760317", "0.7538775", "0.752675", "0.7512444", "0.746848", "0.74585116", "0.7457683", "0.744855", "0.74194956", "0.74001276", "0.73991203", "0.73977876", "0.73959565", "0.7392454", "0.7383097", "0.7382037", "0.73681116", "0.73614764", "0.73566616", "0.7347925", "0.7345382", "0.7320901", "0.7320099", "0.73140925", "0.73059845", "0.7302085", "0.73016024", "0.72901773", "0.72718656", "0.72618127", "0.72597796", "0.7244514", "0.72415656", "0.72330403", "0.7232687", "0.7222446", "0.7220377", "0.7209516", "0.72093505", "0.7200301", "0.71888715", "0.71767443", "0.71703166", "0.71596605", "0.7152738", "0.7146792", "0.7140492", "0.7124764", "0.7112772", "0.71050346", "0.71048903", "0.7101193", "0.70962155", "0.709452", "0.70801747", "0.70756185", "0.7075565", "0.70754504", "0.70724094", "0.7071534", "0.7069642", "0.7058093", "0.70543844", "0.7040389", "0.7034424", "0.7031352", "0.70284027", "0.7028325", "0.7027194", "0.70178723", "0.70136255", "0.6982933", "0.69703424", "0.6969428", "0.6955249", "0.694606", "0.69321203", "0.69311625" ]
0.0
-1
Parse all the |args| and save the results to |namespace|.
def parse_args(self, args=None, namespace=None): # This will call our parse_known_args below, so don't use setup_logging. namespace = argparse.ArgumentParser.parse_args( self, args=args, namespace=namespace) return namespace
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def parse(self, args):\n pass", "def parse_arguments(args):", "def parse_args(self, args: List[str]) -> Namespace:\n parser = self._to_parser()\n args = parser.parse_args(args)\n if hasattr(args, 'dm_commands'):\n if args.dm_commands is not None:\n args.dm_commands = parse_commands(args.dm_commands)\n else:\n args.dm_commands = list()\n if hasattr(args, 'dm_options'):\n if args.dm_options is not None:\n args.dm_options = parse_options(args.dm_options)\n else:\n args.dm_options = dict()\n LOG.debug(f\"Arguments: {args}.\")\n return args", "def parse(self, args_str=None):\n self._namespace = vars(self._parser.parse_args(args_str))", "def parse_args(self, args):\n raise Exception(\"Not implemented\")", "def _parse(self, args):\n parser = self._create_parser()\n return parser.parse(args)", "def __parse_args(self):\n for argument in self.args:\n source_arg = re.match(\"^(--source=(([A-Z]|[a-z]|/|_|[0-9]|.)+))$\", argument)\n input_arg = re.match(\"^(--input=(([A-Z]|[a-z]|/|_|[0-9]|.)+))$\", argument)\n stats_arg = re.match(\"^(--stats=(([A-Z]|[a-z]|/|_|[0-9]|.)+))$\", argument)\n help_arg = re.match(\"^--help$\", argument)\n vars_arg = re.match(\"^--vars$\", argument)\n insts_arg = re.match(\"^--insts$\", argument)\n if source_arg:\n self.sourceFile = source_arg.group(2)\n self.passedArgs.append(\"source\")\n elif input_arg:\n self.inputFile = input_arg.group(2)\n self.passedArgs.append(\"input\")\n elif help_arg:\n print(\"napoveda\")\n sys.exit(0)\n elif stats_arg:\n self.statsFile = stats_arg.group(2)\n self.passedArgs.append(\"stats\")\n elif vars_arg:\n self.passedArgs.append(\"vars\")\n if self.first_stat_arg is None:\n self.first_stat_arg = \"vars\"\n elif insts_arg:\n self.passedArgs.append(\"insts\")\n if self.first_stat_arg is None:\n self.first_stat_arg = \"insts\"\n else:\n raise ArgError(\"Unknown argument or format of the argument! (\" + argument + \")\")", "def parse_args(args=None):\n return AP.parse_args(args=args)", "def parse_args(self):\n return self.__process_args__(self.parser.parse_args())", "def parse_args(self, args=None, namespace=None):\n\n arguments = self._parser.parse_args(args, namespace)\n\n if arguments.usage:\n self._action = _formulate_action(\n ProgramUsageAction,\n parser=self._parser,\n exitf=self._parser.exit)\n\n elif arguments.version:\n self._action = _formulate_action(\n ShowVersionAction,\n prog=self._parser.prog,\n ver=self.versionString,\n year=self.yearString,\n author=self.authorName,\n license=self.programLicense,\n exitf=self._parser.exit)\n\n else:\n self._action = _formulate_action(\n DefaultAction,\n prog=self._parser.prog,\n exitf=self._parser.exit,\n imagefile=arguments.imagefile)", "def parse_args(args=None):\n\t\treturn _get_args_parser().parse_args(args)", "def ParseArgs() -> argparse.Namespace:\n parser = argparse.ArgumentParser()\n\n parser.add_argument(\n '--input', dest='input', help='Japanese phonetic reading file')\n parser.add_argument(\n '--output_token_array',\n dest='output_token_array',\n help='Output token array file.')\n parser.add_argument(\n '--output_string_array',\n dest='output_string_array',\n help='Output string array file.')\n return parser.parse_args()", "def parse_args(args: Optional[Sequence[str]] = None) -> Namespace:\n\n parser = ArgumentParser(\n description=(\n \"Takes a snapshot of a WordPress website remotely and \"\n \"stores it to either a local or a remote location. This \"\n \"requires rsync, php-cli and mysqldump\"\n )\n )\n\n parser.add_argument(\n \"source\",\n help=(\n \"Source directory for the WordPress installation dir. Syntax: \"\n \"`/var/www` or `user@host:/var/www`\"\n ),\n type=parse_location,\n )\n parser.add_argument(\n \"backup_dir\", help=\"Directory to store the snapshot\", type=parse_location\n )\n parser.add_argument(\n \"-n\",\n \"--snapshot-base-name\",\n help=\"Base name for the snapshot file. Defaults to DB name.\",\n )\n parser.add_argument(\n \"-t\",\n \"--file-name-template\",\n help=\"Template for snapshot file name. Defaults to: `{base}_{time}.tar.gz`\",\n default=\"{base}_{time}.tar.gz\",\n )\n parser.add_argument(\n \"-c\",\n \"--compression-mode\",\n help=\"Compression mode for tar (gzip, bzip2, lzip, xz). Defaults to: gzip\",\n default=\"gzip\",\n const=\"gzip\",\n nargs=\"?\",\n choices=[\"gzip\", \"bzip2\", \"lzip\", \"xz\"],\n )\n parser.add_argument(\n \"--db-host\",\n help=(\n \"Optional IP address of the database server, if IP of the wpconfig.php is a local one.\"\n ),\n default=None,\n const=None,\n nargs=\"?\",\n )\n parser.add_argument(\n \"--maintenance-mode\",\n help=(\n \"Activate maintenance mode before copying files to prevent conflicting file changes.\"\n ),\n action=\"store_true\",\n )\n parser.add_argument(\n \"--exclude\",\n help=(\n \"Exclude source files/directories, given as PATTERN. See tar command manual page.\"\n ),\n action=\"append\",\n )\n parser.add_argument(\n \"--exclude-tag-all\",\n help=(\n \"Exclude source directories and all its content, where FILE is inside. See tar command manual page.\"\n ),\n action=\"append\",\n )\n\n parsed_args = parser.parse_args(args)\n\n # apply compression mode to file name template\n if parsed_args.compression_mode == \"bzip2\":\n parsed_args.file_name_template = re.sub(\".gz\", \".bz2\", parsed_args.file_name_template)\n parsed_args.backup_dir.set_compression_mode(parsed_args.compression_mode)\n elif parsed_args.compression_mode == \"lzip\":\n parsed_args.file_name_template = re.sub(\".gz\", \".lz\", parsed_args.file_name_template)\n parsed_args.backup_dir.set_compression_mode(parsed_args.compression_mode)\n elif parsed_args.compression_mode == \"xz\":\n parsed_args.file_name_template = re.sub(\".gz\", \".xz\", parsed_args.file_name_template)\n parsed_args.backup_dir.set_compression_mode(parsed_args.compression_mode)\n\n return parsed_args", "def parse_args():\n global Args\n parser = argparse.ArgumentParser()\n subparsers = parser.add_subparsers()\n pars_simulation(subparsers)\n pars_analyze(subparsers)\n Args = parser.parse_args()", "def parse_known_args(self, args=None, namespace=None):\n namespace, unknown_args = argparse.ArgumentParser.parse_known_args(\n self, args=args, namespace=namespace)\n setup_logging(debug=namespace.debug, quiet=namespace.quiet)\n return (namespace, unknown_args)", "def update_args(self, args):\n self.args = self.parser.parse_args(args)", "def parse_args(args):\n\n parser = argparse.ArgumentParser(description=\"Scrape jobs and store results.\")\n\n parser.add_argument(\"--version\",\n action=\"version\",\n version=\"scrape-jobs {ver}\".format(ver=__version__))\n\n parser.add_argument(\"-v\",\n \"--verbose\",\n dest=\"loglevel\",\n help=\"set loglevel to INFO\",\n action=\"store_const\",\n const=logging.INFO)\n\n parser.add_argument(\"-vv\",\n \"--very-verbose\",\n dest=\"loglevel\",\n help=\"set loglevel to DEBUG\",\n action=\"store_const\",\n const=logging.DEBUG)\n\n default_file = str(Path.cwd().joinpath(config.CONFIG_FILENAME).absolute())\n parser.add_argument(\"-c\",\n dest=\"config_file\",\n action=\"store\",\n default=default_file,\n type=str,\n help=f\"defaults to '{default_file}'\")\n\n parser.add_argument(dest=\"site\",\n action=\"store\",\n choices={\"seek.com.au\", \"linkedin.com\"},\n type=str,\n help=\"site to scrape\")\n\n return parser.parse_args(args)", "def _parse_args(args=None):\n parser = argparse.ArgumentParser()\n\n # general\n parser.add_argument(\"-f\", \"--files\",\n help=\"Files from analysis, separated by comma\",\n metavar=\"TwissFile\", dest=\"files\", required=True)\n parser.add_argument(\"--twissfile\",\n help=\"Twiss file to use\",\n metavar=\"/path/to/twiss.dat\", dest=\"twissfile\", required=True)\n parser.add_argument(\"-o\", \"--output\",\n help=\"Output path, where to store the results\",\n metavar=\"<path>\", default=\"./\", dest=\"output_path\")\n parser.add_argument(\"-t\", \"--algorithm\",\n help=\"Which algorithm to use {:s}\".format(ALGO_CHOICES),\n metavar=\"ALGORITHM\", default=ALGO_CHOICES[0], dest=\"algorithm\",\n choices=ALGO_CHOICES)\n parser.add_argument(\"-d\", \"--deltapScalingFactor\",\n help=\"Scaling factor for deltap, remember final value must be in MAD units\",\n metavar=\"<deltapScalingFactor>\", default=1.0, type=float, dest=\"deltap_scaling_factor\")\n\n # parse arguments\n accel_cls, remain_args = manager.get_accel_class_from_args(args)\n options = parser.parse_args(remain_args)\n source_files = [f.strip() for f in options.files.split(',')]\n\n # put all arguments into one dict\n options_dict = {\n \"accel_cls\": accel_cls,\n \"source_files\": source_files,\n }\n options_dict.update(options.__dict__)\n\n options_dict.pop(\"files\") # is \"source_files\" now\n\n return options_dict", "def run(self, args: argparse.Namespace) -> None:\n pass", "def parse(self, args=None, namespace=None):\n orginal_args = sys.argv[1:]\n subparsers, args = self._fix_parsers()\n subparser = list(set(subparsers.keys()) & set(args))\n known, unknown = self.parse_known_args(args, namespace)\n\n if \"-h\" in unknown or \"--help\" in unknown:\n if len(orginal_args) == 1 and (\"-h\" in unknown or \"--help\" in unknown):\n self.print_message(self.title+\"\\n\")\n self.print_help()\n exit(0)\n elif len(subparser) == 1:\n subparsers[subparser[0]].print_help()\n exit(0)\n if unknown:\n msg = 'unrecognized arguments: %s'\n self.error(msg % ' '.join(unknown))\n\n return known", "def parse_args():\n\n parser = argparse.ArgumentParser()\n\n parser.add_argument(\n \"-i\", \"--input\", required=True, action=\"store\", dest=\"f_in\", help=\"input file\"\n )\n parser.add_argument(\n \"-o\",\n \"--output\",\n required=True,\n action=\"store\",\n dest=\"f_out\",\n help=\"stem of output file\",\n )\n\n parser.add_argument(\n \"-d\",\n \"--dir\",\n required=True,\n action=\"store\",\n dest=\"dir\",\n help=\"directory to save output files\",\n )\n\n parser.add_argument(\n \"-t\",\n \"--threshold\",\n required=False,\n action=\"store\",\n dest=\"thres\",\n default=0.85,\n help=\"threshold for the scoring function\",\n )\n\n parser.add_argument(\n \"-l\",\n \"--language\",\n required=True,\n action=\"store\",\n dest=\"lang\",\n help=\"provide language in order to set stop words\",\n )\n\n parser.add_argument(\n \"-min\",\n \"--minimum\",\n required=False,\n action=\"store\",\n dest=\"min\",\n default=100,\n help=\"minimum number of occurrences to be considered as ngram\",\n )\n\n parser.add_argument(\n \"--trigram\",\n required=False,\n action=\"store_true\",\n dest=\"trigram\",\n help=\"extracting trigrams in addition to bigrams\",\n )\n\n return parser.parse_args()", "def parse_args() -> argparse.Namespace:\n parser = argparse.ArgumentParser(description='X sandbox.')\n # 一次可以测试多个函数\n parser.add_argument('-fl', '--function_name_list', type=str,\n dest='function_name_list', required=True,\n help='test objective function names list.')\n # 权重列表 for ScalarizedObjective.\n parser.add_argument('-wl', '--weight_list', type=str,\n dest='weight_list', required=True,\n help='weight list.')\n # 协方差矩阵\n parser.add_argument('-cm', '--covariance_matrix', type=str,\n dest='covariance_matrix', required=True,\n help='covariance matrix.')\n # 是否估计协方差\n parser.add_argument('-ec', '--evaluate_covariance', type=int,\n dest='evaluate_covariance', default=0,\n help='0: False; 1: True(evaluate covariance)')\n # # 桶数\n # parser.add_argument('-nb', '--num_bucket', type=int,\n # dest='num_bucket', required=False,\n # default=1, help='number of bucket.')\n # # 样本量\n # parser.add_argument('-ns', '--num_sample', type=int,\n # dest='num_sample', required=False,\n # default=1, help='number of sample.')\n # 随机迭代次数\n parser.add_argument('-ii', '--init_iter', type=int,\n dest='init_iter', required=False,\n default=1, help='iteration of sobol generation.')\n # 随机迭代时每次迭代的组数\n parser.add_argument('-ibs', '--init_batch_size', type=int,\n dest='init_batch_size', required=False,\n default=1, help='number of sobol generation.')\n # BO更新的迭代次数\n parser.add_argument('-ui', '--update_iter', type=int,\n dest='update_iter', required=False,\n default=20, help='number of GP(N)EI generation.')\n # 实验组数\n parser.add_argument('-bs', '--batch_size', type=int,\n dest='batch_size', required=False,\n default=1, help='number of trial each iter.')\n # 方差系数 deprecated\n parser.add_argument('-vc', '--var_coef', type=int,\n dest='var_coef', required=False,\n default=1, help='variance coef.')\n # 方差计算方法\n parser.add_argument('-vct', '--var_compute_type', type=int,\n dest='var_compute_type', required=False,\n default=1, help='variance compute type: 0 1 2 3 4')\n\n # 采样数\n parser.add_argument('-nr', '--num_random', type=int,\n dest='num_random', required=False,\n default=10000, help='num_random for gen samples')\n # 桶数\n parser.add_argument('-nb', '--num_bucket', type = int,\n dest='num_bucket', required=False,\n default=50, help='number of bucket.')\n # 对照组数\n # parser.add_argument('-nc', '--num_control', type=int,\n # dest='num_control', required=False,\n # default=0, help='number of control arms each iter.')\n # 保存路径\n parser.add_argument('-sp', '--save_path', type=str, dest='save_path',\n default=\"/mnt/wfs/mmcommwfssz/project_wx-td-itil-exp/\" + \\\n \"bo_test_output/covariance_test\",\n help=(\"helper directory.\"))\n return parser.parse_args()", "def parse_args(args):\n\n parser = argparse.ArgumentParser(\n description=\"\"\"Generates and runs an afni_proc.py script to preprocess resting state fMRI data\"\"\",\n formatter_class=argparse.RawDescriptionHelpFormatter)\n\n # Optional Flags\n parser.add_argument(\"-t\", \"--trs_remove\", action=\"store\", default=5, type=int, metavar='TRs',\n help=\"\"\"number of trs to remove at the beginning of the epi data\n (default = 5 trs)\"\"\")\n parser.add_argument(\"-d\", \"--dim_voxel\", action=\"store\", default=2.0, type=float, metavar='MM',\n help=\"voxel dimensions in mm that processed epi will be resampled to (default = 2.0 mm)\")\n parser.add_argument(\"-b\", \"--bandpass\", action=\"store\", default=[0.01, 0.25], nargs=2, type=float, metavar=\"F\",\n help=\"bandpass frequencies lower and upper limits (default = 0.01 0.25)\")\n parser.add_argument(\"-v\", \"--volumes\", action=\"store\", default=0, type=int, metavar=\"V\",\n help=\"\"\"truncate the epi data to the inputted number of volumes, useful if subjects have data \n with different numbers of volumes (default = no truncation)\"\"\")\n parser.add_argument(\"-f\", \"--fwhm\", action=\"store\", default=5.0, type=float, metavar=\"MM\",\n help=\"the full width half maximum that is used when blurring (default = 5.0 mm)\")\n parser.add_argument(\"-c\", \"--cores\", action=\"store\", default=cpu_count(), type=int, metavar=\"C\",\n help=\"number of cores supplied to 3dDeconvolve (default = all cores)\")\n parser.add_argument(\"-s\", \"--subj_id\", action=\"store\", default=\"sub\", metavar=\"SUB\",\n help=\"text file of subject ids (default = sub)\")\n parser.add_argument(\"-T\", \"--time_step\", action=\"store\", default=0, type=float, metavar=\"TS\",\n help=\"set the time step for bandpassing (default = ts in header info\")\n\n parser.add_argument(\"-g\", \"--global_signal_regression\", action=\"store_false\", default=True,\n help=\"do not perform global signal regression (default = perform gsr)\")\n\n parser.add_argument(\"-r\", \"--rerun\", action=\"store_true\", default=False,\n help=\"\"\"rerun preprocessing, override and delete previous results in \n 'Processed' folder (default = don't override)\"\"\")\n parser.add_argument(\"-m\", \"--motion_param\", action=\"store_true\", default=False,\n help=\"use 12 motion parameters for regression (default = 6 motion parameters)\")\n parser.add_argument(\"-G\", \"--gm_blur\", action=\"store_true\", default=False,\n help=\"blur only in grey matter mask (default = blur in whole brain)\")\n parser.add_argument(\"-n\", \"--nl_reg\", action=\"store_true\", default=False,\n help=\"use non-linear warp between anatomical and MNI template (default = linear warp)\")\n\n # Required Inputs\n required = parser.add_argument_group(\"required arguments\")\n required.add_argument(\"-e\", \"--epi\", action=\"store\", required=True,\n help=\"text file of paths to raw epi data\")\n required.add_argument(\"-a\", \"--anat\", action=\"store\", required=True,\n help=\"text file of paths to raw anatomical data\")\n required.add_argument(\"-o\", \"--out_dir\", action=\"store\", required=True, metavar=\"OUT\",\n help=\"text file of paths to output directory\")\n result = parser.parse_args(args)\n\n # Make sure inputted parameters are legal\n assert (os.path.isfile(result.epi)), \"{} does not exist or is not a file\".format(result.epi)\n assert (os.path.isfile(result.anat)), \"{} does not exist or is not a file\".format(result.ant)\n assert (result.trs_remove >= 0), \"Cannot remove negative trs\"\n assert (result.dim_voxel >= 0), \"Cannot have a negative voxel dimension\"\n assert (np.all(np.array(result.bandpass) > 0)), \"Cannot have a negative frequency limit for bandpassing\"\n assert (result.volumes > -1), \"Number of volumes must be greater than 0\"\n assert (result.cores > 0), \"Number of cores used must be greater than 0\"\n assert (result.time_step > -1), \"Time step must be greater than 0\"\n\n return result", "def parse_args(self, argv=None):\n self.opts, self.args = self.cli_parser.parse_args(argv)\n self._begin_logging()\n if argv is None:\n argv = sys.argv\n logger.info(' '.join(argv))\n self._process_input_files()\n self._construct_links_of_interest()\n self._open_output_files()\n data = self._construct_data_struct()\n return data", "def parse_args():\n parser = argparse.ArgumentParser(description='Parse flags to configure the json parsing')\n parser.add_argument(\"-f\", \"--format\", help=\"output format: (csv|tsv|json)\", choices=[\"csv\", \"tsv\", \"json\"],\n default=\"tsv\")\n parser.add_argument(\"-p\", \"--parallelized\", help=\"save output in parallelized or single file format\",\n action=\"store_true\")\n parser.add_argument(\"-i\", \"--input\", help=\"folder where input documents are\", default=\"data\")\n parser.add_argument(\"-o\", \"--output\", help=\"folder where output documents are\", default=\"cleaned\")\n parser.add_argument(\"-d\", \"--documentformat\", help=\"combine all features into a single text per post\",\n action=\"store_true\")\n parser.add_argument(\"-pa\", \"--partitions\", help=\"number of spark partitions\",\n default=1)\n args = parser.parse_args()\n return args", "def parseArgs(args):\n parser= argparse.ArgumentParser(description = __doc__)\n parser.add_argument (\"manifestFile\",\n help = \" The input file. \",\n action = \"store\")\n parser.add_argument (\"outputDir\",\n help = \" The output directory, usually named kallistoOut. \",\n action = \"store\")\n parser.add_argument (\"--isMouse\",\n help = \" This is mouse data, use a mouse transcriptome.\",\n action = \"store_true\")\n parser.add_argument (\"--single\",\n help = \" This is single data (not paired end data).\",\n action = \"store_true\")\n parser.add_argument (\"--test\",\n help = \" Generate the jobList then stop.\",\n action = \"store_true\")\n parser.add_argument (\"--bootstrap\",\n help = \" Enable bootstrapping, please provide an integer. Defaults to 10. \", \n action = \"store\",\n type = int)\n parser.add_argument (\"--verbose\",\n help = \" Spit out messages during runtime. \",\n action = \"store_true\")\n\n parser.set_defaults(single = False)\n parser.set_defaults(verbose = False)\n parser.set_defaults(isMouse = False)\n parser.set_defaults(bootstrap = 10)\n options = parser.parse_args()\n return options", "def read_args(self):\n parser = argparse.ArgumentParser()\n parser.add_argument('--algorithm', '-a', default='onelayer',\n choices=['onelayer', 'randomforest', 'sos'],\n help='which algorithm to run')\n parser.add_argument('--format', '-f', default='pcap',\n choices=['netflow', 'pcap'],\n help='which format are the files to process in')\n parser.add_argument('--operation', '-o', default='eval',\n choices=['eval', 'train', 'test'],\n help='which operation to run')\n parser.add_argument('--sos_model', '-s', default='networkml/trained_models/sos/SoSmodel',\n help='path to SoSmodel')\n parser.add_argument('--trained_model', '-m', default='networkml/trained_models/onelayer/OneLayerModel.pkl',\n help='path to the trained model file')\n parser.add_argument('--path', '-p', default='/pcaps',\n help='path to file or directory of files to process')\n parser.add_argument('--save', '-w', default='networkml/trained_models/onelayer/OneLayerModel.pkl',\n help='path to save the trained model, if training')\n\n self.args = parser.parse_args()\n return", "def parse_args():\n parser = argparse.ArgumentParser(description='Args')\n parser.add_argument('--out_dir', default='_out', type=str, help='output folder')\n args = parser.parse_args()\n\n return args", "def parse_args(args):\n parser = argparse.ArgumentParser(\n description=\"Command line bot application, e.g. bot how do you work?\")\n parser.add_argument(\n '--version',\n action='version',\n version='nlpia_bot {ver}'.format(ver=__version__))\n parser.add_argument(\n '--name',\n default=\"bot\",\n dest=\"nickname\",\n help=\"IRC nick or CLI command name for the bot\",\n type=str,\n metavar=\"STR\")\n parser.add_argument(\n '--personality',\n default=\"\",\n dest=\"personality\",\n help=\"comma-separated personalities to load into bot: search_movie,pattern_greet,search_ds,generate_spanish\",\n type=str,\n metavar=\"STR\")\n parser.add_argument(\n '-v',\n '--verbose',\n dest=\"loglevel\",\n help=\"set loglevel to INFO\",\n action='store_const',\n const=logging.INFO)\n parser.add_argument(\n '-vv',\n '--very-verbose',\n dest=\"loglevel\",\n help=\"set loglevel to DEBUG\",\n action='store_const',\n const=logging.DEBUG)\n parser.add_argument(\n 'words',\n type=str,\n nargs='+',\n help=\"Words to pass to bot as an utterance or conversational statement requiring a bot reply or action.\")\n return parser.parse_args(args)", "def add_args(self, parser):", "def _parse_args(self, args : dict):\n result = {}\n for key, value in args.items():\n if key in self._subparsers:\n # if it's a list, it is because it's a preset\n if isinstance(value, list):\n result[key] = value[0]\n else:\n result[key] = self._subparsers[key]._parse_args(value)\n elif key in self._actions:\n result[key] = self._actions[key](value)\n else:\n raise ValueError(f\"Unknown argument {key}\")\n\n return result", "def parse_args(args):\n parser = argparse.ArgumentParser(description='Parse your tophat align summary')\n parser.add_argument(\"-v\", \"--verbose\", help=\"increase output verbosity\", action='store_true')\n parser.add_argument(\"-m\", \"--mode\", help=\"input type is a directory or file\", choices=['file', 'directory'], default='directory')\n parser.add_argument(\"-i\", \"--input\", metavar='</dir> || file1,file2... ', help=\"provide a list of file or direcotry\")\n parser.add_argument(\"-o\", \"--out\", metavar='outfile.tsv', help=\"name of outputfile\")\n\n return parser.parse_args(args)", "def _parse_args(self, prepared_args):\n pass", "def parse_args():\n parser = argparse.ArgumentParser(description=\"Normalize the BraTS data set\",\n formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n\n input_options = parser.add_argument_group(\"Input\")\n input_options.add_argument('--brats', required=True, help=\"BraTS root data set directory\")\n input_options.add_argument('--year', required=True, type=int, default=2018, help=\"BraTS year\")\n\n output_options = parser.add_argument_group(\"Output\")\n output_options.add_argument('--output', required=True, help=\"Output directory of normalized data set\")\n\n general_options_group = parser.add_argument_group(\"General\")\n general_options_group.add_argument(\"--pool-size\", type=int, default=8, help=\"Size of worker pool\")\n\n logging_options_group = parser.add_argument_group(\"Logging\")\n logging_options_group.add_argument('--log', dest=\"log_level\", default=\"WARNING\", help=\"Logging level\")\n logging_options_group.add_argument('--log-file', default=\"normalize.log\", help=\"Log file\")\n\n args = parser.parse_args()\n\n # Setup the logger\n global logger\n logger = logging.getLogger('root')\n\n # Logging level configuration\n log_level = getattr(logging, args.log_level.upper())\n if not isinstance(log_level, int):\n raise ValueError('Invalid log level: %s' % args.log_level)\n logger.setLevel(log_level)\n\n log_formatter = logging.Formatter('[%(asctime)s][%(levelname)s][%(funcName)s] - %(message)s')\n\n # For the log file...\n file_handler = logging.FileHandler(args.log_file)\n file_handler.setFormatter(log_formatter)\n logger.addHandler(file_handler)\n\n # For the console\n console_handler = logging.StreamHandler(sys.stdout)\n console_handler.setFormatter(log_formatter)\n logger.addHandler(console_handler)\n return args", "def parse_args(args):\n \n # Construct the parser (which is stored in parser)\n # Module docstring lives in __doc__\n # See http://python-forum.com/pythonforum/viewtopic.php?f=3&t=36847\n # And a formatter class so our examples in the docstring look good. Isn't it\n # convenient how we already wrapped it to 80 characters?\n # See http://docs.python.org/library/argparse.html#formatter-class\n parser = argparse.ArgumentParser(description=__doc__, \n formatter_class=argparse.RawDescriptionHelpFormatter)\n \n # General options\n parser.add_argument(\"hal\",\n help=\"HAL file to evaluate\")\n parser.add_argument(\"--truth\",\n help=\"MAF file of a true alignment for precision and recall\")\n parser.add_argument(\"--beds\", nargs=\"*\",\n help=\"BED file(s) of genes on the genomes in the HAL\")\n parser.add_argument(\"--coverage_file\", type=argparse.FileType(\"w\"),\n default = sys.stdout,\n help=\"file to save average coverage vs. the reference in (one number)\")\n parser.add_argument(\"--precision_recall_file\", type=argparse.FileType(\"w\"),\n default = sys.stdout,\n help=\"TSV file to save precision and recall in (two numbers)\")\n parser.add_argument(\"--gene_category_file\", type=argparse.FileType(\"w\"),\n default = sys.stdout,\n help=\"file to save categories and counts for genes in\")\n parser.add_argument(\"--tag\", nargs=\"*\", default=[],\n help=\"extra columns to tag all result TSV lines with at the front\")\n \n # The command line arguments start with the program name, which we don't\n # want to treat as an argument for argparse. So we remove it.\n args = args[1:]\n \n return parser.parse_args(args)", "def Args(parser):", "def parse_args() -> argparse.Namespace:\n\n parser = argparse.ArgumentParser(\n description=\"THE FOLLOWING SCRIPT SHOWS SNAPSHOT OPERATIONS USING REST API.\", )\n parser.add_argument(\n \"-c\", \"--cluster\", required=True, help=\"API server IP:port details\")\n parser.add_argument(\n \"-u\",\n \"--api_user\",\n default=\"admin\",\n help=\"API Username\")\n parser.add_argument(\"-p\", \"--api_pass\", help=\"API Password\")\n parsed_args = parser.parse_args()\n\n # collect the password without echo if not already provided\n if not parsed_args.api_pass:\n parsed_args.api_pass = getpass()\n\n return parsed_args", "def _parse_args(args: List) -> argparse.Namespace:\r\n\r\n parser = argparse.ArgumentParser(\r\n description=\"This CLI program is used to test out the SpikerStream interface.\"\r\n )\r\n\r\n parser.add_argument(\r\n \"-p\", \"--p\", \"-plot\", \"--plot\",\r\n choices=[\"matplotlib\", \"pyqtgraph\"],\r\n default=\"matplotlib\",\r\n help=\"The plotting library.\",\r\n dest=\"plot_type\"\r\n )\r\n\r\n parser.add_argument(\r\n \"-i\", \"--i\", \"-input\", \"--input\",\r\n choices=[\"spikerbox\", \"audio\"],\r\n default=\"audio\",\r\n help=\"The input stream.\",\r\n dest=\"stream_type\"\r\n )\r\n\r\n parser.add_argument(\r\n \"-s\", \"--s\", \"-serialport\", \"--serialport\",\r\n nargs=\"?\",\r\n default=\"COM1\",\r\n help=\"The serial port the SpikerBox is attached to.\",\r\n dest=\"serial_port\"\r\n )\r\n\r\n parser.add_argument(\r\n \"-c\", \"--c\", \"-chunk\", \"--chunk\",\r\n nargs=\"?\",\r\n default=10000,\r\n help=\"The chunk size when using the SpikerBox stream. 20,000 = 1 second.\",\r\n dest=\"chunk_size\"\r\n )\r\n\r\n return parser.parse_args(args)", "def parse_args():\n parser = argparse.ArgumentParser(description=\"Evaluate the tumor segmentation model\",\n formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n\n input_options = parser.add_argument_group(\"Input\")\n input_options.add_argument(\"--save-path\", required=True, help=\"Tensorflow save path\")\n input_options.add_argument(\"--model\", required=True, help=\"File to save trained model in\")\n\n output_options = parser.add_argument_group(\"Output\")\n output_options.add_argument(\"-o\", \"--output\", required=True, help=\"Output directory to store plots\")\n\n info_options = parser.add_argument_group(\"Info\")\n info_options.add_argument(\"--config\", required=False, type=str, help=\"Configuration file\")\n info_options.add_argument(\"-params\", \"--params\", type=str, help=\"Hyperparameters json file\")\n\n logging_options = parser.add_argument_group(\"Logging\")\n logging_options.add_argument('--log', dest=\"log_level\", default=\"DEBUG\", help=\"Logging level\")\n\n args = parser.parse_args()\n\n # Setup the logger\n global logger\n logger = logging.getLogger('root')\n\n # Logging level configuration\n log_level = getattr(logging, args.log_level.upper())\n if not isinstance(log_level, int):\n raise ValueError('Invalid log level: %s' % args.log_level)\n\n log_formatter = logging.Formatter('[%(asctime)s][%(levelname)s][%(funcName)s] - %(message)s')\n\n # For the console\n console_handler = logging.StreamHandler(sys.stdout)\n console_handler.setFormatter(log_formatter)\n logger.addHandler(console_handler)\n\n logger.setLevel(log_level)\n\n return args", "def parseArguments(args=None):\n\n # parse command line arguments\n parser = argparse.ArgumentParser(description='collection creator')\n parser.add_argument( 'config_file', action=\"store\" )\n parser.add_argument( 'out_path', action=\"store\" )\n\n return parser.parse_args(args)", "def parse_args(arglist):\n help = dedent(\"\"\"\n Run FIR model on subject data\n \"\"\")\n parser = tools.parser\n parser.description = help\n parser.formatter_class = argparse.RawDescriptionHelpFormatter\n parser.add_argument(\"-extract_info\", help=\"info for experiment to extract\")\n parser.add_argument(\"-mask_type\", help=\"mask or func?\")\n parser.add_argument(\"-mask_name\", help=\"name of mask in sub's mask directory\")\n return parser.parse_args(arglist)", "def __init__(self, args: argparse.Namespace):\n self._args = args", "def __init__(self, args=None, namespace=None):\n self.parse_args(args, namespace)\n if not namespace:\n self.__set_path_name__()", "def _parse_args(self):\n parser = argparse.ArgumentParser()\n _, args = parser.parse_known_args()\n self.args = [a for a in args if a != '']", "def process_command_line_arguments() -> Namespace:\n\n parser = build_parser()\n arguments = parser.parse_args()\n\n return arguments", "def parse_args():\n parser = MyParser(description='Data processing and analytics library \\\n for OpenStack Browbeat perf data')\n\n parser.add_argument('-s', '--summary', dest=\"days\", type=int, default=-1,\n help='-s N summary of last N days of results')\n\n parser.add_argument('--summary-uuid', dest=\"summary_uuid\", type=str,\n default=None,\n help='--summary-uuid UUID summary of a specific uuid')\n\n parser.add_argument('--short-summary', dest=\"short_days\", type=int,\n default=-1,\n help='--short-summary N gives \\\n summary of last N days of results but uses cockroach \\\n db so only provides with basic summary')\n\n parser.add_argument('--upload-timesummary', dest=\"timeseries_uuid\",\n type=str, default=None,\n help='--upload-timesummary UUID \\\n uploads the features computed from data obtained from\\\n graphite. ')\n\n parser.add_argument('--upload-logsummary', dest=\"loggin_uuid\",\n type=str, default=None,\n help='--upload-logsummary UUID \\\n uploads the log summary to crdb \\\n currently just summarizes over entire timeperiod. ')\n\n parser.add_argument('-u', '--update-db', dest='update', type=bool,\n default=False,\n help='-u True pushes data to cockroach db')\n\n parser.add_argument('--update-clf', dest=\"clf_days\", type=int,\n default=-1,\n help='--update-clf 60 will update all classifiers \\\n listed in config file under classifier_lists \\\n using data from last 60 days')\n\n parser.add_argument('--test-clf', dest=\"test_days\", type=int,\n default=-1,\n help='--test-clf 60 will train all classifiers \\\n listed in config file under classifier_lists \\\n using data from last 60 days and then test it \\\n and display metrics')\n\n parser.add_argument('-v', '--osp-version', dest='version', type=str,\n default=None,\n help='-v 11-tripleo only returns hits for that \\\n OpenStack version, \\\n only supported by summary right now')\n\n parser.add_argument('-c', '--config', dest='config', type=str,\n default=pkg_resources.resource_filename('bml',\n \"config.yml\"),\n help='-c <config file path> use custom config file')\n\n args = parser.parse_args()\n return args", "def _parse_args() -> argparse.Namespace:\n desc = 'Pretty print CBOR file output by AST exporter.'\n parser = argparse.ArgumentParser(description=desc)\n parser.add_argument('cbor', type=argparse.FileType('rb'),\n help=\"cbor file to pretty print.\")\n parser.add_argument(\"--indent\", \"-i\", dest=\"indent\",\n type=int, default=2, nargs='?',\n help=\"spaces per indent.\")\n parser.add_argument(\"--depth\", \"-d\", dest=\"depth\",\n type=int, default=2, nargs='?',\n help=\"max level of indentation.\")\n return parser.parse_args()", "def post(self):\n args = parser.parse_args()", "def parse_arguments(input_args: List[str]) -> argparse.Namespace:\n description = \"Tool to output OWASP Cornucopia playing cards into different file types and languages. \"\n description += \"\\nExample usage: $ ./cornucopia/convert.py -t docx -l es \"\n description += \"\\nExample usage: c:\\\\cornucopia\\\\scripts\\\\convert.py -t idml -l fr \"\n description += \"-o 'my_output_folder/owasp_cornucopia_edition_language_version.idml'\"\n parser = argparse.ArgumentParser(description=description, formatter_class=argparse.RawTextHelpFormatter)\n parser.add_argument(\n \"-i\",\n \"--inputfile\",\n type=str,\n default=\"\",\n help=(\n \"Input (template) file to use.\"\n f\"\\nDefault={convert_vars.DEFAULT_TEMPLATE_FILENAME}.(docx|idml)\"\n \"\\nTemplate type is dependent on output type (-t) or file (-o) specified.\"\n ),\n )\n group = parser.add_mutually_exclusive_group(required=False)\n group.add_argument(\n \"-t\",\n \"--outputfiletype\",\n type=str,\n choices=convert_vars.FILETYPE_CHOICES,\n help=\"Type of file to output. Default = docx. If specified, this overwrites the output file extension\",\n )\n parser.add_argument(\n \"-o\",\n \"--outputfile\",\n default=\"\",\n type=str,\n help=(\n \"Specify a path and name of output file to generate. (caution: existing file will be overwritten). \"\n f\"\\ndefault = {convert_vars.DEFAULT_OUTPUT_FILENAME}.(docx|pdf|idml)\"\n ),\n )\n group = parser.add_mutually_exclusive_group(required=False)\n group.add_argument(\n # parser.add_argument(\n \"-l\",\n \"--language\",\n type=str,\n choices=convert_vars.LANGUAGE_CHOICES,\n default=\"en\",\n help=(\n \"Output language to produce. [`en`, `es`, `fr`, `pt-br`, `template`] \"\n \"\\nTemplate will attempt to create a template from the english input file and \"\n \"\\nreplacing strings with the template lookup codes\"\n ),\n )\n parser.add_argument(\n \"-d\",\n \"--debug\",\n action=\"store_true\",\n help=\"Output additional information to debug script\",\n )\n group = parser.add_mutually_exclusive_group(required=False)\n group.add_argument(\n # parser.add_argument(\n \"-s\",\n \"--style\",\n type=str,\n choices=convert_vars.STYLE_CHOICES,\n default=\"static\",\n help=(\n \"Output style to produce. [`static` or `dynamic`] \"\n \"\\nStatic cards have the mappings printed on them, dynamic ones a QRCode that points to an maintained list.\"\n ),\n )\n parser.add_argument(\n \"-u\",\n \"--url\",\n default=\"https://copi.securedelivery.io/cards\",\n type=str,\n help=(\n \"Specify a URL to use in generating dynamic cards. (caution: URL will be suffixed with / and the card ID). \"\n ),\n )\n args = parser.parse_args(input_args)\n return args", "def parse_args(args):\n\n parser = argparse.ArgumentParser(description=\"Add meta data to one or more netCDF files\")\n\n parser.add_argument(\"-m\",\"--metafiles\", help=\"One or more meta-data files in YAML format\", action='append')\n parser.add_argument(\"-l\",\"--metalist\", help=\"File containing a list of meta-data files\", action='append')\n parser.add_argument(\"-v\",\"--verbose\", help=\"Verbose output\", action='store_true')\n parser.add_argument(\"files\", help=\"netCDF files\", nargs='+')\n\n return parser.parse_args(args)", "def parse_args(args):\n # If called as a main function, this processes command line arguments\n # as main. If this is called as part of an action\n if isinstance(args, list):\n parser = argparse.ArgumentParser(description=description)\n else:\n parser = args\n # add required parameters for this application\n parser.add_argument(\"operands\",\n nargs='+',\n type=float,\n help=\"List of operands.\")\n # add options for this application\n parser.add_argument(\n '-v',\n '--verbose',\n dest=\"loglevel\",\n help=\"set loglevel to INFO\",\n action='store_const',\n const=logging.INFO)\n parser.add_argument(\n '-vv',\n '--very-verbose',\n dest=\"loglevel\",\n help=\"set loglevel to DEBUG\",\n action='store_const',\n const=logging.DEBUG)\n if isinstance(args, list):\n return parser.parse_args(args)", "def main():\n args = parse_args()\n process_args(args)", "def parse_args():\n import argparse\n\n #argument\n parser =argparse.ArgumentParser()\n\n parser.add_argument('--in_list', help = 'path to input list.')\n parser.add_argument('--out_list', help = 'path for saving list.')\n args = parser.parse_args()\n\n return args", "def parse_args(args):\n parser = argparse.ArgumentParser(description=\"Just a Fibonacci demonstration\")\n parser.add_argument(\n \"--version\",\n action=\"version\",\n version=\"lj506 {ver}\".format(ver=__version__),\n )\n parser.add_argument(\n \"-v\",\n \"--verbose\",\n dest=\"loglevel\",\n help=\"set loglevel to INFO\",\n action=\"store_const\",\n const=logging.INFO,\n )\n parser.add_argument(\n \"-vv\",\n \"--very-verbose\",\n dest=\"loglevel\",\n help=\"set loglevel to DEBUG\",\n action=\"store_const\",\n const=logging.DEBUG,\n )\n\n parser.add_argument('--dir', '-d', help=\"Download directory\", default=Path.cwd())\n\n return parser.parse_args(args)", "def parse(self, args: list[str], prog: Optional[str] = None) -> argparse.Namespace:\n prog = prog or Path(sys.argv[0]).name\n try:\n return self._parse(\n args,\n argparse.Namespace(),\n self._overrides.copy(),\n prog,\n )\n except _HelpError:\n self.exit(0, self.format_help(prog))", "def parse_args():\n parser = argparse.ArgumentParser(\n description=\"Nuvoton post build command\"\n )\n\n subparsers = parser.add_subparsers(description=\"The action to perform\")\n\n parser_tfm_sign_image_tgt = subparsers.add_parser(\n \"tfm_sign_image_tgt\",\n help=\"Sign secure and non-secure images together\"\n )\n \n parser_tfm_sign_image_tgt.add_argument(\n \"--tfm-import-path\",\n help=\"Path containing the TF-M bootloader, layouts and signing keys\",\n required=True\n )\n\n parser_tfm_sign_image_tgt.add_argument(\n \"--signing_key\",\n help=\"File name of key for signing secure binary or secure/non-secure binaries together\",\n required=True\n )\n\n parser_tfm_sign_image_tgt.add_argument(\n \"--non-secure-bin\",\n help=\"Path to the non-secure binary\",\n required=True\n )\n\n parser_tfm_sign_image_tgt.set_defaults(func=tfm_sign_image_tgt)\n\n return parser.parse_args()", "def parse_args(args):\n parser = argparse.ArgumentParser(\n description=\"Export DICOMs in Orthanc Study to BIDS-ready directory structure \")\n parser.add_argument(\n '--version',\n action='version',\n version='bnctools {ver}'.format(ver=__version__))\n parser.add_argument(\n '-v',\n '--verbose',\n dest=\"loglevel\",\n help=\"set loglevel to INFO\",\n action='store_const',\n const=logging.INFO)\n parser.add_argument(\n '-vv',\n '--very-verbose',\n dest=\"loglevel\",\n help=\"set loglevel to DEBUG\",\n action='store_const',\n const=logging.DEBUG)\n return parser.parse_args(args)", "def parse_args(*args, **kwargs):\n parser = argparse.ArgumentParser(add_help=False)\n\n parser.add_argument(\n \"--help\",\n help=\"show usage information\",\n action=\"help\",\n )\n\n parser.add_argument(\n \"--source\",\n metavar=\"host[:port]\",\n help=\"\"\"Hostname of the mongod server from which oplog\n operations are going to be pulled. Called \"--from\"\n in mongooplog.\"\"\",\n )\n\n parser.add_argument(\n '--oplogns',\n default='local.oplog.rs',\n help=\"Source namespace for oplog\",\n )\n\n parser.add_argument(\n \"--dest\",\n metavar=\"host[:port]\",\n help=\"\"\"\n Hostname of the mongod server (or replica set as\n <set name>/s1,s2) to which oplog operations\n are going to be applied. Default is \"localhost\".\n Called \"--host\" in mongooplog.\n \"\"\",\n )\n\n parser.add_argument(\n \"-w\",\n \"--window\",\n dest=\"start_ts\",\n metavar=\"WINDOW\",\n type=compose(\n Timestamp.for_window,\n delta_from_seconds,\n pytimeparse.parse,\n ),\n help=\"\"\"Time window to query, like \"3 days\" or \"24:00\"\n (24 hours, 0 minutes).\"\"\",\n )\n\n parser.add_argument(\n \"-f\",\n \"--follow\",\n action=\"store_true\",\n help=\"\"\"Wait for new data in oplog. Makes the utility\n polling oplog forever (until interrupted). New data\n is going to be applied immediately with at most one\n second delay.\"\"\",\n )\n\n parser.add_argument(\n \"--ns\",\n nargs=\"*\",\n default=[],\n action=Extend,\n help=\"\"\"Process only these namespaces, ignoring all others.\n Space separated list of strings in form of ``dname``\n or ``dbname.collection``. May be specified multiple times.\n \"\"\",\n )\n\n parser.add_argument(\n \"-x\",\n \"--exclude\",\n nargs=\"*\",\n default=[],\n action=Extend,\n help=\"\"\"List of space separated namespaces which should be\n ignored. Can be in form of ``dname`` or ``dbname.collection``.\n May be specified multiple times.\n \"\"\",\n )\n\n parser.add_argument(\n \"--rename\",\n nargs=\"*\",\n default=[],\n metavar=\"ns_old=ns_new\",\n type=RenameSpec.from_spec,\n action=Extend,\n help=\"\"\"\n Rename database(s) and/or collection(s). Operations on\n namespace ``ns_old`` from the source server will be\n applied to namespace ``ns_new`` on the destination server.\n May be specified multiple times.\n \"\"\",\n )\n\n parser.add_argument(\n \"--dry-run\",\n default=False,\n action=\"store_true\",\n help=\"Suppress application of ops.\",\n )\n\n parser.add_argument(\n \"--resume-file\",\n metavar=\"FILENAME\",\n type=ResumeFile,\n default=NullResumeFile(),\n help=\"\"\"Read from and write to this file the last processed\n timestamp.\"\"\",\n )\n\n jaraco.logging.add_arguments(parser)\n\n args = parser.parse_args(*args, **kwargs)\n args.rename = Renamer(args.rename)\n\n args.start_ts = args.start_ts or args.resume_file.read()\n\n return args", "def parse_args():\n parser = argparse.ArgumentParser(\"cat_stats.py\")\n parser.add_argument(\"folder\", \n help = \"folder where all the stats files are located\")\n return parser.parse_args()", "def parse_args():\n\n areas = list(default_config['areas'].keys())\n\n class ListAreas(argparse.Action):\n \"\"\"Helper class for argparse to list available areas and exit\"\"\"\n\n def __call__(self, parser, namespace, values, option_string=None):\n print(\"\\n\".join(areas))\n parser.exit()\n\n parser = argparse.ArgumentParser(parents=[kcs_parser],\n conflict_handler='resolve')\n\n parser.add_argument('files', nargs='+', help=\"Input files\")\n parser.add_argument('--area', action='append', required=True,\n choices=areas, help=\"One or more area names\")\n parser.add_argument('--template',\n help=\"Output path template, including subdirectory\")\n parser.add_argument('-v', '--verbosity', action='count',\n default=0, help=\"Verbosity level\")\n parser.add_argument('-P', '--nproc', type=int, default=1,\n help=\"Number of simultaneous processes\")\n parser.add_argument('--list-areas', action=ListAreas, nargs=0,\n help=\"List availabe areas and quit\")\n parser.add_argument('--regrid', action='store_true',\n help=\"Regrid the data (to a 1x1 deg. grid)\")\n parser.add_argument('--no-save-results', action='store_true',\n help=\"Store the resulting extracted datasets on disk\")\n parser.add_argument('--no-average-area', action='store_true',\n help=\"Don't average the extracted areas\")\n parser.add_argument('--tempdir')\n parser.add_argument('--subdir-per-realization', action='store_true')\n parser.add_argument('--ignore-common-warnings', action='store_true')\n\n args = parser.parse_args()\n setup_logging(args.verbosity)\n read_config(args.config)\n\n if args.template is None:\n args.template = default_config['data']['extraction']['template']\n args.save_result = not args.no_save_results\n args.average_area = not args.no_average_area\n args.area = {name: default_config['areas'][name] for name in args.area}\n args.area = {key: None if value == 'global' else value for key, value in args.area.items()}\n return args", "def parse_args(args):\n\n parser = argparse.ArgumentParser(\n description=\"Convert downloaded ERA image data into time series format.\")\n parser.add_argument(\"dataset_root\",\n help='Root of local filesystem where the image data is stored.')\n parser.add_argument(\"timeseries_root\",\n help='Root of local filesystem where the time series should be stored.')\n parser.add_argument(\"start\", type=mkdate,\n help=(\"Startdate in format YYYY-MM-DD\"))\n parser.add_argument(\"end\", type=mkdate,\n help=(\"Enddate in format YYYY-MM-DD\"))\n parser.add_argument(\"variables\", metavar=\"variables\",\n nargs=\"+\",\n help=(\"Short name of variables as stored in the images, which are reshuffled. \"\n \"See documentation on image download for resp. ERA products, \"\n \"for more information on variable names of the product. \"))\n parser.add_argument(\"--mask_seapoints\", type=bool, default=False,\n help=(\"Replace points over water with nan. This option needs the \"\n \"LandSeaMask (lsm) variable in the image data (will use mask from first available file). \"\n \"To use a dynamic LSM, reshuffle the LSM variable to time series.\"))\n parser.add_argument(\"--h_steps\", type=int, default=None, nargs='+',\n help=(\"Time steps (full hours) of images that will be reshuffled (must be in the images). \"\n \"By default 6H images (starting at 0:00 UTC) will be reshuffled.\"))\n parser.add_argument(\"--imgbuffer\", type=int, default=50,\n help=(\"How many images to read at once. Bigger numbers make the \"\n \"conversion faster but consume more memory. Choose this according to your \"\n \"system and the size of a single image.\"))\n args = parser.parse_args(args)\n\n print(\"ERA Interim data is deprecated. Use ERA5 instead.\")\n print(\"Converting data from {} to {} into {}.\"\n .format(args.start.isoformat(), args.end.isoformat(), args.timeseries_root))\n\n return args", "def safe_parse_args(self, args=None):\n args = self.set_default_subparser('run', args)\n return self.parse_args(args)", "def getargs(parser: argparse.ArgumentParser) -> argparse.Namespace:\n parser.add_argument(\n '-servers', type=str, default='',\n help=\"\"\"\n Hostname or IP and port of Kafka broker producing stream.\n [KAFKA_IPPORT/KAFKA_IPPORT_SIM]\n \"\"\")\n parser.add_argument(\n '-topic', type=str, default='',\n help=\"\"\"\n Name of Kafka topic stream to read from.\n [KAFKA_TOPIC/KAFKA_TOPIC_SIM]\n \"\"\")\n parser.add_argument(\n '-schema', type=str, default='',\n help=\"\"\"\n Schema to decode the alert. Should be avro file.\n [FINK_ALERT_SCHEMA]\"\"\")\n parser.add_argument(\n '-startingoffsets_stream', type=str, default='',\n help=\"\"\"From which stream offset you want to start pulling data when\n building the raw database: latest, earliest, or custom.\n [KAFKA_STARTING_OFFSET]\n \"\"\")\n parser.add_argument(\n '-online_data_prefix', type=str, default='',\n help=\"\"\"Path prefix to store online data, e.g. /path/to/online.\n This would then contain automatically {raw, science}/year=/month=/day=\n [ONLINE_DATA_PREFIX]\n \"\"\")\n parser.add_argument(\n '-agg_data_prefix', type=str, default='',\n help=\"\"\"Path prefix to store archive data, e.g. /path/to/archive.\n This would then contain automatically {raw, science}/year=/month=/day=\n [AGG_DATA_PREFIX]\n \"\"\")\n parser.add_argument(\n '-science_db_name', type=str, default='',\n help=\"\"\"\n The name of the HBase table\n [SCIENCE_DB_NAME]\n \"\"\")\n parser.add_argument(\n '-science_db_catalogs', type=str, default='',\n help=\"\"\"\n The path for HBase table catalogs. Must exist.\n [SCIENCE_DB_CATALOGS]\n \"\"\")\n parser.add_argument(\n '-log_level', type=str, default='',\n help=\"\"\"\n The minimum level of log: OFF, DEBUG, INFO, WARN, ERROR, CRITICAL\n [LOG_LEVEL]\n \"\"\")\n parser.add_argument(\n '-finkwebpath', type=str, default='',\n help=\"\"\"\n Folder to store UI data for display.\n [FINK_UI_PATH]\n \"\"\")\n parser.add_argument(\n '-tinterval', type=int, default=0,\n help=\"\"\"\n Time interval between two monitoring. In seconds.\n [FINK_TRIGGER_UPDATE]\n \"\"\")\n parser.add_argument(\n '-tinterval_kafka', type=float, default=0.0,\n help=\"\"\"\n Time interval between two messages are published. In seconds.\n [TIME_INTERVAL]\n \"\"\")\n parser.add_argument(\n '-exit_after', type=int, default=None,\n help=\"\"\"\n Stop the service after `exit_after` seconds.\n This primarily for use on Travis, to stop service after some time.\n Use that with `fink start service --exit_after <time>`. Default is None.\n \"\"\")\n parser.add_argument(\n '-datasimpath', type=str, default='',\n help=\"\"\"\n Folder containing simulated alerts to be published by Kafka.\n [FINK_DATA_SIM]\n \"\"\")\n parser.add_argument(\n '-poolsize', type=int, default=5,\n help=\"\"\"\n Maximum number of alerts to send. If the poolsize is\n bigger than the number of alerts in `datapath`, then we replicate\n the alerts. Default is 5.\n [POOLSIZE]\n \"\"\")\n parser.add_argument(\n '-distribution_servers', type=str, default='',\n help=\"\"\"\n Kafka bootstrap servers for alert redistribution\n [DISTRIBUTION_SERVERS]\n \"\"\")\n parser.add_argument(\n '-distribution_topic', type=str, default='',\n help=\"\"\"\n Kafka topic for Alert redistribution\n [DISTRIBUTION_TOPIC]\n \"\"\")\n parser.add_argument(\n '-distribution_schema', type=str, default='',\n help=\"\"\"\n The path where the avro schema for alert distribution is stored\n [DISTRIBUTION_SCHEMA]\n \"\"\")\n parser.add_argument(\n '-startingOffset_dist', type=str, default='',\n help=\"\"\"From which offset(timestamp) you want to start the\n distribution service.\n Options are: latest, earliest or a custom timestamp\n [DISTRIBUTION_OFFSET]\n \"\"\")\n parser.add_argument(\n '-checkpointpath_dist', type=str, default='',\n help=\"\"\"\n The path of file in which to store the offset for distribution service.\n This file will store the timestamp up-till which the science db is\n scanned and alerts have been distributed.\n [DISTRIBUTION_OFFSET_FILE]\n \"\"\")\n parser.add_argument(\n '-distribution_rules_xml', type=str, default='',\n help=\"\"\"\n The path to distribution-rules.xml which stores user defined rules to\n filter the distribution stream\n [DISTRIBUTION_RULES_XML]\n \"\"\")\n parser.add_argument(\n '-slack_channels', type=str, default='',\n help=\"\"\"\n Text file with list of slack channels to which automatic alerts\n must be sent for e.g. based on cross-match type\n [SLACK_CHANNELS]\n \"\"\")\n parser.add_argument(\n '-night', type=str, default='',\n help=\"\"\"\n YYYYMMDD night\n [NIGHT]\n \"\"\")\n parser.add_argument(\n '-fs', type=str, default='',\n help=\"\"\"\n Filesystem: local or hdfs.\n [FS_KIND]\n \"\"\")\n parser.add_argument(\n '-datapath', type=str, default='',\n help=\"\"\"\n Directory on disk for saving temporary alert data.\n [DATA_PREFIX]\n \"\"\")\n parser.add_argument(\n '--save_science_db_catalog_only', action='store_true',\n help=\"\"\"\n If True, save only the catalog on disk and do not push\n data on HBase. Default is False.\n [SAVE_SCIENCE_DB_CATALOG_ONLY]\n \"\"\")\n parser.add_argument(\n '-index_table', type=str, default='',\n help=\"\"\"\n Name of the rowkey for index table\n [INDEXTABLE]\n \"\"\")\n parser.add_argument(\n '-tns_folder', type=str, default='',\n help=\"\"\"\n Folder to store logs and keys for TNS submission\n [TNS_FOLDER]\n \"\"\")\n parser.add_argument(\n '--tns_sandbox', action='store_true',\n help=\"\"\"\n If True, push to TNS sandbox. Default is False.\n [TNS_SANDBOX]\n \"\"\")\n parser.add_argument(\n '-substream_prefix', type=str, default='fink_',\n help=\"\"\"\n Prefix for outgoing substreams\n [SUBSTREAM_PREFIX]\n \"\"\")\n parser.add_argument(\n '-fink_fat_output', type=str, default='',\n help=\"\"\"\n Folder that contains fink-fat output parquet files\n [FINK_FAT_OUTPUT]\n \"\"\")\n parser.add_argument(\n '-producer', type=str, default='ztf',\n help=\"\"\"\n Name of the alert producer. Currently available: ztf, elasticc, sims\n [PRODUCER]\n \"\"\")\n parser.add_argument(\n '-noscience', type=bool, default=False,\n help=\"\"\"\n Disable execution of science modules\n \"\"\")\n parser.add_argument(\n '-tns_raw_output', type=str, default='',\n help=\"\"\"\n Folder that contains raw TNS catalog\n [TNS_RAW_OUTPUT]\n \"\"\")\n args = parser.parse_args(None)\n return args", "def parse_args():\n parser = argparse.ArgumentParser(description=\"Run NCF..\")\n parser.add_argument(\n \"--config_file\",\n nargs=\"?\",\n type=str,\n default=\"../configs/ncf_default.json\",\n help=\"Specify the config file name. Only accept a file from ../configs/\",\n )\n # If the following settings are specified with command line,\n # These settings will used to update the parameters received from the config file.\n parser.add_argument(\n \"--dataset\",\n nargs=\"?\",\n type=str,\n help=\"Options are: tafeng, dunnhunmby and instacart\",\n )\n parser.add_argument(\n \"--data_split\",\n nargs=\"?\",\n type=str,\n help=\"Options are: leave_one_out and temporal\",\n )\n parser.add_argument(\n \"--root_dir\", nargs=\"?\", type=str, help=\"working directory\",\n )\n parser.add_argument(\n \"--emb_dim\", nargs=\"?\", type=int, help=\"Dimension of the embedding.\"\n )\n parser.add_argument(\"--lr\", nargs=\"?\", type=float, help=\"Intial learning rate.\")\n parser.add_argument(\"--max_epoch\", nargs=\"?\", type=int, help=\"Number of max epoch.\")\n parser.add_argument(\n \"--batch_size\", nargs=\"?\", type=int, help=\"Batch size for training.\"\n )\n parser.add_argument(\"--optimizer\", nargs=\"?\", type=str, help=\"OPTI\")\n parser.add_argument(\"--activator\", nargs=\"?\", type=str, help=\"activator\")\n parser.add_argument(\"--alpha\", nargs=\"?\", type=float, help=\"ALPHA\")\n return parser.parse_args()", "def parse_args():\n description = 'Use the nhlapi/Game class to retrieve information about a game in the NHL.'\n epilog = 'Example use: game.py 2018020131 --boxScore'\n\n # Standard options for each nhlapi interface\n parser = argparse.ArgumentParser(description=description, epilog=epilog)\n parser.add_argument('--humanReadable', help='output in easier to read format for users',\n action='store_true')\n parser.add_argument(\n '--log', default='/dev/null', type=str,\n help='the file where the output should be written')\n\n # Optional user supplied values\n parser.add_argument('gameId', help='the game ID', type=int)\n\n # The data available from this api:\n for stat in Game.STATS:\n parser.add_argument('--' + stat, help='retrieve ' + stat + ' data', action='store_true')\n\n args = parser.parse_args()\n\n if args.log:\n log_format = '%(asctime)s %(levelname)s: %(message)s'\n logging.basicConfig(filename=args.log,\n format=log_format,\n level=logging.DEBUG)\n\n game = Game(args.gameId)\n if not game:\n print('game with id: {} not found'.format(args.gameId))\n return args\n\n args_vars = vars(args)\n for arg in args_vars:\n if arg in Game.STATS and args_vars[arg]:\n if args.liveDiffTime:\n game.load_ext_url(Game.STATS[arg], diff_time=args.liveDiffTime)\n else:\n game.load_ext_url(Game.STATS[arg])\n\n if args.humanReadable:\n output = json.dumps(game.content, indent=1)\n else:\n output = game.content\n print(output)\n\n result = 'retrieved data for id: {}'.format(args.gameId)\n info(result)\n\n return args", "def parse_args(self):\n return Args(self.args)", "def parse_args():\n parser = argparse.ArgumentParser(\n formatter_class=argparse.RawDescriptionHelpFormatter,\n description=\"\"\"\nNenG - Nash Equilibrium Noncooperative games.\nTool for computing Nash equilibria in noncooperative games.\nSpecifically:\nAll pure Nash equilibria in all games (--method=pne).\nAll mixed Nash equilibria in two-players games (--method=support_enumeration).\nOne sample mixed Nash equilibria in n-players games (--method={CMAES,L-BFGS-B,SLSQP}).\n\"\"\")\n pa = parser.add_argument\n pa('-f', '--file', required=True, help=\"File where game in nfg format is saved.\")\n pa('-m', '--method', default='CMAES', choices=game.Game.METHODS,\n help=\"Method to use for computing Nash equlibria.\")\n pa('-e', '--elimination', action='store_true', default=False,\n help=\"Use Iterative Elimination of Strictly Dominated Strategies before computing NE.\")\n pa('-p', '--payoff', action='store_true', default=False,\n help=\"Print also players payoff with each Nash equilibrium.\")\n pa('-c', '--checkNE', action='store_true', default=False,\n help=\"After computation check if found strategy profile is really Nash equilibrium.\")\n pa('-t', '--trim', choices=('normalization', 'penalization'), default='normalization',\n help=\"Method for keeping strategy profile in probability distribution universum.\")\n pa('-l', '--log', default=\"WARNING\",\n choices=(\"DEBUG\", \"INFO\", \"WARNING\", \"ERROR\", \"CRITICAL\"),\n help=\"Level of logs to save/print\")\n pa('--log-file', default=None, help='Log file. If omitted log is printed to stdout.')\n return parser.parse_args()", "def Parse(self, args):\n unparsed = []\n\n skip_parse = False\n\n for arg in args:\n if arg == '--':\n skip_parse = True\n continue\n\n if skip_parse:\n unparsed.append(arg)\n continue\n\n match = RE_FLAG.match(arg)\n if match is None:\n unparsed.append(arg)\n continue\n\n key = match.group(1)\n value = match.group(2)\n\n if key not in self._defs:\n unparsed.append(arg)\n continue\n\n self._defs[key].Parse(value)\n\n self._unparsed = tuple(unparsed)\n return True", "def parse_args():\n\n parser = argparse.ArgumentParser()\n img_path = '/users/gpu/haribala/code/datasets/CUB_200_2011/CUB_200_2011/images/001.Black_footed_Albatross/' # Black_Footed_Albatross_0001_796111.jpg\n output_path = os.path.join('.', 'outputs', 'arch4', 'img2txt_samples', '001.Black_footed_Albatross')\n encoder_path = os.path.join('.', 'outputs', 'arch4', 'img2txt_2020_01_16_09_34_45', 'encoder_400_40.pth')\n decoder_path = os.path.join('.', 'outputs', 'arch4', 'img2txt_2020_01_16_09_34_45', 'decoder_400_40.pth')\n\n parser.add_argument('--images', type=str, default=img_path, help='Directory containing the input images.')\n parser.add_argument('--outputs', type=str, default=output_path, help='Directory to store outputs.')\n parser.add_argument('--encoder_path', type=str, default=encoder_path, help='Path to trained encoder weights.')\n parser.add_argument('--decoder_path', type=str, default=decoder_path, help='Path to trained decoder weights.')\n\n # Model parameters (should be same as paramters in train.py)\n parser.add_argument('--embed_size', type=int , default=256, help='dimension of word embedding vectors')\n parser.add_argument('--hidden_size', type=int , default=512, help='dimension of lstm hidden states')\n parser.add_argument('--num_layers', type=int , default=1, help='number of layers in lstm')\n\n args = parser.parse_args()\n\n return args", "def parse_args(argv: list[str]) -> argparse.Namespace:\n os_release = platform.freedesktop_os_release()\n parser = argparse.ArgumentParser()\n parser.add_argument(\n \"-c\",\n \"--chroot\",\n default=os_release[\"VERSION_CODENAME\"],\n help=\"Specify a chroot or active session to use. (default: use current distribution\"\n \" codename '%(default)s')\",\n )\n parser.add_argument(\n \"-d\",\n \"--directory\",\n default=os.getcwd(),\n help=\"Change to directory inside the chroot before running the command or login shell.\"\n \" Use the current directory if it exists in the chroot. Otherwise fall back to the user's\"\n \" home directory (and create the directory if it is missing).\",\n )\n parser.add_argument(\n \"-u\",\n \"--user\",\n default=getpass.getuser(),\n help=\"Run as a different user. The default is to run as %(default)s and fallback to root\"\n \" if that user does not exist in the chroot.\",\n )\n parser.add_argument(\n \"-p\",\n \"--packages\",\n default=[],\n action=\"append\",\n help=\"List of comma- or space-separated packages that should be installed\"\n \" without recommends. Can be specified multiple times.\",\n )\n parser.add_argument(\n \"--ppa\",\n default=[],\n action=\"append\",\n help=\"PPA APT sources that should be added. Can be specified multiple times.\",\n )\n parser.add_argument(\n \"-e\", \"--enable-proposed\", action=\"store_true\", help=\"Enable -proposed pocket.\"\n )\n parser.add_argument(\n \"--proposed-uri\",\n default=\"http://archive.ubuntu.com/ubuntu\",\n help=\"Sources list URI to use for -proposed (default: %(default)s)\",\n )\n parser.add_argument(\n \"--proposed-components\",\n default=\"main,universe\",\n help=\"List of comma- or space-separated components to use for -proposed\"\n \" (default: %(default)s)\",\n )\n\n args = parser.parse_args(argv)\n args.packages = [p for packages in args.packages for p in re.split(\"[, ]\", packages)]\n args.proposed_components = re.split(\"[, ]\", args.proposed_components)\n\n return args", "def parse_args() -> argparse.Namespace:\n parser = argparse.ArgumentParser(\n prog=\"python3 console_run.py\",\n description=\"Collects 辣条/亲密度 on live.bilibili.com using selenium webdriver\",\n )\n\n parser.add_argument(\n \"-r\", \"--room\", type=int, help=\"default room\", default=22198526,\n )\n\n # runtime behaviors\n parser.add_argument(\n \"--headless\", help=\"do not show the browser\", action=\"store_true\",\n )\n\n parser.add_argument(\n \"-d\", \"--disable-image\", help=\"do not show the images\", action=\"store_true\",\n )\n\n # log settings\n parser.add_argument(\n \"--silent\", help=\"do not print log to console\", action=\"store_true\",\n )\n\n parser.add_argument(\n \"-l\", \"--log\", help=\"save log to the log file\", action=\"store_true\",\n )\n\n # arg check\n parser.add_argument(\n \"--skip-check\", help=\"skip the arg check\", action=\"store_true\",\n )\n\n # timer\n parser.add_argument(\n \"-s\", \"--second\", type=int, help=\"planned running time in seconds\", default=0,\n )\n parser.add_argument(\n \"-m\", \"--minute\", type=int, help=\"planned running time in minutes\", default=0,\n )\n\n # paths\n parser.add_argument(\n \"--log-path\", type=str, help=\"path of the log file\", default=\"./log.txt\",\n )\n\n parser.add_argument(\n \"--driver-path\",\n type=str,\n help=\"path of the geckodriver. If it's not install, \"\n \"see https://github.com/mozilla/geckodriver/releases for more information\",\n default=\"/usr/local/bin/geckodriver\",\n )\n\n return parser.parse_args()", "def parse_args():\n parser = argparse.ArgumentParser(\n description='Convert CVAT XML annotations to YOLO format'\n )\n\n parser.add_argument(\n '--cvat-xml', metavar='FILE', required=True,\n help='input file with CVAT annotation in xml format'\n )\n\n parser.add_argument(\n '--image-dir', metavar='DIRECTORY', required=False,\n help='directory which contains original images'\n )\n\n parser.add_argument(\n '--output-dir', metavar='DIRECTORY', required=True,\n help='directory for output annotations in YOLO format'\n )\n\n parser.add_argument(\n '--username', metavar='USERNAME', required=False,\n help='Username from CVAT Login page, required to download images'\n )\n\n parser.add_argument(\n '--password', metavar='PASSWORD', required=False,\n help='Password from CVAT Login page, required to download images'\n )\n\n parser.add_argument(\n '--labels', metavar='ILABELS', required=False,\n help='Labels (separated by comma) to extract. Example: car,truck,motorcycle'\n )\n\n return parser.parse_args()", "def parse_args(args):\n parser = argparse.ArgumentParser(\n description=\"Convert MERRA2 images to time series format.\")\n parser.add_argument(\n \"dataset_root\",\n help='Root of local filesystem where the data is stored.')\n parser.add_argument(\n \"timeseries_root\",\n help='Root of local filesystem where the timeseries will be stored.')\n parser.add_argument(\"start\", type=mkdate, help=(\n \"Startdate. Either in format YYYY-MM-DD or YYYY-MM-DDTHH:MM.\"))\n parser.add_argument(\"end\", type=mkdate, help=(\n \"Enddate. Either in format YYYY-MM-DD or YYYY-MM-DDTHH:MM.\"))\n parser.add_argument(\"parameters\", metavar=\"parameters\",\n nargs=\"+\",\n help=(\"Parameters to download in numerical format.\"))\n\n parser.add_argument(\"--temporal_sampling\", type=int, default=6,\n help=(\n \"The temporal sampling of the output time series.\"\n \"Integers between 1 (1-hourly resolution) and 24\"\n \"(daily resolution) are possible.\"))\n\n parser.add_argument(\n \"--imgbuffer\",\n type=int,\n default=50,\n help=(\n \"How many images to read at once. Bigger numbers make the \"\n \"conversion faster but consume more memory.\"))\n\n args = parser.parse_args(args)\n # set defaults that can not be handled by argparse\n print(\"Converting data from {} to {} into folder {}.\".format(\n args.start.isoformat(), args.end.isoformat(), args.timeseries_root))\n return args", "def parse_args(self):\n parser = argparse.ArgumentParser()\n parser.add_argument('-d', '--data', dest='data',\n help='Generate requested amount of test data.',\n type=int, nargs='+')\n parser.add_argument('-c', '--check', action='store_true',\n dest='check', help='Check files without modifying them.',\n default=False)\n args = parser.parse_args()\n self.arg_data = args.data\n self.arg_check = args.check\n\n if xc.arg_data: # did the user request to generate test data?\n choice = input(Fore.YELLOW + 'This option will ' + Fore.RED +\n '*OVERWRITE ALL FILES* ' + Fore.YELLOW + 'you sure (y/n)? ')\n if choice.upper() == 'Y':\n self.test_data_row_count = int(self.arg_data[0])\n xc.generate_test_data()\n else:\n xc.arg_data = False\n else:\n self.process_dump_files()", "def _input_args(self, args: List[str]):\n assert self._call is None, f\"You need to specify all inputs before calling `{self._call}`\"\n assert isinstance(args, list), f\"{args} is a {type(args)}, expected a list of strings!\"\n assert len(args) > 0, f\"Expected a non-empty argument list!\"\n assert all(isinstance(a, str) for a in args), f\"Expected a list of strings, not {[type(a) for a in args]}!\"\n # all arguments could potentially be filenames that we write to, so let's just add them\n self._write_files |= set(args)\n # add dummy argument zero\n args = [\"\"] + args\n # allocate args in memory\n arg_strings = [self._str(a, \"arg\") for a in args]\n # allocate a pointer array for argv\n self.data += [f\"argv: .word \" + \" \".join(\"0\" for _ in range(len(args)))]\n # load argc and argv\n self._args += [\"\", \"# argument count in a0\", f\"li a0, {len(args)}\"]\n self._args += [\"\", \"# load pointers to argument strings into argv\", f\"la a1, argv\"]\n for ii, aa in enumerate(arg_strings):\n self._args += [f\"la t1, {aa}\", f\"sw t1, {ii * 4}(a1)\"]", "def parse():\n\n args = sys.argv\n if os.name == 'nt' and args and 'python' in os.path.basename(args[0]).lower():\n args = args[2:]\n else:\n args = args[1:]\n args = vars(parser.parse_args(args))\n \n # set the global verbosity level of the script\n script.set_verbosity(args['verbosity']) \n \n return args", "def parse_args(args=None):\n\n parser = argparse.ArgumentParser(description=ds.ARGPARSER['description'])\n parser.add_argument('input',\n help=ds.ARGPARSE_INPUT['help'])\n parser.add_argument('output',\n nargs='?',\n help=ds.ARGPARSE_OUTPUT['help'],\n default=ds.ARGPARSE_OUTPUT['default'])\n parser.add_argument('-X', '--overwrite',\n help=ds.ARGPARSE_OVERWRITE['help'],\n action='store_true')\n parser.add_argument('-e', '--extensions',\n nargs='+',\n default=ds.ARGPARSE_EXTENSION['default'],\n help=ds.ARGPARSE_EXTENSION['help'])\n parser.add_argument('-w', '--wrapper',\n help=ds.ARGPARSE_WRAPPER['help'],\n default=ds.ARGPARSE_WRAPPER['default'], )\n parser.add_argument('-v', '--verbose',\n help=ds.ARGPARSE_VERBOSE['help'],\n action='store_true')\n parser.add_argument('-r', '-R',\n help=ds.ARGPARSE_RECURSIVE['help'],\n action='store_true',\n dest='recursive')\n parser.add_argument('--version',\n action='version',\n version=ah.__version__)\n\n if args is not None:\n return parser.parse_args(args)\n else:\n return parser.parse_args()", "def parse_arguments(args):\n parser = argparse.ArgumentParser()\n parser.add_argument(\n '-d', '--debug',\n help=\"Activates debug mode\",\n action=\"store_const\", dest=\"loglevel\", const=logging.DEBUG,\n default=logging.WARNING,\n )\n parser.add_argument(\n '-v', '--verbose',\n help=\"Activates verbose mode\",\n action=\"store_const\", dest=\"loglevel\", const=logging.INFO,\n )\n parser.add_argument(\n '-m', '--model',\n help=\"Path to model input file (e.g. model.json)\",\n action=\"store\", dest=\"model\",\n default='/'.join([os.path.dirname(__file__), '../../data/model.json'])\n )\n parser.add_argument(\n 'text',\n help=\"Text to be translated\",\n )\n parser.add_argument(\n '-i', '--implementation',\n help=\"Chosen method (e.g. CavnarTrenkleImpl)\",\n action=\"store\", dest=\"implementation\",\n default='CavnarTrenkleImpl'\n )\n parser.add_argument(\n '-o', '--output',\n help=\"Output results file in JSON (e.g. results.json)\",\n action=\"store\", dest=\"output_file\",\n default=None\n )\n # This argument is a json object which will be mapped to dict\n parser.add_argument(\n '--predict-args',\n help=\"Arguments for the prediction method (JSON format)\",\n action=\"store\", dest=\"predict_args\",\n type=json.loads\n )\n\n return vars(parser.parse_args(args))", "def parse_args(args):\n parser = argparse.ArgumentParser(\n description=\"Just a Fibonacci demonstration\")\n parser.add_argument(\n \"--version\",\n action=\"version\",\n version=\"bytespread {ver}\".format(ver=__version__))\n parser.add_argument(\n \"-d\",\n dest=\"directory\",\n required=True,\n help=\"The directly to analyse\")\n\n parser.add_argument(\n \"-w\",\n dest=\"wildcard\",\n default=\"*\",\n required=False,\n help=\"Wildcard for file match within the directory (default: *)\")\n\n parser.add_argument(\n \"-c\",\n dest=\"clusters\",\n default=32,\n required=False,\n type=int,\n help=\"Number of clusters (default: 32)\")\n\n parser.add_argument(\n \"-b\",\n dest=\"bricks\",\n default=100,\n required=False,\n type=int,\n help=\"Number bricks to show for the longest column (default: 100)\")\n\n parser.add_argument(\n \"-r\",\n dest=\"recursive\",\n action='store_true',\n required=False,\n help=\"Recursive within the provided folder (default: false)\")\n\n return parser.parse_args(args)", "def normalize_args(args: Namespace) -> Namespace:\n if args.threads <= 0:\n args.threads = os.cpu_count()\n if not args.threads:\n logger.warning('It was not possible to determine the number of CPUs in your system. '\n 'Only one will be used, this will decrease the amount of downloads.')\n args.threads = 1\n if args.page <= 0:\n args.page = 1\n if args.max_pages <= 0:\n args.max_pages = 1\n if args.page > args.max_pages:\n args.max_pages = args.page\n\n if args.only_favorites and not args.user_inputs:\n logger.warn(f\"You're passing --favorites/-f flag without any user input.\")\n args.only_favorites = False\n\n if args.gen_pdf and not args.album_inputs and not args.user_inputs and not args.search_download:\n logger.warn(f\"You're passing --pdf flag without any album/user input or search download.\")\n args.gen_pdf = False\n\n args.keyword = args.keyword.strip() if args.keyword else None\n\n if args.album_inputs:\n inputs = inputs_string_to_list(args.album_inputs)\n args.albums_ids = extract_ids_from_list(inputs, extract_album_id)\n else:\n args.albums_ids = None\n\n if args.user_inputs:\n inputs = inputs_string_to_list(args.user_inputs)\n args.users_ids = extract_ids_from_list(inputs, extract_user_id)\n else:\n args.users_ids = None\n\n return args", "def parseArgs(args):\n parser = argparse.ArgumentParser(description = \"Scrapes baseball-reference.com for player statistics\")\n\n parser.add_argument(\"-d\", \"--domain\", help=\"domain to scrape for statistics. Default is baseball-reference.com\", nargs=1, default=[\"http://www.baseball-reference.com\"])\n parser.add_argument(\"-f\", \"--filename\", help=\"database file to store data in\", required=True, nargs=1, type=argparse.FileType(\"r+\"))\n parser.add_argument(\"-r\", \"--reset\", help=\"removes database before scraping all data from baseball-reference. Conflicts with -u. One of -r and -u must be specified\", action=\"store_true\")\n parser.add_argument(\"-u\", \"--update\", help=\"scrapes baseball-reference and adds all new information to the database. Conflicts with -r. One of -r and -u must be specified\", action=\"store_true\")\n parser.add_argument(\"--verbose\", help=\"enables verbose output\", action=\"store_true\")\n parser.add_argument(\"--version\", help=\"prints out version and exits\", action=\"version\", version=\"%(prog)s ({version})\".format(version=__version__))\n\n parsedArgs = parser.parse_args()\n\n if parsedArgs.reset == parsedArgs.update:\n parser.error(\"-r and -u are conflicting flags. Exactly one must be specified\")\n parser.print_help()\n\n return parsedArgs", "def parse_args(args: List[str]) -> Optional[argparse.Namespace]:\n\n root = argparse.ArgumentParser(description=inspect.cleandoc('''\n Small cross-platform Python app that can create and update PlatformIO projects from STM32CubeMX .ioc files. It\n uses STM32CubeMX to generate a HAL-framework-based code and alongside creates PlatformIO project with compatible\n parameters to stick them both together. Both CLI and GUI editions are available. Visit\n https://github.com/ussserrr/stm32pio for more information. Use 'stm32pio [command] -h' to see help on the\n particular command'''))\n\n # Global arguments (there is also an automatically added '-h, --help' option)\n root.add_argument('--version', action='version', version=f\"stm32pio {stm32pio.core.util.get_version()}\")\n root.add_argument('-v', '--verbose', help=\"enable verbose output (default level: INFO)\", action='count', default=1)\n\n sub = root.add_subparsers(dest='command', title='commands', description=\"valid commands\", help=\"available actions\")\n\n # Primary operations\n init = sub.add_parser('init', help=\"create config .INI file to check and tweak parameters before proceeding\")\n generate = sub.add_parser('generate', help=\"generate CubeMX code only\")\n pio_init = sub.add_parser('pio_init', help=\"create new compatible PlatformIO project\")\n patch = sub.add_parser('patch', help=\"tweak the project so both CubeMX and PlatformIO could work together\")\n new = sub.add_parser('new', help=\"generate CubeMX code, create PlatformIO project and glue them together\")\n status = sub.add_parser('status', help=\"inspect the project current state\")\n validate = sub.add_parser('validate', help=\"verify current environment based on the config values\")\n clean = sub.add_parser('clean', help=\"clean-up the project (by default, no files will be deleted immediately \"\n \"without your confirmation)\")\n gui = sub.add_parser('gui', help=\"start the graphical version of the application. All arguments will \"\n \"be passed forward, see its own --help for more information\")\n\n # Assign options to commands\n for command in [init, generate, pio_init, patch, new, status, validate, clean, gui]:\n command.add_argument('-d', '--directory', dest='path', default=Path.cwd(),\n help=\"path to the project (current directory, if not given)\")\n for command in [init, pio_init, new, gui]:\n command.add_argument('-b', '--board', dest='board', default='', help=\"PlatformIO board name. \" + board_hint)\n for command in [init, generate, new]:\n command.add_argument('-e', '--start-editor', dest='editor',\n help=\"start the specified editor after an action (e.g. subl, code, atom, etc.)\")\n for command in [generate, new]:\n command.add_argument('-c', '--with-build', action='store_true', help=\"build the project after code generation\")\n for command in [init, new]:\n command.add_argument('-s', '--store-content', action='store_true',\n help=\"save folder initial contents as a cleanup ignore list\")\n clean.add_argument('-s', '--store-content', action='store_true',\n help=\"save project folder contents as a cleanup ignore list and exit\")\n clean.add_argument('-q', '--quiet', action='store_true',\n help=\"suppress the caution about the content removal (be sure of what you are doing!)\")\n\n if len(args) == 0:\n root.print_help()\n return None\n\n return root.parse_args(args)", "def parse_args(args):\n parser = argparse.ArgumentParser(\n description=\"Lookup table generator for Image Comparison\")\n parser.add_argument(\n \"--version\",\n action=\"version\",\n version=\"lookuptable {ver}\".format(ver=__version__))\n parser.add_argument(\n \"-f\",\n \"--folder\",\n dest=\"imagefolder\",\n help=\"path to image folder\",\n type=str,\n metavar=\"STRING\")\n parser.add_argument(\n \"-v\",\n \"--verbose\",\n dest=\"loglevel\",\n help=\"set loglevel to INFO\",\n action=\"store_const\",\n const=logging.INFO)\n parser.add_argument(\n \"-vv\",\n \"--very-verbose\",\n dest=\"loglevel\",\n help=\"set loglevel to DEBUG\",\n action=\"store_const\",\n const=logging.DEBUG)\n return parser.parse_args(args)", "def _parse_args():\n parser = argparse.ArgumentParser()\n parser.add_argument('out', nargs='*', help='Create a plot for all provided'\n ' output files')\n parser.add_argument('--yscale', '-y', help='Y-axis scale',\n default='linear')\n parser.add_argument('--hits', help='Draw hits', dest='hits', action='store_true')\n parser.add_argument('--no-hits', help='Do not draw hits', dest='hits', action='store_false')\n parser.set_defaults(hits=True)\n\n parser.add_argument('--ltrim', help='Remove a number of smallest latency values from the plot', default=0, type=int)\n parser.add_argument('--rtrim', help='Remove a number of biggest latency values from the plot', default=0, type=int)\n\n parser.add_argument('--misses', help='Draw misses', dest='misses', action='store_true')\n parser.add_argument('--no-misses', help='Do not draw misses', dest='misses', action='store_false')\n parser.set_defaults(misses=True)\n\n args = parser.parse_args()\n if not args.out:\n parser.error('at least one output need to be provided')\n return args", "def parse_args():\n from argparse import ArgumentParser\n ap = ArgumentParser(prog=__exe__, description=__purpose__)\n ap.add_argument(\n '--host', dest='host', default=None,\n help='Host for XNAT. Default: env XNAT_HOST.')\n ap.add_argument(\n '-u', '--username', dest='username', default=None,\n help='Username for XNAT.')\n ap.add_argument('project', help='Project Label')\n ap.add_argument('session', help='Session Label')\n ap.add_argument(\n 'proc_suffix', help='Proc name suffix', nargs='?', default='')\n ap.add_argument(\n '-sd', '--subjects_dir', dest='subjects_dir',\n help='Subjects Directory',\n default=os.environ.get('SUBJECTS_DIR', '/tmp'))\n return ap.parse_args()", "def parse_args():\n parser = argparse.ArgumentParser(description=\"crnn process\")\n parser.add_argument(\"--dir_name\", type=str,\n default=None, help=\"infer input dir\")\n parser.add_argument('--res_dir_name', default='./output', type=str,\n help='the folder to save the result')\n args_opt = parser.parse_args()\n return args_opt", "def get_args() -> Namespace:\n\n parser = ArgumentParser(description='A Minecraft RCON client.')\n parser.add_argument('server', help='the server to connect to')\n parser.add_argument(\n '-t', '--timeout', type=float, help='connection timeout in seconds')\n parser.add_argument(\n '-d', '--debug', action='store_true',\n help='print additional debug information')\n subparsers = parser.add_subparsers(dest='action')\n command_parser = subparsers.add_parser(\n 'exec', help='execute commands on the server')\n command_parser.add_argument(\n 'command', help='command to execute on the server')\n command_parser.add_argument(\n 'argument', nargs='*', default=(), help='arguments for the command')\n say_parser = subparsers.add_parser(\n 'say', help='broadcast a message on the server')\n say_parser.add_argument('message', help='the message to broadcast')\n fortune_parser = subparsers.add_parser(\n 'fortune', help='send a fortune to the players on the server')\n fortune_parser.add_argument(\n '-l', '--long', action='store_true', help='generate ling fortunes')\n fortune_parser.add_argument(\n '-o', '--offensive', action='store_true',\n help='generate offensive fortunes')\n datetime_parser = subparsers.add_parser(\n 'datetime',\n help='sends the current date and time to the players on the server')\n datetime_parser.add_argument(\n '-f', '--format', default='%c', help='the datetime format')\n subparsers.add_parser('in-use', help='checks whether the server is in use')\n shutdown_parser = subparsers.add_parser(\n 'idle-shutdown', help='shuts down the server if it is not in use')\n shutdown_parser.add_argument(\n '-s', '--sudo', action='store_true',\n help='invoke the shutdown command using sudo')\n shutdown_parser.add_argument(\n '-u', '--unit', default='minecraft@{server}.service',\n help='the systemd unit template')\n return parser.parse_args()", "def __parse_args():\n parser = argparse.ArgumentParser()\n parser.add_argument('-f', '--force', action=\"store_true\", default=False,\n help='overwrite existing database files during import')\n parser.add_argument('-e', '--extension', action=\"store\", default='txt',\n help='specify file extension. default is \"txt\"')\n parser.add_argument('-d', '--delimiter', action=\"store\", default='\\t',\n help='specify column delimiter. default is tab (\\\\t)')\n parser.add_argument('-m', '--mark', action=\"store\", default='.',\n help='specify decimal mark for numeric data. default is'\n ' dot (.)')\n parser.add_argument('-o', '--outformat', action=\"store\", default='npz',\n help='specify output database format. default is \"npz\"'\n ' for numpy database. use \"mat\" for matlab '\n ' database format.')\n parser.add_argument('-r', '--recursive', action=\"store_true\", default=False,\n help='recursively walk through all sub-directories of'\n ' current working directory')\n parser.add_argument('-p', '--pcs', action=\"store_true\", default=True,\n help='indicate if files are pcs files.')\n parser.add_argument('-c', '--colheadlines', action=\"store\", default='1',\n help='number of lines spanned by the column headers')\n args = parser.parse_args()\n return args", "def parse_args(self):\n \n # check args:\n # XXX: make them position independent\n if not os.path.isdir(self.params.R_source_folder):\n raise gc3libs.exceptions.InvalidUsage(\n \"Invalid path to R scripts folder: '%s'. Path not found\"\n % self.params.R_source_folder)\n # XXX: shall we check/validate the content ( presence of valid R scripts ) ?\n\n self.log.info(\"source dir: %s\" % self.params.R_source_folder)\n\n if not os.path.exists(self.params.command_file):\n raise gc3libs.exceptions.InvalidUsage(\n \"gc_gps command file '%s' does not exist;\"\n % self.params.command_file)\n gc3libs.utils.test_file(self.params.command_file, os.R_OK,\n gc3libs.exceptions.InvalidUsage)\n\n if self.params.input_dir and not os.path.isdir(self.params.input_dir):\n raise gc3libs.exceptions.InvalidUsage(\n \"Input folder '%s' does not exists\"\n % self.params.input_dir)\n\n self.log.info(\"Command file: %s\" % self.params.command_file)\n self.log.info(\"R source dir: %s\" % self.params.R_source_folder)\n if self.params.input_dir:\n self.log.info(\"Input data dir: '%s'\" % self.params.input_dir)", "def parse_args():\n parser = argparse.ArgumentParser(\"Plot time series figures.\")\n parser.add_argument('--log-file', type=str, nargs=\"+\", required=True,\n help=\"path to a testing log file.\")\n parser.add_argument('--trace-file', type=str, default=None,\n help=\"path to a trace file.\")\n parser.add_argument('--save-dir', type=str, default=None,\n help=\"path to save.\")\n parser.add_argument('--noise', type=float, default=0)\n\n args, unknown = parser.parse_known_args()\n return args", "def parse_args(args):\r\n\r\n parser = argparse.ArgumentParser(description=__doc__)\r\n\r\n parser.add_argument(\r\n \"--seed\",\r\n type=int,\r\n help=\"Random number generator seed for replicability\",\r\n default=12,\r\n )\r\n parser.add_argument(\"--data-file\", type=str, default=\"_output/data.npz\")\r\n parser.add_argument(\r\n \"--num-classes\",\r\n type=int,\r\n default=0,\r\n help=\"Number of classes in classification. Should be zero if doing regression\",\r\n )\r\n parser.add_argument(\r\n \"--fit-dnn\", action=\"store_true\", default=False, help=\"Fit DNN vs CNNC\"\r\n )\r\n parser.add_argument(\r\n \"--do-binary\", action=\"store_true\", default=False, help=\"fit binary outcome\"\r\n )\r\n parser.add_argument(\r\n \"--data-path\", type=str\r\n )\r\n parser.add_argument(\r\n \"--num-tf\", type=int, default=2\r\n )\r\n parser.add_argument(\r\n \"--exclude-tf\", type=int, default=1\r\n )\r\n parser.add_argument(\r\n \"--batch-size\", type=int, default=32\r\n )\r\n parser.add_argument(\r\n \"--n-layers\", type=int, default=2, help=\"Number of hidden layers\"\r\n )\r\n parser.add_argument(\r\n \"--n-hidden\", type=int, default=10, help=\"Number of hidden nodes per layer\"\r\n )\r\n parser.add_argument(\r\n \"--dropout-rate\", type=float, default=0.15, help=\"probability of dropping out a node\"\r\n )\r\n parser.add_argument(\r\n \"--epochs\", type=int, default=40, help=\"Number of Adam epochs\"\r\n )\r\n parser.add_argument(\"--log-file\", type=str, default=\"_output/log_nn.txt\")\r\n parser.add_argument(\"--out-model-file\", type=str, default=\"_output/nn.pt\")\r\n args = parser.parse_args()\r\n\r\n assert args.num_classes != 1\r\n\r\n return args", "def parse_args():\n help_text = \"\"\"\n Analyzer of the frequency of use of nouns in the headings of posts on hubr.com\n \"\"\"\n parser = argparse.ArgumentParser(\n description=help_text\n )\n parser.add_argument(\n '-p',\n '--pages',\n type=int,\n dest='page_count',\n default=PAGE_COUNT,\n help=f'Number of pages to parse, default is {PAGE_COUNT}.'\n )\n parser.add_argument(\n '-s',\n '--start',\n type=int,\n default=PAGE_START,\n dest='start_page',\n help=f'Start page number, default is {PAGE_START}.',\n )\n parser.add_argument(\n '-t',\n '--top',\n type=int,\n default=TOP_SIZE,\n dest='top_size',\n help=f'The size of the top noun, default is {TOP_SIZE}.',\n )\n\n return parser.parse_args()", "def parse_args():\n parser = argparse.ArgumentParser(\n description='Aggregate Elasticsearch Log data.')\n parser.add_argument(\n '--host',\n default='https://logging-es',\n type=str,\n action='store',\n help='Host name or IP of the Elasticsearch server.'\n )\n parser.add_argument(\n '--port',\n default=9200,\n type=int,\n action='store',\n help='Port number of the Elasticsearch server.'\n )\n parser.add_argument(\n '--ca_certs',\n default='secret/admin-ca',\n type=str,\n action='store',\n help='Path to the CA certificates file'\n )\n parser.add_argument(\n '--cert',\n default='secret/admin-cert',\n type=str,\n action='store',\n help='Path to the client certificate file'\n )\n parser.add_argument(\n '--key',\n default='secret/admin-key',\n type=str,\n action='store',\n help='Path to the client key file'\n )\n\n return parser.parse_args()", "def parse_args():\n parser = argparse.ArgumentParser()\n\n parser.add_argument(\"-pn\", \"--projectname\", type=str, required=True)\n parser.add_argument(\"-gp\", \"--generated_file_path\", type=str, required=True)\n parser.add_argument(\"-fs\", \"--filesize\", type=int, required=False)\n parser.add_argument(\"-rc\", \"--rowcount\", type=int, required=False)\n parser.add_argument(\"-ll\", \"--logging_level\", type=str, required=False)\n\n parser.add_argument(\"-cjp\", \"--config_json_path\", type=str, required=False)\n parser.add_argument(\"-dfl\", \"--data_files_location\", type=str, required=False)\n parser.add_argument(\"-drc\", \"--default_rowcount\", type=int, required=False)\n parser.add_argument(\"-fen\", \"--file_encoding\", type=str, required=False)\n parser.add_argument(\"-fle\", \"--file_line_ending\", type=str, required=False)\n\n parsed = parser.parse_args()\n\n project_name = parsed.projectname\n generated_file_path = parsed.generated_file_path\n file_size = parsed.filesize\n row_count = parsed.rowcount\n logging_level = parsed.logging_level\n config_json_path = parsed.config_json_path\n data_files_location = parsed.data_files_location\n default_rowcount = parsed.default_rowcount\n file_encoding = parsed.file_encoding\n file_line_ending = parsed.file_line_ending\n\n project_scope_kwargs = {\n \"project_name\": project_name,\n \"data_files_location\": data_files_location,\n \"config_json_path\": config_json_path,\n \"default_rowcount\": default_rowcount,\n }\n file_scope_kwargs = {\n \"generated_file_path\": generated_file_path,\n \"file_size\": file_size,\n \"row_count\": row_count,\n \"file_encoding\": file_encoding,\n \"file_line_ending\": file_line_ending,\n }\n return logging_level, project_scope_kwargs, file_scope_kwargs", "def parseArguments():\n # Create argument parser\n parser = argparse.ArgumentParser()\n\n # Optional arguments\n parser.add_argument(\"-t\", \"--test\", help=\"Optionally test algorithm on subsample of the data. Set to 1 for testing\", type=int, default=0)\n\n parser.add_argument(\"--cores\", help=\"Optimized code for a server with a lot of RAM, set to the number of available cores\", type=int, default=40)\n\n\n # Print version\n parser.add_argument(\"--version\", action=\"version\", version='%(prog)s - Version 2.0') #version 1.0 is for the observations in June 2018\n #version 1.1 contains the optimizations made after the june observations (mainly the switch to stackmags)\n #version 1.2 changed sim class to NOT include the list of failed candidates (not qsos)\n #... copied changes made to crossval version\n #version 1.5 added check for duplicate quasars and remove them\n #version 1.6 new simulated quasars (december)\n ##-------------------\n #version 2.0: combined training of classifier and regressor, streamlined input\n #version 2.1: Tryied to updates excluded area to a little more than stripe 82 but decided not to keep it, so no change\n\n # Parse arguments\n args = parser.parse_args()\n\n return args", "def parse_args(self):\n\n # Parse the arguments themselves.\n args = vars( self.parser.parse_args() )\n\n return args", "def parse_args():\n parser = argparse.ArgumentParser(description='Crawl an Android app store for apk files.')\n parser.add_argument('--store', dest='api', choices=['GooglePlay', 'F-Droid'], required=True,\n help='Specifies the store to crawl. At the moment only Google Play is supported.')\n parser.add_argument('--meta', dest='meta', required=False, action='store_const', default=False, const=True,\n help='If set, no apps will be downloaded, but the meta_data will be saved.')\n parser.add_argument('--basedir', dest='base_dir', type=str, default=os.getenv('HOME'),\n required=False, help='Specifies the base path for both logs and apk_downloads.')\n parser.add_argument('--credentials', dest='credentials', type=str, required=False, default=None,\n help='Specifies the path to a credential file in .toml format.')\n parser.add_argument('--limit', dest='limit', type=int, required=False, default=None,\n help='Specifies the maximum number of apks per category to download.')\n return parser.parse_args()", "def handle_args():\n parser = argparse.ArgumentParser(\n description=\"\"\"Script to download archives from the NLM public\n FTP server.\n \"\"\")\n # Server settings\n server_settings = parser.add_argument_group('FTP SERVER SETTINGS', '')\n server_settings.add_argument(\n '-n', '--netrc', default='~/.netrc',\n help=\"\"\"netrc file containing login parameters for the NLM\n server. See `man 5 netrc` for details on generating this\n file or read nlm_data_import/netrc/example.netrc.\n \"\"\")\n server_settings.add_argument(\n 'server_data_dir',\n help='Directory containing desired files on the NLM FTP server')\n server_settings.add_argument(\n '-l', '--limit', type=int, default=0,\n help='Only download LIMIT files.')\n\n # Download settings\n local_settings = parser.add_argument_group('LOCAL SETTINGS', '')\n local_settings.add_argument(\n '-d', '--download_database', default='~/.ftp_download_db',\n help='Path to SQLite database detailing past downloads')\n local_settings.add_argument(\n '-o', '--output_dir', default='~/medline_data',\n help='Directory where downloads will be saved')\n local_settings.add_argument(\n '-x', '--export_dir', default='~/medline_data_exports',\n help=\"\"\"Directory where data to be retrieved by the\n `hypothesis_graph application server are staged.\n \"\"\")\n # Sending debug emails (requires the send_ses_messages module - see\n # setup.py)\n debugging_settings = parser.add_argument_group('DEBUGGING SETTINGS', '')\n debugging_settings.add_argument(\n '--email_debugging', default=False, action='store_true',\n help=\"Send debugging emails. Defaults to FALSE.\")\n debugging_settings.add_argument(\n '--from_email', required=False, help=\"FROM field for debugging emails\")\n debugging_settings.add_argument(\n '--to_email', required=False, help=\"TO field for debugging emails\")\n\n return parser.parse_args()", "def parse_train_args() -> Namespace:\n parser = ArgumentParser()\n add_train_args(parser)\n args = parser.parse_args()\n modify_train_args(args)\n\n return args", "def parse_args() -> argparse.Namespace:\n desc = 'run integration tests.'\n parser = argparse.ArgumentParser(description=desc)\n parser.add_argument(\n '--only', dest='regex', type=regex,\n default='.*', help=\"Regular expression to filter which tests to run\"\n )\n parser.add_argument('-j', '--jobs', type=int, dest=\"jobs\",\n default=multiprocessing.cpu_count(),\n help='max number of concurrent jobs')\n parser.add_argument('-v', '--verbose', default=False, dest=\"verbose\",\n help='enable verbose output')\n return parser.parse_args()" ]
[ "0.72242314", "0.70827246", "0.7079136", "0.6844468", "0.684225", "0.678038", "0.6626821", "0.65809435", "0.65164214", "0.6471258", "0.64519733", "0.64393944", "0.64139444", "0.63585967", "0.631252", "0.62846994", "0.62844545", "0.6282284", "0.6273448", "0.6238388", "0.6209527", "0.6202339", "0.61969113", "0.6193125", "0.61844075", "0.6182045", "0.6180894", "0.6178742", "0.61711085", "0.61677444", "0.61428213", "0.61412925", "0.6118593", "0.6118082", "0.61168575", "0.6111032", "0.6105158", "0.6101413", "0.6085709", "0.6081346", "0.60678744", "0.6067235", "0.60667574", "0.6065296", "0.60636556", "0.60557073", "0.6048412", "0.6042964", "0.6039945", "0.60352784", "0.60330683", "0.6023513", "0.6018534", "0.60127133", "0.6002855", "0.5996186", "0.59879315", "0.5973374", "0.59715647", "0.59647727", "0.5964497", "0.59629667", "0.5958664", "0.5953677", "0.5951104", "0.5946469", "0.5943969", "0.5941952", "0.5941917", "0.5940079", "0.59397405", "0.5939624", "0.59383714", "0.5936238", "0.5924443", "0.5922961", "0.5906431", "0.5903975", "0.59007084", "0.58974534", "0.5895843", "0.58934844", "0.5892317", "0.5891882", "0.58913386", "0.5886797", "0.58849204", "0.58795255", "0.5877486", "0.5875252", "0.5863361", "0.5860831", "0.5856632", "0.58563423", "0.58557385", "0.5843815", "0.5842788", "0.5838448", "0.5829904", "0.58246017" ]
0.70003927
3
Parse all the |args| and save the results to |namespace|.
def parse_known_args(self, args=None, namespace=None): namespace, unknown_args = argparse.ArgumentParser.parse_known_args( self, args=args, namespace=namespace) setup_logging(debug=namespace.debug, quiet=namespace.quiet) return (namespace, unknown_args)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def parse(self, args):\n pass", "def parse_arguments(args):", "def parse_args(self, args: List[str]) -> Namespace:\n parser = self._to_parser()\n args = parser.parse_args(args)\n if hasattr(args, 'dm_commands'):\n if args.dm_commands is not None:\n args.dm_commands = parse_commands(args.dm_commands)\n else:\n args.dm_commands = list()\n if hasattr(args, 'dm_options'):\n if args.dm_options is not None:\n args.dm_options = parse_options(args.dm_options)\n else:\n args.dm_options = dict()\n LOG.debug(f\"Arguments: {args}.\")\n return args", "def parse_args(self, args=None, namespace=None):\n # This will call our parse_known_args below, so don't use setup_logging.\n namespace = argparse.ArgumentParser.parse_args(\n self, args=args, namespace=namespace)\n return namespace", "def parse(self, args_str=None):\n self._namespace = vars(self._parser.parse_args(args_str))", "def parse_args(self, args):\n raise Exception(\"Not implemented\")", "def _parse(self, args):\n parser = self._create_parser()\n return parser.parse(args)", "def __parse_args(self):\n for argument in self.args:\n source_arg = re.match(\"^(--source=(([A-Z]|[a-z]|/|_|[0-9]|.)+))$\", argument)\n input_arg = re.match(\"^(--input=(([A-Z]|[a-z]|/|_|[0-9]|.)+))$\", argument)\n stats_arg = re.match(\"^(--stats=(([A-Z]|[a-z]|/|_|[0-9]|.)+))$\", argument)\n help_arg = re.match(\"^--help$\", argument)\n vars_arg = re.match(\"^--vars$\", argument)\n insts_arg = re.match(\"^--insts$\", argument)\n if source_arg:\n self.sourceFile = source_arg.group(2)\n self.passedArgs.append(\"source\")\n elif input_arg:\n self.inputFile = input_arg.group(2)\n self.passedArgs.append(\"input\")\n elif help_arg:\n print(\"napoveda\")\n sys.exit(0)\n elif stats_arg:\n self.statsFile = stats_arg.group(2)\n self.passedArgs.append(\"stats\")\n elif vars_arg:\n self.passedArgs.append(\"vars\")\n if self.first_stat_arg is None:\n self.first_stat_arg = \"vars\"\n elif insts_arg:\n self.passedArgs.append(\"insts\")\n if self.first_stat_arg is None:\n self.first_stat_arg = \"insts\"\n else:\n raise ArgError(\"Unknown argument or format of the argument! (\" + argument + \")\")", "def parse_args(args=None):\n return AP.parse_args(args=args)", "def parse_args(self):\n return self.__process_args__(self.parser.parse_args())", "def parse_args(self, args=None, namespace=None):\n\n arguments = self._parser.parse_args(args, namespace)\n\n if arguments.usage:\n self._action = _formulate_action(\n ProgramUsageAction,\n parser=self._parser,\n exitf=self._parser.exit)\n\n elif arguments.version:\n self._action = _formulate_action(\n ShowVersionAction,\n prog=self._parser.prog,\n ver=self.versionString,\n year=self.yearString,\n author=self.authorName,\n license=self.programLicense,\n exitf=self._parser.exit)\n\n else:\n self._action = _formulate_action(\n DefaultAction,\n prog=self._parser.prog,\n exitf=self._parser.exit,\n imagefile=arguments.imagefile)", "def parse_args(args=None):\n\t\treturn _get_args_parser().parse_args(args)", "def ParseArgs() -> argparse.Namespace:\n parser = argparse.ArgumentParser()\n\n parser.add_argument(\n '--input', dest='input', help='Japanese phonetic reading file')\n parser.add_argument(\n '--output_token_array',\n dest='output_token_array',\n help='Output token array file.')\n parser.add_argument(\n '--output_string_array',\n dest='output_string_array',\n help='Output string array file.')\n return parser.parse_args()", "def parse_args(args: Optional[Sequence[str]] = None) -> Namespace:\n\n parser = ArgumentParser(\n description=(\n \"Takes a snapshot of a WordPress website remotely and \"\n \"stores it to either a local or a remote location. This \"\n \"requires rsync, php-cli and mysqldump\"\n )\n )\n\n parser.add_argument(\n \"source\",\n help=(\n \"Source directory for the WordPress installation dir. Syntax: \"\n \"`/var/www` or `user@host:/var/www`\"\n ),\n type=parse_location,\n )\n parser.add_argument(\n \"backup_dir\", help=\"Directory to store the snapshot\", type=parse_location\n )\n parser.add_argument(\n \"-n\",\n \"--snapshot-base-name\",\n help=\"Base name for the snapshot file. Defaults to DB name.\",\n )\n parser.add_argument(\n \"-t\",\n \"--file-name-template\",\n help=\"Template for snapshot file name. Defaults to: `{base}_{time}.tar.gz`\",\n default=\"{base}_{time}.tar.gz\",\n )\n parser.add_argument(\n \"-c\",\n \"--compression-mode\",\n help=\"Compression mode for tar (gzip, bzip2, lzip, xz). Defaults to: gzip\",\n default=\"gzip\",\n const=\"gzip\",\n nargs=\"?\",\n choices=[\"gzip\", \"bzip2\", \"lzip\", \"xz\"],\n )\n parser.add_argument(\n \"--db-host\",\n help=(\n \"Optional IP address of the database server, if IP of the wpconfig.php is a local one.\"\n ),\n default=None,\n const=None,\n nargs=\"?\",\n )\n parser.add_argument(\n \"--maintenance-mode\",\n help=(\n \"Activate maintenance mode before copying files to prevent conflicting file changes.\"\n ),\n action=\"store_true\",\n )\n parser.add_argument(\n \"--exclude\",\n help=(\n \"Exclude source files/directories, given as PATTERN. See tar command manual page.\"\n ),\n action=\"append\",\n )\n parser.add_argument(\n \"--exclude-tag-all\",\n help=(\n \"Exclude source directories and all its content, where FILE is inside. See tar command manual page.\"\n ),\n action=\"append\",\n )\n\n parsed_args = parser.parse_args(args)\n\n # apply compression mode to file name template\n if parsed_args.compression_mode == \"bzip2\":\n parsed_args.file_name_template = re.sub(\".gz\", \".bz2\", parsed_args.file_name_template)\n parsed_args.backup_dir.set_compression_mode(parsed_args.compression_mode)\n elif parsed_args.compression_mode == \"lzip\":\n parsed_args.file_name_template = re.sub(\".gz\", \".lz\", parsed_args.file_name_template)\n parsed_args.backup_dir.set_compression_mode(parsed_args.compression_mode)\n elif parsed_args.compression_mode == \"xz\":\n parsed_args.file_name_template = re.sub(\".gz\", \".xz\", parsed_args.file_name_template)\n parsed_args.backup_dir.set_compression_mode(parsed_args.compression_mode)\n\n return parsed_args", "def parse_args():\n global Args\n parser = argparse.ArgumentParser()\n subparsers = parser.add_subparsers()\n pars_simulation(subparsers)\n pars_analyze(subparsers)\n Args = parser.parse_args()", "def update_args(self, args):\n self.args = self.parser.parse_args(args)", "def parse_args(args):\n\n parser = argparse.ArgumentParser(description=\"Scrape jobs and store results.\")\n\n parser.add_argument(\"--version\",\n action=\"version\",\n version=\"scrape-jobs {ver}\".format(ver=__version__))\n\n parser.add_argument(\"-v\",\n \"--verbose\",\n dest=\"loglevel\",\n help=\"set loglevel to INFO\",\n action=\"store_const\",\n const=logging.INFO)\n\n parser.add_argument(\"-vv\",\n \"--very-verbose\",\n dest=\"loglevel\",\n help=\"set loglevel to DEBUG\",\n action=\"store_const\",\n const=logging.DEBUG)\n\n default_file = str(Path.cwd().joinpath(config.CONFIG_FILENAME).absolute())\n parser.add_argument(\"-c\",\n dest=\"config_file\",\n action=\"store\",\n default=default_file,\n type=str,\n help=f\"defaults to '{default_file}'\")\n\n parser.add_argument(dest=\"site\",\n action=\"store\",\n choices={\"seek.com.au\", \"linkedin.com\"},\n type=str,\n help=\"site to scrape\")\n\n return parser.parse_args(args)", "def _parse_args(args=None):\n parser = argparse.ArgumentParser()\n\n # general\n parser.add_argument(\"-f\", \"--files\",\n help=\"Files from analysis, separated by comma\",\n metavar=\"TwissFile\", dest=\"files\", required=True)\n parser.add_argument(\"--twissfile\",\n help=\"Twiss file to use\",\n metavar=\"/path/to/twiss.dat\", dest=\"twissfile\", required=True)\n parser.add_argument(\"-o\", \"--output\",\n help=\"Output path, where to store the results\",\n metavar=\"<path>\", default=\"./\", dest=\"output_path\")\n parser.add_argument(\"-t\", \"--algorithm\",\n help=\"Which algorithm to use {:s}\".format(ALGO_CHOICES),\n metavar=\"ALGORITHM\", default=ALGO_CHOICES[0], dest=\"algorithm\",\n choices=ALGO_CHOICES)\n parser.add_argument(\"-d\", \"--deltapScalingFactor\",\n help=\"Scaling factor for deltap, remember final value must be in MAD units\",\n metavar=\"<deltapScalingFactor>\", default=1.0, type=float, dest=\"deltap_scaling_factor\")\n\n # parse arguments\n accel_cls, remain_args = manager.get_accel_class_from_args(args)\n options = parser.parse_args(remain_args)\n source_files = [f.strip() for f in options.files.split(',')]\n\n # put all arguments into one dict\n options_dict = {\n \"accel_cls\": accel_cls,\n \"source_files\": source_files,\n }\n options_dict.update(options.__dict__)\n\n options_dict.pop(\"files\") # is \"source_files\" now\n\n return options_dict", "def run(self, args: argparse.Namespace) -> None:\n pass", "def parse(self, args=None, namespace=None):\n orginal_args = sys.argv[1:]\n subparsers, args = self._fix_parsers()\n subparser = list(set(subparsers.keys()) & set(args))\n known, unknown = self.parse_known_args(args, namespace)\n\n if \"-h\" in unknown or \"--help\" in unknown:\n if len(orginal_args) == 1 and (\"-h\" in unknown or \"--help\" in unknown):\n self.print_message(self.title+\"\\n\")\n self.print_help()\n exit(0)\n elif len(subparser) == 1:\n subparsers[subparser[0]].print_help()\n exit(0)\n if unknown:\n msg = 'unrecognized arguments: %s'\n self.error(msg % ' '.join(unknown))\n\n return known", "def parse_args():\n\n parser = argparse.ArgumentParser()\n\n parser.add_argument(\n \"-i\", \"--input\", required=True, action=\"store\", dest=\"f_in\", help=\"input file\"\n )\n parser.add_argument(\n \"-o\",\n \"--output\",\n required=True,\n action=\"store\",\n dest=\"f_out\",\n help=\"stem of output file\",\n )\n\n parser.add_argument(\n \"-d\",\n \"--dir\",\n required=True,\n action=\"store\",\n dest=\"dir\",\n help=\"directory to save output files\",\n )\n\n parser.add_argument(\n \"-t\",\n \"--threshold\",\n required=False,\n action=\"store\",\n dest=\"thres\",\n default=0.85,\n help=\"threshold for the scoring function\",\n )\n\n parser.add_argument(\n \"-l\",\n \"--language\",\n required=True,\n action=\"store\",\n dest=\"lang\",\n help=\"provide language in order to set stop words\",\n )\n\n parser.add_argument(\n \"-min\",\n \"--minimum\",\n required=False,\n action=\"store\",\n dest=\"min\",\n default=100,\n help=\"minimum number of occurrences to be considered as ngram\",\n )\n\n parser.add_argument(\n \"--trigram\",\n required=False,\n action=\"store_true\",\n dest=\"trigram\",\n help=\"extracting trigrams in addition to bigrams\",\n )\n\n return parser.parse_args()", "def parse_args() -> argparse.Namespace:\n parser = argparse.ArgumentParser(description='X sandbox.')\n # 一次可以测试多个函数\n parser.add_argument('-fl', '--function_name_list', type=str,\n dest='function_name_list', required=True,\n help='test objective function names list.')\n # 权重列表 for ScalarizedObjective.\n parser.add_argument('-wl', '--weight_list', type=str,\n dest='weight_list', required=True,\n help='weight list.')\n # 协方差矩阵\n parser.add_argument('-cm', '--covariance_matrix', type=str,\n dest='covariance_matrix', required=True,\n help='covariance matrix.')\n # 是否估计协方差\n parser.add_argument('-ec', '--evaluate_covariance', type=int,\n dest='evaluate_covariance', default=0,\n help='0: False; 1: True(evaluate covariance)')\n # # 桶数\n # parser.add_argument('-nb', '--num_bucket', type=int,\n # dest='num_bucket', required=False,\n # default=1, help='number of bucket.')\n # # 样本量\n # parser.add_argument('-ns', '--num_sample', type=int,\n # dest='num_sample', required=False,\n # default=1, help='number of sample.')\n # 随机迭代次数\n parser.add_argument('-ii', '--init_iter', type=int,\n dest='init_iter', required=False,\n default=1, help='iteration of sobol generation.')\n # 随机迭代时每次迭代的组数\n parser.add_argument('-ibs', '--init_batch_size', type=int,\n dest='init_batch_size', required=False,\n default=1, help='number of sobol generation.')\n # BO更新的迭代次数\n parser.add_argument('-ui', '--update_iter', type=int,\n dest='update_iter', required=False,\n default=20, help='number of GP(N)EI generation.')\n # 实验组数\n parser.add_argument('-bs', '--batch_size', type=int,\n dest='batch_size', required=False,\n default=1, help='number of trial each iter.')\n # 方差系数 deprecated\n parser.add_argument('-vc', '--var_coef', type=int,\n dest='var_coef', required=False,\n default=1, help='variance coef.')\n # 方差计算方法\n parser.add_argument('-vct', '--var_compute_type', type=int,\n dest='var_compute_type', required=False,\n default=1, help='variance compute type: 0 1 2 3 4')\n\n # 采样数\n parser.add_argument('-nr', '--num_random', type=int,\n dest='num_random', required=False,\n default=10000, help='num_random for gen samples')\n # 桶数\n parser.add_argument('-nb', '--num_bucket', type = int,\n dest='num_bucket', required=False,\n default=50, help='number of bucket.')\n # 对照组数\n # parser.add_argument('-nc', '--num_control', type=int,\n # dest='num_control', required=False,\n # default=0, help='number of control arms each iter.')\n # 保存路径\n parser.add_argument('-sp', '--save_path', type=str, dest='save_path',\n default=\"/mnt/wfs/mmcommwfssz/project_wx-td-itil-exp/\" + \\\n \"bo_test_output/covariance_test\",\n help=(\"helper directory.\"))\n return parser.parse_args()", "def parse_args(args):\n\n parser = argparse.ArgumentParser(\n description=\"\"\"Generates and runs an afni_proc.py script to preprocess resting state fMRI data\"\"\",\n formatter_class=argparse.RawDescriptionHelpFormatter)\n\n # Optional Flags\n parser.add_argument(\"-t\", \"--trs_remove\", action=\"store\", default=5, type=int, metavar='TRs',\n help=\"\"\"number of trs to remove at the beginning of the epi data\n (default = 5 trs)\"\"\")\n parser.add_argument(\"-d\", \"--dim_voxel\", action=\"store\", default=2.0, type=float, metavar='MM',\n help=\"voxel dimensions in mm that processed epi will be resampled to (default = 2.0 mm)\")\n parser.add_argument(\"-b\", \"--bandpass\", action=\"store\", default=[0.01, 0.25], nargs=2, type=float, metavar=\"F\",\n help=\"bandpass frequencies lower and upper limits (default = 0.01 0.25)\")\n parser.add_argument(\"-v\", \"--volumes\", action=\"store\", default=0, type=int, metavar=\"V\",\n help=\"\"\"truncate the epi data to the inputted number of volumes, useful if subjects have data \n with different numbers of volumes (default = no truncation)\"\"\")\n parser.add_argument(\"-f\", \"--fwhm\", action=\"store\", default=5.0, type=float, metavar=\"MM\",\n help=\"the full width half maximum that is used when blurring (default = 5.0 mm)\")\n parser.add_argument(\"-c\", \"--cores\", action=\"store\", default=cpu_count(), type=int, metavar=\"C\",\n help=\"number of cores supplied to 3dDeconvolve (default = all cores)\")\n parser.add_argument(\"-s\", \"--subj_id\", action=\"store\", default=\"sub\", metavar=\"SUB\",\n help=\"text file of subject ids (default = sub)\")\n parser.add_argument(\"-T\", \"--time_step\", action=\"store\", default=0, type=float, metavar=\"TS\",\n help=\"set the time step for bandpassing (default = ts in header info\")\n\n parser.add_argument(\"-g\", \"--global_signal_regression\", action=\"store_false\", default=True,\n help=\"do not perform global signal regression (default = perform gsr)\")\n\n parser.add_argument(\"-r\", \"--rerun\", action=\"store_true\", default=False,\n help=\"\"\"rerun preprocessing, override and delete previous results in \n 'Processed' folder (default = don't override)\"\"\")\n parser.add_argument(\"-m\", \"--motion_param\", action=\"store_true\", default=False,\n help=\"use 12 motion parameters for regression (default = 6 motion parameters)\")\n parser.add_argument(\"-G\", \"--gm_blur\", action=\"store_true\", default=False,\n help=\"blur only in grey matter mask (default = blur in whole brain)\")\n parser.add_argument(\"-n\", \"--nl_reg\", action=\"store_true\", default=False,\n help=\"use non-linear warp between anatomical and MNI template (default = linear warp)\")\n\n # Required Inputs\n required = parser.add_argument_group(\"required arguments\")\n required.add_argument(\"-e\", \"--epi\", action=\"store\", required=True,\n help=\"text file of paths to raw epi data\")\n required.add_argument(\"-a\", \"--anat\", action=\"store\", required=True,\n help=\"text file of paths to raw anatomical data\")\n required.add_argument(\"-o\", \"--out_dir\", action=\"store\", required=True, metavar=\"OUT\",\n help=\"text file of paths to output directory\")\n result = parser.parse_args(args)\n\n # Make sure inputted parameters are legal\n assert (os.path.isfile(result.epi)), \"{} does not exist or is not a file\".format(result.epi)\n assert (os.path.isfile(result.anat)), \"{} does not exist or is not a file\".format(result.ant)\n assert (result.trs_remove >= 0), \"Cannot remove negative trs\"\n assert (result.dim_voxel >= 0), \"Cannot have a negative voxel dimension\"\n assert (np.all(np.array(result.bandpass) > 0)), \"Cannot have a negative frequency limit for bandpassing\"\n assert (result.volumes > -1), \"Number of volumes must be greater than 0\"\n assert (result.cores > 0), \"Number of cores used must be greater than 0\"\n assert (result.time_step > -1), \"Time step must be greater than 0\"\n\n return result", "def parse_args(self, argv=None):\n self.opts, self.args = self.cli_parser.parse_args(argv)\n self._begin_logging()\n if argv is None:\n argv = sys.argv\n logger.info(' '.join(argv))\n self._process_input_files()\n self._construct_links_of_interest()\n self._open_output_files()\n data = self._construct_data_struct()\n return data", "def parse_args():\n parser = argparse.ArgumentParser(description='Parse flags to configure the json parsing')\n parser.add_argument(\"-f\", \"--format\", help=\"output format: (csv|tsv|json)\", choices=[\"csv\", \"tsv\", \"json\"],\n default=\"tsv\")\n parser.add_argument(\"-p\", \"--parallelized\", help=\"save output in parallelized or single file format\",\n action=\"store_true\")\n parser.add_argument(\"-i\", \"--input\", help=\"folder where input documents are\", default=\"data\")\n parser.add_argument(\"-o\", \"--output\", help=\"folder where output documents are\", default=\"cleaned\")\n parser.add_argument(\"-d\", \"--documentformat\", help=\"combine all features into a single text per post\",\n action=\"store_true\")\n parser.add_argument(\"-pa\", \"--partitions\", help=\"number of spark partitions\",\n default=1)\n args = parser.parse_args()\n return args", "def parseArgs(args):\n parser= argparse.ArgumentParser(description = __doc__)\n parser.add_argument (\"manifestFile\",\n help = \" The input file. \",\n action = \"store\")\n parser.add_argument (\"outputDir\",\n help = \" The output directory, usually named kallistoOut. \",\n action = \"store\")\n parser.add_argument (\"--isMouse\",\n help = \" This is mouse data, use a mouse transcriptome.\",\n action = \"store_true\")\n parser.add_argument (\"--single\",\n help = \" This is single data (not paired end data).\",\n action = \"store_true\")\n parser.add_argument (\"--test\",\n help = \" Generate the jobList then stop.\",\n action = \"store_true\")\n parser.add_argument (\"--bootstrap\",\n help = \" Enable bootstrapping, please provide an integer. Defaults to 10. \", \n action = \"store\",\n type = int)\n parser.add_argument (\"--verbose\",\n help = \" Spit out messages during runtime. \",\n action = \"store_true\")\n\n parser.set_defaults(single = False)\n parser.set_defaults(verbose = False)\n parser.set_defaults(isMouse = False)\n parser.set_defaults(bootstrap = 10)\n options = parser.parse_args()\n return options", "def read_args(self):\n parser = argparse.ArgumentParser()\n parser.add_argument('--algorithm', '-a', default='onelayer',\n choices=['onelayer', 'randomforest', 'sos'],\n help='which algorithm to run')\n parser.add_argument('--format', '-f', default='pcap',\n choices=['netflow', 'pcap'],\n help='which format are the files to process in')\n parser.add_argument('--operation', '-o', default='eval',\n choices=['eval', 'train', 'test'],\n help='which operation to run')\n parser.add_argument('--sos_model', '-s', default='networkml/trained_models/sos/SoSmodel',\n help='path to SoSmodel')\n parser.add_argument('--trained_model', '-m', default='networkml/trained_models/onelayer/OneLayerModel.pkl',\n help='path to the trained model file')\n parser.add_argument('--path', '-p', default='/pcaps',\n help='path to file or directory of files to process')\n parser.add_argument('--save', '-w', default='networkml/trained_models/onelayer/OneLayerModel.pkl',\n help='path to save the trained model, if training')\n\n self.args = parser.parse_args()\n return", "def parse_args():\n parser = argparse.ArgumentParser(description='Args')\n parser.add_argument('--out_dir', default='_out', type=str, help='output folder')\n args = parser.parse_args()\n\n return args", "def parse_args(args):\n parser = argparse.ArgumentParser(\n description=\"Command line bot application, e.g. bot how do you work?\")\n parser.add_argument(\n '--version',\n action='version',\n version='nlpia_bot {ver}'.format(ver=__version__))\n parser.add_argument(\n '--name',\n default=\"bot\",\n dest=\"nickname\",\n help=\"IRC nick or CLI command name for the bot\",\n type=str,\n metavar=\"STR\")\n parser.add_argument(\n '--personality',\n default=\"\",\n dest=\"personality\",\n help=\"comma-separated personalities to load into bot: search_movie,pattern_greet,search_ds,generate_spanish\",\n type=str,\n metavar=\"STR\")\n parser.add_argument(\n '-v',\n '--verbose',\n dest=\"loglevel\",\n help=\"set loglevel to INFO\",\n action='store_const',\n const=logging.INFO)\n parser.add_argument(\n '-vv',\n '--very-verbose',\n dest=\"loglevel\",\n help=\"set loglevel to DEBUG\",\n action='store_const',\n const=logging.DEBUG)\n parser.add_argument(\n 'words',\n type=str,\n nargs='+',\n help=\"Words to pass to bot as an utterance or conversational statement requiring a bot reply or action.\")\n return parser.parse_args(args)", "def add_args(self, parser):", "def _parse_args(self, args : dict):\n result = {}\n for key, value in args.items():\n if key in self._subparsers:\n # if it's a list, it is because it's a preset\n if isinstance(value, list):\n result[key] = value[0]\n else:\n result[key] = self._subparsers[key]._parse_args(value)\n elif key in self._actions:\n result[key] = self._actions[key](value)\n else:\n raise ValueError(f\"Unknown argument {key}\")\n\n return result", "def parse_args(args):\n parser = argparse.ArgumentParser(description='Parse your tophat align summary')\n parser.add_argument(\"-v\", \"--verbose\", help=\"increase output verbosity\", action='store_true')\n parser.add_argument(\"-m\", \"--mode\", help=\"input type is a directory or file\", choices=['file', 'directory'], default='directory')\n parser.add_argument(\"-i\", \"--input\", metavar='</dir> || file1,file2... ', help=\"provide a list of file or direcotry\")\n parser.add_argument(\"-o\", \"--out\", metavar='outfile.tsv', help=\"name of outputfile\")\n\n return parser.parse_args(args)", "def _parse_args(self, prepared_args):\n pass", "def parse_args():\n parser = argparse.ArgumentParser(description=\"Normalize the BraTS data set\",\n formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n\n input_options = parser.add_argument_group(\"Input\")\n input_options.add_argument('--brats', required=True, help=\"BraTS root data set directory\")\n input_options.add_argument('--year', required=True, type=int, default=2018, help=\"BraTS year\")\n\n output_options = parser.add_argument_group(\"Output\")\n output_options.add_argument('--output', required=True, help=\"Output directory of normalized data set\")\n\n general_options_group = parser.add_argument_group(\"General\")\n general_options_group.add_argument(\"--pool-size\", type=int, default=8, help=\"Size of worker pool\")\n\n logging_options_group = parser.add_argument_group(\"Logging\")\n logging_options_group.add_argument('--log', dest=\"log_level\", default=\"WARNING\", help=\"Logging level\")\n logging_options_group.add_argument('--log-file', default=\"normalize.log\", help=\"Log file\")\n\n args = parser.parse_args()\n\n # Setup the logger\n global logger\n logger = logging.getLogger('root')\n\n # Logging level configuration\n log_level = getattr(logging, args.log_level.upper())\n if not isinstance(log_level, int):\n raise ValueError('Invalid log level: %s' % args.log_level)\n logger.setLevel(log_level)\n\n log_formatter = logging.Formatter('[%(asctime)s][%(levelname)s][%(funcName)s] - %(message)s')\n\n # For the log file...\n file_handler = logging.FileHandler(args.log_file)\n file_handler.setFormatter(log_formatter)\n logger.addHandler(file_handler)\n\n # For the console\n console_handler = logging.StreamHandler(sys.stdout)\n console_handler.setFormatter(log_formatter)\n logger.addHandler(console_handler)\n return args", "def parse_args(args):\n \n # Construct the parser (which is stored in parser)\n # Module docstring lives in __doc__\n # See http://python-forum.com/pythonforum/viewtopic.php?f=3&t=36847\n # And a formatter class so our examples in the docstring look good. Isn't it\n # convenient how we already wrapped it to 80 characters?\n # See http://docs.python.org/library/argparse.html#formatter-class\n parser = argparse.ArgumentParser(description=__doc__, \n formatter_class=argparse.RawDescriptionHelpFormatter)\n \n # General options\n parser.add_argument(\"hal\",\n help=\"HAL file to evaluate\")\n parser.add_argument(\"--truth\",\n help=\"MAF file of a true alignment for precision and recall\")\n parser.add_argument(\"--beds\", nargs=\"*\",\n help=\"BED file(s) of genes on the genomes in the HAL\")\n parser.add_argument(\"--coverage_file\", type=argparse.FileType(\"w\"),\n default = sys.stdout,\n help=\"file to save average coverage vs. the reference in (one number)\")\n parser.add_argument(\"--precision_recall_file\", type=argparse.FileType(\"w\"),\n default = sys.stdout,\n help=\"TSV file to save precision and recall in (two numbers)\")\n parser.add_argument(\"--gene_category_file\", type=argparse.FileType(\"w\"),\n default = sys.stdout,\n help=\"file to save categories and counts for genes in\")\n parser.add_argument(\"--tag\", nargs=\"*\", default=[],\n help=\"extra columns to tag all result TSV lines with at the front\")\n \n # The command line arguments start with the program name, which we don't\n # want to treat as an argument for argparse. So we remove it.\n args = args[1:]\n \n return parser.parse_args(args)", "def Args(parser):", "def parse_args() -> argparse.Namespace:\n\n parser = argparse.ArgumentParser(\n description=\"THE FOLLOWING SCRIPT SHOWS SNAPSHOT OPERATIONS USING REST API.\", )\n parser.add_argument(\n \"-c\", \"--cluster\", required=True, help=\"API server IP:port details\")\n parser.add_argument(\n \"-u\",\n \"--api_user\",\n default=\"admin\",\n help=\"API Username\")\n parser.add_argument(\"-p\", \"--api_pass\", help=\"API Password\")\n parsed_args = parser.parse_args()\n\n # collect the password without echo if not already provided\n if not parsed_args.api_pass:\n parsed_args.api_pass = getpass()\n\n return parsed_args", "def _parse_args(args: List) -> argparse.Namespace:\r\n\r\n parser = argparse.ArgumentParser(\r\n description=\"This CLI program is used to test out the SpikerStream interface.\"\r\n )\r\n\r\n parser.add_argument(\r\n \"-p\", \"--p\", \"-plot\", \"--plot\",\r\n choices=[\"matplotlib\", \"pyqtgraph\"],\r\n default=\"matplotlib\",\r\n help=\"The plotting library.\",\r\n dest=\"plot_type\"\r\n )\r\n\r\n parser.add_argument(\r\n \"-i\", \"--i\", \"-input\", \"--input\",\r\n choices=[\"spikerbox\", \"audio\"],\r\n default=\"audio\",\r\n help=\"The input stream.\",\r\n dest=\"stream_type\"\r\n )\r\n\r\n parser.add_argument(\r\n \"-s\", \"--s\", \"-serialport\", \"--serialport\",\r\n nargs=\"?\",\r\n default=\"COM1\",\r\n help=\"The serial port the SpikerBox is attached to.\",\r\n dest=\"serial_port\"\r\n )\r\n\r\n parser.add_argument(\r\n \"-c\", \"--c\", \"-chunk\", \"--chunk\",\r\n nargs=\"?\",\r\n default=10000,\r\n help=\"The chunk size when using the SpikerBox stream. 20,000 = 1 second.\",\r\n dest=\"chunk_size\"\r\n )\r\n\r\n return parser.parse_args(args)", "def parse_args():\n parser = argparse.ArgumentParser(description=\"Evaluate the tumor segmentation model\",\n formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n\n input_options = parser.add_argument_group(\"Input\")\n input_options.add_argument(\"--save-path\", required=True, help=\"Tensorflow save path\")\n input_options.add_argument(\"--model\", required=True, help=\"File to save trained model in\")\n\n output_options = parser.add_argument_group(\"Output\")\n output_options.add_argument(\"-o\", \"--output\", required=True, help=\"Output directory to store plots\")\n\n info_options = parser.add_argument_group(\"Info\")\n info_options.add_argument(\"--config\", required=False, type=str, help=\"Configuration file\")\n info_options.add_argument(\"-params\", \"--params\", type=str, help=\"Hyperparameters json file\")\n\n logging_options = parser.add_argument_group(\"Logging\")\n logging_options.add_argument('--log', dest=\"log_level\", default=\"DEBUG\", help=\"Logging level\")\n\n args = parser.parse_args()\n\n # Setup the logger\n global logger\n logger = logging.getLogger('root')\n\n # Logging level configuration\n log_level = getattr(logging, args.log_level.upper())\n if not isinstance(log_level, int):\n raise ValueError('Invalid log level: %s' % args.log_level)\n\n log_formatter = logging.Formatter('[%(asctime)s][%(levelname)s][%(funcName)s] - %(message)s')\n\n # For the console\n console_handler = logging.StreamHandler(sys.stdout)\n console_handler.setFormatter(log_formatter)\n logger.addHandler(console_handler)\n\n logger.setLevel(log_level)\n\n return args", "def parseArguments(args=None):\n\n # parse command line arguments\n parser = argparse.ArgumentParser(description='collection creator')\n parser.add_argument( 'config_file', action=\"store\" )\n parser.add_argument( 'out_path', action=\"store\" )\n\n return parser.parse_args(args)", "def parse_args(arglist):\n help = dedent(\"\"\"\n Run FIR model on subject data\n \"\"\")\n parser = tools.parser\n parser.description = help\n parser.formatter_class = argparse.RawDescriptionHelpFormatter\n parser.add_argument(\"-extract_info\", help=\"info for experiment to extract\")\n parser.add_argument(\"-mask_type\", help=\"mask or func?\")\n parser.add_argument(\"-mask_name\", help=\"name of mask in sub's mask directory\")\n return parser.parse_args(arglist)", "def __init__(self, args: argparse.Namespace):\n self._args = args", "def __init__(self, args=None, namespace=None):\n self.parse_args(args, namespace)\n if not namespace:\n self.__set_path_name__()", "def _parse_args(self):\n parser = argparse.ArgumentParser()\n _, args = parser.parse_known_args()\n self.args = [a for a in args if a != '']", "def process_command_line_arguments() -> Namespace:\n\n parser = build_parser()\n arguments = parser.parse_args()\n\n return arguments", "def parse_args():\n parser = MyParser(description='Data processing and analytics library \\\n for OpenStack Browbeat perf data')\n\n parser.add_argument('-s', '--summary', dest=\"days\", type=int, default=-1,\n help='-s N summary of last N days of results')\n\n parser.add_argument('--summary-uuid', dest=\"summary_uuid\", type=str,\n default=None,\n help='--summary-uuid UUID summary of a specific uuid')\n\n parser.add_argument('--short-summary', dest=\"short_days\", type=int,\n default=-1,\n help='--short-summary N gives \\\n summary of last N days of results but uses cockroach \\\n db so only provides with basic summary')\n\n parser.add_argument('--upload-timesummary', dest=\"timeseries_uuid\",\n type=str, default=None,\n help='--upload-timesummary UUID \\\n uploads the features computed from data obtained from\\\n graphite. ')\n\n parser.add_argument('--upload-logsummary', dest=\"loggin_uuid\",\n type=str, default=None,\n help='--upload-logsummary UUID \\\n uploads the log summary to crdb \\\n currently just summarizes over entire timeperiod. ')\n\n parser.add_argument('-u', '--update-db', dest='update', type=bool,\n default=False,\n help='-u True pushes data to cockroach db')\n\n parser.add_argument('--update-clf', dest=\"clf_days\", type=int,\n default=-1,\n help='--update-clf 60 will update all classifiers \\\n listed in config file under classifier_lists \\\n using data from last 60 days')\n\n parser.add_argument('--test-clf', dest=\"test_days\", type=int,\n default=-1,\n help='--test-clf 60 will train all classifiers \\\n listed in config file under classifier_lists \\\n using data from last 60 days and then test it \\\n and display metrics')\n\n parser.add_argument('-v', '--osp-version', dest='version', type=str,\n default=None,\n help='-v 11-tripleo only returns hits for that \\\n OpenStack version, \\\n only supported by summary right now')\n\n parser.add_argument('-c', '--config', dest='config', type=str,\n default=pkg_resources.resource_filename('bml',\n \"config.yml\"),\n help='-c <config file path> use custom config file')\n\n args = parser.parse_args()\n return args", "def _parse_args() -> argparse.Namespace:\n desc = 'Pretty print CBOR file output by AST exporter.'\n parser = argparse.ArgumentParser(description=desc)\n parser.add_argument('cbor', type=argparse.FileType('rb'),\n help=\"cbor file to pretty print.\")\n parser.add_argument(\"--indent\", \"-i\", dest=\"indent\",\n type=int, default=2, nargs='?',\n help=\"spaces per indent.\")\n parser.add_argument(\"--depth\", \"-d\", dest=\"depth\",\n type=int, default=2, nargs='?',\n help=\"max level of indentation.\")\n return parser.parse_args()", "def post(self):\n args = parser.parse_args()", "def parse_arguments(input_args: List[str]) -> argparse.Namespace:\n description = \"Tool to output OWASP Cornucopia playing cards into different file types and languages. \"\n description += \"\\nExample usage: $ ./cornucopia/convert.py -t docx -l es \"\n description += \"\\nExample usage: c:\\\\cornucopia\\\\scripts\\\\convert.py -t idml -l fr \"\n description += \"-o 'my_output_folder/owasp_cornucopia_edition_language_version.idml'\"\n parser = argparse.ArgumentParser(description=description, formatter_class=argparse.RawTextHelpFormatter)\n parser.add_argument(\n \"-i\",\n \"--inputfile\",\n type=str,\n default=\"\",\n help=(\n \"Input (template) file to use.\"\n f\"\\nDefault={convert_vars.DEFAULT_TEMPLATE_FILENAME}.(docx|idml)\"\n \"\\nTemplate type is dependent on output type (-t) or file (-o) specified.\"\n ),\n )\n group = parser.add_mutually_exclusive_group(required=False)\n group.add_argument(\n \"-t\",\n \"--outputfiletype\",\n type=str,\n choices=convert_vars.FILETYPE_CHOICES,\n help=\"Type of file to output. Default = docx. If specified, this overwrites the output file extension\",\n )\n parser.add_argument(\n \"-o\",\n \"--outputfile\",\n default=\"\",\n type=str,\n help=(\n \"Specify a path and name of output file to generate. (caution: existing file will be overwritten). \"\n f\"\\ndefault = {convert_vars.DEFAULT_OUTPUT_FILENAME}.(docx|pdf|idml)\"\n ),\n )\n group = parser.add_mutually_exclusive_group(required=False)\n group.add_argument(\n # parser.add_argument(\n \"-l\",\n \"--language\",\n type=str,\n choices=convert_vars.LANGUAGE_CHOICES,\n default=\"en\",\n help=(\n \"Output language to produce. [`en`, `es`, `fr`, `pt-br`, `template`] \"\n \"\\nTemplate will attempt to create a template from the english input file and \"\n \"\\nreplacing strings with the template lookup codes\"\n ),\n )\n parser.add_argument(\n \"-d\",\n \"--debug\",\n action=\"store_true\",\n help=\"Output additional information to debug script\",\n )\n group = parser.add_mutually_exclusive_group(required=False)\n group.add_argument(\n # parser.add_argument(\n \"-s\",\n \"--style\",\n type=str,\n choices=convert_vars.STYLE_CHOICES,\n default=\"static\",\n help=(\n \"Output style to produce. [`static` or `dynamic`] \"\n \"\\nStatic cards have the mappings printed on them, dynamic ones a QRCode that points to an maintained list.\"\n ),\n )\n parser.add_argument(\n \"-u\",\n \"--url\",\n default=\"https://copi.securedelivery.io/cards\",\n type=str,\n help=(\n \"Specify a URL to use in generating dynamic cards. (caution: URL will be suffixed with / and the card ID). \"\n ),\n )\n args = parser.parse_args(input_args)\n return args", "def parse_args(args):\n\n parser = argparse.ArgumentParser(description=\"Add meta data to one or more netCDF files\")\n\n parser.add_argument(\"-m\",\"--metafiles\", help=\"One or more meta-data files in YAML format\", action='append')\n parser.add_argument(\"-l\",\"--metalist\", help=\"File containing a list of meta-data files\", action='append')\n parser.add_argument(\"-v\",\"--verbose\", help=\"Verbose output\", action='store_true')\n parser.add_argument(\"files\", help=\"netCDF files\", nargs='+')\n\n return parser.parse_args(args)", "def parse_args(args):\n # If called as a main function, this processes command line arguments\n # as main. If this is called as part of an action\n if isinstance(args, list):\n parser = argparse.ArgumentParser(description=description)\n else:\n parser = args\n # add required parameters for this application\n parser.add_argument(\"operands\",\n nargs='+',\n type=float,\n help=\"List of operands.\")\n # add options for this application\n parser.add_argument(\n '-v',\n '--verbose',\n dest=\"loglevel\",\n help=\"set loglevel to INFO\",\n action='store_const',\n const=logging.INFO)\n parser.add_argument(\n '-vv',\n '--very-verbose',\n dest=\"loglevel\",\n help=\"set loglevel to DEBUG\",\n action='store_const',\n const=logging.DEBUG)\n if isinstance(args, list):\n return parser.parse_args(args)", "def main():\n args = parse_args()\n process_args(args)", "def parse_args():\n import argparse\n\n #argument\n parser =argparse.ArgumentParser()\n\n parser.add_argument('--in_list', help = 'path to input list.')\n parser.add_argument('--out_list', help = 'path for saving list.')\n args = parser.parse_args()\n\n return args", "def parse_args(args):\n parser = argparse.ArgumentParser(description=\"Just a Fibonacci demonstration\")\n parser.add_argument(\n \"--version\",\n action=\"version\",\n version=\"lj506 {ver}\".format(ver=__version__),\n )\n parser.add_argument(\n \"-v\",\n \"--verbose\",\n dest=\"loglevel\",\n help=\"set loglevel to INFO\",\n action=\"store_const\",\n const=logging.INFO,\n )\n parser.add_argument(\n \"-vv\",\n \"--very-verbose\",\n dest=\"loglevel\",\n help=\"set loglevel to DEBUG\",\n action=\"store_const\",\n const=logging.DEBUG,\n )\n\n parser.add_argument('--dir', '-d', help=\"Download directory\", default=Path.cwd())\n\n return parser.parse_args(args)", "def parse(self, args: list[str], prog: Optional[str] = None) -> argparse.Namespace:\n prog = prog or Path(sys.argv[0]).name\n try:\n return self._parse(\n args,\n argparse.Namespace(),\n self._overrides.copy(),\n prog,\n )\n except _HelpError:\n self.exit(0, self.format_help(prog))", "def parse_args():\n parser = argparse.ArgumentParser(\n description=\"Nuvoton post build command\"\n )\n\n subparsers = parser.add_subparsers(description=\"The action to perform\")\n\n parser_tfm_sign_image_tgt = subparsers.add_parser(\n \"tfm_sign_image_tgt\",\n help=\"Sign secure and non-secure images together\"\n )\n \n parser_tfm_sign_image_tgt.add_argument(\n \"--tfm-import-path\",\n help=\"Path containing the TF-M bootloader, layouts and signing keys\",\n required=True\n )\n\n parser_tfm_sign_image_tgt.add_argument(\n \"--signing_key\",\n help=\"File name of key for signing secure binary or secure/non-secure binaries together\",\n required=True\n )\n\n parser_tfm_sign_image_tgt.add_argument(\n \"--non-secure-bin\",\n help=\"Path to the non-secure binary\",\n required=True\n )\n\n parser_tfm_sign_image_tgt.set_defaults(func=tfm_sign_image_tgt)\n\n return parser.parse_args()", "def parse_args(args):\n parser = argparse.ArgumentParser(\n description=\"Export DICOMs in Orthanc Study to BIDS-ready directory structure \")\n parser.add_argument(\n '--version',\n action='version',\n version='bnctools {ver}'.format(ver=__version__))\n parser.add_argument(\n '-v',\n '--verbose',\n dest=\"loglevel\",\n help=\"set loglevel to INFO\",\n action='store_const',\n const=logging.INFO)\n parser.add_argument(\n '-vv',\n '--very-verbose',\n dest=\"loglevel\",\n help=\"set loglevel to DEBUG\",\n action='store_const',\n const=logging.DEBUG)\n return parser.parse_args(args)", "def parse_args(*args, **kwargs):\n parser = argparse.ArgumentParser(add_help=False)\n\n parser.add_argument(\n \"--help\",\n help=\"show usage information\",\n action=\"help\",\n )\n\n parser.add_argument(\n \"--source\",\n metavar=\"host[:port]\",\n help=\"\"\"Hostname of the mongod server from which oplog\n operations are going to be pulled. Called \"--from\"\n in mongooplog.\"\"\",\n )\n\n parser.add_argument(\n '--oplogns',\n default='local.oplog.rs',\n help=\"Source namespace for oplog\",\n )\n\n parser.add_argument(\n \"--dest\",\n metavar=\"host[:port]\",\n help=\"\"\"\n Hostname of the mongod server (or replica set as\n <set name>/s1,s2) to which oplog operations\n are going to be applied. Default is \"localhost\".\n Called \"--host\" in mongooplog.\n \"\"\",\n )\n\n parser.add_argument(\n \"-w\",\n \"--window\",\n dest=\"start_ts\",\n metavar=\"WINDOW\",\n type=compose(\n Timestamp.for_window,\n delta_from_seconds,\n pytimeparse.parse,\n ),\n help=\"\"\"Time window to query, like \"3 days\" or \"24:00\"\n (24 hours, 0 minutes).\"\"\",\n )\n\n parser.add_argument(\n \"-f\",\n \"--follow\",\n action=\"store_true\",\n help=\"\"\"Wait for new data in oplog. Makes the utility\n polling oplog forever (until interrupted). New data\n is going to be applied immediately with at most one\n second delay.\"\"\",\n )\n\n parser.add_argument(\n \"--ns\",\n nargs=\"*\",\n default=[],\n action=Extend,\n help=\"\"\"Process only these namespaces, ignoring all others.\n Space separated list of strings in form of ``dname``\n or ``dbname.collection``. May be specified multiple times.\n \"\"\",\n )\n\n parser.add_argument(\n \"-x\",\n \"--exclude\",\n nargs=\"*\",\n default=[],\n action=Extend,\n help=\"\"\"List of space separated namespaces which should be\n ignored. Can be in form of ``dname`` or ``dbname.collection``.\n May be specified multiple times.\n \"\"\",\n )\n\n parser.add_argument(\n \"--rename\",\n nargs=\"*\",\n default=[],\n metavar=\"ns_old=ns_new\",\n type=RenameSpec.from_spec,\n action=Extend,\n help=\"\"\"\n Rename database(s) and/or collection(s). Operations on\n namespace ``ns_old`` from the source server will be\n applied to namespace ``ns_new`` on the destination server.\n May be specified multiple times.\n \"\"\",\n )\n\n parser.add_argument(\n \"--dry-run\",\n default=False,\n action=\"store_true\",\n help=\"Suppress application of ops.\",\n )\n\n parser.add_argument(\n \"--resume-file\",\n metavar=\"FILENAME\",\n type=ResumeFile,\n default=NullResumeFile(),\n help=\"\"\"Read from and write to this file the last processed\n timestamp.\"\"\",\n )\n\n jaraco.logging.add_arguments(parser)\n\n args = parser.parse_args(*args, **kwargs)\n args.rename = Renamer(args.rename)\n\n args.start_ts = args.start_ts or args.resume_file.read()\n\n return args", "def parse_args():\n parser = argparse.ArgumentParser(\"cat_stats.py\")\n parser.add_argument(\"folder\", \n help = \"folder where all the stats files are located\")\n return parser.parse_args()", "def parse_args():\n\n areas = list(default_config['areas'].keys())\n\n class ListAreas(argparse.Action):\n \"\"\"Helper class for argparse to list available areas and exit\"\"\"\n\n def __call__(self, parser, namespace, values, option_string=None):\n print(\"\\n\".join(areas))\n parser.exit()\n\n parser = argparse.ArgumentParser(parents=[kcs_parser],\n conflict_handler='resolve')\n\n parser.add_argument('files', nargs='+', help=\"Input files\")\n parser.add_argument('--area', action='append', required=True,\n choices=areas, help=\"One or more area names\")\n parser.add_argument('--template',\n help=\"Output path template, including subdirectory\")\n parser.add_argument('-v', '--verbosity', action='count',\n default=0, help=\"Verbosity level\")\n parser.add_argument('-P', '--nproc', type=int, default=1,\n help=\"Number of simultaneous processes\")\n parser.add_argument('--list-areas', action=ListAreas, nargs=0,\n help=\"List availabe areas and quit\")\n parser.add_argument('--regrid', action='store_true',\n help=\"Regrid the data (to a 1x1 deg. grid)\")\n parser.add_argument('--no-save-results', action='store_true',\n help=\"Store the resulting extracted datasets on disk\")\n parser.add_argument('--no-average-area', action='store_true',\n help=\"Don't average the extracted areas\")\n parser.add_argument('--tempdir')\n parser.add_argument('--subdir-per-realization', action='store_true')\n parser.add_argument('--ignore-common-warnings', action='store_true')\n\n args = parser.parse_args()\n setup_logging(args.verbosity)\n read_config(args.config)\n\n if args.template is None:\n args.template = default_config['data']['extraction']['template']\n args.save_result = not args.no_save_results\n args.average_area = not args.no_average_area\n args.area = {name: default_config['areas'][name] for name in args.area}\n args.area = {key: None if value == 'global' else value for key, value in args.area.items()}\n return args", "def parse_args(args):\n\n parser = argparse.ArgumentParser(\n description=\"Convert downloaded ERA image data into time series format.\")\n parser.add_argument(\"dataset_root\",\n help='Root of local filesystem where the image data is stored.')\n parser.add_argument(\"timeseries_root\",\n help='Root of local filesystem where the time series should be stored.')\n parser.add_argument(\"start\", type=mkdate,\n help=(\"Startdate in format YYYY-MM-DD\"))\n parser.add_argument(\"end\", type=mkdate,\n help=(\"Enddate in format YYYY-MM-DD\"))\n parser.add_argument(\"variables\", metavar=\"variables\",\n nargs=\"+\",\n help=(\"Short name of variables as stored in the images, which are reshuffled. \"\n \"See documentation on image download for resp. ERA products, \"\n \"for more information on variable names of the product. \"))\n parser.add_argument(\"--mask_seapoints\", type=bool, default=False,\n help=(\"Replace points over water with nan. This option needs the \"\n \"LandSeaMask (lsm) variable in the image data (will use mask from first available file). \"\n \"To use a dynamic LSM, reshuffle the LSM variable to time series.\"))\n parser.add_argument(\"--h_steps\", type=int, default=None, nargs='+',\n help=(\"Time steps (full hours) of images that will be reshuffled (must be in the images). \"\n \"By default 6H images (starting at 0:00 UTC) will be reshuffled.\"))\n parser.add_argument(\"--imgbuffer\", type=int, default=50,\n help=(\"How many images to read at once. Bigger numbers make the \"\n \"conversion faster but consume more memory. Choose this according to your \"\n \"system and the size of a single image.\"))\n args = parser.parse_args(args)\n\n print(\"ERA Interim data is deprecated. Use ERA5 instead.\")\n print(\"Converting data from {} to {} into {}.\"\n .format(args.start.isoformat(), args.end.isoformat(), args.timeseries_root))\n\n return args", "def safe_parse_args(self, args=None):\n args = self.set_default_subparser('run', args)\n return self.parse_args(args)", "def getargs(parser: argparse.ArgumentParser) -> argparse.Namespace:\n parser.add_argument(\n '-servers', type=str, default='',\n help=\"\"\"\n Hostname or IP and port of Kafka broker producing stream.\n [KAFKA_IPPORT/KAFKA_IPPORT_SIM]\n \"\"\")\n parser.add_argument(\n '-topic', type=str, default='',\n help=\"\"\"\n Name of Kafka topic stream to read from.\n [KAFKA_TOPIC/KAFKA_TOPIC_SIM]\n \"\"\")\n parser.add_argument(\n '-schema', type=str, default='',\n help=\"\"\"\n Schema to decode the alert. Should be avro file.\n [FINK_ALERT_SCHEMA]\"\"\")\n parser.add_argument(\n '-startingoffsets_stream', type=str, default='',\n help=\"\"\"From which stream offset you want to start pulling data when\n building the raw database: latest, earliest, or custom.\n [KAFKA_STARTING_OFFSET]\n \"\"\")\n parser.add_argument(\n '-online_data_prefix', type=str, default='',\n help=\"\"\"Path prefix to store online data, e.g. /path/to/online.\n This would then contain automatically {raw, science}/year=/month=/day=\n [ONLINE_DATA_PREFIX]\n \"\"\")\n parser.add_argument(\n '-agg_data_prefix', type=str, default='',\n help=\"\"\"Path prefix to store archive data, e.g. /path/to/archive.\n This would then contain automatically {raw, science}/year=/month=/day=\n [AGG_DATA_PREFIX]\n \"\"\")\n parser.add_argument(\n '-science_db_name', type=str, default='',\n help=\"\"\"\n The name of the HBase table\n [SCIENCE_DB_NAME]\n \"\"\")\n parser.add_argument(\n '-science_db_catalogs', type=str, default='',\n help=\"\"\"\n The path for HBase table catalogs. Must exist.\n [SCIENCE_DB_CATALOGS]\n \"\"\")\n parser.add_argument(\n '-log_level', type=str, default='',\n help=\"\"\"\n The minimum level of log: OFF, DEBUG, INFO, WARN, ERROR, CRITICAL\n [LOG_LEVEL]\n \"\"\")\n parser.add_argument(\n '-finkwebpath', type=str, default='',\n help=\"\"\"\n Folder to store UI data for display.\n [FINK_UI_PATH]\n \"\"\")\n parser.add_argument(\n '-tinterval', type=int, default=0,\n help=\"\"\"\n Time interval between two monitoring. In seconds.\n [FINK_TRIGGER_UPDATE]\n \"\"\")\n parser.add_argument(\n '-tinterval_kafka', type=float, default=0.0,\n help=\"\"\"\n Time interval between two messages are published. In seconds.\n [TIME_INTERVAL]\n \"\"\")\n parser.add_argument(\n '-exit_after', type=int, default=None,\n help=\"\"\"\n Stop the service after `exit_after` seconds.\n This primarily for use on Travis, to stop service after some time.\n Use that with `fink start service --exit_after <time>`. Default is None.\n \"\"\")\n parser.add_argument(\n '-datasimpath', type=str, default='',\n help=\"\"\"\n Folder containing simulated alerts to be published by Kafka.\n [FINK_DATA_SIM]\n \"\"\")\n parser.add_argument(\n '-poolsize', type=int, default=5,\n help=\"\"\"\n Maximum number of alerts to send. If the poolsize is\n bigger than the number of alerts in `datapath`, then we replicate\n the alerts. Default is 5.\n [POOLSIZE]\n \"\"\")\n parser.add_argument(\n '-distribution_servers', type=str, default='',\n help=\"\"\"\n Kafka bootstrap servers for alert redistribution\n [DISTRIBUTION_SERVERS]\n \"\"\")\n parser.add_argument(\n '-distribution_topic', type=str, default='',\n help=\"\"\"\n Kafka topic for Alert redistribution\n [DISTRIBUTION_TOPIC]\n \"\"\")\n parser.add_argument(\n '-distribution_schema', type=str, default='',\n help=\"\"\"\n The path where the avro schema for alert distribution is stored\n [DISTRIBUTION_SCHEMA]\n \"\"\")\n parser.add_argument(\n '-startingOffset_dist', type=str, default='',\n help=\"\"\"From which offset(timestamp) you want to start the\n distribution service.\n Options are: latest, earliest or a custom timestamp\n [DISTRIBUTION_OFFSET]\n \"\"\")\n parser.add_argument(\n '-checkpointpath_dist', type=str, default='',\n help=\"\"\"\n The path of file in which to store the offset for distribution service.\n This file will store the timestamp up-till which the science db is\n scanned and alerts have been distributed.\n [DISTRIBUTION_OFFSET_FILE]\n \"\"\")\n parser.add_argument(\n '-distribution_rules_xml', type=str, default='',\n help=\"\"\"\n The path to distribution-rules.xml which stores user defined rules to\n filter the distribution stream\n [DISTRIBUTION_RULES_XML]\n \"\"\")\n parser.add_argument(\n '-slack_channels', type=str, default='',\n help=\"\"\"\n Text file with list of slack channels to which automatic alerts\n must be sent for e.g. based on cross-match type\n [SLACK_CHANNELS]\n \"\"\")\n parser.add_argument(\n '-night', type=str, default='',\n help=\"\"\"\n YYYYMMDD night\n [NIGHT]\n \"\"\")\n parser.add_argument(\n '-fs', type=str, default='',\n help=\"\"\"\n Filesystem: local or hdfs.\n [FS_KIND]\n \"\"\")\n parser.add_argument(\n '-datapath', type=str, default='',\n help=\"\"\"\n Directory on disk for saving temporary alert data.\n [DATA_PREFIX]\n \"\"\")\n parser.add_argument(\n '--save_science_db_catalog_only', action='store_true',\n help=\"\"\"\n If True, save only the catalog on disk and do not push\n data on HBase. Default is False.\n [SAVE_SCIENCE_DB_CATALOG_ONLY]\n \"\"\")\n parser.add_argument(\n '-index_table', type=str, default='',\n help=\"\"\"\n Name of the rowkey for index table\n [INDEXTABLE]\n \"\"\")\n parser.add_argument(\n '-tns_folder', type=str, default='',\n help=\"\"\"\n Folder to store logs and keys for TNS submission\n [TNS_FOLDER]\n \"\"\")\n parser.add_argument(\n '--tns_sandbox', action='store_true',\n help=\"\"\"\n If True, push to TNS sandbox. Default is False.\n [TNS_SANDBOX]\n \"\"\")\n parser.add_argument(\n '-substream_prefix', type=str, default='fink_',\n help=\"\"\"\n Prefix for outgoing substreams\n [SUBSTREAM_PREFIX]\n \"\"\")\n parser.add_argument(\n '-fink_fat_output', type=str, default='',\n help=\"\"\"\n Folder that contains fink-fat output parquet files\n [FINK_FAT_OUTPUT]\n \"\"\")\n parser.add_argument(\n '-producer', type=str, default='ztf',\n help=\"\"\"\n Name of the alert producer. Currently available: ztf, elasticc, sims\n [PRODUCER]\n \"\"\")\n parser.add_argument(\n '-noscience', type=bool, default=False,\n help=\"\"\"\n Disable execution of science modules\n \"\"\")\n parser.add_argument(\n '-tns_raw_output', type=str, default='',\n help=\"\"\"\n Folder that contains raw TNS catalog\n [TNS_RAW_OUTPUT]\n \"\"\")\n args = parser.parse_args(None)\n return args", "def parse_args():\n parser = argparse.ArgumentParser(description=\"Run NCF..\")\n parser.add_argument(\n \"--config_file\",\n nargs=\"?\",\n type=str,\n default=\"../configs/ncf_default.json\",\n help=\"Specify the config file name. Only accept a file from ../configs/\",\n )\n # If the following settings are specified with command line,\n # These settings will used to update the parameters received from the config file.\n parser.add_argument(\n \"--dataset\",\n nargs=\"?\",\n type=str,\n help=\"Options are: tafeng, dunnhunmby and instacart\",\n )\n parser.add_argument(\n \"--data_split\",\n nargs=\"?\",\n type=str,\n help=\"Options are: leave_one_out and temporal\",\n )\n parser.add_argument(\n \"--root_dir\", nargs=\"?\", type=str, help=\"working directory\",\n )\n parser.add_argument(\n \"--emb_dim\", nargs=\"?\", type=int, help=\"Dimension of the embedding.\"\n )\n parser.add_argument(\"--lr\", nargs=\"?\", type=float, help=\"Intial learning rate.\")\n parser.add_argument(\"--max_epoch\", nargs=\"?\", type=int, help=\"Number of max epoch.\")\n parser.add_argument(\n \"--batch_size\", nargs=\"?\", type=int, help=\"Batch size for training.\"\n )\n parser.add_argument(\"--optimizer\", nargs=\"?\", type=str, help=\"OPTI\")\n parser.add_argument(\"--activator\", nargs=\"?\", type=str, help=\"activator\")\n parser.add_argument(\"--alpha\", nargs=\"?\", type=float, help=\"ALPHA\")\n return parser.parse_args()", "def parse_args():\n description = 'Use the nhlapi/Game class to retrieve information about a game in the NHL.'\n epilog = 'Example use: game.py 2018020131 --boxScore'\n\n # Standard options for each nhlapi interface\n parser = argparse.ArgumentParser(description=description, epilog=epilog)\n parser.add_argument('--humanReadable', help='output in easier to read format for users',\n action='store_true')\n parser.add_argument(\n '--log', default='/dev/null', type=str,\n help='the file where the output should be written')\n\n # Optional user supplied values\n parser.add_argument('gameId', help='the game ID', type=int)\n\n # The data available from this api:\n for stat in Game.STATS:\n parser.add_argument('--' + stat, help='retrieve ' + stat + ' data', action='store_true')\n\n args = parser.parse_args()\n\n if args.log:\n log_format = '%(asctime)s %(levelname)s: %(message)s'\n logging.basicConfig(filename=args.log,\n format=log_format,\n level=logging.DEBUG)\n\n game = Game(args.gameId)\n if not game:\n print('game with id: {} not found'.format(args.gameId))\n return args\n\n args_vars = vars(args)\n for arg in args_vars:\n if arg in Game.STATS and args_vars[arg]:\n if args.liveDiffTime:\n game.load_ext_url(Game.STATS[arg], diff_time=args.liveDiffTime)\n else:\n game.load_ext_url(Game.STATS[arg])\n\n if args.humanReadable:\n output = json.dumps(game.content, indent=1)\n else:\n output = game.content\n print(output)\n\n result = 'retrieved data for id: {}'.format(args.gameId)\n info(result)\n\n return args", "def parse_args(self):\n return Args(self.args)", "def parse_args():\n parser = argparse.ArgumentParser(\n formatter_class=argparse.RawDescriptionHelpFormatter,\n description=\"\"\"\nNenG - Nash Equilibrium Noncooperative games.\nTool for computing Nash equilibria in noncooperative games.\nSpecifically:\nAll pure Nash equilibria in all games (--method=pne).\nAll mixed Nash equilibria in two-players games (--method=support_enumeration).\nOne sample mixed Nash equilibria in n-players games (--method={CMAES,L-BFGS-B,SLSQP}).\n\"\"\")\n pa = parser.add_argument\n pa('-f', '--file', required=True, help=\"File where game in nfg format is saved.\")\n pa('-m', '--method', default='CMAES', choices=game.Game.METHODS,\n help=\"Method to use for computing Nash equlibria.\")\n pa('-e', '--elimination', action='store_true', default=False,\n help=\"Use Iterative Elimination of Strictly Dominated Strategies before computing NE.\")\n pa('-p', '--payoff', action='store_true', default=False,\n help=\"Print also players payoff with each Nash equilibrium.\")\n pa('-c', '--checkNE', action='store_true', default=False,\n help=\"After computation check if found strategy profile is really Nash equilibrium.\")\n pa('-t', '--trim', choices=('normalization', 'penalization'), default='normalization',\n help=\"Method for keeping strategy profile in probability distribution universum.\")\n pa('-l', '--log', default=\"WARNING\",\n choices=(\"DEBUG\", \"INFO\", \"WARNING\", \"ERROR\", \"CRITICAL\"),\n help=\"Level of logs to save/print\")\n pa('--log-file', default=None, help='Log file. If omitted log is printed to stdout.')\n return parser.parse_args()", "def Parse(self, args):\n unparsed = []\n\n skip_parse = False\n\n for arg in args:\n if arg == '--':\n skip_parse = True\n continue\n\n if skip_parse:\n unparsed.append(arg)\n continue\n\n match = RE_FLAG.match(arg)\n if match is None:\n unparsed.append(arg)\n continue\n\n key = match.group(1)\n value = match.group(2)\n\n if key not in self._defs:\n unparsed.append(arg)\n continue\n\n self._defs[key].Parse(value)\n\n self._unparsed = tuple(unparsed)\n return True", "def parse_args():\n\n parser = argparse.ArgumentParser()\n img_path = '/users/gpu/haribala/code/datasets/CUB_200_2011/CUB_200_2011/images/001.Black_footed_Albatross/' # Black_Footed_Albatross_0001_796111.jpg\n output_path = os.path.join('.', 'outputs', 'arch4', 'img2txt_samples', '001.Black_footed_Albatross')\n encoder_path = os.path.join('.', 'outputs', 'arch4', 'img2txt_2020_01_16_09_34_45', 'encoder_400_40.pth')\n decoder_path = os.path.join('.', 'outputs', 'arch4', 'img2txt_2020_01_16_09_34_45', 'decoder_400_40.pth')\n\n parser.add_argument('--images', type=str, default=img_path, help='Directory containing the input images.')\n parser.add_argument('--outputs', type=str, default=output_path, help='Directory to store outputs.')\n parser.add_argument('--encoder_path', type=str, default=encoder_path, help='Path to trained encoder weights.')\n parser.add_argument('--decoder_path', type=str, default=decoder_path, help='Path to trained decoder weights.')\n\n # Model parameters (should be same as paramters in train.py)\n parser.add_argument('--embed_size', type=int , default=256, help='dimension of word embedding vectors')\n parser.add_argument('--hidden_size', type=int , default=512, help='dimension of lstm hidden states')\n parser.add_argument('--num_layers', type=int , default=1, help='number of layers in lstm')\n\n args = parser.parse_args()\n\n return args", "def parse_args() -> argparse.Namespace:\n parser = argparse.ArgumentParser(\n prog=\"python3 console_run.py\",\n description=\"Collects 辣条/亲密度 on live.bilibili.com using selenium webdriver\",\n )\n\n parser.add_argument(\n \"-r\", \"--room\", type=int, help=\"default room\", default=22198526,\n )\n\n # runtime behaviors\n parser.add_argument(\n \"--headless\", help=\"do not show the browser\", action=\"store_true\",\n )\n\n parser.add_argument(\n \"-d\", \"--disable-image\", help=\"do not show the images\", action=\"store_true\",\n )\n\n # log settings\n parser.add_argument(\n \"--silent\", help=\"do not print log to console\", action=\"store_true\",\n )\n\n parser.add_argument(\n \"-l\", \"--log\", help=\"save log to the log file\", action=\"store_true\",\n )\n\n # arg check\n parser.add_argument(\n \"--skip-check\", help=\"skip the arg check\", action=\"store_true\",\n )\n\n # timer\n parser.add_argument(\n \"-s\", \"--second\", type=int, help=\"planned running time in seconds\", default=0,\n )\n parser.add_argument(\n \"-m\", \"--minute\", type=int, help=\"planned running time in minutes\", default=0,\n )\n\n # paths\n parser.add_argument(\n \"--log-path\", type=str, help=\"path of the log file\", default=\"./log.txt\",\n )\n\n parser.add_argument(\n \"--driver-path\",\n type=str,\n help=\"path of the geckodriver. If it's not install, \"\n \"see https://github.com/mozilla/geckodriver/releases for more information\",\n default=\"/usr/local/bin/geckodriver\",\n )\n\n return parser.parse_args()", "def parse_args():\n parser = argparse.ArgumentParser(\n description='Convert CVAT XML annotations to YOLO format'\n )\n\n parser.add_argument(\n '--cvat-xml', metavar='FILE', required=True,\n help='input file with CVAT annotation in xml format'\n )\n\n parser.add_argument(\n '--image-dir', metavar='DIRECTORY', required=False,\n help='directory which contains original images'\n )\n\n parser.add_argument(\n '--output-dir', metavar='DIRECTORY', required=True,\n help='directory for output annotations in YOLO format'\n )\n\n parser.add_argument(\n '--username', metavar='USERNAME', required=False,\n help='Username from CVAT Login page, required to download images'\n )\n\n parser.add_argument(\n '--password', metavar='PASSWORD', required=False,\n help='Password from CVAT Login page, required to download images'\n )\n\n parser.add_argument(\n '--labels', metavar='ILABELS', required=False,\n help='Labels (separated by comma) to extract. Example: car,truck,motorcycle'\n )\n\n return parser.parse_args()", "def parse_args(argv: list[str]) -> argparse.Namespace:\n os_release = platform.freedesktop_os_release()\n parser = argparse.ArgumentParser()\n parser.add_argument(\n \"-c\",\n \"--chroot\",\n default=os_release[\"VERSION_CODENAME\"],\n help=\"Specify a chroot or active session to use. (default: use current distribution\"\n \" codename '%(default)s')\",\n )\n parser.add_argument(\n \"-d\",\n \"--directory\",\n default=os.getcwd(),\n help=\"Change to directory inside the chroot before running the command or login shell.\"\n \" Use the current directory if it exists in the chroot. Otherwise fall back to the user's\"\n \" home directory (and create the directory if it is missing).\",\n )\n parser.add_argument(\n \"-u\",\n \"--user\",\n default=getpass.getuser(),\n help=\"Run as a different user. The default is to run as %(default)s and fallback to root\"\n \" if that user does not exist in the chroot.\",\n )\n parser.add_argument(\n \"-p\",\n \"--packages\",\n default=[],\n action=\"append\",\n help=\"List of comma- or space-separated packages that should be installed\"\n \" without recommends. Can be specified multiple times.\",\n )\n parser.add_argument(\n \"--ppa\",\n default=[],\n action=\"append\",\n help=\"PPA APT sources that should be added. Can be specified multiple times.\",\n )\n parser.add_argument(\n \"-e\", \"--enable-proposed\", action=\"store_true\", help=\"Enable -proposed pocket.\"\n )\n parser.add_argument(\n \"--proposed-uri\",\n default=\"http://archive.ubuntu.com/ubuntu\",\n help=\"Sources list URI to use for -proposed (default: %(default)s)\",\n )\n parser.add_argument(\n \"--proposed-components\",\n default=\"main,universe\",\n help=\"List of comma- or space-separated components to use for -proposed\"\n \" (default: %(default)s)\",\n )\n\n args = parser.parse_args(argv)\n args.packages = [p for packages in args.packages for p in re.split(\"[, ]\", packages)]\n args.proposed_components = re.split(\"[, ]\", args.proposed_components)\n\n return args", "def parse_args(args):\n parser = argparse.ArgumentParser(\n description=\"Convert MERRA2 images to time series format.\")\n parser.add_argument(\n \"dataset_root\",\n help='Root of local filesystem where the data is stored.')\n parser.add_argument(\n \"timeseries_root\",\n help='Root of local filesystem where the timeseries will be stored.')\n parser.add_argument(\"start\", type=mkdate, help=(\n \"Startdate. Either in format YYYY-MM-DD or YYYY-MM-DDTHH:MM.\"))\n parser.add_argument(\"end\", type=mkdate, help=(\n \"Enddate. Either in format YYYY-MM-DD or YYYY-MM-DDTHH:MM.\"))\n parser.add_argument(\"parameters\", metavar=\"parameters\",\n nargs=\"+\",\n help=(\"Parameters to download in numerical format.\"))\n\n parser.add_argument(\"--temporal_sampling\", type=int, default=6,\n help=(\n \"The temporal sampling of the output time series.\"\n \"Integers between 1 (1-hourly resolution) and 24\"\n \"(daily resolution) are possible.\"))\n\n parser.add_argument(\n \"--imgbuffer\",\n type=int,\n default=50,\n help=(\n \"How many images to read at once. Bigger numbers make the \"\n \"conversion faster but consume more memory.\"))\n\n args = parser.parse_args(args)\n # set defaults that can not be handled by argparse\n print(\"Converting data from {} to {} into folder {}.\".format(\n args.start.isoformat(), args.end.isoformat(), args.timeseries_root))\n return args", "def parse_args(self):\n parser = argparse.ArgumentParser()\n parser.add_argument('-d', '--data', dest='data',\n help='Generate requested amount of test data.',\n type=int, nargs='+')\n parser.add_argument('-c', '--check', action='store_true',\n dest='check', help='Check files without modifying them.',\n default=False)\n args = parser.parse_args()\n self.arg_data = args.data\n self.arg_check = args.check\n\n if xc.arg_data: # did the user request to generate test data?\n choice = input(Fore.YELLOW + 'This option will ' + Fore.RED +\n '*OVERWRITE ALL FILES* ' + Fore.YELLOW + 'you sure (y/n)? ')\n if choice.upper() == 'Y':\n self.test_data_row_count = int(self.arg_data[0])\n xc.generate_test_data()\n else:\n xc.arg_data = False\n else:\n self.process_dump_files()", "def parse():\n\n args = sys.argv\n if os.name == 'nt' and args and 'python' in os.path.basename(args[0]).lower():\n args = args[2:]\n else:\n args = args[1:]\n args = vars(parser.parse_args(args))\n \n # set the global verbosity level of the script\n script.set_verbosity(args['verbosity']) \n \n return args", "def _input_args(self, args: List[str]):\n assert self._call is None, f\"You need to specify all inputs before calling `{self._call}`\"\n assert isinstance(args, list), f\"{args} is a {type(args)}, expected a list of strings!\"\n assert len(args) > 0, f\"Expected a non-empty argument list!\"\n assert all(isinstance(a, str) for a in args), f\"Expected a list of strings, not {[type(a) for a in args]}!\"\n # all arguments could potentially be filenames that we write to, so let's just add them\n self._write_files |= set(args)\n # add dummy argument zero\n args = [\"\"] + args\n # allocate args in memory\n arg_strings = [self._str(a, \"arg\") for a in args]\n # allocate a pointer array for argv\n self.data += [f\"argv: .word \" + \" \".join(\"0\" for _ in range(len(args)))]\n # load argc and argv\n self._args += [\"\", \"# argument count in a0\", f\"li a0, {len(args)}\"]\n self._args += [\"\", \"# load pointers to argument strings into argv\", f\"la a1, argv\"]\n for ii, aa in enumerate(arg_strings):\n self._args += [f\"la t1, {aa}\", f\"sw t1, {ii * 4}(a1)\"]", "def parse_args(args=None):\n\n parser = argparse.ArgumentParser(description=ds.ARGPARSER['description'])\n parser.add_argument('input',\n help=ds.ARGPARSE_INPUT['help'])\n parser.add_argument('output',\n nargs='?',\n help=ds.ARGPARSE_OUTPUT['help'],\n default=ds.ARGPARSE_OUTPUT['default'])\n parser.add_argument('-X', '--overwrite',\n help=ds.ARGPARSE_OVERWRITE['help'],\n action='store_true')\n parser.add_argument('-e', '--extensions',\n nargs='+',\n default=ds.ARGPARSE_EXTENSION['default'],\n help=ds.ARGPARSE_EXTENSION['help'])\n parser.add_argument('-w', '--wrapper',\n help=ds.ARGPARSE_WRAPPER['help'],\n default=ds.ARGPARSE_WRAPPER['default'], )\n parser.add_argument('-v', '--verbose',\n help=ds.ARGPARSE_VERBOSE['help'],\n action='store_true')\n parser.add_argument('-r', '-R',\n help=ds.ARGPARSE_RECURSIVE['help'],\n action='store_true',\n dest='recursive')\n parser.add_argument('--version',\n action='version',\n version=ah.__version__)\n\n if args is not None:\n return parser.parse_args(args)\n else:\n return parser.parse_args()", "def parse_arguments(args):\n parser = argparse.ArgumentParser()\n parser.add_argument(\n '-d', '--debug',\n help=\"Activates debug mode\",\n action=\"store_const\", dest=\"loglevel\", const=logging.DEBUG,\n default=logging.WARNING,\n )\n parser.add_argument(\n '-v', '--verbose',\n help=\"Activates verbose mode\",\n action=\"store_const\", dest=\"loglevel\", const=logging.INFO,\n )\n parser.add_argument(\n '-m', '--model',\n help=\"Path to model input file (e.g. model.json)\",\n action=\"store\", dest=\"model\",\n default='/'.join([os.path.dirname(__file__), '../../data/model.json'])\n )\n parser.add_argument(\n 'text',\n help=\"Text to be translated\",\n )\n parser.add_argument(\n '-i', '--implementation',\n help=\"Chosen method (e.g. CavnarTrenkleImpl)\",\n action=\"store\", dest=\"implementation\",\n default='CavnarTrenkleImpl'\n )\n parser.add_argument(\n '-o', '--output',\n help=\"Output results file in JSON (e.g. results.json)\",\n action=\"store\", dest=\"output_file\",\n default=None\n )\n # This argument is a json object which will be mapped to dict\n parser.add_argument(\n '--predict-args',\n help=\"Arguments for the prediction method (JSON format)\",\n action=\"store\", dest=\"predict_args\",\n type=json.loads\n )\n\n return vars(parser.parse_args(args))", "def parse_args(args):\n parser = argparse.ArgumentParser(\n description=\"Just a Fibonacci demonstration\")\n parser.add_argument(\n \"--version\",\n action=\"version\",\n version=\"bytespread {ver}\".format(ver=__version__))\n parser.add_argument(\n \"-d\",\n dest=\"directory\",\n required=True,\n help=\"The directly to analyse\")\n\n parser.add_argument(\n \"-w\",\n dest=\"wildcard\",\n default=\"*\",\n required=False,\n help=\"Wildcard for file match within the directory (default: *)\")\n\n parser.add_argument(\n \"-c\",\n dest=\"clusters\",\n default=32,\n required=False,\n type=int,\n help=\"Number of clusters (default: 32)\")\n\n parser.add_argument(\n \"-b\",\n dest=\"bricks\",\n default=100,\n required=False,\n type=int,\n help=\"Number bricks to show for the longest column (default: 100)\")\n\n parser.add_argument(\n \"-r\",\n dest=\"recursive\",\n action='store_true',\n required=False,\n help=\"Recursive within the provided folder (default: false)\")\n\n return parser.parse_args(args)", "def parseArgs(args):\n parser = argparse.ArgumentParser(description = \"Scrapes baseball-reference.com for player statistics\")\n\n parser.add_argument(\"-d\", \"--domain\", help=\"domain to scrape for statistics. Default is baseball-reference.com\", nargs=1, default=[\"http://www.baseball-reference.com\"])\n parser.add_argument(\"-f\", \"--filename\", help=\"database file to store data in\", required=True, nargs=1, type=argparse.FileType(\"r+\"))\n parser.add_argument(\"-r\", \"--reset\", help=\"removes database before scraping all data from baseball-reference. Conflicts with -u. One of -r and -u must be specified\", action=\"store_true\")\n parser.add_argument(\"-u\", \"--update\", help=\"scrapes baseball-reference and adds all new information to the database. Conflicts with -r. One of -r and -u must be specified\", action=\"store_true\")\n parser.add_argument(\"--verbose\", help=\"enables verbose output\", action=\"store_true\")\n parser.add_argument(\"--version\", help=\"prints out version and exits\", action=\"version\", version=\"%(prog)s ({version})\".format(version=__version__))\n\n parsedArgs = parser.parse_args()\n\n if parsedArgs.reset == parsedArgs.update:\n parser.error(\"-r and -u are conflicting flags. Exactly one must be specified\")\n parser.print_help()\n\n return parsedArgs", "def normalize_args(args: Namespace) -> Namespace:\n if args.threads <= 0:\n args.threads = os.cpu_count()\n if not args.threads:\n logger.warning('It was not possible to determine the number of CPUs in your system. '\n 'Only one will be used, this will decrease the amount of downloads.')\n args.threads = 1\n if args.page <= 0:\n args.page = 1\n if args.max_pages <= 0:\n args.max_pages = 1\n if args.page > args.max_pages:\n args.max_pages = args.page\n\n if args.only_favorites and not args.user_inputs:\n logger.warn(f\"You're passing --favorites/-f flag without any user input.\")\n args.only_favorites = False\n\n if args.gen_pdf and not args.album_inputs and not args.user_inputs and not args.search_download:\n logger.warn(f\"You're passing --pdf flag without any album/user input or search download.\")\n args.gen_pdf = False\n\n args.keyword = args.keyword.strip() if args.keyword else None\n\n if args.album_inputs:\n inputs = inputs_string_to_list(args.album_inputs)\n args.albums_ids = extract_ids_from_list(inputs, extract_album_id)\n else:\n args.albums_ids = None\n\n if args.user_inputs:\n inputs = inputs_string_to_list(args.user_inputs)\n args.users_ids = extract_ids_from_list(inputs, extract_user_id)\n else:\n args.users_ids = None\n\n return args", "def _parse_args():\n parser = argparse.ArgumentParser()\n parser.add_argument('out', nargs='*', help='Create a plot for all provided'\n ' output files')\n parser.add_argument('--yscale', '-y', help='Y-axis scale',\n default='linear')\n parser.add_argument('--hits', help='Draw hits', dest='hits', action='store_true')\n parser.add_argument('--no-hits', help='Do not draw hits', dest='hits', action='store_false')\n parser.set_defaults(hits=True)\n\n parser.add_argument('--ltrim', help='Remove a number of smallest latency values from the plot', default=0, type=int)\n parser.add_argument('--rtrim', help='Remove a number of biggest latency values from the plot', default=0, type=int)\n\n parser.add_argument('--misses', help='Draw misses', dest='misses', action='store_true')\n parser.add_argument('--no-misses', help='Do not draw misses', dest='misses', action='store_false')\n parser.set_defaults(misses=True)\n\n args = parser.parse_args()\n if not args.out:\n parser.error('at least one output need to be provided')\n return args", "def parse_args(args: List[str]) -> Optional[argparse.Namespace]:\n\n root = argparse.ArgumentParser(description=inspect.cleandoc('''\n Small cross-platform Python app that can create and update PlatformIO projects from STM32CubeMX .ioc files. It\n uses STM32CubeMX to generate a HAL-framework-based code and alongside creates PlatformIO project with compatible\n parameters to stick them both together. Both CLI and GUI editions are available. Visit\n https://github.com/ussserrr/stm32pio for more information. Use 'stm32pio [command] -h' to see help on the\n particular command'''))\n\n # Global arguments (there is also an automatically added '-h, --help' option)\n root.add_argument('--version', action='version', version=f\"stm32pio {stm32pio.core.util.get_version()}\")\n root.add_argument('-v', '--verbose', help=\"enable verbose output (default level: INFO)\", action='count', default=1)\n\n sub = root.add_subparsers(dest='command', title='commands', description=\"valid commands\", help=\"available actions\")\n\n # Primary operations\n init = sub.add_parser('init', help=\"create config .INI file to check and tweak parameters before proceeding\")\n generate = sub.add_parser('generate', help=\"generate CubeMX code only\")\n pio_init = sub.add_parser('pio_init', help=\"create new compatible PlatformIO project\")\n patch = sub.add_parser('patch', help=\"tweak the project so both CubeMX and PlatformIO could work together\")\n new = sub.add_parser('new', help=\"generate CubeMX code, create PlatformIO project and glue them together\")\n status = sub.add_parser('status', help=\"inspect the project current state\")\n validate = sub.add_parser('validate', help=\"verify current environment based on the config values\")\n clean = sub.add_parser('clean', help=\"clean-up the project (by default, no files will be deleted immediately \"\n \"without your confirmation)\")\n gui = sub.add_parser('gui', help=\"start the graphical version of the application. All arguments will \"\n \"be passed forward, see its own --help for more information\")\n\n # Assign options to commands\n for command in [init, generate, pio_init, patch, new, status, validate, clean, gui]:\n command.add_argument('-d', '--directory', dest='path', default=Path.cwd(),\n help=\"path to the project (current directory, if not given)\")\n for command in [init, pio_init, new, gui]:\n command.add_argument('-b', '--board', dest='board', default='', help=\"PlatformIO board name. \" + board_hint)\n for command in [init, generate, new]:\n command.add_argument('-e', '--start-editor', dest='editor',\n help=\"start the specified editor after an action (e.g. subl, code, atom, etc.)\")\n for command in [generate, new]:\n command.add_argument('-c', '--with-build', action='store_true', help=\"build the project after code generation\")\n for command in [init, new]:\n command.add_argument('-s', '--store-content', action='store_true',\n help=\"save folder initial contents as a cleanup ignore list\")\n clean.add_argument('-s', '--store-content', action='store_true',\n help=\"save project folder contents as a cleanup ignore list and exit\")\n clean.add_argument('-q', '--quiet', action='store_true',\n help=\"suppress the caution about the content removal (be sure of what you are doing!)\")\n\n if len(args) == 0:\n root.print_help()\n return None\n\n return root.parse_args(args)", "def parse_args(args):\n parser = argparse.ArgumentParser(\n description=\"Lookup table generator for Image Comparison\")\n parser.add_argument(\n \"--version\",\n action=\"version\",\n version=\"lookuptable {ver}\".format(ver=__version__))\n parser.add_argument(\n \"-f\",\n \"--folder\",\n dest=\"imagefolder\",\n help=\"path to image folder\",\n type=str,\n metavar=\"STRING\")\n parser.add_argument(\n \"-v\",\n \"--verbose\",\n dest=\"loglevel\",\n help=\"set loglevel to INFO\",\n action=\"store_const\",\n const=logging.INFO)\n parser.add_argument(\n \"-vv\",\n \"--very-verbose\",\n dest=\"loglevel\",\n help=\"set loglevel to DEBUG\",\n action=\"store_const\",\n const=logging.DEBUG)\n return parser.parse_args(args)", "def parse_args():\n from argparse import ArgumentParser\n ap = ArgumentParser(prog=__exe__, description=__purpose__)\n ap.add_argument(\n '--host', dest='host', default=None,\n help='Host for XNAT. Default: env XNAT_HOST.')\n ap.add_argument(\n '-u', '--username', dest='username', default=None,\n help='Username for XNAT.')\n ap.add_argument('project', help='Project Label')\n ap.add_argument('session', help='Session Label')\n ap.add_argument(\n 'proc_suffix', help='Proc name suffix', nargs='?', default='')\n ap.add_argument(\n '-sd', '--subjects_dir', dest='subjects_dir',\n help='Subjects Directory',\n default=os.environ.get('SUBJECTS_DIR', '/tmp'))\n return ap.parse_args()", "def parse_args():\n parser = argparse.ArgumentParser(description=\"crnn process\")\n parser.add_argument(\"--dir_name\", type=str,\n default=None, help=\"infer input dir\")\n parser.add_argument('--res_dir_name', default='./output', type=str,\n help='the folder to save the result')\n args_opt = parser.parse_args()\n return args_opt", "def get_args() -> Namespace:\n\n parser = ArgumentParser(description='A Minecraft RCON client.')\n parser.add_argument('server', help='the server to connect to')\n parser.add_argument(\n '-t', '--timeout', type=float, help='connection timeout in seconds')\n parser.add_argument(\n '-d', '--debug', action='store_true',\n help='print additional debug information')\n subparsers = parser.add_subparsers(dest='action')\n command_parser = subparsers.add_parser(\n 'exec', help='execute commands on the server')\n command_parser.add_argument(\n 'command', help='command to execute on the server')\n command_parser.add_argument(\n 'argument', nargs='*', default=(), help='arguments for the command')\n say_parser = subparsers.add_parser(\n 'say', help='broadcast a message on the server')\n say_parser.add_argument('message', help='the message to broadcast')\n fortune_parser = subparsers.add_parser(\n 'fortune', help='send a fortune to the players on the server')\n fortune_parser.add_argument(\n '-l', '--long', action='store_true', help='generate ling fortunes')\n fortune_parser.add_argument(\n '-o', '--offensive', action='store_true',\n help='generate offensive fortunes')\n datetime_parser = subparsers.add_parser(\n 'datetime',\n help='sends the current date and time to the players on the server')\n datetime_parser.add_argument(\n '-f', '--format', default='%c', help='the datetime format')\n subparsers.add_parser('in-use', help='checks whether the server is in use')\n shutdown_parser = subparsers.add_parser(\n 'idle-shutdown', help='shuts down the server if it is not in use')\n shutdown_parser.add_argument(\n '-s', '--sudo', action='store_true',\n help='invoke the shutdown command using sudo')\n shutdown_parser.add_argument(\n '-u', '--unit', default='minecraft@{server}.service',\n help='the systemd unit template')\n return parser.parse_args()", "def __parse_args():\n parser = argparse.ArgumentParser()\n parser.add_argument('-f', '--force', action=\"store_true\", default=False,\n help='overwrite existing database files during import')\n parser.add_argument('-e', '--extension', action=\"store\", default='txt',\n help='specify file extension. default is \"txt\"')\n parser.add_argument('-d', '--delimiter', action=\"store\", default='\\t',\n help='specify column delimiter. default is tab (\\\\t)')\n parser.add_argument('-m', '--mark', action=\"store\", default='.',\n help='specify decimal mark for numeric data. default is'\n ' dot (.)')\n parser.add_argument('-o', '--outformat', action=\"store\", default='npz',\n help='specify output database format. default is \"npz\"'\n ' for numpy database. use \"mat\" for matlab '\n ' database format.')\n parser.add_argument('-r', '--recursive', action=\"store_true\", default=False,\n help='recursively walk through all sub-directories of'\n ' current working directory')\n parser.add_argument('-p', '--pcs', action=\"store_true\", default=True,\n help='indicate if files are pcs files.')\n parser.add_argument('-c', '--colheadlines', action=\"store\", default='1',\n help='number of lines spanned by the column headers')\n args = parser.parse_args()\n return args", "def parse_args(self):\n \n # check args:\n # XXX: make them position independent\n if not os.path.isdir(self.params.R_source_folder):\n raise gc3libs.exceptions.InvalidUsage(\n \"Invalid path to R scripts folder: '%s'. Path not found\"\n % self.params.R_source_folder)\n # XXX: shall we check/validate the content ( presence of valid R scripts ) ?\n\n self.log.info(\"source dir: %s\" % self.params.R_source_folder)\n\n if not os.path.exists(self.params.command_file):\n raise gc3libs.exceptions.InvalidUsage(\n \"gc_gps command file '%s' does not exist;\"\n % self.params.command_file)\n gc3libs.utils.test_file(self.params.command_file, os.R_OK,\n gc3libs.exceptions.InvalidUsage)\n\n if self.params.input_dir and not os.path.isdir(self.params.input_dir):\n raise gc3libs.exceptions.InvalidUsage(\n \"Input folder '%s' does not exists\"\n % self.params.input_dir)\n\n self.log.info(\"Command file: %s\" % self.params.command_file)\n self.log.info(\"R source dir: %s\" % self.params.R_source_folder)\n if self.params.input_dir:\n self.log.info(\"Input data dir: '%s'\" % self.params.input_dir)", "def parse_args():\n parser = argparse.ArgumentParser(\"Plot time series figures.\")\n parser.add_argument('--log-file', type=str, nargs=\"+\", required=True,\n help=\"path to a testing log file.\")\n parser.add_argument('--trace-file', type=str, default=None,\n help=\"path to a trace file.\")\n parser.add_argument('--save-dir', type=str, default=None,\n help=\"path to save.\")\n parser.add_argument('--noise', type=float, default=0)\n\n args, unknown = parser.parse_known_args()\n return args", "def parse_args(args):\r\n\r\n parser = argparse.ArgumentParser(description=__doc__)\r\n\r\n parser.add_argument(\r\n \"--seed\",\r\n type=int,\r\n help=\"Random number generator seed for replicability\",\r\n default=12,\r\n )\r\n parser.add_argument(\"--data-file\", type=str, default=\"_output/data.npz\")\r\n parser.add_argument(\r\n \"--num-classes\",\r\n type=int,\r\n default=0,\r\n help=\"Number of classes in classification. Should be zero if doing regression\",\r\n )\r\n parser.add_argument(\r\n \"--fit-dnn\", action=\"store_true\", default=False, help=\"Fit DNN vs CNNC\"\r\n )\r\n parser.add_argument(\r\n \"--do-binary\", action=\"store_true\", default=False, help=\"fit binary outcome\"\r\n )\r\n parser.add_argument(\r\n \"--data-path\", type=str\r\n )\r\n parser.add_argument(\r\n \"--num-tf\", type=int, default=2\r\n )\r\n parser.add_argument(\r\n \"--exclude-tf\", type=int, default=1\r\n )\r\n parser.add_argument(\r\n \"--batch-size\", type=int, default=32\r\n )\r\n parser.add_argument(\r\n \"--n-layers\", type=int, default=2, help=\"Number of hidden layers\"\r\n )\r\n parser.add_argument(\r\n \"--n-hidden\", type=int, default=10, help=\"Number of hidden nodes per layer\"\r\n )\r\n parser.add_argument(\r\n \"--dropout-rate\", type=float, default=0.15, help=\"probability of dropping out a node\"\r\n )\r\n parser.add_argument(\r\n \"--epochs\", type=int, default=40, help=\"Number of Adam epochs\"\r\n )\r\n parser.add_argument(\"--log-file\", type=str, default=\"_output/log_nn.txt\")\r\n parser.add_argument(\"--out-model-file\", type=str, default=\"_output/nn.pt\")\r\n args = parser.parse_args()\r\n\r\n assert args.num_classes != 1\r\n\r\n return args", "def parse_args():\n help_text = \"\"\"\n Analyzer of the frequency of use of nouns in the headings of posts on hubr.com\n \"\"\"\n parser = argparse.ArgumentParser(\n description=help_text\n )\n parser.add_argument(\n '-p',\n '--pages',\n type=int,\n dest='page_count',\n default=PAGE_COUNT,\n help=f'Number of pages to parse, default is {PAGE_COUNT}.'\n )\n parser.add_argument(\n '-s',\n '--start',\n type=int,\n default=PAGE_START,\n dest='start_page',\n help=f'Start page number, default is {PAGE_START}.',\n )\n parser.add_argument(\n '-t',\n '--top',\n type=int,\n default=TOP_SIZE,\n dest='top_size',\n help=f'The size of the top noun, default is {TOP_SIZE}.',\n )\n\n return parser.parse_args()", "def parse_args():\n parser = argparse.ArgumentParser(\n description='Aggregate Elasticsearch Log data.')\n parser.add_argument(\n '--host',\n default='https://logging-es',\n type=str,\n action='store',\n help='Host name or IP of the Elasticsearch server.'\n )\n parser.add_argument(\n '--port',\n default=9200,\n type=int,\n action='store',\n help='Port number of the Elasticsearch server.'\n )\n parser.add_argument(\n '--ca_certs',\n default='secret/admin-ca',\n type=str,\n action='store',\n help='Path to the CA certificates file'\n )\n parser.add_argument(\n '--cert',\n default='secret/admin-cert',\n type=str,\n action='store',\n help='Path to the client certificate file'\n )\n parser.add_argument(\n '--key',\n default='secret/admin-key',\n type=str,\n action='store',\n help='Path to the client key file'\n )\n\n return parser.parse_args()", "def parse_args():\n parser = argparse.ArgumentParser()\n\n parser.add_argument(\"-pn\", \"--projectname\", type=str, required=True)\n parser.add_argument(\"-gp\", \"--generated_file_path\", type=str, required=True)\n parser.add_argument(\"-fs\", \"--filesize\", type=int, required=False)\n parser.add_argument(\"-rc\", \"--rowcount\", type=int, required=False)\n parser.add_argument(\"-ll\", \"--logging_level\", type=str, required=False)\n\n parser.add_argument(\"-cjp\", \"--config_json_path\", type=str, required=False)\n parser.add_argument(\"-dfl\", \"--data_files_location\", type=str, required=False)\n parser.add_argument(\"-drc\", \"--default_rowcount\", type=int, required=False)\n parser.add_argument(\"-fen\", \"--file_encoding\", type=str, required=False)\n parser.add_argument(\"-fle\", \"--file_line_ending\", type=str, required=False)\n\n parsed = parser.parse_args()\n\n project_name = parsed.projectname\n generated_file_path = parsed.generated_file_path\n file_size = parsed.filesize\n row_count = parsed.rowcount\n logging_level = parsed.logging_level\n config_json_path = parsed.config_json_path\n data_files_location = parsed.data_files_location\n default_rowcount = parsed.default_rowcount\n file_encoding = parsed.file_encoding\n file_line_ending = parsed.file_line_ending\n\n project_scope_kwargs = {\n \"project_name\": project_name,\n \"data_files_location\": data_files_location,\n \"config_json_path\": config_json_path,\n \"default_rowcount\": default_rowcount,\n }\n file_scope_kwargs = {\n \"generated_file_path\": generated_file_path,\n \"file_size\": file_size,\n \"row_count\": row_count,\n \"file_encoding\": file_encoding,\n \"file_line_ending\": file_line_ending,\n }\n return logging_level, project_scope_kwargs, file_scope_kwargs", "def parseArguments():\n # Create argument parser\n parser = argparse.ArgumentParser()\n\n # Optional arguments\n parser.add_argument(\"-t\", \"--test\", help=\"Optionally test algorithm on subsample of the data. Set to 1 for testing\", type=int, default=0)\n\n parser.add_argument(\"--cores\", help=\"Optimized code for a server with a lot of RAM, set to the number of available cores\", type=int, default=40)\n\n\n # Print version\n parser.add_argument(\"--version\", action=\"version\", version='%(prog)s - Version 2.0') #version 1.0 is for the observations in June 2018\n #version 1.1 contains the optimizations made after the june observations (mainly the switch to stackmags)\n #version 1.2 changed sim class to NOT include the list of failed candidates (not qsos)\n #... copied changes made to crossval version\n #version 1.5 added check for duplicate quasars and remove them\n #version 1.6 new simulated quasars (december)\n ##-------------------\n #version 2.0: combined training of classifier and regressor, streamlined input\n #version 2.1: Tryied to updates excluded area to a little more than stripe 82 but decided not to keep it, so no change\n\n # Parse arguments\n args = parser.parse_args()\n\n return args", "def parse_args(self):\n\n # Parse the arguments themselves.\n args = vars( self.parser.parse_args() )\n\n return args", "def parse_args():\n parser = argparse.ArgumentParser(description='Crawl an Android app store for apk files.')\n parser.add_argument('--store', dest='api', choices=['GooglePlay', 'F-Droid'], required=True,\n help='Specifies the store to crawl. At the moment only Google Play is supported.')\n parser.add_argument('--meta', dest='meta', required=False, action='store_const', default=False, const=True,\n help='If set, no apps will be downloaded, but the meta_data will be saved.')\n parser.add_argument('--basedir', dest='base_dir', type=str, default=os.getenv('HOME'),\n required=False, help='Specifies the base path for both logs and apk_downloads.')\n parser.add_argument('--credentials', dest='credentials', type=str, required=False, default=None,\n help='Specifies the path to a credential file in .toml format.')\n parser.add_argument('--limit', dest='limit', type=int, required=False, default=None,\n help='Specifies the maximum number of apks per category to download.')\n return parser.parse_args()", "def handle_args():\n parser = argparse.ArgumentParser(\n description=\"\"\"Script to download archives from the NLM public\n FTP server.\n \"\"\")\n # Server settings\n server_settings = parser.add_argument_group('FTP SERVER SETTINGS', '')\n server_settings.add_argument(\n '-n', '--netrc', default='~/.netrc',\n help=\"\"\"netrc file containing login parameters for the NLM\n server. See `man 5 netrc` for details on generating this\n file or read nlm_data_import/netrc/example.netrc.\n \"\"\")\n server_settings.add_argument(\n 'server_data_dir',\n help='Directory containing desired files on the NLM FTP server')\n server_settings.add_argument(\n '-l', '--limit', type=int, default=0,\n help='Only download LIMIT files.')\n\n # Download settings\n local_settings = parser.add_argument_group('LOCAL SETTINGS', '')\n local_settings.add_argument(\n '-d', '--download_database', default='~/.ftp_download_db',\n help='Path to SQLite database detailing past downloads')\n local_settings.add_argument(\n '-o', '--output_dir', default='~/medline_data',\n help='Directory where downloads will be saved')\n local_settings.add_argument(\n '-x', '--export_dir', default='~/medline_data_exports',\n help=\"\"\"Directory where data to be retrieved by the\n `hypothesis_graph application server are staged.\n \"\"\")\n # Sending debug emails (requires the send_ses_messages module - see\n # setup.py)\n debugging_settings = parser.add_argument_group('DEBUGGING SETTINGS', '')\n debugging_settings.add_argument(\n '--email_debugging', default=False, action='store_true',\n help=\"Send debugging emails. Defaults to FALSE.\")\n debugging_settings.add_argument(\n '--from_email', required=False, help=\"FROM field for debugging emails\")\n debugging_settings.add_argument(\n '--to_email', required=False, help=\"TO field for debugging emails\")\n\n return parser.parse_args()", "def parse_train_args() -> Namespace:\n parser = ArgumentParser()\n add_train_args(parser)\n args = parser.parse_args()\n modify_train_args(args)\n\n return args", "def parse_args():\n parser = argparse.ArgumentParser()\n parser.add_argument(\n '--zarr_dir',\n type=str,\n help='path to directory of zarr files',\n )\n parser.add_argument(\n '--tiff_dir',\n type=str,\n help='path to directory of tiff files',\n )\n parser.add_argument(\n '--output_dir',\n type=str,\n help='path to directory for writing',\n )\n parser.add_argument(\n '--config_path',\n type=str,\n default=None,\n help='path to yaml preprocess config file',\n )\n \n args = parser.parse_args()\n return args" ]
[ "0.72235036", "0.70820785", "0.70796216", "0.70001507", "0.68445134", "0.6841379", "0.67815137", "0.6625614", "0.65816057", "0.65165967", "0.64699036", "0.64527774", "0.643961", "0.64133555", "0.6358838", "0.62851185", "0.62849003", "0.62825257", "0.6272746", "0.6238674", "0.6209278", "0.6200426", "0.61960703", "0.6193129", "0.61839855", "0.61820817", "0.6180517", "0.61782736", "0.6171707", "0.61673903", "0.61426985", "0.6141705", "0.6117948", "0.6117747", "0.6117373", "0.6109977", "0.61047155", "0.6100426", "0.6084233", "0.6082338", "0.60675555", "0.6066118", "0.6065386", "0.60650045", "0.6063709", "0.6054844", "0.6048415", "0.6043252", "0.6039616", "0.60353535", "0.60328346", "0.6022277", "0.60183483", "0.6012241", "0.6003073", "0.599443", "0.59881264", "0.59734213", "0.5971298", "0.59645396", "0.5964268", "0.5962429", "0.59580815", "0.5952351", "0.5950386", "0.59461516", "0.59430695", "0.59419304", "0.5940391", "0.5939525", "0.59394467", "0.5939441", "0.59380066", "0.59357256", "0.59233177", "0.59226197", "0.59063077", "0.5904057", "0.58999395", "0.58967996", "0.58967805", "0.58922774", "0.5892134", "0.5892029", "0.5891886", "0.58862895", "0.5883805", "0.5879362", "0.58773905", "0.5874698", "0.58627105", "0.58611184", "0.58560735", "0.5855937", "0.58548236", "0.5844498", "0.5842331", "0.5837936", "0.5829513", "0.58242595" ]
0.6311945
15
Add our custom/consistent set of command line flags.
def add_common_arguments(self, short_options=True): getopts = lambda *args: args if short_options else args[1:] self.add_argument(*getopts('-d', '--debug'), action='store_true', help='Run with debug output.') self.add_argument(*getopts('-q', '--quiet'), action='count', default=0, help='Use once to hide info messages, twice to hide ' 'warnings, and thrice to hide errors.')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def AddFlags(arg_parser):\n common_flags.DefineAppsDomainFlagWithDefault(arg_parser)\n common_flags.DefineVerboseFlagWithDefaultFalse(arg_parser)\n\n arg_parser.add_argument(\n '--long_list', '-l', action='store_true', default=False,\n help='Show more columns of output.')\n arg_parser.add_argument(\n '--plus_domains', '-p', action='store_true', default=False,\n help='Show output from Google Plus Domains Profile.')\n arg_parser.add_argument(\n '--user_email', '-u', required=True,\n help='User email address [REQUIRED].',\n type=validators.EmailValidatorType())", "def add_command_line_arguments(self, parser):\n # parser.add_option(...)\n pass", "def add_extra_args(self):\n self.parser.add_argument(\"--region\", required=False)\n self.parser.add_argument(\"--zone\", required=False)\n self.parser.add_argument(\"--network\", required=False)", "def fiddle_with_flags():\n flags['c++'] += '-arch x86_64 -bundle'\n flags['c'] += '-arch x86_64'", "def test_addFlags(self):\n self._flagsTest('addFlags', b'+FLAGS')", "def add_extra_compiler_flags(self, op):\n if is_listing(op):\n for ii in op:\n self.add_extra_compiler_flags(ii)\n elif not op in self.__include_directories and not op in self.__definitions:\n self.__compiler_flags_extra += [op]", "def Args(parser):\n flags.AddRegion(parser)\n flags.AddCluster(parser)", "def add_args(self, parser):", "def add_arguments(cls):\n return [\n (('--yes',), dict(action='store_true', help='clean .git repo')),\n (('--variable', '-s'),\n dict(nargs='+', help='set extra variable,format is name:value')),\n (('--skip-builtin',),\n dict(action='store_true', help='skip replace builtin variable')),\n\n (('--dir',), dict(nargs='?', default=os.getcwd(),\n help='set working directory')),\n (('--debug',), dict(action='store_true', help='open debug mode')),\n (('--dry-run',), dict(action='store_true',\n help='print command instead execute it')),\n (('--verbose', '-v'), dict(action='count')),\n ]", "def command_line_arguments():\n _parser.add_argument('-l', '--list', nargs='+',\n help='<Required> Set flag', required=True)\n _parser.add_argument(\"-A\", \"--access\", required=True,\n help=\"access to host => grant/revoke\")", "def add_args(parser):\n parser.add_argument(\n \"--zero-infinity\", action=\"store_true\", help=\"zero inf loss\"\n )\n try:\n parser.add_argument(\n \"--remove-bpe\",\n \"--post-process\",\n default=\"letter\",\n help=\"remove BPE tokens before scoring (can be set to sentencepiece, letter, and more)\",\n )\n except:\n pass # this option might have been added from eval args ", "def setup_flags(self):\n self.io_args.color = self.io_args.color_full\n self.io_args.rig_in = self.io_args.rig\n self.io_args.matches = os.path.join(self.io_args.output_root, \"matches.json\")\n self.io_args.rig_out = os.path.join(self.io_args.output_root, \"rig.json\")", "def _get_flags(args: Sequence[str]) -> Dict[str, bool]:\n flags = {}\n for arg in args:\n if arg.startswith(FLAG_MARKER):\n flag_name = arg[len(FLAG_MARKER):]\n if flag_name and flag_name not in OMIT_FLAGS:\n flags[flag_name] = True\n else:\n break # Ignore flags after initial CLI call\n return flags", "def _set_fflags(target, fc=\"gfortran\", argv=True, osname=None, verbose=False):\n fflags = None\n\n if fc is not None:\n fflags = []\n # get lower case OS string\n if osname is None:\n osname = _get_osname()\n\n # remove target .exe extension, if necessary\n target = _get_base_app_name(target)\n\n # remove .exe extension if necessary\n fc = _get_base_app_name(fc)\n\n if target == \"mp7\":\n if fc == \"gfortran\":\n fflags.append(\"-ffree-line-length-512\")\n elif target == \"gsflow\":\n if fc == \"ifort\":\n if osname == \"win32\":\n fflags += [\n \"-fp:source\",\n \"-names:lowercase\",\n \"-assume:underscore\",\n ]\n else:\n pass\n elif fc == \"gfortran\":\n fflags += [\"-O1\", \"-fno-second-underscore\"]\n opt = \"-fallow-argument-mismatch\"\n if _check_gnu_switch_available(\n opt, compiler=fc, verbose=verbose\n ):\n fflags += [\n opt,\n ]\n elif target in (\n \"mf2000\",\n \"mt3dms\",\n \"swtv4\",\n ):\n if fc == \"gfortran\":\n opt = \"-fallow-argument-mismatch\"\n if _check_gnu_switch_available(\n opt, compiler=fc, verbose=verbose\n ):\n fflags += [\n opt,\n ]\n elif target in (\n \"mf6\",\n \"libmf6\",\n \"zbud6\",\n ):\n if fc == \"gfortran\":\n fflags += [\n \"-Wtabs\",\n \"-Wline-truncation\",\n \"-Wunused-label\",\n \"-Wunused-variable\",\n \"-pedantic\",\n \"-std=f2008\",\n \"-Wcharacter-truncation\",\n ]\n\n # add additional fflags from the command line\n if argv:\n for idx, arg in enumerate(sys.argv):\n if \"--fflags\" in arg.lower():\n s = sys.argv[idx + 1]\n delim = \" -\"\n if \" /\" in s:\n delim = \" /\"\n fflags += s.split(delim)\n\n # write fortran flags\n if len(fflags) < 1:\n fflags = None\n else:\n if verbose:\n msg = (\n \"{} fortran code \".format(target)\n + \"will be built with the following predefined flags:\\n\"\n )\n msg += \" {}\\n\".format(\" \".join(fflags))\n print(msg)\n\n return fflags", "def add_token_flags(parser):\n parser.add_argument('--name', '-n', help='name of service')\n parser.add_argument('--owner', '-o', help='owner of service')\n parser.add_argument('--version', '-v', help='version of service')\n parser.add_argument('--cmd', '-C', help='command to start service')\n parser.add_argument('--cmd-type', '-t', help='command type of service (e.g. \"shell\")', dest='cmd-type')\n parser.add_argument('--cpus', '-c', help='cpus to reserve for service', type=float)\n parser.add_argument('--mem', '-m', help='memory (in MiB) to reserve for service', type=int)\n parser.add_argument('--ports', help='number of ports to reserve for service', type=int)", "def AddDatabaseFlagsFlag(parser):\n help_text = \"\"\"\\\n Comma-separated list of database flags to set on the AlloyDB primary\n instance. Use an equals sign to separate the flag name and value. Flags\n without values, like skip_grant_tables, can be written out without a value,\n e.g., `skip_grant_tables=`. Use on/off values for booleans. View AlloyDB's\n documentation for allowed flags (e.g., `--database-flags\n max_allowed_packet=55555,skip_grant_tables=,log_output=1`).\n \"\"\"\n parser.add_argument(\n '--database-flags',\n type=arg_parsers.ArgDict(),\n metavar='FLAG=VALUE',\n help=help_text)", "def add_args(parser, args):\n for arg in args:\n parser.add_argument('--' + arg, **global_args_dict[arg])\n return parser", "def add_arguments(parser):\n for arg, properties in AgentArgs.OPTIONS.items():\n parser.add_argument('--' + arg, **properties)\n verbosity = parser.add_mutually_exclusive_group()\n for arg, properties in AgentArgs.EXCLUSIVE_OPTIONS_1.items():\n verbosity.add_argument('--' + arg, **properties)\n progress_reporting = parser.add_mutually_exclusive_group()\n for arg, properties in AgentArgs.EXCLUSIVE_OPTIONS_2.items():\n progress_reporting.add_argument('--' + arg, **properties)", "def _AddCommonFlags(self, resource):\n self.flags['format'] = 'json'\n self.additional_flags.extend(FLAGS.openstack_additional_flags or ())", "def try_add_flag(args, compiler, flag, ext=None):\n if try_compile(compiler, flags=args+[flag], ext=ext):\n args.append(flag)", "def override_if_not_in_args(flag, argument, args):\r\n if flag not in args:\r\n args.extend([flag, argument])", "def add_arguments(parser):\n parser.add_argument('-e', '--environment', help='Environment name', required=True)\n parser.add_argument('-w', '--dont-wait', help='Skip waiting for the init to finish', action='store_true')\n parser.add_argument('-l', '--version-label', help='Version label', required=False)", "def get_flags(args):\r\n\r\n flags = 0\r\n\r\n if args.regexfilepattern is not None:\r\n flags |= pygrep.FILE_REGEX_MATCH\r\n\r\n if not args.regexp:\r\n flags |= pygrep.LITERAL\r\n elif args.dotall:\r\n flags |= pygrep.DOTALL\r\n\r\n if args.ignore_case:\r\n flags |= pygrep.IGNORECASE\r\n\r\n if args.recursive:\r\n flags |= pygrep.RECURSIVE\r\n\r\n if args.regexdirpattern:\r\n flags |= pygrep.DIR_REGEX_MATCH\r\n\r\n return flags", "def add_cli_args(parser):\n parser.add_argument(\n '--raw_path',\n help='Source path where audio data files are stored',\n default=RAW_DATA_PATH\n )\n parser.add_argument(\n '--features_path',\n help='Output path where exported data will be placed',\n default=FEATURES_DATA_PATH\n )\n parser.add_argument(\n '--feature',\n help='name of the feature to be extracted (options: mfsc, leglaive)',\n default=VoiceActivationFrameSelectionFeatureExtractor.feature_name\n )", "def add_args(parser):\n parser.add_argument(\n \"--share-encoder-embeddings\",\n action=\"store_true\",\n help=\"share encoder embeddings across languages\",\n )\n parser.add_argument(\n \"--share-decoder-embeddings\",\n action=\"store_true\",\n help=\"share decoder embeddings across languages\",\n )\n parser.add_argument(\n \"--share-encoders\",\n action=\"store_true\",\n help=\"share encoders across languages\",\n )\n parser.add_argument(\n \"--share-decoders\",\n action=\"store_true\",\n help=\"share decoders across languages\",\n )", "def _add_to_cli(self, parser, group=None):\n super(BoolOpt, self)._add_to_cli(parser, group)\n self._add_inverse_to_argparse(parser, group)", "def set_c_flags_hook(build_ext, ext):\n std_flag = get_c_std_flag(build_ext.compiler)\n if std_flag is not None:\n ext.extra_compile_args.append(std_flag)", "def read_flags():\n return flag_args", "def add_args(parser):\r\n # fmt: off\r\n parser.add_argument('--momentum', default=0.0, type=float, metavar='M',\r\n help='momentum factor')\r\n parser.add_argument('--weight-decay', '--wd', default=0.0, type=float, metavar='WD',\r\n help='weight decay')\r\n # fmt: on\r", "def add_logging_flags(parser):\n parser.add_argument('--log-file',\n help='Write the log to file. By default, write to stdout.')\n parser.add_argument('--log-level',\n choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],\n default='INFO',\n help='Set log level.')\n parser.add_argument('--verbose', '-v',\n help='Set the verbosity level. (Only partially supported for now.)')\n parser.add_argument('--quiet',\n help='Alias for setting log level to CRITICAL')\n parser.add_argument('--debug',\n help='Alias for setting log level to DEBUG')", "def set_custom_options(self, *args, use_long=True, silent=True, **kwargs):\n # TODO: need to automatically decide whether long or short format\n # keyworded arguments\n if use_long:\n self.custom_options.extend([\"{0}={1}\".format(k, v) for k, v in\n kwargs.items()])\n else:\n self.custom_options.extend([\"{0} {1}\".format(k, v) for k, v in\n kwargs.items()])\n # flags\n self.custom_options.extend(args)\n\n if not silent:\n print(\"-- Custom options specified: \", \" \".join(self.custom_options))", "def add_all_args(parser, process_args, *args):\n parser = add_args(process_args, parser)\n cli_args = parser.parse_args()\n for arg in ['model_cfg', 'classes', *args]:\n assert eval(f'cli_args.{arg}'), f'{arg} is required'\n return cli_args", "def add_arguments(self, parser):\n super(Command, self).add_arguments(parser)\n parser.add_argument(\n \"--skip-download\",\n action=\"store_false\",\n dest=\"download\",\n default=True,\n help=\"Skip downloading of the ZIP archive\"\n )\n parser.add_argument(\n \"--skip-clean\",\n action=\"store_false\",\n dest=\"clean\",\n default=True,\n help=\"Skip cleaning up the raw data files\"\n )\n parser.add_argument(\n \"--skip-load\",\n action=\"store_false\",\n dest=\"load\",\n default=True,\n help=\"Skip loading up the raw data files\"\n )\n parser.add_argument(\n \"--keep-files\",\n action=\"store_true\",\n dest=\"keep_files\",\n default=False,\n help=\"Keep zip, unzipped, TSV and CSV files\"\n )\n parser.add_argument(\n \"--no-archive\",\n action=\"store_true\",\n dest=\"no_archive\",\n default=False,\n help=\"Store an archive the downloaded zip file on the version model\"\n )\n parser.add_argument(\n \"--noinput\",\n action=\"store_true\",\n dest=\"noinput\",\n default=False,\n help=\"Download the ZIP archive without asking permission\"\n )\n parser.add_argument(\n \"--test\",\n \"--use-test-data\",\n action=\"store_true\",\n dest=\"test_data\",\n default=False,\n help=\"Use sampled test data (skips download, clean a load)\"\n )\n parser.add_argument(\n \"-a\",\n \"--app-name\",\n dest=\"app_name\",\n default=\"calaccess_raw\",\n help=\"Name of Django app with models into which data will \"\n \"be imported (if other not calaccess_raw)\"\n )", "def cli(*args, **kwargs):\n logger.debug('Global options: %s %s', args, kwargs)", "def _parse_flags(argv: List[str]) -> argparse.Namespace:\n argv = flag_utils.normalize_flags(argv) # See b/174043007 for context.\n\n parser = argparse_flags.ArgumentParser(\n description='Tensorflow Datasets CLI tool',\n allow_abbrev=False,\n )\n parser.add_argument(\n '--version',\n action='version',\n version='TensorFlow Datasets: ' + tfds.__version__,\n )\n parser.set_defaults(subparser_fn=lambda _: parser.print_help())\n # Register sub-commands\n subparser = parser.add_subparsers(title='command')\n build.register_subparser(subparser)\n new.register_subparser(subparser)\n return parser.parse_args(argv[1:])", "def add_args(parser):\n # fmt: off\n parser.add_argument('--max-lr', required=True, type=float, metavar='LR',\n help='max learning rate, must be more than args.lr')\n parser.add_argument('--lr-period-updates', default=5000, type=float, metavar='LR',\n help='initial number of updates per period (cycle length)')\n parser.add_argument('--lr-shrink', default=0.1, type=float, metavar='LS',\n help='shrink factor for annealing')\n parser.add_argument('--shrink-min', action='store_true',\n help='if set, also shrinks min lr')\n # fmt: on", "def add_cmdline_args(argparser):\n DictionaryAgent.add_cmdline_args(argparser)\n agent = argparser.add_argument_group('Fairseq Arguments')\n agent.add_argument(\n '-tr', '--truncate',\n type=int, default=-1,\n help='truncate input & output lengths to speed up training (may '\n 'reduce accuracy). This fixes all input and output to have a '\n 'maximum length. This reduces the total amount of padding in '\n 'the batches.')\n agent.add_argument(\n '--max-positions',\n default=1024,\n type=int,\n metavar='N',\n help='max number of tokens in the sequence')\n agent.add_argument(\n '--seed',\n default=1,\n type=int,\n metavar='N',\n help='pseudo random number generator seed')\n options.add_optimization_args(argparser)\n options.add_generation_args(argparser)\n options.add_model_args(argparser)", "def _set_cflags(target, cc=\"gcc\", argv=True, osname=None, verbose=False):\n cflags = None\n\n if cc is not None:\n cflags = []\n # get lower case OS string\n if osname is None:\n osname = _get_osname()\n\n # remove target .exe extension, if necessary\n target = _get_base_app_name(target)\n\n # remove .exe extension of necessary\n cc = _get_base_app_name(cc)\n\n if target == \"triangle\":\n if osname in [\"linux\", \"darwin\"]:\n if cc.startswith(\"g\"):\n cflags += [\"-lm\"]\n else:\n cflags += [\"-DNO_TIMER\"]\n elif target == \"gsflow\":\n if cc in [\"icc\", \"icpl\", \"icl\"]:\n if osname == \"win32\":\n cflags += [\"-D_CRT_SECURE_NO_WARNINGS\"]\n else:\n cflags += [\"-D_UF\"]\n elif cc == \"gcc\":\n cflags += [\"-O1\"]\n\n # add additional cflags from the command line\n if argv:\n for idx, arg in enumerate(sys.argv):\n if \"--cflags\" in arg.lower():\n s = sys.argv[idx + 1]\n delim = \" -\"\n if \" /\" in s:\n delim = \" /\"\n cflags += s.split(delim)\n\n # write c/c++ flags\n if len(cflags) < 1:\n cflags = None\n else:\n if verbose:\n msg = (\n \"{} c/c++ code \".format(target)\n + \"will be built with the following predefined flags:\\n\"\n )\n msg += \" {}\\n\".format(\" \".join(cflags))\n print(msg)\n\n return cflags", "def get_additional_args(self):\n additional = \"\"\n if not self.workflow.cleanup_scripts:\n additional += \" --skip-script-cleanup \"\n if self.workflow.shadow_prefix:\n additional += \" --shadow-prefix {} \".format(self.workflow.shadow_prefix)\n if self.workflow.use_conda:\n additional += \" --use-conda \"\n if self.workflow.conda_prefix:\n additional += \" --conda-prefix {} \".format(self.workflow.conda_prefix)\n if self.workflow.use_singularity:\n additional += \" --use-singularity \"\n if self.workflow.singularity_prefix:\n additional += \" --singularity-prefix {} \".format(\n self.workflow.singularity_prefix\n )\n if self.workflow.singularity_args:\n additional += ' --singularity-args \"{}\"'.format(\n self.workflow.singularity_args\n )\n\n if self.workflow.use_env_modules:\n additional += \" --use-envmodules\"\n\n return additional", "def test_addFlagsSilently(self):\n self._flagsSilentlyTest('addFlags', b'+FLAGS.SILENT')", "def AddDatabaseFlags(parser, update=False):\n help_ = (\n 'Comma-separated list of database flags to set on the '\n 'instance. Use an equals sign to separate flag name and value. '\n 'Flags without values, like skip_grant_tables, can be written '\n 'out without a value after, e.g., `skip_grant_tables=`. Use '\n 'on/off for booleans. View the Instance Resource API for allowed '\n 'flags. (e.g., `--database-flags max_allowed_packet=55555,'\n 'skip_grant_tables=,log_output=1`)'\n )\n if update:\n help_ += (\n '\\n\\nThe value given for this argument *replaces* the existing list.'\n )\n parser.add_argument(\n '--database-flags',\n type=arg_parsers.ArgDict(min_length=1),\n metavar='FLAG=VALUE',\n required=False,\n help=help_,\n )", "def add_additional_args(cls, parser: argparse.ArgumentParser):\n pass", "def add_extra_args(self):\n self.parser.add_argument('--device', dest='device', type=str, help='Device ID, e.g. d--0001')", "def add_args(self): \n self.parser.add_argument('-u', '--username',\n default=None,\n help='the username for mongoDB (Default: None)')\n\n self.parser.add_argument('-p', '--password',\n default=None,\n help='the password for mongoDB (Default: None)')\n\n self.parser.add_argument('-d', '--database',\n default='grits',\n help='the database for mongoDB (Default: grits)')\n\n self.parser.add_argument('-m', '--mongohost',\n default='localhost',\n help='the hostname for mongoDB (Default: localhost)')\n\n self.parser.add_argument('-f', '--force', \n action='store_true',\n help='do not require confirmation to create indexes (Default: False)')", "def flags(self, query):\n flag_list = []\n\n # add the help flag if it is requested\n if '--help' in query:\n flag_list.append('help')\n query = query.replace('--help', '').replace('-h', '')\n\n return query, flag_list", "def add_extra_arguments(self, parser):\n pass", "def add_args(parser):\r\n parser.add_argument(\"data\", help=\"path to data directory\")\r\n parser.add_argument(\r\n \"--silence-token\", default=\"\\u2581\", help=\"token for silence (used by w2l)\"\r\n )\r\n parser.add_argument(\r\n \"--max-source-positions\",\r\n default=sys.maxsize,\r\n type=int,\r\n metavar=\"N\",\r\n help=\"max number of frames in the source sequence\",\r\n )\r\n parser.add_argument(\r\n \"--max-target-positions\",\r\n default=1024,\r\n type=int,\r\n metavar=\"N\",\r\n help=\"max number of tokens in the target sequence\",\r\n )", "def setupOptions(self):\n add = command_line.CommandLineParser.add_option\n add(\"-S\", \"--startup\", action=\"store\", type=\"str\", default=None,\n dest=\"dotVistrails\",\n help=\"Set startup file (default is ~/.vistrails/startup.py)\")\n add(\"-?\", action=\"help\",\n help=\"show this help message and exit\")\n add(\"-v\", \"--version\", action=\"callback\",\n callback=lambda option, opt, value, parser: self.printVersion(),\n help=\"print version information and quit\")\n add(\"-V\", \"--verbose\", action=\"store\", type=\"int\", default=None,\n dest=\"verbose\", help=\"set verboseness level (0--2, \"\n \"default=0, higher means more verbose)\")\n add(\"-n\", \"--nosplash\", action=\"store_false\",\n default = None,\n help=\"don't display splash on startup\")\n add(\"-c\", \"--cache\", action=\"store\", type=\"int\", default=None,\n dest=\"cache\", help=\"enable/disable caching\")\n add(\"-m\", \"--movies\", action=\"store\", type=\"int\", default=None,\n dest=\"movies\", help=\"set automatic movie creation on spreadsheet \"\n \"(0 or 1, default=1. Set this to zero to work around vtk bug with \"\n \"offscreen renderer and opengl texture3d mappers)\")\n add(\"-s\", \"--multiheads\", action=\"store_true\",\n default = None,\n help=\"display the builder and spreadsheet on different screens \"\n \"(if available)\")\n add(\"-x\", \"--maximized\", action=\"store_true\",\n default = None,\n help=\"Maximize VisTrails windows at startup\")\n add(\"-b\", \"--noninteractive\", action=\"store_true\",\n default = None,\n help=\"run in non-interactive mode\")\n add(\"-e\", \"--dumpcells\", action=\"store\", dest=\"dumpcells\",\n default = None,\n help=\"when running in non-interactive mode, directory to dump \"\n \"spreadsheet cells before exiting\")\n add(\"-p\", \"--pdf\", action=\"store_true\",\n default = None,\n help=\"dump files in pdf format (only valid in console mode)\")\n add(\"-l\", \"--nologger\", action=\"store_true\",\n default = None,\n help=\"disable the logging\")\n add(\"-d\", \"--debugsignals\", action=\"store_true\",\n default = None,\n help=\"debug Qt Signals\")\n add(\"-a\", \"--parameters\", action=\"store\", dest=\"parameters\",\n help=\"workflow parameter settings (non-interactive mode only)\")\n add(\"-t\", \"--host\", action=\"store\", dest=\"host\",\n help=\"hostname or ip address of database server\")\n add(\"-r\", \"--port\", action=\"store\", type=\"int\", default=3306,\n dest=\"port\", help=\"database port\")\n add(\"-f\", \"--db\", action=\"store\", dest=\"db\",\n help=\"database name\")\n add(\"-u\", \"--user\", action=\"store\", dest=\"user\",\n help=\"database username\")\n add(\"-i\", \"--showspreadsheetonly\", action=\"store_true\",\n default = None,\n help=\"only the spreadsheet will be shown. This implies -w was given.\\\nThe builder window can be accessed by a spreadsheet menu option.\")\n add(\"-w\", \"--executeworkflows\", action=\"store_true\",\n default = None,\n help=\"The workflows will be executed\")\n add(\"-I\", \"--workflowinfo\", action=\"store\",\n default = None,\n help=(\"Save workflow graph and spec in specified directory \"\n \"(only valid in console mode).\"))\n add(\"-E\", \"--reviewmode\", action=\"store_true\",\n default = None,\n help=\"Show the spreadsheet in the reviewing mode\")\n add(\"-q\", \"--quickstart\", action=\"store\",\n help=\"Start VisTrails using the specified static registry\")\n add(\"-D\", \"--detachHistoryView\", action=\"store_true\",\n help=\"Detach the history view from the builder windows\")\n add(\"-G\", \"--workflowgraph\", action=\"store\",\n default = None,\n help=(\"Save workflow graph in specified directory without running \"\n \"the workflow (only valid in console mode).\"))\n add(\"-U\", \"--evolutiongraph\", action=\"store\",\n default = None,\n help=(\"Save evolution graph in specified directory without running \"\n \"any workflow (only valid in console mode).\"))\n command_line.CommandLineParser.parse_options()", "def add_arguments(self, parser):", "def _build_flags(deploy):\n\n flags = ['R' if deploy['running'] else 'S',\n 'E' if deploy['enabled'] else 'D']\n\n if 'running' in deploy['container_status']:\n flags.append('r' if deploy['container_status']['running'] else 's')\n else:\n flags.append('-')\n\n flags.append('C' if 'callback_uri' in deploy else '-')\n\n return \"\".join(flags)", "def add_args(parser):\n rescore_add_args(parser)\n parser.add_argument(\n \"--rl-weight\",\n type=float,\n default=0.1,\n help=\"trade-off coefficient of rl loss\",\n )\n parser.add_argument(\n \"--rl-num-trajectory\",\n type=int,\n default=3,\n help=\"num trajectory in rl training\",\n )", "def add_args(parser: argparse.ArgumentParser):\n pass", "def cflags_other(self):\n\n status, stdout, stderr = self.__xcall__(['--cflags-only-other'])\n\n if status != 0:\n raise RuntimeError(\"error querying --cflags-only-other for package `%s': %s\" % (self.name, stderr))\n\n flag_map = {\n '-D': 'define_macros',\n }\n\n kw = {}\n\n for token in stdout.split():\n if token[:2] in flag_map:\n kw.setdefault(flag_map.get(token[:2]), []).append(token[2:])\n\n else: # throw others to extra_link_args\n kw.setdefault('extra_compile_args', []).append(token)\n\n # make it uniq\n for k, v in kw.items(): kw[k] = uniq(v)\n\n # for macros, separate them so they can be plugged on C/C++ extensions\n if 'define_macros' in kw:\n for k, string in enumerate(kw['define_macros']):\n if string.find('=') != -1:\n kw['define_macros'][k] = string.split('=', 2)\n else:\n kw['define_macros'][k] = (string, None)\n\n return kw", "def generate_options(self):\n super(MachineLookup, self).generate_options()\n options = [\"generic_filters\", \"meta_filters\"]\n for option in self.command_options:\n if option['dest'] in options:\n option['action'] = \"append\"", "def add_extra_args(self):\n pass", "def define_flags():\n flags.DEFINE_string(\n \"project_id\",\n help=\"GCP project ID\",\n required=True,\n default=None)\n flags.DEFINE_string(\n \"endpoint_id\",\n help=\"Vertex AI endpoint ID number\",\n required=True,\n default=None)\n flags.DEFINE_string(\n \"region\",\n help=\"GCP region\",\n required=True,\n default=None)\n flags.DEFINE_enum(\n \"scenario\",\n enum_values=[\"single_stream\", \"multi_stream\", \"server\"],\n help=\"The MLPerf scenario. Possible values: \"\n \"single_stream | multi_stream | server.\",\n default=\"server\")\n flags.DEFINE_enum(\n \"dataset\",\n enum_values=[\"criteo\", \"squad_bert\", \"sentiment_bert\", \"generic_jsonl\"],\n help=\"The dataset to use. Possible values: \"\n \"criteo | squad_bert | sentiment_bert.\",\n default=None)\n flags.DEFINE_string(\n \"data_file\",\n help=\"Path to the file containing the requests data. Can be a local file\"\n \"or a GCS path. Required for criteo and sentiment_bert datasets.\",\n default=None)\n flags.DEFINE_integer(\n \"performance_sample_count\",\n help=\"Number of samples used in perfomance test. If not set defaults to\"\n \"total_sample_count.\",\n default=None)\n flags.DEFINE_integer(\n \"total_sample_count\",\n help=\"Total number of samples available. Should only be set for\"\n \"synthetic, generated datasets.\",\n default=None)\n flags.DEFINE_float(\n \"target_latency_percentile\",\n help=\"The target latency percentile.\",\n default=0.99)\n flags.DEFINE_integer(\n \"target_latency_ns\",\n help=\"The target latency in nanoseconds. If achieved latency exceeds\"\n \"the target, the perfomance constraint of the run will not be satisfied.\",\n default=130 * int(1e6))\n flags.DEFINE_integer(\n \"min_query_count\",\n help=\"The minimum number of queries used in the run.\",\n default=1)\n flags.DEFINE_integer(\n \"min_duration_ms\",\n help=\"The minimum duration of the run in milliseconds.\",\n default=10000)\n flags.DEFINE_multi_float(\n \"qps\",\n help=\"The QPS values to run each test at. Specify multiple values \"\n \"with multiple flags. i.e. --qps=10 --qps=12.5.\",\n default=[])\n flags.DEFINE_string(\n \"cache\",\n help=\"Path to the cached dataset file. Used in squad_bert benchmark.\",\n default=None)\n flags.DEFINE_enum(\n \"api_type\",\n enum_values=[\"rest\", \"gapic\", \"grpc\"],\n help=\"API over which requests will be send. Possible values: \"\n \"rest | gapic | grpc.\",\n default=None)\n flags.DEFINE_string(\n \"csv_report_filename\",\n help=\"Optional filename to generate report.\",\n default=\"\")", "def _add_standard_args(parser: ArgumentParser) -> None:\r\n parser.add_argument(\r\n '--username',\r\n required=True,\r\n action=EnvDefault,\r\n envvar='ZFR_USERNAME',\r\n help='Username used to login to Zephyr Scale.'\r\n )\r\n parser.add_argument(\r\n '--password',\r\n required=True,\r\n action=EnvDefault,\r\n envvar='ZFR_PASSWORD',\r\n help='Password used to login to Zephyr Scale.'\r\n )\r\n parser.add_argument(\r\n '--url',\r\n required=True,\r\n action=EnvDefault,\r\n envvar='ZFR_URL',\r\n help='Jira url used to interace with the Zephyr API.'\r\n )\r\n parser.set_defaults(cmd=FolderCommand(parser))", "def write_flags(self):\n\n self.cmake.write(\n '################# Flags ################\\n'\n '# Defines Flags for Windows and Linux. #\\n'\n '########################################\\n\\n'\n )\n\n self.define_group_properties()\n self.define_windows_flags()\n self.define_linux_flags()", "def _add_argument(self, args=''):\n\n sys.argv += args.split(' ')", "def _AddShellOptions(spec):\n # type: (_FlagSpecAndMore) -> None\n spec.InitOptions()\n spec.InitShopt()\n\n for opt in option_def.All():\n if opt.builtin == 'set':\n spec.Option(opt.short_flag, opt.name)\n # Notes:\n # - shopt option don't need to be registered; we validate elsewhere\n # - 'interactive' Has a cell for internal use, but isn't allowed to be\n # modified.", "def add_arguments(parser):\n add_token_flags(parser)\n parser.add_argument('token', nargs='?')\n if is_admin_enabled():\n parser.add_argument('--admin', '-a', help='run command in admin mode', action='store_true')\n format_group = parser.add_mutually_exclusive_group()\n format_group.add_argument('--json', help='provide the data in a JSON file', dest='json')\n format_group.add_argument('--yaml', help='provide the data in a YAML file', dest='yaml')\n format_group.add_argument('--input', help='provide the data in a JSON/YAML file', dest='input')\n parser.add_argument('--output', help='outputs the computed token configuration in a JSON/YAML file (or to stdout using -)'\n 'without performing any token edit operations')\n parser.add_argument('--context', dest='context',\n help='can be used only when a data file has been provided via --input, --json, or --yaml; '\n 'this JSON/YAML file provides the context variables used '\n 'to render the data file as a template')\n add_override_flags(parser)", "def __add_common_args(parser: argparse.ArgumentParser):\n parser.add_argument(\"--model\", help=\"name of the model to use. Use query --get-models to get a list of valid names.\")\n parser.add_argument(\"--grid-type\", help=\"type of the grid to use.\")\n parser.add_argument(\"--level-type\", help=\"type of the vertical level to use.\")\n parser.add_argument(\"--init-time\", help=f\"initialization time to use. \"\n \"Integers are interpreted as hours since model start, dates formatted as \"\n f\"{__DATE_FORMAT.replace('%Y', 'YYYY').replace('%m', 'MM').replace('%d', 'DD').replace('%H', 'HH').replace('%M', 'MM')} are interpreted as absolute start dates.\")\n parser.add_argument(\"--variable\", nargs=\"+\", help=\"name of the variable to use. Use query --get-vars to get a list of valid names.\")\n parser.add_argument(\"--levels\", nargs=\"+\", type=int, help=\"levels to use.\")\n parser.add_argument(\"--lead-time\", nargs=\"+\", type=int, help=\"lead times to use in hours.\")", "def _options_commandline_overrides(options):\n cmdline_values = {\n 'run_storage_base': options.run_storage_base,\n 'watch': options.watch,\n 'verbose': options.verbose,\n 'uploader_config': options.uploader_config,\n 'logging_config': options.logging_config,\n }\n\n # Commandline options override any value in the config file.\n for k, v in cmdline_values.items():\n if v is not None:\n options[k] = v\n\n return options", "def add_custom_argument(self, *name_or_flags, **options):\n self._specific_args_group.add_argument(*name_or_flags, **options)", "def AddOptionsToArgSpec(spec):\n for short_flag, opt_name in state.SET_OPTIONS:\n spec.Option(short_flag, opt_name)\n\n for shopt_name in state.ALL_SHOPT_OPTIONS + state.META_OPTIONS:\n spec.ShoptOption(shopt_name)", "def add_override_flags(parser):\n override_group = parser.add_mutually_exclusive_group(required=False)\n override_group.add_argument('--override', action='store_true', dest='override',\n help='Allow overriding values in input file with values from CLI arguments. '\n 'Overriding values is disallowed by default. '\n 'Adding the --no-override flag explicitly disallows overriding values.')\n override_group.add_argument('--no-override', action='store_false', dest='override', help=argparse.SUPPRESS)", "def get_options():\n\n global args\n\n options = parser.add_argument_group(\"flags\")\n options.add_argument(\n \"-t\",\n \"--hash-type\",\n help=\"type of hash from the following: lm, ntlm, md4, md5, sha1, sha256, sha512\",\n metavar=\"\",\n required=True,\n choices=[\"lm\", \"ntlm\", \"md4\", \"md5\", \"sha1\", \"sha256\", \"sha512\"],\n )\n options.add_argument(\n \"-w\",\n \"--wordlist\",\n help=\"file path to wordlist\",\n metavar=\"\",\n type=argparse.FileType(\"r\"),\n required=True,\n )\n\n hash_group = options.add_mutually_exclusive_group(required=True)\n hash_group.add_argument(\n \"-s\", \"--hash-string\", help=\"hash string to crack\", metavar=\"\"\n )\n hash_group.add_argument(\n \"-l\",\n \"--hash-list\",\n help=\"file path to the list of hashes\",\n metavar=\"\",\n type=argparse.FileType(\"r\"),\n )\n\n args = parser.parse_args()", "def _add_arguments(self):\r\n self._parser.add_argument(\r\n '-s', '--server',\r\n required=True,\r\n help=\"enter server name\")\r\n self._parser.add_argument(\r\n '-db', '--database',\r\n required=True,\r\n help='enter database name')\r\n self._parser.add_argument(\r\n '-u', '--username',\r\n help='enter username')\r\n self._parser.add_argument(\r\n '-p', '--password',\r\n help='enter password')\r\n #self._parser.add_argument(\r\n # '-h', '--help',\r\n # help='show this help message and exit')\r", "def add_args(parser):\n # fmt: off\n parser.add_argument('--pixel-loss-type', type=str, default='l1')\n # fmt: on", "def add_args(parser):\n # fmt: off\n FConvModel.add_args(parser)\n parser.add_argument('--encoder-conv-channels', type=str, metavar='EXPR',\n help='list of encoder convolution\\'s out channels')\n parser.add_argument('--encoder-conv-kernel-sizes', type=str, metavar='EXPR',\n help='list of encoder convolution\\'s kernel sizes')\n parser.add_argument('--encoder-conv-strides', type=str, metavar='EXPR',\n help='list of encoder convolution\\'s strides')\n parser.add_argument('--decoder-positional-embed', action='store_true',\n help='use decoder positional embeddings')\n # fmt: on", "def _add_compiler_args(parser):\n ap = parser\n ap.add_argument('--full-event-pattern',\n help=\"If set, use the 'full' format \"\n \"(TYPE, (CLK, DST, SRC), MSG) for event patterns;\"\n \"otherwise, use 'short' format (MSG, SRC)\",\n action='store_true')\n ap.add_argument('--enable-object-pattern',\n help=\"Enable the use of object-style tuple pattern syntax:\"\n \" Object(ARGS...); which is equivalent to \"\n \"('Object', ARGS...)\",\n action='store_true')\n ap.add_argument('--enable-membertest-pattern',\n help=\"Overloads the Python 'in' operator to support using \"\n \"tuple patterns, e.g.: '(_a, 1, _) in S', which is \"\n \"equivalent to 'some((_a, 1, _) in S)'\",\n action='store_true')\n ap.add_argument('--enable-iterator-pattern',\n help=\"Overloads the Python 'for ... in ...' keywords to \"\n \"support using tuple patterns in the target, \"\n \"e.g.: '[b for (_a, 1, b) in S]', which is equivalent to \"\n \"'[b for (var1, var2, b) in S if var1 == a if var2 == b]'\",\n action='store_true')\n ap.add_argument('--use-top-semantic',\n help=\"Use 'top' semantics for query variable and \"\n \"parameter resolution. Under 'top' semantics, only \"\n \"parameters to the top-level query are marked.\",\n action='store_true')\n ap.add_argument('--no-table1',\n help=\"Disable table 1 quantification transformations. \"\n \"Only used when '-i' is enabled.\",\n action='store_true')\n ap.add_argument('--no-table2',\n help=\"Disable table 2 quantification transformations. \"\n \"Only used when '-i' is enabled.\",\n action='store_true')\n ap.add_argument('--no-table3',\n help=\"Disable table 3 quantification transformations. \"\n \"Only used when '-i' is enabled.\",\n action='store_true')\n ap.add_argument('--no-table4',\n help=\"Disable table 4 quantification transformations. \"\n \"Only used when '-i' is enabled.\",\n action='store_true')\n ap.add_argument('--jb-style',\n help=\"Generate Jon-friendly quantification transformations. \"\n \"Only useful with '-i'.\",\n action='store_true')\n ap.add_argument('--no-all-tables',\n help=\"Disable all quantification transformations. \"\n \"Only useful with '-i'.\",\n action='store_true')\n ap.add_argument('--module-name', type=str, default='__main__',\n help=\"Name of the module to be compiled.\")", "def define_mnist_flags():\n flags_core.define_base(\n clean=True,\n num_gpu=True,\n train_epochs=True,\n epochs_between_evals=True,\n distribution_strategy=True)\n\n flags_core.define_device()\n flags_core.define_distribution()\n\n # flags.DEFINE_string('data_dir', r'D:\\tf2_official_data_model_res\\data\\my_recommand_data','null')\n # flags.DEFINE_string('model_dir', r'D:\\tf2_official_data_model_res\\model\\my_recommand_model', 'null')\n # flags.DEFINE_string('distribution_strategy', r'one_device', 'null')\n FLAGS.set_default('data_dir', r'D:\\tf2_official_data_model_res\\data\\my_recommand_data')\n FLAGS.set_default('model_dir', r'D:\\tf2_official_data_model_res\\model\\my_recommand_model')\n FLAGS.set_default('distribution_strategy', r'one_device')\n FLAGS.set_default('num_gpus',0)\n FLAGS.set_default('train_epochs', 100)\n flags.DEFINE_bool('download', True,'Whether to download data to `--data_dir`.')", "def AddRegisterFlagsToParser(parser):\n _AddDNSSettingsFlagsToParser(parser, mutation_op=MutationOp.REGISTER)\n _AddContactSettingsFlagsToParser(parser, mutation_op=MutationOp.REGISTER)\n _AddPriceFlagsToParser(parser, MutationOp.REGISTER)\n\n messages = apis.GetMessagesModule('domains', API_VERSION_FOR_FLAGS)\n notice_choices = ContactNoticeEnumMapper(messages).choices.copy()\n notice_choices.update({\n 'hsts-preloaded':\n ('By sending this notice you acknowledge that the domain is '\n 'preloaded on the HTTP Strict Transport Security list in browsers. '\n 'Serving a website on such domain will require an SSL certificate. '\n 'See https://support.google.com/domains/answer/7638036 for details.')\n })\n base.Argument( # This is not a go/gcloud-style#commonly-used-flags.\n '--notices',\n help='Notices about special properties of certain domains or contacts.',\n metavar='NOTICE',\n type=arg_parsers.ArgList(element_type=str,\n choices=notice_choices)).AddToParser(parser)", "def cmdline(self, executable, options, task, rlimits):\n data_model_param = get_data_model_from_task(task, {ILP32: \"-m32\", LP64: \"-m64\"})\n print(options)\n if data_model_param and not any(\n option.startswith(\"--clang-options=\") for option in options\n ):\n options += [\"--clang-options=\" + data_model_param]\n\n if task.property_file:\n options += [\"--svcomp-property\", task.property_file]\n else:\n raise UnsupportedFeatureException(\n \"SMACK can't execute without a property file.\"\n )\n\n options += [task.single_input_file]\n\n return [executable] + options", "def processCmdlineOpts(cmdOpts):\n global opts\n opts = {}\n for i in range(1,len(cmdOpts)):\n if re.match('-i', cmdOpts[i]):\n opts['i'] = cmdOpts[i+1]\n if i not in opts: \n opts['i']='awn.xml'\n return opts", "def add_argparse_args(parser, prefix=None):\n\n if prefix is None:\n p1 = '--'\n p2 = ''\n else:\n p1 = '--' + prefix + '-'\n p2 = prefix + '_'\n\n parser.add_argument(p1+'no-norm-mean', dest=(p2+'no_norm_mean'), \n default=False, action='store_true',\n help='don\\'t center the features')\n\n parser.add_argument(p1+'norm-var', dest=(p2+'norm_var'), \n default=False, action='store_true',\n help='normalize the variance of the features')\n\n \n parser.add_argument(p1+'left-context', dest=(p2+'left_context'), type=int,\n default=300,\n help='past context in number of frames')\n\n parser.add_argument(p1+'right-context', dest=(p2+'right_context'), type=int,\n default=300,\n help='future context in number of frames')", "def cli(arg_dict):\n\n keys = list(arg_dict.keys())\n for key in keys:\n v = arg_dict[key]\n usr_args_ls = sys.argv\n for ind in range(len(usr_args_ls)):\n val = usr_args_ls[ind]\n if val == \"-\" + key[0] or val == \"--\" + key:\n if type(v).__name__ == \"bool\":\n v = not v\n else:\n v = usr_args_ls[ind + 1]\n\n arg_dict[key] = v", "def _add_arguments(parser):\n parser.add_argument(\n \"command\",\n help='The plugin to run. e.g. \"shell\".',\n choices=sorted(registry.get_command_keys()),\n )\n\n parser.add_argument(\n \"-x\",\n \"--maximum-repositories\",\n default=sys.maxsize,\n type=int,\n help='If a value of `2` is used, it means \"Only search 2 repositories '\n 'for Rez packages to run on, at most\".',\n )\n\n parser.add_argument(\n \"-z\",\n \"--maximum-rez-packages\",\n default=sys.maxsize,\n type=int,\n help='If a value of `2` is used, it means \"Only search for 2 Rez packages '\n 'to run some comm on, at most\".',\n )\n\n parser.add_argument(\n \"-p\",\n \"--packages-path\",\n default=[config.release_packages_path], # pylint: disable=no-member\n help=\"A `{os.pathsep}` separated list of paths that report/run will be run on. \"\n \"If not defined, `rez.config.config.release_packages_path` is used, instead.\".format(\n os=os\n ),\n )\n\n parser.add_argument(\n \"-s\",\n \"--search-packages-path\",\n default=[config.release_packages_path], # pylint: disable=no-member\n help=\"A `{os.pathsep}` separated list of paths to search for Rez package dependencies. \"\n \"If not defined, `rez.config.config.release_packages_path` is used, instead.\".format(\n os=os\n ),\n )\n\n parser.add_argument(\n \"-i\",\n \"--ignore-patterns\",\n default=[],\n nargs=\"*\",\n help=\"A set of glob expressions or a file to a set of glob expressions. \"\n \"If a Rez package name matches one of \"\n \"these, it will not be run on.\",\n )\n\n parser.add_argument(\n \"-k\",\n \"--keep-temporary-files\",\n action=\"store_true\",\n help=\"If added, do not delete any temporary files that are generated during this run.\",\n )\n\n parser.add_argument(\n \"-r\",\n \"--rez-packages\",\n default=set(),\n nargs=\"+\",\n help=\"The names of Rez packages to process. If no names are given, \"\n \"every Rez package that is found will be processed.\",\n )\n\n parser.add_argument(\n \"-t\",\n \"--temporary-directory\",\n help=\"A folder on-disk that will be used to clone git repositories.\",\n )", "def env_callback(args):\n return {\n \"MY_CUSTOM_FLAG\": args.my_custom_flag\n }", "def test_setFlags(self):\n self._flagsTest('setFlags', b'FLAGS')", "def build_arguments(self, *cmd_args, **cmd_kwargs):\n args = []\n args.extend(cmd_args)\n\n for raw_key, value in cmd_kwargs.items():\n if len(raw_key) == 1:\n args.append('-{}'.format(raw_key))\n else:\n key = raw_key.replace('_', '-')\n args.append('--{}'.format(key))\n\n if value is True:\n # If True, it is enough.\n # e.g.: system=True translates to --system\n continue\n\n args.append(str(value))\n\n return args", "def parse_xcodebuild_flags(args):\n result = {}\n key = None\n for arg in args:\n if arg.startswith('-'):\n if arg in INTERESTING_FLAGS:\n key = arg\n elif key is not None:\n result[key] = arg\n key = None\n\n return result", "def parse_xcodebuild_flags(args):\n result = {}\n key = None\n for arg in args:\n if arg.startswith('-'):\n if arg in INTERESTING_FLAGS:\n key = arg\n elif key is not None:\n result[key] = arg\n key = None\n\n return result", "def _parse_flags(argv: List[str]) -> argparse.Namespace:\n argv = flag_utils.normalize_flags(argv) # See b/174043007 for context.\n\n parser = argparse_flags.ArgumentParser(\n description='Tensorflow Datasets CLI tool',\n )\n parser.add_argument(\n '--data_dir',\n type=tfds.core.Path,\n help='Path to the dataset files.',\n )\n parser.add_argument(\n '--out_dir',\n type=tfds.core.Path,\n help='Computed metadata will be written here.',\n )\n return parser.parse_args(argv[1:])", "def add_args(parser):\n # fmt: off\n parser.add_argument(\"--hidden-size\", type=int, default=512)\n parser.add_argument(\"--max-epochs\", type=int, default=1000)\n parser.add_argument(\"--sample-size\", type=int, default=500)\n parser.add_argument(\"--batch-size\", type=int, default=4)\n # fmt: on", "def _add_sample_specific_arguments(self, is_required: bool, *args):\n for arg in args:\n name_or_flags = arg[\"name_or_flags\"]\n options = arg[\"options\"]\n options[\"required\"] = is_required\n self._specific_args_group.add_argument(*name_or_flags, **options)", "def addArguments(self, parser):\r\n self.argparseHelper.addArg(parser)", "def define_flags():\n define_flag = {\n 'boolean': flags.DEFINE_boolean,\n 'float': flags.DEFINE_float,\n 'integer': flags.DEFINE_integer,\n 'string': flags.DEFINE_string,\n }\n for name, param_spec in six.iteritems(proparams._DEFAULT_PARAMS):\n define_flag[param_spec.flag_type](name, param_spec.default_value, param_spec.description)\n flags.declare_key_flag(name)", "def _add_default_options(self, argv):\r\n rc_filename = self._rc_filename()\r\n\r\n options = argv\r\n\r\n if self.IGNORE_RC_FLAG not in argv and os.path.exists(rc_filename):\r\n command = self._command or self.NO_COMMAND\r\n rc_config = ConfigParser.SafeConfigParser()\r\n rc_config.read(rc_filename)\r\n\r\n if rc_config.has_option(command, self.OPTIONS):\r\n default_options_str = rc_config.get(command, self.OPTIONS)\r\n default_options = shlex.split(default_options_str, True)\r\n options = default_options + options\r\n\r\n return options", "def add_supported_cxxflags(self, cxxflags):\n self.start_msg('Checking allowed flags for c++ compiler')\n\n supportedFlags = []\n for flag in cxxflags:\n if self.check_cxx (cxxflags=[flag], mandatory=False):\n supportedFlags += [flag]\n\n self.end_msg (' '.join (supportedFlags))\n self.env.CXXFLAGS += supportedFlags", "def add_args(cls, parser: argparse.ArgumentParser ):\n try:\n parser.add_argument('--wallet.name',required=False, default=bittensor.defaults.wallet.name, help='''The name of the wallet to unlock for running bittensor''')\n parser.add_argument('--wallet.hotkey', required=False, default=bittensor.defaults.wallet.hotkey, help='''The name of wallet's hotkey.''')\n parser.add_argument('--wallet.path',required=False, default=bittensor.defaults.wallet.path, help='''The path to your bittensor wallets''')\n except argparse.ArgumentError:\n # re-parsing arguments.\n pass", "def or_cpp_flags(self, flags):\n flags_dict = {\"deprecated\": \"vtable::common_::deprecated\",\n \"hidden\": \"vtable::common_::hidden\",\n \"unprivileged\": \"vtable::common_::unprivileged\",\n \"no_reply\": \"vtable::method_::no_reply\"}\n\n cpp_flags = []\n for flag in flags:\n try:\n cpp_flags.append(flags_dict[flag])\n except KeyError:\n raise ValueError(\"Invalid flag \\\"{}\\\"\".format(flag))\n\n return \" | \".join(cpp_flags)", "def take_action_on_flags(self, *args, **kwargs):\r\n pass", "def add_new_model_args(old_args, new_args):\r\n global ADVANCED_OPTIONS\r\n old_args, new_args = vars(old_args), vars(new_args)\r\n for k in new_args.keys():\r\n if k not in old_args:\r\n if (k in ADVANCED_OPTIONS):\r\n logger.info('Adding arg %s: %s' % (k, new_args[k]))\r\n old_args[k] = new_args[k]\r\n\r\n return argparse.Namespace(**old_args)", "def add_args_to_subparser(the_parser, subcommand_name):\n\n the_parser.add_argument(CmdArgs.verbose_optional, help=CmdArgs.verbose_help,\n action='store_true',\n )\n\n if subcommand_name in DCA_VISUALIZATION_SUBCOMMANDS:\n the_parser.add_argument(CmdArgs.biomolecule, help=CmdArgs.biomolecule_help)\n the_parser.add_argument(CmdArgs.pdb_chain_id, help=CmdArgs.pdb_chain_id_help)\n the_parser.add_argument(CmdArgs.pdb_file, help=CmdArgs.pdb_file_help)\n the_parser.add_argument(CmdArgs.refseq_file, help=CmdArgs.refseq_file_help)\n the_parser.add_argument(CmdArgs.dca_file, help=CmdArgs.dca_file_help)\n the_parser.add_argument(CmdArgs.rna_secstruct_file_optional,\n help=CmdArgs.rna_secstruct_file_help,\n )\n the_parser.add_argument(CmdArgs.linear_dist_optional,\n help=CmdArgs.linear_dist_help, type = int,\n )\n the_parser.add_argument(CmdArgs.contact_dist_optional,\n help=CmdArgs.contact_dist_help, type = float,\n )\n the_parser.add_argument(CmdArgs.num_dca_contacts_optional,\n help = CmdArgs.num_dca_contacts_help, type = int,\n )\n the_parser.add_argument(CmdArgs.wc_neighbor_dist_optional, type= int,\n help = CmdArgs.wc_neighbor_dist_help,\n )\n the_parser.add_argument(CmdArgs.pdb_id_optional, help = CmdArgs.pdb_id_help)\n\n if subcommand_name in FILE_CONTENT_SUBCOMMANDS:\n if subcommand_name == 'pdb_content':\n the_parser.add_argument(CmdArgs.pdb_file, help = CmdArgs.pdb_file_help)\n if subcommand_name in MSA_TRIMMING_SUBCOMMANDS:\n the_parser.add_argument(CmdArgs.max_gap_optional,\n type = float, help = CmdArgs.max_gap_help,\n )\n if subcommand_name == 'trim_by_refseq':\n the_parser.add_argument(CmdArgs.biomolecule, help=CmdArgs.biomolecule_help)\n the_parser.add_argument(CmdArgs.msa_file, help=CmdArgs.msa_file_help)\n the_parser.add_argument(CmdArgs.refseq_file, help=CmdArgs.refseq_file_help)\n the_parser.add_argument(CmdArgs.remove_all_gaps_optional,\n help= CmdArgs.remove_all_gaps_help, action='store_true',\n )\n if subcommand_name == 'trim_by_gap_size':\n the_parser.add_argument(CmdArgs.msa_file, help=CmdArgs.msa_file_help)\n return None", "def get_flags():\n flags.DEFINE_string(\n 'model_name',\n help='MobileNet version name: mobilenet_v1, mobilenet_v2, '\n 'mobilenet_v3_small and mobilenet_v3_large',\n default='mobilenet_v1'\n )\n flags.DEFINE_string(\n 'dataset_name',\n help='Dataset name from TDFS to train on: imagenette, imagenet2012',\n default='imagenette'\n )\n flags.DEFINE_string(\n 'model_dir',\n help='Working directory.',\n default='./tmp'\n )\n flags.DEFINE_string(\n 'data_dir',\n help='Directory for training data.',\n default=None\n )\n flags.DEFINE_bool(\n 'resume_checkpoint',\n help='Whether resume training from previous checkpoint.',\n default=False\n )\n flags.DEFINE_string(\n 'optimizer_name',\n help='Name of optimizer.',\n default='rmsprop'\n )\n flags.DEFINE_string(\n 'learning_scheduler_name',\n help='Name of learning rate scheduler.',\n default='exponential'\n )\n # for hyperparameter tuning\n flags.DEFINE_float(\n 'op_momentum',\n help='Optimizer momentum.',\n default=0.9\n )\n flags.DEFINE_float(\n 'op_decay_rate',\n help='Optimizer discounting factor for gradient.',\n default=0.9\n )\n flags.DEFINE_float(\n 'lr',\n help='Base learning rate.',\n default=0.008\n )\n flags.DEFINE_float(\n 'lr_decay_rate',\n help='Magnitude of learning rate decay.',\n default=0.97\n )\n flags.DEFINE_float(\n 'lr_decay_epochs',\n help='Frequency of learning rate decay.',\n default=2.4\n )\n flags.DEFINE_float(\n 'label_smoothing',\n help='The amount of label smoothing.',\n default=0.0,\n )\n flags.DEFINE_float(\n 'ma_decay_rate',\n help='Exponential moving average decay rate.',\n default=None\n )\n flags.DEFINE_float(\n 'dropout_rate',\n help='Dropout rate.',\n default=0.2\n )\n flags.DEFINE_float(\n 'std_weight_decay',\n help='Standard weight decay.',\n default=0.00004\n )\n flags.DEFINE_float(\n 'truncated_normal_stddev',\n help='The standard deviation of the truncated normal weight initializer.',\n default=0.09\n )\n flags.DEFINE_float(\n 'batch_norm_decay',\n help='Batch norm decay.',\n default=0.9997\n )\n flags.DEFINE_integer(\n 'batch_size',\n help='Training batch size.',\n default=4 # for testing purpose\n )\n flags.DEFINE_integer(\n 'epochs',\n help='Number of epochs.',\n default=5\n )", "def _test_argv(self, verbose, extra_argv):\r\n #self.package_path = os.path.abspath(self.package_path)\r\n argv = [__file__, self.package_path]\r\n argv += ['--verbosity', str(verbose)]\r\n if extra_argv:\r\n argv += extra_argv\r\n return argv", "def add_args(parser):\n add_encoder_args(parser)\n add_decoder_args(parser)", "def add_arguments(cls, arg_parser: ArgParser) -> None:", "def modify_commandline_options(parser, is_train=True):\n parser.set_defaults(norm='batch', netG='unet_256', dataset_mode='aligned')\n if is_train:\n parser.set_defaults(pool_size=0, gan_mode='vanilla')\n parser.add_argument('--lambda_feat', type=float, default=10.0, help='weight for feature-matching loss')\n\n return parser" ]
[ "0.6911403", "0.6741382", "0.6687998", "0.65631276", "0.6552243", "0.65478307", "0.6540075", "0.65375626", "0.6467081", "0.64280957", "0.6374949", "0.6354658", "0.6335308", "0.633125", "0.6321641", "0.6318687", "0.63147134", "0.6310211", "0.6308138", "0.62982756", "0.625911", "0.62488616", "0.62343836", "0.6222942", "0.62212014", "0.62191844", "0.62091166", "0.617911", "0.616631", "0.616278", "0.61568624", "0.6155294", "0.6151615", "0.61248887", "0.6109403", "0.61082137", "0.6075339", "0.60735816", "0.6058404", "0.6045002", "0.60436237", "0.60383666", "0.60369974", "0.60184354", "0.6016844", "0.59888464", "0.596016", "0.5952723", "0.5946546", "0.5933531", "0.59125966", "0.58978033", "0.58862144", "0.5875658", "0.5872678", "0.58705676", "0.5859737", "0.585494", "0.5841913", "0.58355665", "0.58248556", "0.580437", "0.58041656", "0.5803761", "0.579661", "0.57846254", "0.5776232", "0.57742643", "0.5771766", "0.5761647", "0.575803", "0.5756836", "0.5755022", "0.57544357", "0.5753255", "0.5747576", "0.5747271", "0.574301", "0.57374763", "0.573691", "0.5730335", "0.5725943", "0.5725943", "0.5724583", "0.5711847", "0.57046676", "0.57040566", "0.57040197", "0.57008755", "0.56999034", "0.5697225", "0.56962824", "0.5696026", "0.5690913", "0.5684683", "0.5677328", "0.5676746", "0.5675047", "0.567419", "0.5671209" ]
0.5957892
47
Touch (and truncate) |path|.
def touch(path): open(path, 'wb').close()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def touch(path: str) -> None:\n Stat.forget(path)\n os.utime(path)", "def touch(path):\n fd = open(path, 'a')\n fd.close()", "def Touch(path, makedirs=False):\n if makedirs:\n SafeMakedirs(os.path.dirname(path))\n\n # Create the file if nonexistant.\n open(path, 'a').close()\n # Update timestamp to right now.\n os.utime(path, None)", "def touch(path):\n if not os.path.isfile(path):\n with open(path, 'a'):\n os.utime(path, None)", "def _touch(path):\n open(path, \"w\").close()", "def touch(path):\n with open(path, 'wt') as f:\n pass", "def touch(file_path: str) -> None:\n try:\n os.utime(file_path, None)\n except Exception:\n open(file_path, 'a').close()", "def do_touch ( self, fspath ):\n return", "def touch(path, times=None):\n if os.path.isdir(path):\n os.utime(path, times)\n else:\n with open(path, \"ab\"):\n os.utime(path, times)", "def touch(path, atime=None, mtime=None):\n assert ((atime is None) == (mtime is None)), 'atime and mtime are exclusive'\n if atime is None:\n times = None\n else:\n times = (atime, mtime)\n with open(path, 'ab+'):\n # Note: there is a race condition here.\n os.utime(path, times=times)", "def touch(path, mtime, test=False):\n if test: return\n os.utime(path, (mtime, mtime))", "def touch(path, mtime, test=False):\n if test: return\n os.utime(path, (mtime, mtime), follow_symlinks=False)", "def truncate(self, name, path, size_mb=100):\n self._assert_absolute_path_or_placeholder(path)\n self._run(name, ['truncate', path, size_mb])", "def touch(filepath, times=None, mkdir=False):\n filepath = expand_path(filepath)\n if mkdir:\n mkdir_p(os.path.dirname(filepath))\n with open(filepath, 'a'):\n if times or times is None:\n os.utime(filepath, times)\n return filepath", "def touch_p(filepath, times=None, mkdir=True):\n return touch(filepath=filepath, times=times, mkdir=mkdir)", "def touch(name):\r\n with file(name, 'a'):\r\n os.utime(name, None)", "def touch_file(file_name):\n os.utime(file_name, None)", "def touch(path, content=\"\", encoding=\"utf-8\", overwrite=False):\n path = os.path.abspath(path)\n if not overwrite and os.path.exists(path):\n logger.warning('touch: \"%s\" already exists', path)\n return False\n try:\n logger.info(\"touch: %s\", path)\n with io.open(path, \"wb\") as f:\n if not isinstance(content, bytes):\n content = content.encode(encoding)\n f.write(content)\n return True\n except Exception as e:\n logger.error(\"touch: %s failed. Error: %s\", path, e)\n return False", "def touch(fname, times=None):\n with open(fname, 'a'):\n os.utime(fname, times)", "def touch(*paths):\n\n for path in paths:\n if os.path.exists(path):\n os.utime(path, None)\n else:\n open(path, 'a').close()\n LOG.debug('touch {!r}'.format(path))", "def _touch(fname, times=None):\n with file(fname, 'a'):\n os.utime(fname, times)", "def _touch_file(self, fname):\n if os.path.exists(fname):\n os.utime(fname, None)\n else:\n open(fname, 'a').close()", "def msize(path):\n with open(path, \"w\") as w:\n w.write(\"\")\n os.utime(path, (0, 0))\n time.sleep(0.4)\n with open(path, \"w\") as w:\n w.write(\"0\")\n os.utime(path, (0, 0))", "def touch(file, times=None):\r\n if times:\r\n if len(times) > 2:\r\n raise ValueError('times must either be a tuple of (atime, mtime) or else a single time value '\r\n 'to use for both.')\r\n\r\n if len(times) == 1:\r\n times = (times, times)\r\n\r\n with safe_open(file, 'a'):\r\n os.utime(file, times)", "def touch(file_name):\n open(file_name, 'a').close()", "def test_set_path_1(self):\n self.file.touch()\n # Since using tempfile, there is an added quirk.\n # the tempfile path may be a symlink, so passing it through set path\n # will resolve the symlink, changing the path, and breaking the test.\n self.file = self.file.resolve()\n output = basic.set_path(self.file, kind=\"file\", expect=True)\n with self.subTest():\n self.assertIsInstance(output, Path)\n with self.subTest():\n self.assertEqual(str(self.file), str(output))", "def update_path():\n #TODO update path information\n pass", "def write_to_path(self, path):\n assert not path.exists()\n fout = path.open(\"wb\")\n fout.write(self.to_string())\n assert not fout.close()\n path.setdata()", "def touch(*parts):\n path = os.path.join(*parts)\n dirpath = os.path.dirname(path)\n if not os.path.isdir(dirpath):\n os.makedirs(dirpath)\n open(path, \"w\", encoding=\"utf-8\").close()", "def unlink(self, path: PathLike):", "def touch(self, fname, mode=0o644, dir_fd=None, **kwargs):\n flags = os.O_CREAT | os.O_APPEND\n with os.fdopen(os.open(fname, flags=flags, mode=mode, dir_fd=dir_fd)) as f:\n os.utime(\n f.fileno() if os.utime in os.supports_fd else fname,\n dir_fd=None if os.supports_fd else dir_fd, **kwargs\n )\n with open(fname, 'a'):\n os.utime(fname, None)", "def create_file(path: Path, content: str) -> None:\n path.touch()\n with path.open(\"w\") as f:\n f.write(content)", "def writable(path):", "def touch(filename):\n try:\n if os.path.exists(filename):\n os.utime(filename, None)\n else:\n open(filename, \"w\").close()\n except IOError as e:\n if e.errno != 13:\n raise\n else:\n return False\n except OSError as e:\n if e.errno != 13:\n raise\n else:\n return False\n return True", "def __touch_file(self, filename):\n with open(filename, \"w\") as fd:\n fd.write(\"\")", "def setPath(self, path):\n if self._path != path:\n self._path = path\n self.__update_preview()", "def test_reopen_changed_inode(tmp_path):\n\n path1 = tmp_path / \"file\"\n path2 = tmp_path / \"changed_file\"\n\n with open(path1, \"w\") as f:\n for i in range(1000):\n print(f\"{i}\", file=f)\n\n with open(path2, \"w\") as f:\n for i in range(2000):\n print(f\"{i}\", file=f)\n\n file_info = LogFileInfo(\n filename=path1,\n size_when_last_opened=0,\n file_position=0,\n file_handle=None,\n is_err_file=False,\n job_id=None,\n worker_pid=None,\n )\n\n file_info.reopen_if_necessary()\n for _ in range(1000):\n file_info.file_handle.readline()\n\n orig_file_pos = file_info.file_handle.tell()\n file_info.file_position = orig_file_pos\n\n # NOTE: On windows, an open file can't be deleted.\n file_info.file_handle.close()\n os.remove(path1)\n os.rename(path2, path1)\n\n file_info.reopen_if_necessary()\n\n assert file_info.file_position == orig_file_pos\n assert file_info.file_handle.tell() == orig_file_pos", "def move_to(self, path: str) -> None:\n self._new_path = os.path.join(path, self.annot_type, os.path.basename(self._file_path))\n os.rename(self._file_path, self._new_path)\n self._file_was_moved = True", "def timestamp_one(self, path):\n stat = path.stat()\n sde = self.manager.source_date_epoch\n if stat.st_mtime > sde:\n cls = self.__class__.__name__\n self.log.debug(\n f\"[lite][base] <{cls}> set time to source_date_epoch {sde} on {path}\"\n )\n os.utime(path, (sde, sde))\n return\n return", "def timestamp_one(self, path):\n stat = path.stat()\n sde = self.manager.source_date_epoch\n if stat.st_mtime > sde:\n cls = self.__class__.__name__\n self.log.debug(\n f\"[lite][base] <{cls}> set time to source_date_epoch {sde} on {path}\"\n )\n os.utime(path, (sde, sde))\n return\n return", "def touch(cls, upload, location=None, bytes_downloaded=0):\n path = \"uploader/touch/%s\" % upload[\"id\"]\n kwargs = {\"bytes_transferred\": bytes_downloaded, \"location\": location}\n try:\n return Backend.put(path, kwargs, headers=Backend.headers())\n except requests.HTTPError as err:\n if err.response.status_code == 410:\n LOGGER.warning(\"Cannot Touch file %s. Already finished \\\n (not active) (410)\", upload[\"id\"])\n raise err\n except:\n raise", "def change_time(path,time_adj):\n try:\n stat = os.stat(path)\n except OSError as E:\n print('path {:s} does not exist'.format(E))\n\n os.utime(path,(stat.st_atime+time_adj,stat.st_mtime+time_adj))", "def release(self, unlink=True, touch=False):\n fobj = self.fobj\n if not fobj:\n return\n if touch:\n fobj.write('\\n')\n if unlink:\n os.unlink(self.path)\n self.fobj = None\n fobj.close()", "def mtime(path):", "def touch(self, dst, label=None):\r\n self.write('', dst, label, mode='a')", "def touch(self, dst, label=None):\r\n self.write('', dst, label, mode='a')", "def refresh(path):\n if os.path.exists(path):\n os.remove(path)\n return path", "def remove(path):", "def touch(self, dst, label=None):\n self.write('', dst, label, mode='a')", "def create_file(path):\n command = ['touch', TEST_FILE]\n file_operation(path, command)", "def mount(\n self, path, mode, *, persistent=True, text_only=False\n ): # pylint: disable=arguments-differ\n if mode not in (\"r\", \"w\"):\n raise ValueError('Mode must be \"r\" or \"w\"')\n if mode == \"r\":\n hcell = self._get_hcell2()\n hcell.pop(\"checksum\", None)\n super().mount(path, mode, \"cell\", persistent=persistent)\n if text_only:\n hcell = self._get_hcell2()\n hcell[\"mount\"][\"directory_text_only\"] = True\n return self", "def new_path(path):\n return Path(path[0], path[1], path[2])", "def make_hidden_file(file_path: pathlib.Path) -> None:\n file_path.touch()\n make_file_hidden(file_path)", "def WriteFile(path, content, mode='w', atomic=False, makedirs=False):\n write_path = path\n if atomic:\n write_path = path + '.tmp'\n\n if makedirs:\n SafeMakedirs(os.path.dirname(path))\n\n with open(write_path, mode) as f:\n f.writelines(cros_build_lib.iflatten_instance(content))\n\n if not atomic:\n return\n\n try:\n os.rename(write_path, path)\n except EnvironmentError:\n SafeUnlink(write_path)\n raise", "def _delete_file(self, path):\n if not self.mount():\n return False\n uri = self.path_to_uri(path)\n return self.gvfs.delete_file(uri)", "def touch(self, node):\n pass", "def remove_file(self, path):\n pass", "def path_and_rename(path):\n def wrapper(instance, filename):\n ext = filename.split('.')[-1]\n # get filename\n if instance.pk:\n filename = '{}.{}'.format(instance.pk, ext)\n else:\n # set filename as random string\n filename = '{}.{}'.format(uuid4().hex, ext)\n # return the whole path to the file\n return os.path.join(path, filename)\n return wrapper", "def touch(op):\n if not os.path.exists(op):\n if is_verbose():\n print(\"Creating nonexistent file '%s'.\" % (op))\n fd = open(op, \"w\")\n fd.close()\n elif not os.path.isfile(op):\n raise RuntimeError(\"'%s' exists but is not a normal file\" % (op))", "def lease_path(self, filesystem_identifier, path, headers=None, **kwargs):\n logger.debug('Leasing path %s/%s...', filesystem_identifier, path)\n params = get_params(parameters=locals(), exclusions=['self', 'filesystem_identifier', 'path', 'headers'])\n response = self._post(endpoint='{}/{}'.format(filesystem_identifier, path),\n params=params,\n headers=headers)\n return Command(self, response)", "def write(self, path, buf, offset, fh):\n\n if offset + len(buf) > self.max_size:\n raise FuseOSError(errno.EFBIG)\n\n result = super(CurrentView, self).write(path, buf, offset, fh)\n self.dirty[fh] = {\"message\": \"Update {}\".format(path), \"stage\": True}\n\n log.debug(\"CurrentView: Wrote %s to %s\", len(buf), path)\n return result", "def mount(self, name, path):\n if not os.path.exists(path):\n raise OSError('no mount path: '+ path)\n if name.startswith('/'):\n name = name[1:]\n self._top_paths[name] = path", "def save(self, **kwargs):\n if len(self.path) > 0:\n self.path = self.path.strip().rstrip()\n super(TargetPath, self).save(**kwargs)", "def edit_file(path, editor=None):\n\n # Find the editor to use\n editor = find_editor(editor)\n\n # Create temporary directory and copy the file\n tmpdir = tempfile.mkdtemp()\n tmpfile = os.path.join(tmpdir, os.path.basename(path))\n shutil.copy2(path, tmpfile)\n\n # Execute the editor\n subprocess.call([editor, tmpfile])\n\n # Copy the temporary file back and cleanup\n shutil.copy2(tmpfile, path)\n shutil.rmtree(tmpdir)", "def __init__(self, path):\r\n self.path = path\r\n \"\"\" If the file doesn't exist, touch it \"\"\"\r\n open(self.path, 'w').close()", "def _create_sparsed_file(self, nms, path, size):\n nms.appliance.execute(\n 'truncate --size %(size)dG %(path)s' % {\n 'path': path,\n 'size': size\n }\n )", "def move_and_symlink_file(file_path: Path):\n assert (\n file_path.is_file()\n and not file_path.is_symlink()\n and user_home in file_path.parents\n )\n\n original_path = file_path\n new_path = translate_home_path(original_path)\n\n print(f\"Moving: {original_path} -> {new_path}\")\n if not new_path.parent.exists():\n new_path.parent.mkdir(mode=HOME_DIRECTORY_MODE, parents=True)\n move(str(original_path), str(new_path))\n\n print(f\"Creating Symlink: {original_path} -> {new_path}\")\n original_path.symlink_to(new_path)", "def reopen(self):\n self.close()\n self._fileobj = os.fdopen(\n os.open(str(self.path), os.O_CREAT | os.O_RDWR, 384), \"r+b\", 0\n )", "def upload_path(self, **kwargs):\n\n # Files not uploaded , skip\n if not (path := kwargs.get('path')):\n return\n\n if not os.path.exists(path):\n return\n\n with open(path, 'rb') as file:\n self.system()\n\n # Remove content type for files\n self.headers.pop('content-type', None)\n files = {'file': file}\n params = {'url': self.URL_POST_FILE, 'files': files}\n response = self.make_request(method='post', **params)\n response.update({'type': kwargs.get('type', AttachmentHelper.TYPE_OTHER)})\n\n # Remove local\n if kwargs.get('delete', True, ):\n os.remove(path)\n\n return self._save(response=response)", "def set_new_path(self, path):\r\n path = Path(path)\r\n self.tree.setRootIndex(self.model.index(str(path)))\r\n # to display correcly / on windows and \\ everywhere else\r\n if platform.system() == \"windows\":\r\n self.path_viewer.setText(path.as_posix())\r\n else:\r\n self.path_viewer.setText(str(path))", "def set_file_immutable_unlink(path):\n\n return vserver.set_file_attr(path, {'immutable':True, 'iunlink':True})", "def path(self, path):\n\n self._path = path", "def path(self, path):\n\n self._path = path", "def path(self, path):\n\n self._path = path", "def path(self, path):\n\n self._path = path", "def path(self, path):\n\n self._path = path", "def touch(self, wsgi_file=None):\n if wsgi_file is None:\n wsgi_file = self.get_wsgi_full_file_name()\n run('touch ' + wsgi_file)", "def path(self, path):\n self._path = path", "def move_file(path):\n new_path = os.path.join(TEST_DIR, TEST_FILE)\n command = ['mv', TEST_FILE, new_path]\n file_operation(path, command)", "def clean_path(file_path):\n\n pass", "def touch_file(name):\n if not os.path.exists(name):\n print(f'File \\'{name}\\' not found. Creating it.')\n with open(name, 'w'):\n pass\n return False\n return True", "def release(self, path, fh):\n\n if fh in self.dirty:\n message = self.dirty[fh][\"message\"]\n should_stage = self.dirty[fh].get(\"stage\", False)\n del self.dirty[fh]\n\n global writers\n writers -= 1\n if should_stage:\n log.debug(\"CurrentView: Staged %s for commit\", path)\n self._stage(add=path, message=message)\n\n log.debug(\"CurrentView: Release %s\", path)\n return os.close(fh)", "def touch(self, filename):\n call(['touch', os.path.join(SAMPLE_PROJECT, filename)])", "def write(self, path):\n try:\n contents = self.file_contents()\n except Exception as e:\n raise e\n\n tmp_hosts_file_path = \"{0}.tmp\".format(path) # Write atomically\n with open(tmp_hosts_file_path, 'w') as tmp_hosts_file:\n tmp_hosts_file.write(contents)\n\n os.rename(tmp_hosts_file_path, path)", "def reten_log(path):\n try:\n file = open(path, 'r+')\n lines = file.readlines()\n if lines > 200:\n file.truncate()\n file.close()\n else:\n file.close()\n except:\n pass", "def reopen(self):\n self.close()\n self._fileobj = os.fdopen(os.open(str(self.path), os.O_CREAT | os.O_RDWR, 384), \"r+b\", 0)", "def delete_tempfile(path):\n try:\n unlink(path)\n except:\n pass", "def heartbeat(self):\n timeout_at = time.time() + self.timeout\n\n try:\n os.utime(self.filename, (timeout_at, timeout_at))\n\n except OSError as err:\n if err.errno == errno.ENOENT:\n _LOGGER.warning('Lost lease file: %r', self.filename)\n self._write(timeout_at)\n else:\n raise", "def move(self, path):\n self.current_location = (path[1][1], path[1][0])", "def open_file(self, now):\n path = now.strftime(self.template)\n if path != self.path:\n if self.file is not None:\n self.file.close()\n self.path = path\n try:\n self.file = open(path, 'ab', 0)\n except IOError as e:\n if e.errno == errno.ENOENT:\n os.makedirs(os.path.dirname(path))\n self.file = open(path, 'ab', 0)\n\n self.update_link()", "def _RemountRootAsReadWrite(self):\n self.RunCmdOnDevice(['mount', '-o', 'remount,rw', '/'])", "def touch(self):\n full_destination_path = os.path.join(\n os.path.expandvars(self.path_destination), self.name\n )\n\n try:\n with open(full_destination_path, \"w\", encoding=\"utf-8\") as _file:\n _file.write(self.text)\n message.info(f\"Created file: '{self.name}' at '{self.path_destination}'\")\n except OSError:\n message.error(\n f\"There was a problem creating the file '{self.name}' at '{self.path_destination}'\"\n )\n\n if config.fail_fast:\n sys.exit(1)\n\n message.info(\"Stopping execution temporarily for your evaluation.\")\n\n for i in range(3, 0, -1):\n message.info(f\"Program will continue in {i * 10} seconds...\")\n sleep(config.seconds_to_wait_on_fail)", "def remove_file(path: str) -> None:\n\tremove(path)", "def write(self,path,content):\n file_path = os.path.join( self.directory, path)\n with open(file_path, \"w\") as file:\n file.write( content )", "def delete_path():\n #TODO delete path from database\n pass", "def remove_path_from_disk(path:str):\n try:\n shutil.rmtree(path)\n except Exception as err:\n print(err)", "def touch():\n run('touch %s' % PATH_SEP.join((env.code_root, 'mwana', 'malawi', 'apache',\n 'project.wsgi')))", "def ChangeDir(self, path: str) -> None:\n ...", "def truncate(self):\n for file_name in os.listdir(self.path):\n if file_name[0:4] == 'data':\n os.remove(self.path + '/' + file_name)\n self.current_row = 0", "def rm(self, path: str) -> None:\n self.fs.rm(self._full_path(path))" ]
[ "0.7507206", "0.7477994", "0.7407964", "0.7295061", "0.7234373", "0.7213623", "0.7138664", "0.7133505", "0.6832273", "0.6770861", "0.6743686", "0.6636576", "0.6419507", "0.6384915", "0.6220418", "0.601276", "0.5957383", "0.5900866", "0.5870614", "0.5842674", "0.5808397", "0.5742977", "0.57257974", "0.5664707", "0.56186527", "0.5505547", "0.5490555", "0.5476924", "0.5466743", "0.5458615", "0.54355335", "0.5431119", "0.5418105", "0.54103833", "0.5382122", "0.5318964", "0.53133935", "0.52959126", "0.5271339", "0.5271339", "0.5257574", "0.5249157", "0.52301264", "0.52003264", "0.5172308", "0.5172308", "0.516108", "0.51531583", "0.5146903", "0.5102319", "0.5078358", "0.5076565", "0.50706816", "0.506719", "0.5055321", "0.50332993", "0.50221133", "0.5005294", "0.50049376", "0.49828768", "0.49645007", "0.49346632", "0.49296173", "0.49284893", "0.49278238", "0.49266136", "0.49181408", "0.49164674", "0.49156103", "0.4915367", "0.4911193", "0.49080345", "0.49080345", "0.49080345", "0.49080345", "0.49080345", "0.49046773", "0.49011105", "0.48991466", "0.48937002", "0.4892311", "0.48911503", "0.4869706", "0.48681375", "0.48671466", "0.48660576", "0.48623747", "0.48607403", "0.48595637", "0.48550504", "0.4854527", "0.4850106", "0.48454273", "0.48368806", "0.48289993", "0.48161858", "0.48154712", "0.4807086", "0.48039365", "0.47940457" ]
0.7130963
8
Remove |path| and ignore errors if it doesn't exist.
def unlink(path): try: os.unlink(path) except FileNotFoundError: pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def remove(path):\n try:\n os.remove(path)\n except FileNotFoundError:\n _logger.error('file does not exist %s; stack: %s', path, stack_trace())", "def remove(path):", "def remove(path: str) -> None:\n if Stat.isfile(path):\n Stat.forget(path)\n os.remove(path)\n elif Stat.exists(path):\n Stat.forget(path)\n shutil.rmtree(path)", "def removeIfExists(path):\n try:\n os.remove(path)\n except OSError as e: \n if e.errno != errno.ENOENT: # Continue if the error is \"no such file or directory\"\n raise # Re-raise the exception if a different error occured", "def remove(path):\r\n os.remove(path)", "def remove(self, path):\n os.remove(path)", "def remove(path: str):\n _fs().remove(path)", "def remove(path):\n if os.path.isfile(path):\n try:\n os.remove(path)\n except OSError as e:\n if e.errno != errno.ENOENT:\n raise\n elif os.path.isdir(path):\n try:\n shutil.rmtree(path)\n except FileNotFoundError:\n return", "def RemovePath(path):\n if os.path.isdir(path):\n shutil.rmtree(path, ignore_errors=True)\n return\n try:\n os.remove(path)\n except OSError:\n pass", "def CleanUp(self, path):\n try:\n if os.path.exists(path):\n os.remove(path)\n except (OSError, IOError) as e:\n logging.info(\"Failed to remove temporary file %s. Err: %s\", path, e)", "def remove_file(path: str) -> None:\n\tremove(path)", "def remove(path):\n if os.path.isfile(path):\n os.remove(path) # remove the file\n elif os.path.isdir(path):\n shutil.rmtree(path) # remove dir and all contains\n else:\n raise ValueError(\"file {} is not a file or dir.\".format(path))", "def remove(path):\n if os.path.isfile(path):\n os.remove(path) # remove the file\n elif os.path.isdir(path):\n shutil.rmtree(path) # remove dir and all contains\n else:\n raise ValueError(\"file {} is not a file or dir.\".format(path))", "def remove(path):\n # thanks https://stackoverflow.com/a/41789397\n if os.path.isfile(path) or os.path.islink(path):\n os.remove(path) # remove the file\n elif os.path.isdir(path):\n shutil.rmtree(path) # remove dir and all contains\n else:\n raise ValueError(\"file {} is not a file or dir.\".format(path))", "def remove(path):\n if os.path.isfile(path) or os.path.islink(path):\n os.remove(path) # remove the file\n elif os.path.isdir(path):\n shutil.rmtree(path) # remove dir and all contains\n else:\n raise ValueError(\"file {} is not a file or dir.\".format(path))", "def remove(path):\n if os.path.isfile(path) or os.path.islink(path):\n os.remove(path) # remove the file\n elif os.path.isdir(path):\n shutil.rmtree(path) # remove dir and all contains\n else:\n raise ValueError(\"file {} is not a file or dir.\".format(path))", "def delete(self, path):\n full_path = self._get_full_path(path)\n if os.path.exists(full_path):\n os.remove(full_path)", "def remove(self, *path):\n path = self.localpath(*path)\n if os.path.exists(path + \".info\"):\n try:\n if os.path.isdir(path):\n shutil.rmtree(path)\n elif os.path.isfile(path):\n os.remove(path)\n os.remove(path + \".info\")\n except OSError as ex:\n print(\"Failed to delete\", path, \"due to:\", ex)\n else:\n raise FileNotFoundError", "def rm(self, path: str) -> None:\n self.fs.rm(self._full_path(path))", "def clean_path(path):\n return resolved_path(path)", "def remove(path):\n if os.path.isfile(path):\n os.remove(path) # remove the file\n elif os.path.isdir(path):\n shutil.rmtree(path) # remove dir and all contains\n else:\n print(\" - file {} is not a file or dir.\".format(path))", "def remove(path):\n if os.path.isdir(path):\n return __rmtree(path)\n else:\n return __rmfile(path)", "def remove_path_from_disk(path:str):\n try:\n shutil.rmtree(path)\n except Exception as err:\n print(err)", "def rm_f(path):\n try:\n os.unlink(path)\n except OSError as e:\n if e.errno != errno.ENOENT:\n raise", "def remove(self, path):\r\n return self.paths.remove(path)", "def remove(path):\n try:\n os.remove(path)\n return True\n except FileNotFoundError:\n return False", "def removePath(self, path):\n self.pushMode(CLI_MODES.shell)\n output = self.sendCmd(\"rm -rf %s\" % path)\n self.popMode()\n return output", "def delete_file(path):\n if os.path.isfile(path):\n os.remove(path)", "def rm(path):\n try:\n shutil.rmtree(path)\n except Exception as e:\n print(\"* [Error] occured: {}\\n\".format(e))\n else:\n print(\"* Done.\\n\")", "def RemovePath(*path):\n file_path = os.path.join(*path)\n if os.path.exists(file_path):\n if os.path.isdir(file_path):\n RemoveDirectory(file_path)\n else:\n RemoveFile(file_path)", "def _delete_file(path):\n if os.path.isfile(path):\n os.remove(path)", "def _delete_file(path):\n if os.path.isfile(path):\n os.remove(path)", "def remove(self,path):\n path = os.path.join(self.testpath,path)\n if os.path.isfile(path):\n os.remove(path)\n if os.path.isdir(path):\n shutil.rmtree(path)", "def _delete_file(path):\n if os.path.isfile(path):\n os.remove(path)", "def delete_if_present(path, verbose):\n if os.path.isfile(path):\n if verbose:\n print(\"removing\", path)\n os.unlink(path)", "def delete_file(path):\n if os.path.isfile(path):\n os.remove(path)", "def _unlink(path):\n if os.path.isdir(path):\n os.rmdir(path)\n else:\n os.remove(path)", "def unlink(self, path: PathLike):", "def remove_dir(path):\n if os.path.exists(path):\n shutil.rmtree(path)", "def silent_remove(file_path: str):\n try:\n os.remove(file_path)\n except OSError as e:\n if e.errno != errno.ENOENT: # errno.ENOENT = no such file or directory\n raise", "def clean_path(file_path):\n\n pass", "def delete_file(self, path):\n if not path_exists(path, self._store_folder):\n raise NotFoundException(\"\")\n os.remove(path)", "def remove_file(self, path):\n pass", "def remove_path(self, path, quiet=True):\r\n mount_path = self._mount_for_path(path)\r\n try:\r\n del self._mounts[mount_path]\r\n except KeyError, e:\r\n if(quiet):\r\n self.log.warn('%s not in reservation list.' % mount_path)\r\n else:\r\n raise e", "def rm(self, path):\n try:\n basedir, item = os.path.split(path)\n postdata = codecs.encode(json.dumps({ 'baseDir': basedir, 'items': [ item ] }), 'utf-8')\n self._urlopen('/api/fileops/delete', postdata).read()\n except HTTPError as err:\n raise RuntimeError(\"Unable to delete '{}'\".format(path))", "def RemoveFile(*path):\n file_path = os.path.join(*path)\n try:\n os.remove(file_path)\n except OSError, e:\n if e.errno != errno.ENOENT:\n raise", "def _remove(path, force):\n if not os.path.exists(path):\n return\n elif os.path.isfile(path) and force:\n os.remove(path) # remove the file\n elif os.path.isdir(path) and force:\n import shutil\n shutil.rmtree(path) # remove dir and all contains\n else:\n print('Logdir contains data. Please, set `force` flag to overwrite it.')\n import sys\n sys.exit(0)", "def delete_path():\n #TODO delete path from database\n pass", "def remove_output(path: str) -> None:\n try:\n Stat.remove(path)\n global remove_empty_directories # pylint: disable=invalid-name\n while remove_empty_directories.value:\n path = os.path.dirname(path)\n Stat.rmdir(path)\n Logger.file(f\"Remove the empty directory: {path}\")\n except OSError:\n pass", "def del_files_from_disk(path):\n\n shutil.rmtree(path) #,ignore_errors=True)", "def rm(self, path):\n self._log_command(['rm', '-rf', path])\n if not self.dryrun:\n try:\n shutil.rmtree(path)\n except Exception as e:\n print(\"Failed to delete dir {}: {}\".format(path, e))", "def rm_with_error(a_path):\r\n e = \"\"\r\n if os.path.isfile(a_path):\r\n try:\r\n os.remove(a_path)\r\n except Exception as e:\r\n pass\r\n elif os.path.isdir(a_path):\r\n try:\r\n shutil.rmtree(a_path)\r\n except Exception as e:\r\n pass\r\n if e:\r\n say_it(e)\r\n return 1", "def rm_with_error(a_path):\r\n e = \"\"\r\n if os.path.isfile(a_path):\r\n try:\r\n os.remove(a_path)\r\n except Exception as e:\r\n pass\r\n elif os.path.isdir(a_path):\r\n try:\r\n shutil.rmtree(a_path)\r\n except Exception as e:\r\n pass\r\n if e:\r\n say_it(e)\r\n return 1", "def remove(self, path):\n path = path.decode('utf8')\n cursor = self._dbcon.cursor()\n filename = os.path.basename(path)\n dirname = os.path.dirname(path)\n t = (dirname, filename)\n sql = u\"delete from books where path = ? and filename = ?\"\n cursor.execute(sql, t)\n self._dbcon.commit()\n cursor.close()", "def rmdir(path: str) -> None:\n Stat.forget(path)\n os.rmdir(path)", "def rmrf(path: str):\n if os.path.isdir(path) and not os.path.islink(path):\n shutil.rmtree(path)\n else:\n try:\n os.remove(path)\n except OSError:\n pass", "def delete_file_if_exists(self, file_path):\r\n # noinspection PyBroadException,PyPep8\r\n try:\r\n os.remove(file_path)\r\n except:\r\n pass", "def _clean_directory(path):\n\n if os.path.exists(path):\n shutil.rmtree(os.path.join(path))\n os.mkdir(path)", "def rm_rf(path):\n try:\n if islink(path) or isfile(path):\n # Note that we have to check if the destination is a link because\n # exists('/path/to/dead-link') will return False, although\n # islink('/path/to/dead-link') is True.\n os.unlink(path)\n elif isdir(path):\n shutil.rmtree(path)\n except (OSError, IOError):\n pass", "def forget(path: str) -> None:\n path = clean_path(path)\n index = Stat._cache.bisect_left(path)\n while index < len(Stat._cache):\n index_path = Stat._cache.keys()[index]\n if os.path.commonpath([path, index_path]) != path:\n return\n Stat._cache.popitem(index)", "def delete(self, path):\n path = path.strip(\"/\")\n if not path:\n raise HTTPError(400, \"Can't delete root\")\n self.delete_file(path)\n self.checkpoints.delete_all_checkpoints(path)", "def rmdir(self, path):\n os.rmdir(path)", "def uninstall(path):\n try:\n path = os.path.join(install_dir, path)\n if os.path.isfile(path) or os.path.islink(path):\n os.remove(path)\n elif os.path.isdir(path):\n shutil.rmtree(path)\n else:\n return\n print 'Removed', path\n except Exception:\n print 'Could not remove', path", "def delete_tempfile(path):\n try:\n unlink(path)\n except:\n pass", "def remove_dir_without_error(a_path):\r\n if not os.path.isdir(a_path):\r\n return\r\n for foo in os.listdir(a_path):\r\n abs_foo = os.path.join(a_path, foo)\r\n if os.path.isfile(abs_foo):\r\n try:\r\n os.remove(abs_foo)\r\n except Exception:\r\n continue\r\n else:\r\n remove_dir_without_error(abs_foo)\r\n try:\r\n shutil.rmtree(a_path)\r\n except Exception:\r\n return", "def remove_dir_without_error(a_path):\r\n if not os.path.isdir(a_path):\r\n return\r\n for foo in os.listdir(a_path):\r\n abs_foo = os.path.join(a_path, foo)\r\n if os.path.isfile(abs_foo):\r\n try:\r\n os.remove(abs_foo)\r\n except Exception:\r\n continue\r\n else:\r\n remove_dir_without_error(abs_foo)\r\n try:\r\n shutil.rmtree(a_path)\r\n except Exception:\r\n return", "def rm(path):\n abs_path = navigate.get_abs_path(path)\n parent, name = navigate.split_path(abs_path)\n access_token = db.get_access_to_file(parent, name)\n if access_token is not None:\n dbox_path = '/' + name\n client = dropbox.client.DropboxClient(access_token)\n client.file_delete(dbox_path)\n db.remove_file(access_token, parent, name)", "def removeFile( self, path ):\n res = self.__checkArgumentFormat( path )\n if not res['OK']:\n return res\n urls = res['Value']\n if not len( urls ) > 0:\n return S_ERROR( \"DIPStorage.removeFile: No surls supplied.\" )\n successful = {}\n failed = {}\n serviceClient = RPCClient( self.url )\n for url in urls:\n gLogger.debug( \"DIPStorage.removeFile: Attempting to remove %s.\" % url )\n res = serviceClient.remove( url, '' )\n if res['OK']:\n successful[url] = True\n else:\n failed[url] = res['Message']\n resDict = {'Failed':failed, 'Successful':successful}\n return S_OK( resDict )", "def unwatch(self, path):\n path_obj = Path(path)\n if not path_obj.exists():\n raise FileObserverException(\"Can not unwatch non exist path\")\n parent_path = str(path_obj.parent)\n child_paths = self._watch_dog_observed_paths.get(parent_path, [])\n if path in child_paths:\n child_paths.remove(path)\n self._observed_paths.pop(path, None)\n if not child_paths:\n self._watch_dog_observed_paths.pop(parent_path, None)\n if self._observed_watches[parent_path]:\n self._observer.unschedule(self._observed_watches[parent_path])\n self._observed_watches.pop(parent_path, None)", "def remove(self, path):\n self.__remove.append(path)\n return self", "def _delete_data (self, path):\n head, tail = os.path.split(path)\n for subdir, dirs, files in os.walk(head):\n for file in files:\n if tail in file:\n os.remove(os.path.join(subdir, file))", "def _cleanup_path(path):\n return string.join(filter(None, string.split(path, '/')), '/')", "def rm_path():\n shutil.rmtree(options.input_path)", "def delete_file(path):\n return files.delete_file(path)", "def remove(path: Path, verbose: int = 0, interactive: bool = False, dry_run: bool = False, check: bool = False) -> Path:\n verbose = max(int(verbose or 0), 0)\n check = False if check is None else check\n path = path.expanduser().resolve().absolute()\n if not validate_environment(path) and check is True:\n raise InvalidEnvironmentError(path=path)\n run_command = click.confirm(f'Remove {terminal.yellow(str(path))}?') == 'y' if interactive else True\n if run_command and not dry_run:\n if path.exists():\n shutil.rmtree(path)\n remove_venv_config(name=path.name)\n elif check is True:\n raise PathNotFoundError(path=path)\n terminal.echo(f'{terminal.blue(\"Removed\")}: {terminal.green(path)}', verbose=verbose)\n return path", "def deleteDir(self, path):\n\n # remove directory even if it has files\n shutil.rmtree(path, ignore_errors=True)", "def remove(self):\n path = os.path.abspath(path)\n if path in self.files:\n del self.files[path]\n return True\n return False", "def clean_folder(path):\n if not os.path.exists(path):\n os.makedirs(path)\n else:\n shutil.rmtree(path)\n os.makedirs(path)", "def remove_device(self, path):\n pass", "def remove_by_path(self, path):\n if path.startswith(collection.Collection.CONTENT_PATH):\n if path.endswith(\n '/{}'.format(collection.Collection.BLUEPRINT_PATH)):\n # If this is a blueprint then remove the entire collection.\n col_path = path[len(collection.Collection.CONTENT_PATH):]\n # Get just the directory.\n col_path = os.path.split(col_path)[0]\n collection_path = col_path[1:] # Remove /\n with self._lock:\n if collection_path in self._cache:\n del self._cache[collection_path]\n else:\n # Search for an existing collection path.\n col_path = path[len(collection.Collection.CONTENT_PATH):]\n col_path = os.path.split(col_path)[0]\n while col_path != os.sep:\n collection_path = col_path[1:]\n with self._lock:\n if collection_path in self._cache:\n # Do a 'wildcard' match on the path to remove all\n # locales.\n generic_key = CollectionCache.generate_cache_key(\n path, '')\n for key in self._cache[collection_path]['docs'].keys():\n if key.startswith(generic_key):\n del self._cache[\n collection_path]['docs'][key]\n return\n col_path = os.path.split(col_path)[0]", "def purge_workflow_file(path):\n logger = fsurfer.log.get_logger()\n if not os.path.exists(path):\n return True\n try:\n if os.path.isfile(path):\n os.unlink(path)\n elif os.path.isdir(path):\n os.rmdir(path)\n return True\n except OSError as e:\n logger.exception(\"Exception: {0}\".format(str(e)))\n return False", "def delete_folder(self, path):\n if not path_exists(path, self._store_folder):\n raise NotFoundException(\"\")\n rmdir(path)", "def _remove_temp_path():\n if os.path.exists(_temp_path):\n if os.path.isdir(_temp_path):\n def onerror(function, path, excinfo):\n persist.printf(\"{}: Unable to delete '{}' while cleaning up temporary directory\"\n .format(p_name, path))\n import traceback\n traceback.print_exc(*excinfo)\n import shutil\n shutil.rmtree(_temp_path, onerror=onerror)\n else:\n persist.printf(\"{}: For some reason, '{}' is a file. Removing...\"\n .format(p_name, _temp_path))\n os.remove(_temp_path)", "def rmdir(self, path):\n self.log.debug(\"Local rmdir: %s\", path)\n shutil.rmtree(path)", "def delete_local_file(file_path):\r\n try:\r\n os.remove(file_path)\r\n except OSError as e:\r\n print(f\"Error deleting file {file_path}: {e}\")", "def refresh(path):\n if os.path.exists(path):\n os.remove(path)\n return path", "def delete(self, path):\n if path in self.sorted_checkpoints():\n os.remove(os.path.join(self.root, path))\n else:\n log.warning(\"Trying to delete a checkpoint that does not exists.\")", "def clean(path=None):\n conf.load(path)\n logger.info('cleaning output...')\n helpers.rmdir(conf.get('build_path'))\n logger.info('done')", "def delete(path, recursive=False):\n fs.delete(path, recursive)", "def removeDirectory(path, ignore_errors):\n\n def onError(func, path, exc_info):\n # Try again immediately, ignore what happened, pylint: disable=unused-argument\n try:\n func(path)\n except OSError:\n time.sleep(0.1)\n\n func(path)\n\n with withFileLock(\"removing directory %s\" % path):\n if os.path.exists(path):\n try:\n shutil.rmtree(path, ignore_errors=False, onerror=onError)\n except OSError:\n if ignore_errors:\n shutil.rmtree(path, ignore_errors=ignore_errors)\n else:\n raise", "def safe_rm(path_to_rm):\n # just return if path doesn't exist\n if not os.path.exists(path_to_rm):\n return\n # handle directory\n if os.path.isdir(path_to_rm):\n files_to_rm = [f'{path_to_rm}/{fname}' for fname in os.listdir(path_to_rm)]\n dir_to_rm = path_to_rm\n else:\n files_to_rm = [path_to_rm]\n dir_to_rm = None\n # clear out files\n for file_to_rm in files_to_rm:\n if os.path.isfile(file_to_rm) and os.path.basename(file_to_rm) in REMOVABLE_PATHS:\n os.remove(file_to_rm)\n assert not os.path.exists(file_to_rm), f'Error removing: {file_to_rm}'\n # clear out directory\n if dir_to_rm is not None and os.path.isdir(dir_to_rm):\n os.rmdir(dir_to_rm)\n assert not os.path.exists(dir_to_rm), f'Error removing: {dir_to_rm}'", "async def rm(path: str):\n _ = path.strip('/').split('/')\n bucket = _[0]\n key = '/'.join(_[1:])\n if path.endswith('/'):\n key += '/'\n async with _create_client() as client:\n try:\n await client.delete_object(Bucket=bucket, Key=key)\n logger.info(f'Delete file \"{path}\" from bucket.')\n except ClientError:\n pass", "def delete(self, path):\n \n try:\n self._client.remove(self._getEncodedUri(path), force=True)\n except ClientError, error:\n raise SubversionError(error)\n else:\n self._sharedState.removeFromCache(path)", "def _delete_path_unsafe(target_path: str):\n if os.path.exists(target_path):\n if os.path.isdir(target_path):\n shutil.rmtree(target_path)\n else:\n os.remove(target_path)\n return True\n return False", "def rm_rf(path, dry_run=False):\n log.info(\"removing %s\" % path)\n if dry_run:\n return\n try:\n if os.path.isdir(path) and not os.path.islink(path):\n shutil.rmtree(path)\n else:\n os.remove(path)\n except OSError:\n pass", "def remove_log_path(self, monitor_name, log_path):\n pass", "def rmdir(self, path: PathLike):", "def _delete_file(self, path):\n if not self.mount():\n return False\n uri = self.path_to_uri(path)\n return self.gvfs.delete_file(uri)", "def clean_file_path(path):\r\n\r\n return path.split(\"/\")[-1]", "def remove_file(path):\n pyCMD('hdfs', ['dfs', '-rm', '-skipTrash', path]).execute()" ]
[ "0.80879766", "0.79950017", "0.79609793", "0.7946199", "0.7846539", "0.77772456", "0.77729934", "0.7742355", "0.7642514", "0.76163965", "0.7585357", "0.74620676", "0.74620676", "0.74493706", "0.7417251", "0.7417251", "0.7414736", "0.7359514", "0.7351339", "0.7341862", "0.7340705", "0.7316281", "0.73135793", "0.7274795", "0.72694635", "0.72654384", "0.7259596", "0.72441435", "0.7223323", "0.72218955", "0.72002107", "0.72002107", "0.71792907", "0.7178913", "0.71263975", "0.70768577", "0.7069611", "0.7053394", "0.7046411", "0.704176", "0.70031965", "0.697801", "0.69651395", "0.69599223", "0.6939021", "0.6923394", "0.69118065", "0.68973917", "0.6857027", "0.68295455", "0.682762", "0.68067366", "0.68067366", "0.6799098", "0.6790489", "0.67849165", "0.6780096", "0.67597777", "0.67287266", "0.6696001", "0.66943663", "0.6675265", "0.6674626", "0.66586846", "0.665861", "0.665861", "0.66366476", "0.66344875", "0.6626144", "0.6620977", "0.66189575", "0.6606266", "0.6602773", "0.6597676", "0.65877765", "0.6585945", "0.65635145", "0.65260804", "0.6521451", "0.6452375", "0.64347947", "0.6427298", "0.64238113", "0.64208376", "0.6419642", "0.64105153", "0.64037824", "0.63911366", "0.6374228", "0.6356771", "0.63464206", "0.6337191", "0.6332578", "0.63192326", "0.63007444", "0.6297847", "0.6291233", "0.6271002", "0.62690544", "0.6265751" ]
0.7559851
11
Always symlink |path| to a relativized |target|.
def symlink(target, path): unlink(path) path = os.path.realpath(path) target = os.path.relpath(os.path.realpath(target), os.path.dirname(path)) logging.info('Symlinking %s -> %s', path, target) os.symlink(target, path)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def symlink(path, v=False):\r\n if not os.path.exists(path):\r\n err(path + ' : no such file or directory')\r\n elif not os.path.isdir(path):\r\n err(path + ' : not a directory')\r\n else:\r\n theme_name = os.path.basename(os.path.normpath(path))\r\n theme_path = os.path.join(_THEMES_PATH, theme_name)\r\n if os.path.exists(theme_path):\r\n err(path + ' : already exists')\r\n else:\r\n if v:\r\n print(\"Linking `{p}' to `{t}' ...\".format(p=path, t=theme_path))\r\n try:\r\n os.symlink(path, theme_path)\r\n except Exception as e:\r\n err(\"Cannot link `{p}' to `{t}':\\n{e}\".format(p=path, t=theme_path, e=str(e)))", "def force_symlink(target_path, link_location):\n\n pardir = os.path.dirname(link_location)\n if not os.path.exists(pardir):\n os.makedirs(pardir)\n\n if os.path.lexists(link_location):\n assert os.path.islink(link_location), \\\n \"The path {} exists but is not a symlink\".format(link_location)\n if os.readlink(link_location) != target_path:\n os.remove(link_location)\n os.symlink(target_path, link_location)\n else:\n os.symlink(target_path, link_location)", "def symlink(source, target):\n source, target = map(os.path.expanduser, (source, target))\n print(\"Will symlink %s to %s\" % (source, target))\n\n if os.path.exists(target):\n if os.path.islink(target) and os.path.realpath(target) == source:\n logging.info(\"%s exists\" % target)\n return\n\n backup = target + \".old\"\n\n if os.path.exists(backup):\n raise Exception(\"Can't backup to %s: file already exists.\" % backup)\n\n shutil.move(target, backup)\n\n else:\n os.symlink(source, target)\n logging.info(\"%s symlinked to %s\" % (source, target))", "def symlink(source, target, use_sudo=True):\n\n # Some older versions of Fabric do not have the is_link method \n try:\n from fabric.contrib.files import is_link\n is_a_link = is_link(target)\n except ImportError:\n with settings(hide(\"everything\"), warn_only=True):\n if run(\"test -L \"+target).failed:\n\t is_a_link = False\n else:\n is_a_link = True\n\n if not is_a_link:\n cmd = \"ln -s \"+source+\" \"+target\n if use_sudo:\n sudo(cmd)\n else:\n run(cmd)", "def force_symlink(target, name):\n makedirs(os.path.dirname(name))\n try:\n os.symlink(target, name)\n except OSError as e:\n if e.errno == errno.EEXIST:\n os.remove(name)\n os.symlink(target, name)", "def symlink(origin, target):\n # Skip anything in the home directory if the user is admin\n if user_is_admin() and not args.root and check_contain_home_dir(target):\n print(highlight_colour(\"'%s'\") % str(target) +\n warning_colour(\" is inside of home folder. Skipping...\"))\n raise StopTraversing(\"Skipping.\")\n\n # Check for a broken symlink, if true: prompt for replacement.\n # This is done to avoid having any broken symlinks lingering.\n if is_broken_symlink(target):\n if args.yes or prompt(origin, target, \"remove\"):\n target.unlink()\n else:\n return\n\n if args.replace:\n replace_symlink(origin, target)\n elif args.remove:\n remove_symlink(origin, target)\n else:\n create_symlink(origin, target)", "def ln(src, dst):\n os.symlink(src, dst)", "def attempt_symlink_to(path: str, to_path: str) -> None:\n try:\n Path(path).symlink_to(Path(to_path))\n except OSError:\n pytest.skip(\"could not create symbolic link\")", "def symlink_target(pth):\n\n if os.path.islink(pth):\n return os.readlink(pth)\n return pth", "def absolute_symlink(\n source_path: str,\n dest_path: str\n):\n os.symlink(os.path.abspath(source_path), dest_path)", "def symlink(self, filen, link):\n src = os.path.abspath(filen)\n cwd = self.getWorkingDirectory()\n dest = os.path.join(cwd, link)\n os.symlink(os.path.relpath(src, cwd), dest)", "def symlink(self, filen, link):\n src = os.path.abspath(filen)\n cwd = self.getWorkingDirectory()\n dest = os.path.join(cwd, link)\n os.symlink(os.path.relpath(src, cwd), dest)", "def link(target, link_name):\n src = os.path.abspath(target)\n dst = os.path.abspath(link_name)\n os.symlink(src, dst)", "def symlink(self, src, dst):\n return os.symlink(src, dst)", "def create_symbolic_link(file, target):\n try:\n os.symlink(file, target)\n except NotImplementedError:\n logger.critical(\"Symbolic links not supported on this platform\")\n raise\n except OSError:\n logger.critical(\"Not sufficient permissions\")\n raise", "def makeLinks(self, source, target):\n\n if os.path.exists(target): os.unlink(target)\n os.symlink(source, target)", "def _makeSymlink ( target, source, env ) :\n if len(target) != 1 :\n fail ( \"unexpected number of targets for symlink: \"+str(target) )\n if len(source) != 1 :\n fail ( \"unexpected number of sources for symlink: \"+str(source) )\n\n target = str(target[0])\n source = str(source[0].abspath)\n trace ( \"Executing symlink `%s' -> `%s'\" % ( target, source ), \"makeSymlink\", 3 )\n\n os.symlink ( source, target )", "def link(self):\n\n if self.path_source is not None:\n full_source_path = os.path.join(\n os.path.expandvars(self.path_source), self.name\n )\n full_destination_path = os.path.join(\n os.path.expandvars(self.path_destination), self.name\n )\n\n try:\n if self.sudo:\n spawn.process(\n f'ln -sfv \"{full_source_path}\" \"{full_destination_path}\"',\n sudo=True,\n )\n else:\n os.symlink(full_source_path, full_destination_path)\n except FileExistsError:\n message.error(\n \"Can't symlink, file already exists at destination. Attempting fix.\"\n )\n os.remove(full_destination_path)\n message.info(f\"Removed: '{full_destination_path}'\")\n os.symlink(full_source_path, full_destination_path)\n finally:\n message.info(\n f\"Symlink created: '{full_source_path}' <--> '{full_destination_path}'\"\n )\n else:\n message.error(\n f\"'{self.name}' has no source from which to create a link from.\"\n )", "def update_link(self):\n try:\n relpath = os.path.relpath(self.path, os.path.dirname(self.link_path))\n os.symlink(relpath, self.link_path)\n except OSError as e:\n if e.errno == errno.EEXIST:\n os.unlink(self.link_path)\n os.symlink(self.path, self.link_path)", "def _symlink(conf, devname, label, remove=False):\n return\n\n linkpath = conf.get('symlink')\n if linkpath:\n linkpath = expanduser(linkpath)\n if lexists(linkpath):\n os.unlink(linkpath)\n if not remove:\n # TODO: handle path errors\n os.symlink(get_mount_target(devname, label), linkpath)", "def _symlink(source, link_name):\n flags = 0\n\n if source is not None and os.path.isdir(source):\n flags = 1\n\n CreateSymbolicLinkW(link_name, source, flags)", "def force_symlink(src, dst):\n try:\n os.unlink(dst)\n os.symlink(src, dst)\n except OSError:\n os.symlink(src, dst)", "def create_symlink(src, dest):\n sudo('ln -s {} {}'.format(src, dest))", "def fix_link(hook, target_link):\n if os.path.exists(hook):\n os.unlink(hook)\n os.symlink(target_link, hook)", "def relink(path, Arg = (None, True, False)):\n if not os.path.islink(path): return\n\n exps = Arg[0]\n debuginfo = Arg[1]\n v = Arg[2]\n\n path = os.path.normpath(path)\n s = os.readlink(path)\n snorm = os.path.normpath(s)\n p = os.path.join(PROJ_SRC, path)\n hatpath = os.path.join(PROJ_HAT, path)\n\n if snorm.startswith(PROJ_SRC + os.sep):\n srcpath = snorm[len(PROJ_SRC + os.sep):]\n\n pathl = path.split(os.sep)\n srcpathl = srcpath.split(os.sep)\n head = commonhead(pathl, srcpathl)\n\n if len(pathl) > len(head) + 1 or \\\n len(pathl) == len(head) + 1 and len(srcpathl) > len(head):\n # pathl: o o o a b # pathl: o o o a\n # srcpathl: o o o c d e # srcpathl: o o o c d e\n # head: o o o or # head: o o o\n # --------------------- # ---------------------\n # src: ../c/d/e # src: c/d/e\n srcl = [os.pardir for i in xrange(len(pathl) - 1 - len(head))] + srcpathl[len(head):]\n src = os.path.join(*srcl)\n elif len(pathl) == len(head) + 1 and len(srcpathl) == len(head):\n # pathl: o o o a\n # srcpathl: o o o\n # head: o o o\n # ---------------------\n # src: .\n src = os.curdir\n if v: print >> sys.stderr, 'detected symlink to current directory', `hatpath`, '->', `src`\n elif len(pathl) == len(head):\n src = os.path.join(*srcpathl[len(head) - 1:])\n if len(srcpathl) == len(head):\n # pathl: o o a\n # srcpathl: o o a\n # ---------------------\n # src: a\n if v: print >> sys.stderr, 'detected symlink to itself', `hatpath`, '->', `src`\n else:\n # pathl: o o a\n # srcpathl: o o a c\n # ---------------------\n # src: a/c\n if v: print >> sys.stderr, 'detected too many levels of symlinks', `hatpath`, '->', `src`\n else:\n print >> sys.stderr, 'detected UNFORESEEN', `path`, '->', `srcpath`\n return\n\n _srcpath = os.path.normpath(os.path.join(os.path.dirname(path), src))\n assert srcpath == _srcpath, '%s:\\n%s not equal to %s' % (path, `srcpath`, `_srcpath`)\n\n os.remove(path)\n if os.path.isfile(srcpath) or os.path.isdir(srcpath):\n try:\n os.symlink(src, path)\n except (IOError, os.error), why:\n print >> sys.stderr, 'Cannot symlink %s -> %s: %s' % (`hatpath`, `src`, str(why))\n else:\n if v: print 'symlinked', `hatpath`, '->', `src`\n else:\n if os.path.isfile(s):\n print >> sys.stderr, 'missing:', hatpath, '->', src\n try:\n shutil.copy2(s, path)\n except (IOError, os.error), why:\n print >> sys.stderr, 'Cannot copy %s -> %s: %s' % (`s`, `hatpath`, str(why))\n else:\n if v: print >> sys.stderr, 'copied', `s`, '->', `hatpath`\n elif os.path.isdir(s):\n print >> sys.stderr, 'missing:', hatpath, '->', src\n try:\n os.makedirs(srcpath)\n except (IOError, os.error), why:\n print >> sys.stderr, 'Cannot create directory %s: %s' % (`os.path.join(PROJ_HAT, srcpath)`, str(why))\n else:\n if v: print >> sys.stderr, 'created directory', `os.path.join(PROJ_HAT, srcpath)`\n try:\n os.symlink(src, path)\n except (IOError, os.error), why:\n print >> sys.stderr, 'Cannot symlink %s -> %s: %s' % (`hatpath`, `src`, str(why))\n else:\n if v: print 'symlinked', `hatpath`, '->', `src`\n else:\n print >> sys.stderr, 'dangling:', p, '->', s\n if v: print >> sys.stderr, 'removed', `hatpath`\n# elif os.path.normpath(os.path.join(os.path.dirname(p), s)).startswith(PROJ_SRC + os.sep):\n else:\n srcpath = os.path.normpath(os.path.join(os.path.dirname(p), s))\n# os.path.normpath(os.path.join(os.path.dirname(p), s)).startswith(PROJ_SRC + os.sep):\n if srcpath.startswith(PROJ_SRC + os.sep):\n if os.path.isfile(path) or os.path.isdir(path):\n if v: print 'relative:', hatpath, '->', s\n else:\n if os.path.isfile(p) or os.path.isdir(p):\n print >> sys.stderr, 'missing:', hatpath, '->', s\n else:\n print >> sys.stderr, 'dangling:', p, '->', s\n os.remove(path);\n if v: print >> sys.stderr, 'removed', `hatpath`\n else:\n if os.path.isfile(p) or os.path.isdir(p):\n if exps:\n dst = exps.destination(srcpath)\n if dst:\n os.remove(path)\n if not dst[1] or debuginfo:\n # if not dst[1] or DEBUGINFO == 'yes' or MODE == 'dbg':\n upl = [os.pardir for i in xrange(len(hatpath.split(os.sep)) - 1)]\n src = os.path.join(os.path.join(*upl), dst[0])\n try:\n os.symlink(src, path)\n except (IOError, os.error), why:\n print >> sys.stderr, 'Cannot symlink %s -> %s: %s' % (`hatpath`, `src`, str(why))\n else:\n if v: print 'symlinked', `hatpath`, '->', `src`\n else:\n print 'debuginfo:', hatpath, '->', s\n if v: print 'removed', `hatpath`\n else:\n print >> sys.stderr, 'not_exported:', srcpath\n os.remove(path);\n if v: print >> sys.stderr, 'removed', `hatpath`, '->', `s`\n else:\n print >> sys.stderr, 'external:', hatpath, '->', s\n os.remove(path);\n if v: print >> sys.stderr, 'removed', `hatpath`\n else:\n print >> sys.stderr, 'dangling:', p, '->', s\n os.remove(path);\n if v: print >> sys.stderr, 'removed', `hatpath`", "def symlink_p(src, dst):\n try:\n os.symlink(src, dst)\n except OSError as exc: # Python >2.5\n if exc.errno == errno.EEXIST and os.path.islink(dst):\n if os.path.realpath(dst) == os.path.realpath(src):\n pass\n else:\n print('%s is a link already pointing to %s' % (dst, os.path.realpath(dst)), file=sys.stderr)\n else:\n raise", "def symlink(self, name, source, linkname):\n self._assert_absolute_path_or_placeholder(source)\n self._assert_absolute_path_or_placeholder(linkname)\n self._run(name, ['symlink', source, linkname])\n self.m.path.mock_copy_paths(source, linkname)", "def symlink_force(target: str, link_name: str):\n\n # os.replace() may fail if files are on different filesystems\n link_dir = os.path.dirname(link_name)\n\n while True:\n temp_link_name = tempfile.mktemp(dir=link_dir)\n try:\n os.symlink(target, temp_link_name)\n break\n except FileExistsError:\n pass\n try:\n os.replace(temp_link_name, link_name)\n except OSError: # e.g. permission denied\n os.remove(temp_link_name)\n raise", "def symlink_force(source, link_name):\n try:\n os.symlink(source, link_name)\n except OSError as e:\n if e.errno == errno.EEXIST:\n os.remove(link_name)\n os.symlink(source, link_name)", "def _follow_symlinks(filepath):\n filepath = os.path.abspath(filepath)\n while os.path.islink(filepath):\n filepath = os.path.normpath(\n os.path.join(os.path.dirname(filepath), os.readlink(filepath)))\n return filepath", "def make_symlink(dbconfig, targ):\n if \"latest\" in dbconfig and not dbconfig[\"latest\"]:\n return\n link = re.sub(r'[0-9]+', 'latest', targ)\n try:\n os.symlink(targ, link)\n info(\"create link \" + link + \" --> \" + targ)\n except OSError as e:\n if e.errno == errno.EEXIST:\n os.remove(link)\n os.symlink(targ, link)\n info(\"move link \" + link + \" --> \" + targ)", "def update_link(self, target, dest):\n if not target:\n self.remove_link(dest)\n return\n\n reltarget = os.path.relpath(\n target, os.path.join(self.dirname, os.path.dirname(dest)))\n\n for link in self.runscript.links:\n if link[1] == dest:\n link[0] = reltarget\n break\n else:\n self.runscript.add_link(reltarget, dest)", "def relink(f):\n if os.path.islink(f):\n linkto = os.path.join(NEW_LINK_BASE, os.path.basename(os.readlink(f)))\n #print 'Relinking %s-> %s from \\n %s' % (f, linkto, os.readlink(f))\n #print 'removing %s' % f\n os.remove(f)\n os.symlink(linkto, f)", "def move_and_symlink_file(file_path: Path):\n assert (\n file_path.is_file()\n and not file_path.is_symlink()\n and user_home in file_path.parents\n )\n\n original_path = file_path\n new_path = translate_home_path(original_path)\n\n print(f\"Moving: {original_path} -> {new_path}\")\n if not new_path.parent.exists():\n new_path.parent.mkdir(mode=HOME_DIRECTORY_MODE, parents=True)\n move(str(original_path), str(new_path))\n\n print(f\"Creating Symlink: {original_path} -> {new_path}\")\n original_path.symlink_to(new_path)", "def mksymlinkto(self, value, absolute=1):\n if absolute:\n error.checked_call(os.symlink, str(value), self.strpath)\n else:\n base = self.common(value)\n # with posix local paths '/' is always a common base\n relsource = self.__class__(value).relto(base)\n reldest = self.relto(base)\n n = reldest.count(self.sep)\n target = self.sep.join((\"..\",) * n + (relsource,))\n error.checked_call(os.symlink, target, self.strpath)", "def symlink(source, link_name):\n os_symlink = getattr(os, \"symlink\", None)\n if callable(os_symlink):\n os_symlink(source, link_name)\n else:\n import ctypes\n csl = ctypes.windll.kernel32.CreateSymbolicLinkW\n csl.argtypes = (ctypes.c_wchar_p, ctypes.c_wchar_p, ctypes.c_uint32)\n csl.restype = ctypes.c_ubyte\n flags = 1 if os.path.isdir(source) else 0\n if csl(link_name, source, flags) == 0:\n raise ctypes.WinError()", "def link(from_dir,to_dir,report=False):\n # The paths in to_dir that we will be symlinking to.\n to_paths = get_target_paths(to_dir,report)\n \n # Dictionary of symlinks we will be creating, from_path->to_path\n symlinks = {}\n for to_path in to_paths:\n to_directory, to_filename = os.path.split(to_path)\n # Change leading underscores to leading dots. \n if to_filename.startswith('_'):\n from_filename = '.' + to_filename[1:]\n else:\n from_filename = to_filename\n # Remove hostname specifiers.\n parts = from_filename.split(HOSTNAME_SEPARATOR)\n assert len(parts) == 1 or len(parts) == 2\n from_filename = parts[0]\n from_path = os.path.join(from_dir,from_filename) \n symlinks[from_path] = to_path\n\n # Attempt to create the symlinks that don't already exist.\n for from_path,to_path in symlinks.items(): \n # Check that nothing already exists at from_path.\n if os.path.islink(from_path):\n # A link already exists.\n existing_to_path = os.readlink(from_path)\n existing_to_path = os.path.abspath(os.path.expanduser(existing_to_path))\n if existing_to_path == to_path:\n # It's already a link to the intended target. All is\n # well.\n continue\n else:\n # It's a link to somewhere else.\n print from_path+\" => is already symlinked to \"+existing_to_path\n elif os.path.isfile(from_path):\n print \"There's a file in the way at \"+from_path\n elif os.path.isdir(from_path):\n print \"There's a directory in the way at \"+from_path\n elif os.path.ismount(from_path):\n print \"There's a mount point in the way at \"+from_path\n else:\n # The path is clear, make the symlink.\n if report:\n print 'link would make symlink: %s->%s' % (from_path,to_path)\n else:\n print 'Making symlink %s->%s' % (from_path,to_path)\n os.symlink(to_path,from_path)", "def symlink(self, req, link, parent, name):\r\n self.reply_err(req, EROFS)", "def symlink():\n releases()\n env.current_path = '/root/your_project/current'\n run('rm %(current_path)s' % env)\n run('ln -s %(current_release)s %(current_path)s' % env)", "def symlink(timestamp):\n if exists(env.current_dir):\n run('rm -r %(current_dir)s' % env)\n run('ln -s %s %s' % (os.path.join(env.releases_dir, timestamp), env.current_dir))", "def _sync_symlink(self, binary_name, link_to):\n\n # The symlink we are creating:\n link_path = os.path.join(self.bin_dir, binary_name)\n\n # The expected file we should be linking to:\n link_dest = os.path.join(self.bin_dir, link_to)\n\n if not os.path.exists(link_path) or \\\n not os.path.islink(link_path) or \\\n os.path.realpath(link_path) != os.path.realpath(link_dest):\n if os.path.exists(link_path):\n os.remove(link_path)\n os.symlink(link_to, os.path.join(self.bin_dir, binary_name))\n self.output.append(\"Symlinked %s to %s.\" % (link_path, link_dest))\n self.changed = True", "def ln_overwrite(src, dest):\n if exists(dest, use_sudo=True):\n sudo(\"rm %s && ln -s %s %s\" % (dest, src, dest))\n else:\n sudo(\"ln -s %s %s\" % (src, dest))", "def _set_target_symlinks(self):\n try:\n job_id = self.active_queue[0][\"id\"]\n tgt_dir = self.config[\"target\"][\"directory\"]\n for t in [\"objects\", \"config\"]:\n src = self.active_queue[0][t + \"_filename\"]\n tgt = os.path.join(tgt_dir, self.config[\"target\"][t])\n # force symlink creation\n add_symlink(src, tgt, True)\n self.logger.debug(\"[active/%s] set symlink: '%s' -> '%s'\"\n % (job_id, tgt, src))\n\n except KeyError:\n raise ProfileKeyError(\"no value for target.directory\")", "def force_link(src, dst):\n try:\n os.unlink(dst)\n os.link(src, dst)\n except OSError:\n os.link(src, dst)", "def create_symlink(source_file, dest_file, sudo=True):\n LOG.info(\"Creating symlink to {} called {}\".format(source_file, dest_file))\n cmd = \"ln -sf {} {}\".format(source_file, dest_file)\n _exec_cmd(cmd=cmd, sudo=sudo, fail_ok=False)", "def link(self, src, dst, label=None):\n self._tag(dst, label)\n self._mkdir_for(dst)\n abs_src = self._rootjoin(src)\n abs_dst = os.path.join(self.chroot, dst)\n try:\n os.link(abs_src, abs_dst)\n except OSError as e:\n if e.errno == errno.EEXIST:\n # File already exists, skip\n pass\n elif e.errno == errno.EXDEV:\n # Hard link across devices, fall back on copying\n shutil.copyfile(abs_src, abs_dst)\n else:\n raise", "def link(self, src, dst, label=None):\r\n self._tag(dst, label)\r\n self._mkdir_for(dst)\r\n abs_src = self._rootjoin(src)\r\n abs_dst = os.path.join(self.chroot, dst)\r\n try:\r\n os.link(abs_src, abs_dst)\r\n except OSError as e:\r\n if e.errno == errno.EEXIST:\r\n # File already exists, skip\r\n pass\r\n elif e.errno == errno.EXDEV:\r\n # Hard link across devices, fall back on copying\r\n shutil.copyfile(abs_src, abs_dst)\r\n else:\r\n raise", "def link(self, src, dst, label=None):\r\n self._tag(dst, label)\r\n self._mkdir_for(dst)\r\n abs_src = self._rootjoin(src)\r\n abs_dst = os.path.join(self.chroot, dst)\r\n try:\r\n os.link(abs_src, abs_dst)\r\n except OSError as e:\r\n if e.errno == errno.EEXIST:\r\n # File already exists, skip\r\n pass\r\n elif e.errno == errno.EXDEV:\r\n # Hard link across devices, fall back on copying\r\n shutil.copyfile(abs_src, abs_dst)\r\n else:\r\n raise", "def copy_and_link(file_name):\n if os.path.normpath(output_path) != os.getcwd():\n write_to_runner(f\"mv {file_name} {output_path} \\n\")\n write_to_runner(f\"ln -s {output_path}/{file_name} . \\n\")", "def move_link(symlink_file: Path, link_target: Path) -> None:\n if symlink_file.is_symlink():\n symlink_file.unlink()\n elif symlink_file.is_dir(): # We have set this up be a directory at the start.\n symlink_file.rmdir()\n elif symlink_file.exists(): # A file exists but isn't a symlink or a directory\n raise ValueError(f\"{str(symlink_file)} is not a symlink or a directory\")\n symlink_file.symlink_to(link_target, target_is_directory=True)", "def link(path_origin: str, *paths: str, use_relative_path=True):\n for item in paths:\n if os.path.exists(item):\n os.remove(item)\n if use_relative_path:\n src_path = os.path.relpath(path_origin, start=os.path.dirname(item))\n else:\n src_path = path_origin\n os.symlink(src_path, item)", "def test_symlink(self, mock_request):\n self.server.hook = UrlRequestHook('test_url', request_method='GET')\n linkpath = b'ugly'\n targetpath = b'ugliest'\n self.server.input_queue = sftpcmd(\n SSH2_FXP_SYMLINK, sftpstring(linkpath), sftpstring(targetpath),\n sftpint(0))\n self.server.process()\n mock_request.assert_called_once_with(\n 'GET', 'test_url/symlink', auth=None,\n data={\n 'method': 'symlink', 'linkpath': linkpath,\n 'targetpath': targetpath})", "def createLink(self):\n \n if( self.useLink ):\n trymakedir( self.parent.installPath + \"/\" + self.alias )\n\n os.chdir( self.parent.installPath + \"/\" + self.alias )\n \n # check for already existing symlinks or dirs \n if( os.path.islink( self.version )):\n os.unlink( self.version )\n elif( os.path.isdir( self.version )):\n self.abort( \"could not create link to [ \" + self.linkPath + \" ]\\nin [ \" \\\n + os.path.basename( self.installPath ) + \" ]!!!\" )\n\n os.symlink( self.linkPath , self.version )\n print \"+ Linking \" + self.parent.installPath + \"/\" + self.alias + \"/\" + self.version \\\n + \" -> \" + self.linkPath", "def ln(self, object_path, link_path):\n return self.put_snaplink(link_path, object_path)", "def _so_symlinks(path):\n if not os.path.isdir(path):\n assert AssertionError(\"Failed to make so symlinks: path '%s' is not a directory.\", path)\n for dirent in os.listdir(path):\n fname = os.path.join(path, dirent)\n if os.path.isdir(fname) or os.path.islink(fname):\n continue\n m = re.match(r'(.+\\.so)\\.(\\d+)\\.(\\d+)\\.(\\d+)$', fname)\n if m:\n so,x,y,z = m.groups()\n symlink(fname, \"%s.%s.%s\" % (so, x, y))\n symlink(fname, \"%s.%s\" % (so, x))\n symlink(fname, so)", "def mklinkto(self, oldname):\n error.checked_call(os.link, str(oldname), str(self))", "def _symlink_file_on_disk(source, link_name):\n link_dir = os.path.dirname(link_name)\n\n # create intermediate dirs if they do not already exist\n if not os.path.isdir(link_dir):\n try:\n os.makedirs(link_dir)\n except OSError as exc:\n logger.error(\"Error creating directory '%s': %s\", link_dir, exc)\n return False\n\n # create symbolic link\n try:\n os.symlink(source, link_name)\n except OSError as exc:\n logger.error(\"Error creating symlink '%s': %s\", link_name, exc)\n return False\n\n logger.debug(\"Created symlink '%s' to '%s'\", link_name, source)\n return True", "def _safe_setup_link(link_filename, real_filename):\r\n real_filename = os.path.relpath(real_filename, os.path.dirname(link_filename))\r\n\r\n if os.path.exists(link_filename):\r\n try:\r\n os.unlink(link_filename)\r\n except OSError:\r\n pass\r\n try:\r\n os.symlink(real_filename, link_filename)\r\n except OSError as e:\r\n # Typically permission denied.\r\n pass", "def add(name, target):\n if target is None:\n target = getcwd()\n target = path.abspath(target)\n\n if not path.exists(target):\n raise FileNotFoundError(target)\n\n set_alias(name, target)", "def make_link(self, filepath):\n # Check file exists. It may have been deleted but still in manifest\n if not os.path.exists(self.fullpath(filepath)):\n print('File not found: {filepath}'.format(\n filepath=self.fullpath(filepath)))\n if self.contains(filepath):\n print('removing from manifest')\n self.delete(filepath)\n self.needsync = True\n self.existing_filepaths.discard(filepath)\n else:\n try:\n destdir = os.path.dirname(filepath)\n # Make destination directory if not already exists\n # Necessary because sometimes this is called before\n # individual model setup\n if not os.path.exists(destdir):\n os.makedirs(destdir)\n if self.copy_file(filepath):\n shutil.copy(self.fullpath(filepath), filepath)\n perm = (stat.S_IRUSR | stat.S_IRGRP\n | stat.S_IROTH | stat.S_IWUSR)\n os.chmod(filepath, perm)\n else:\n make_symlink(self.fullpath(filepath), filepath)\n except Exception:\n action = 'copying' if self.copy_file else 'linking'\n print('payu: error: {action} orig: {orig} '\n 'local: {local}'.format(action=action,\n orig=self.fullpath(filepath),\n local=filepath))\n raise\n finally:\n self.existing_filepaths.discard(filepath)", "def symlink_or_copy(self, src, dst, relative_symlinks_ok=False):\n bad_src = os.path.lexists(src) and not os.path.exists(src)\n if self.symlinks and not bad_src and not os.path.islink(dst):\n try:\n if relative_symlinks_ok:\n assert os.path.dirname(src) == os.path.dirname(dst)\n os.symlink(os.path.basename(src), dst)\n else:\n os.symlink(src, dst)\n return\n except Exception: # may need to use a more specific exception\n logger.warning('Unable to symlink %r to %r', src, dst)\n\n # On Windows, we rewrite symlinks to our base python.exe into\n # copies of venvlauncher.exe\n basename, ext = os.path.splitext(os.path.basename(src))\n srcfn = os.path.join(os.path.dirname(__file__),\n \"scripts\",\n \"nt\",\n basename + ext)\n # Builds or venv's from builds need to remap source file\n # locations, as we do not put them into Lib/venv/scripts\n if sysconfig.is_python_build(True) or not os.path.isfile(srcfn):\n if basename.endswith('_d'):\n ext = '_d' + ext\n basename = basename[:-2]\n if basename == 'python':\n basename = 'venvlauncher'\n elif basename == 'pythonw':\n basename = 'venvwlauncher'\n src = os.path.join(os.path.dirname(src), basename + ext)\n else:\n src = srcfn\n if not os.path.exists(src):\n if not bad_src:\n logger.warning('Unable to copy %r', src)\n return\n\n shutil.copyfile(src, dst)", "def link_to_blob(self, path, csum):\n new_link = self.csum_to_path(csum)\n ensure_symlink(path, new_link)\n ensure_readonly(path)", "def create_softlink(self, path, target_path):\n if self.options['storage_method'] == 'hdf5':\n # execute h5py command\n self.file_pointer[path] = h5py.SoftLink(target_path)\n elif self.options['storage_method'] == 'none':\n # save command for later processing\n self.h5commands.append((\"create_softlink\", path, target_path))\n else:\n raise Exception('Invalid option value for storage_method (%s)' % storage_method)", "def create_external_link(self, path, target_file, target_path):\n if self.options['storage_method'] == 'hdf5':\n # execute h5py command\n self.file.file_pointer[self.full_path] = h5py.ExternalLink(file,path)\n elif self.options['storage_method'] == 'none':\n # save command for later processing\n self.h5commands.append((\"create_external_link\", path, target_file, target_path))\n else:\n raise Exception('Invalid option value for storage_method (%s)' % storage_method)", "def register_link(self, target, linkname):\n assert (isinstance(target, config_types.Path) and\n isinstance(linkname, config_types.Path))\n assert linkname not in self._link_map.get(target, ()), (\n '%s is already linked' % linkname)\n assert self.root.is_parent_of(linkname), (\n '%s is not within the root directory %s' % (linkname, self.root))\n self._link_map.setdefault(target, []).append(linkname)", "def _link_files(self, source, target):\n try:\n logging.debug(\"Linking %s and %s\" % (source, target))\n os.link(os.path.realpath(source), target)\n except PermissionError as e:\n msg = \"ERROR: Insufficient rights on {}! \" \\\n \"Possible cause; source file need to be writable/appendable when fs.protect_hardlinks is enabled. \" \\\n \"Permissions: {}\"\n logging.error(msg.format(e.filename, str(AccessControlList.from_file(source))))\n except FileExistsError as e:\n logging.debug(\"File %s already exists!\" % e.filename)", "def test_create_symlink_file(self):\n pass", "def symlink_or_copy(self, src, dst, relative_symlinks_ok=False):\n force_copy = not self.symlinks\n if not force_copy:\n try:\n if not os.path.islink(dst): # can't link to itself!\n if relative_symlinks_ok:\n assert os.path.dirname(src) == os.path.dirname(dst)\n os.symlink(os.path.basename(src), dst)\n else:\n os.symlink(src, dst)\n except Exception: # may need to use a more specific exception\n logger.warning('Unable to symlink %r to %r', src, dst)\n force_copy = True\n if force_copy:\n shutil.copyfile(src, dst)", "def create_symlinks(target_dir: os.PathLike, symlinks_to_create: List[os.PathLike]):\n for src_path in symlinks_to_create:\n trg_path = os.path.join(target_dir, os.path.basename(src_path))\n\n if os.path.islink(src_path):\n # Let's not create symlinks to symlinks\n # Since dropping the current symlink will break the experiment\n os.symlink(os.readlink(src_path), trg_path)\n else:\n print(f'Creating a symlink to {src_path}, so try not to delete it occasionally!')\n os.symlink(src_path, trg_path)", "def testIsSymlink(self):\r\n P=lambda p:ufsi.NativeUnixPath(p)\r\n existingValidSymlinkPath=P(self.existingValidSymlinkFilePathStr)\r\n existingInvalidSymlinkPath=P(self.existingInvalidSymlinkFilePathStr)\r\n nonExistingSymlinkPath=P(self.nonExistingSymlinkPathStr)\r\n\r\n # 1\r\n self.assertEquals(existingValidSymlinkPath.isSymlink(),True,\r\n 'Symlink %r exists'\r\n %str(existingValidSymlinkPath))\r\n\r\n # 2\r\n self.assertEquals(existingInvalidSymlinkPath.isSymlink(),True,\r\n 'Symlink %r exists'\r\n %str(existingInvalidSymlinkPath))\r\n\r\n # 3\r\n self.assertEquals(nonExistingSymlinkPath.isSymlink(),False,\r\n 'Symlink %r does not exist'\r\n %str(nonExistingSymlinkPath))", "def attach_to(self, content):\n\n # Determine our file's new output path relative to the linking\n # document. If it currently lives beneath the linking\n # document's source directory, preserve that relationship on output.\n # Otherwise, make it a sibling.\n\n linking_source_dir = os.path.dirname(content.source_path)\n tail_path = os.path.relpath(self.source_path, linking_source_dir)\n if tail_path.startswith(os.pardir + os.sep):\n tail_path = os.path.basename(tail_path)\n new_save_as = os.path.join(\n os.path.dirname(content.save_as), tail_path)\n\n # We do not build our new url by joining tail_path with the linking\n # document's url, because we cannot know just by looking at the latter\n # whether it points to the document itself or to its parent directory.\n # (An url like 'some/content' might mean a directory named 'some'\n # with a file named 'content', or it might mean a directory named\n # 'some/content' with a file named 'index.html'.) Rather than trying\n # to figure it out by comparing the linking document's url and save_as\n # path, we simply build our new url from our new save_as path.\n\n new_url = path_to_url(new_save_as)\n\n def _log_reason(reason):\n logger.warning(\n \"The {attach} link in %s cannot relocate \"\n \"%s because %s. Falling back to \"\n \"{filename} link behavior instead.\",\n content.get_relative_source_path(),\n self.get_relative_source_path(), reason,\n extra={'limit_msg': \"More {attach} warnings silenced.\"})\n\n # We never override an override, because we don't want to interfere\n # with user-defined overrides that might be in EXTRA_PATH_METADATA.\n if hasattr(self, 'override_save_as') or hasattr(self, 'override_url'):\n if new_save_as != self.save_as or new_url != self.url:\n _log_reason(\"its output location was already overridden\")\n return\n\n # We never change an output path that has already been referenced,\n # because we don't want to break links that depend on that path.\n if self._output_location_referenced:\n if new_save_as != self.save_as or new_url != self.url:\n _log_reason(\"another link already referenced its location\")\n return\n\n self.override_save_as = new_save_as\n self.override_url = new_url", "def test_readlink(self, mock_request):\n self.server.hook = UrlRequestHook(\n 'test_url',\n urls_mapping={\n 'readlink': ['test_url_1', 'test_url_2']},\n paths_mapping={\n 'readlink': ['test_path_1', 'test_path_2']})\n linkpath = b'ugly'\n targetpath = b'ugliest'\n os.symlink(linkpath, targetpath)\n self.server.input_queue = sftpcmd(\n SSH2_FXP_READLINK, sftpstring(targetpath), sftpint(0))\n self.server.process()\n mock_request.assert_has_calls([\n mock.call(\n 'POST', 'test_url_1/test_path_1', auth=None,\n data={'method': 'readlink', 'filename': targetpath}),\n mock.call(\n 'POST', 'test_url_1/test_path_2', auth=None,\n data={'method': 'readlink', 'filename': targetpath}),\n mock.call(\n 'POST', 'test_url_2/test_path_1', auth=None,\n data={'method': 'readlink', 'filename': targetpath}),\n mock.call(\n 'POST', 'test_url_2/test_path_2', auth=None,\n data={'method': 'readlink', 'filename': targetpath}),\n ])", "def _create_symlink(self, source_path, main):\n main_file = os.path.realpath(os.path.join(source_path, main))\n if not os.path.isfile(main_file):\n main_file += '.js'\n if not os.path.isfile(main_file):\n print('\\tWARNING: Could not create symlink for {}, no such file.'.format(main_file))\n return\n main_file_name = os.path.basename(main_file)\n with change_working_directory(os.path.realpath(self.symlink_dir)) as cd:\n file_path = os.path.join(cd, main_file_name)\n self.created(file_path)\n if os.path.islink(file_path):\n os.remove(file_path)\n symlink(main_file, main_file_name)", "def create_symlink(src: str, dst: str) -> bool:\n if exists(src):\n with suppress(Exception):\n if isfile(dst):\n remove(dst)\n else:\n rmtree(dst)\n\n try:\n\n symlink(src, dst)\n return True\n\n except PermissionError as err:\n printer(\n \"User without permission to create the symbolic link.\",\n str(err),\n foreground=FG().ERROR,\n )\n return False\n\n except FileExistsError:\n remove(dst)\n symlink(src, dst)\n return False", "def ensure_symlink_exists(symlink_path, file_path):\n\n if not (os.path.islink(symlink_path) or (os.path.realpath(symlink_path) != os.path.realpath(file_path))):\n # This is bad.\n raise CronException(\"Path {0} is not a symlink or does not point where expected.\".format(symlink_path))", "def copy_or_link(src, dest):\n if os.name == 'nt':\n qisys.sh.install(src, dest)\n else:\n qisys.sh.rm(dest)\n os.symlink(src, dest)", "def _symlink_or_copy(src, dst):\n # try to symlink file\n try:\n os.symlink(src, dst)\n print('Creating symlink \"%s\" pointing to \"%s\"' % (dst, src))\n except Exception as ex_symlink:\n # try to copy file\n try:\n shutil.copyfile(src, dst)\n print('Copying file from \"%s\" to \"%s\"' % (src, dst))\n except Exception as ex_copy:\n raise RuntimeError('Could neither symlink nor copy file \"%s\" to \"%s\":\\n- %s\\n- %s' % (src, dst, str(ex_symlink), str(ex_copy)))", "def move(self, target):\n if target.relto(self):\n raise error.EINVAL(target, \"cannot move path into a subdirectory of itself\")\n try:\n self.rename(target)\n except error.EXDEV: # invalid cross-device link\n self.copy(target)\n self.remove()", "def symlink_cachepath(ivy_home, inpath, symlink_dir, outpath):\r\n safe_mkdir(symlink_dir)\r\n with safe_open(inpath, 'r') as infile:\r\n paths = filter(None, infile.read().strip().split(os.pathsep))\r\n new_paths = []\r\n for path in paths:\r\n if not path.startswith(ivy_home):\r\n new_paths.append(path)\r\n continue\r\n symlink = os.path.join(symlink_dir, os.path.relpath(path, ivy_home))\r\n try:\r\n os.makedirs(os.path.dirname(symlink))\r\n except OSError as e:\r\n if e.errno != errno.EEXIST:\r\n raise\r\n # Note: The try blocks cannot be combined. It may be that the dir exists but the link doesn't.\r\n try:\r\n os.symlink(path, symlink)\r\n except OSError as e:\r\n # We don't delete and recreate the symlink, as this may break concurrently executing code.\r\n if e.errno != errno.EEXIST:\r\n raise\r\n new_paths.append(symlink)\r\n with safe_open(outpath, 'w') as outfile:\r\n outfile.write(':'.join(new_paths))\r\n symlink_map = dict(zip(paths, new_paths))\r\n return symlink_map", "def _symlink_datafile(self):\n logger.debug(\"Symlinking datafile to '%s'\", self.source)\n\n if os.path.isfile(self.source):\n # construct symlink target path based on source file name\n rel_dst_path = self.datafile.storage.get_available_name(\n file_path(self, os.path.basename(self.source))\n )\n abs_dst_path = os.path.join(settings.FILE_STORE_BASE_DIR,\n rel_dst_path)\n # create symlink\n if _symlink_file_on_disk(self.source, abs_dst_path):\n # update the model with the symlink path\n self.datafile.name = rel_dst_path\n logger.debug(\"Datafile symlinked\")\n return True\n else:\n logger.error(\"Symlinking failed\")\n return False\n else:\n logger.error(\"Symlinking failed: source is not a file\")\n return False", "def lnh(src, dst):\n os.link(src, dst)", "def _link(filename, existing_filename):\n CreateHardLinkW(filename, existing_filename, 0)", "def link(self, dst):\n assert isinstance(dst, Path)\n link(dst._path, self._path)", "def create_soft_link():\n vlogger_path = os.path.join(vlogger_dir, \"vlogger.py\")\n dir_path = os.path.expanduser(\"~\")\n bin_dir = os.path.join(dir_path, \"bin\")\n if not os.path.exists(bin_dir):\n os.mkdir(bin_dir)\n\n soft_path = os.path.join(bin_dir, \"vlogger\")\n\n if not os.path.exists(soft_path):\n command = [\"ln\", \"-s\", vlogger_path, soft_path]\n cmd_str = \" \".join(command)\n print(\"Soft link command for easy execution: {}\".format(cmd_str))\n subprocess.call([\"ln\", \"-s\", vlogger_path, soft_path])\n else:\n print(\"Soft link already created: {}\".format(soft_path))", "def link(path, service_name, branch, username):\n slab_logger.log(15, 'Setting the current service to %s' % service_name)\n if service_name == \"current\":\n if os.path.isfile(os.path.join(path, \"current\")):\n currentf = open(os.path.join(path, \"current\"), 'r')\n currentf.seek(0)\n service_name = currentf.readline()\n else:\n slab_logger.error('Unable to determine the current service. '\n 'Please enter a service to work on.')\n return 1\n\n returncode = set_current_service(path, service_name)\n if not returncode == 0:\n slab_logger.error('Unable to write to \"current\" file')\n return 1\n\n if not os.path.islink(os.path.join(path, \"current_service\")):\n # Note: What to link is first arg, where to link is second aka src dest\n if os.path.isdir(os.path.join(path, \"services\", service_name)):\n os.symlink(os.path.join(path, \"services\", service_name),\n os.path.join(path, \"current_service\"))\n slab_logger.debug('Made symlink for %s' % service_name)\n return 0\n else:\n slab_logger.debug('Could not find source for symlink. '\n 'Attempting re-clone of %s.' % service_name)\n returncode = sync_service(path, branch, username, service_name)\n if returncode:\n os.symlink(os.path.join(path, \"services\", service_name),\n os.path.join(path, \"current_service\"))\n slab_logger.debug('Made symlink for %s' % service_name)\n return 0\n else:\n slab_logger.error(\"Failed to find source for symlink: \" +\n os.path.join(path, \"services\", service_name))\n return 1\n else:\n slab_logger.debug(\"Link already exists.\")\n return 0", "def islink(self, path):\n return os.path.islink(path)", "def _tryLink(self,src, dst):\n\n hiero.core.log.info(\"Attempting to link %s to %s\" % (src, dst))\n \n try:\n os.link(util.asUnicode(src), util.asUnicode(dst))\n except OSError as err:\n # If the OS returns an ENOTSUP error (45), for example when trying to set\n # flags on an NFS mounted volume that doesn't support them, Python should\n # absorb this. However, a regression in Python 2.7.3 causes this not to\n # be the case, and the error is thrown as an exception. We therefore\n # catch this explicitly as value 45, since errno.ENOTSUP is not defined\n # in Python 2.7.2 (which is part of the problem). See the following\n # link for further information: http://bugs.python.org/issue14662\n # See TP 199072.\n if err.errno == 45: # ENOTSUP\n pass\n elif err.errno == 17: # FILE EXISTS\n raise\n else:\n raise", "def canonical_path(path, *paths, **kwargs):\n resolve_link = kwargs.pop('resolve_link', True)\n path = os.path.join(path, *paths)\n path = os.path.expanduser(path)\n if resolve_link:\n path = os.path.realpath(path)\n else:\n path = os.path.abspath(path)\n if os.path.isdir(path):\n path = os.path.join(path, '')\n return path", "def new_realpath(name):\n if name.startswith('link-to-ham'):\n return name[len('link-to-'):]\n else:\n return name", "def fast_copy(src: FilePath, dst: FilePath, **kwargs) -> None:\n real_src_path = os.path.realpath(src)\n try:\n os.link(real_src_path, dst, **kwargs)\n except OSError:\n shutil.copy2(real_src_path, dst, **kwargs)", "def link(path):\n abs_path = navigate.get_abs_path(path)\n parent, name = navigate.split_path(abs_path)\n if not db.file_exists(parent, name):\n print \"Error: '\" + path + \"' does not exist.\"\n else:\n dbox_path = '/' + name\n access_token = db.get_access_to_file(parent, name)\n client = dropbox.client.DropboxClient(access_token)\n short_link = client.share(dbox_path)['url']\n normal_link = client.share(dbox_path, short_url=False)['url']\n dl_link = normal_link.replace('www.dropbox.com',\n 'dl.dropboxusercontent.com', 1)\n print \"short link: \" + short_link\n print \"normal link: \" + normal_link\n print \"download link: \" + dl_link", "def project_linkage():\n current_dir = os.getcwd()\n ve_lib = os.path.join(current_dir, 'fabric_factory', 've', 'lib')\n \n python_version = os.listdir(ve_lib).pop()\n for target_dir in [\"project\", \"worker\", \"factory\"]:\n if not os.path.islink(\n os.path.join(ve_lib, python_version,\n \"site-packages\", target_dir)):\n local('ln -s %s %s' %\n (\n os.path.join(current_dir,\"fabric_factory\", \"src\", target_dir),\n os.path.join(ve_lib, python_version,\n \"site-packages\", target_dir)\n )\n )\n else:\n print 'link to %s already exists' %target_dir", "def touch(path, mtime, test=False):\n if test: return\n os.utime(path, (mtime, mtime), follow_symlinks=False)", "def LinkFiles(self, srcdir, target):\n if '+orig' in target:\n tgt_prefix = target.replace('.BRIK','')\n tgt_prefix = tgt_prefix.replace('.HEAD','')\n linkfiles = ['%s.HEAD'%tgt_prefix, '%s.BRIK' %tgt_prefix]\n else:\n linkfiles = [target]\n for linkfile in linkfiles:\n linkname = '%s/%s' % (srcdir, os.path.basename(linkfile))\n rel_linkdir = abspath_to_relpath(os.path.dirname(target), srcdir)\n rel_linkfile = '%s/%s' % (rel_linkdir, os.path.basename(linkfile))\n if not os.path.exists(linkname) and not os.path.islink(linkname):\n cmd = 'cd %s && ln -s %s %s' % (srcdir, rel_linkfile, linkname)\n self.ExecCmd(cmd)", "def symlink_current_release():\n require(\"release\", provided_by=[deploy])\n with cd(\"%(path)s/releases\" % env):\n sudo(\"ln -s %(release)s current_tmp && mv -Tf current_tmp current\" % env)", "def is_broken_link(path):\r\n path = os.readlink(path)\r\n return not os.path.exists(path)", "def IsSymlink(info):\n return (info.external_attr >> 16) == 0120777", "def rel_resolve(path):\n if os.path.isabs(path):\n return os.path.abspath(path)\n else:\n return os.path.join(SCRIPTDIR, path)", "def test_relativise_dst_under():\n src = pathlib.Path(\"/tmp/foo/src.txt\")\n dst = pathlib.Path(\"/tmp/foo/bar/baz/dst.txt\")\n rel = relativise(src, dst)\n assert rel == pathlib.Path(\"bar/baz/dst.txt\")", "def make_symlink(dst, src, silently_move=False):\n dst_dir = os.path.dirname(dst.rstrip(os.path.sep))\n if not os.path.isdir(dst_dir):\n os.makedirs(dst_dir)\n\n # get a temporary directory\n if os.path.exists(dst):\n if silently_move or (((os.path.isfile(dst) or (os.path.isdir(dst)) and\n query_yes_no('Move NSLS-II from userpackages?')))):\n import tempfile\n temp_dir = tempfile.mkdtemp()\n shutil.move(dst, temp_dir)\n print('Previous NSLS-II folder moved to {0}'.format(temp_dir))\n else:\n print('NSLS-II already exists in userpackages. Please move or delete it'\n 'and then re-run setup.py')\n return False\n\n # this symlink does not get removed when pip uninstall vttools is run...\n # todo figure out how to make pip uninstall remove this symlink\n try:\n # symlink the NSLS-II folder into userpackages\n os.symlink(src, dst)\n except AttributeError:\n # you must be on Windows!\n call(['mklink', '/j', dst, src], shell=True)\n\n return True" ]
[ "0.7404254", "0.7402173", "0.7312787", "0.72121716", "0.7204278", "0.71373194", "0.71331364", "0.71290386", "0.71224445", "0.70757365", "0.69786334", "0.69786334", "0.69290304", "0.68298745", "0.6800475", "0.6755648", "0.67269856", "0.67166317", "0.6690445", "0.66393733", "0.65924454", "0.6582094", "0.65082896", "0.6503687", "0.6480789", "0.6456912", "0.6438272", "0.6430119", "0.64076376", "0.6402103", "0.638313", "0.63489354", "0.62894857", "0.62708205", "0.6258779", "0.6231037", "0.62301993", "0.6219114", "0.61856043", "0.61612993", "0.61300355", "0.6090302", "0.60890234", "0.6033687", "0.6002216", "0.5990768", "0.5967704", "0.5967704", "0.5924656", "0.59218454", "0.59131926", "0.5912777", "0.5910438", "0.59024876", "0.5880002", "0.5853524", "0.58450556", "0.5823796", "0.5793048", "0.5790236", "0.5788694", "0.574984", "0.573102", "0.572607", "0.5723075", "0.57179946", "0.5714617", "0.56939834", "0.56938076", "0.5692968", "0.5676366", "0.56750655", "0.5660651", "0.5650113", "0.56206876", "0.5599814", "0.55816436", "0.5578595", "0.5565056", "0.55340505", "0.55105895", "0.54905385", "0.5464047", "0.54510623", "0.5437243", "0.54328763", "0.53762275", "0.5324233", "0.53129137", "0.52753717", "0.52713877", "0.52651733", "0.52611995", "0.52560824", "0.52526647", "0.52477664", "0.52461404", "0.5221211", "0.52015465", "0.5190066" ]
0.8573706
0
Return a string for the |cmd| list w/reasonable quoting.
def cmdstr(cmd): if isinstance(cmd, str): return cmd quoted = [] for arg in cmd: if isinstance(arg, Path): arg = str(arg) if ' ' in arg: arg = '"%s"' % (arg,) quoted.append(arg) return ' '.join(quoted)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def cmdify(self):\n return \" \".join(\n itertools.chain(\n [_quote_if_contains(self.command, r\"[\\s^()]\")],\n (_quote_if_contains(arg, r\"[\\s^]\") for arg in self.args),\n )\n )", "def command_list_to_str(command):\n return \" \".join(pipes.quote(i) for i in command)", "def StringifyCommand(cmd):\n ret = ''\n grouping = 0\n for a in cmd:\n if grouping == 0 and len(ret) > 0:\n ret += \" \\\\\\n \"\n elif grouping > 0:\n ret += \" \"\n if grouping == 0:\n grouping = 1\n if a.startswith('-') and len(a) == 2:\n grouping = 2\n ret += a\n grouping -= 1\n return ret", "def posix_command(command, *args, **kwargs):\n # pylint: disable = redefined-outer-name\n return ' '.join([\n \"'%s'\" % (token.replace(\"'\", \"'\\\\''\")) if needq(token) else token\n for token in map(_make_formatter(*args, **kwargs),\n split_command(command))\n ])", "def _format_command(command: List[str], shell: bool = False) -> Union[Sequence[str], str]:\n return command if not shell else \" \".join(command)", "def formatCommand(command):\n cmdstr=\"\"\n logging.debug(repr(command))\n for arg in command:\n if \" \" in arg:\n cmdstr=cmdstr+\" \\\"\"+arg+\"\\\"\"\n else:\n cmdstr=cmdstr+\" \"+arg\n return cmdstr", "def shell_command(self):\n # TODO: fix this naive version by adding quotes where appropriate\n return \" \".join(self.args)", "def shQuote(text):\n\treturn \"'%s'\" % text.replace(\"'\", r\"'\\''\")", "def win32_command(command, *args, **kwargs):\n # pylint: disable = redefined-outer-name\n return ' '.join([metasub(\n '\"%s\"' % (slashsub(token).replace('\"', '\\\\\"'),)\n if needq(token) else token\n ) for token in map(_make_formatter(*args, **kwargs),\n split_command(command))])", "def _quote(self, arg):\n arg = arg.replace('\\\\', '\\\\\\\\')\n arg = arg.replace('\"', '\\\\\"')\n return '\"%s\"' % arg", "def sh_quote_unsafe_cmdline(args):\n return str.join(' ', (sh_quote_unsafe(arg) for arg in args))", "def shellquote(s):\n return '\"' + s.replace(\"'\", \"'\\\\''\") + '\"'", "def list2cmdline(seq):\n\n result = []\n needquote = False\n for arg in seq:\n bs_buf = []\n\n # Add a space to separate this argument from the others\n if result:\n result.append(' ')\n\n needquote = (\" \" in arg) or (\"\\t\" in arg) or (not arg) or (\"(\" in arg) or (\")\" in arg)\n if needquote:\n result.append('\"')\n\n for c in arg:\n if c == '\\\\':\n # Don't know if we need to double yet.\n bs_buf.append(c)\n elif c == '\"':\n # Double backslashes.\n result.append('\\\\' * len(bs_buf) * 2)\n bs_buf = []\n result.append('\\\\\"')\n else:\n # Normal char\n if bs_buf:\n result.extend(bs_buf)\n bs_buf = []\n result.append(c)\n\n # Add remaining backslashes, if any.\n if bs_buf:\n result.extend(bs_buf)\n\n if needquote:\n result.extend(bs_buf)\n result.append('\"')\n\n return ''.join(result)", "def quote(s):\n # Based on shlex.quote. Bun unlike shlex, it quotes every string and\n # not just the ones that contain unsafe characters.\n return \"'\" + s.replace(\"'\", \"'\\\"'\\\"'\") + \"'\"", "def _build_simple_command(self, cmd):\n return cmd+SBE37_NEWLINE", "def embeded_triple_quotes():\n pass", "def _quote(v):\n return '\"' + v + '\"' if ' ' in v else v", "def sh_quote_unsafe(arg):\n return ('\"' + _DQUOTE_RE.sub(r'\\1\\1\\\"', str(arg)) + '\"' )", "def quoted(val: str) -> str:\n return f'\"{val}\"' if ' ' in val else val", "def shquote(arg):\n for c in '\"', \"'\", \"\\\\\", \"#\":\n if c in arg:\n return repr(arg)\n if arg.split() != [arg]:\n return repr(arg)\n return arg", "def sh_quote_safe(arg):\n return (\"'\" + str(arg).replace(\"'\", r\"'\\''\") + \"'\")", "def quote(m):\n return '\"' + m + '\"'", "def quote(*a, **kw):\n return quote(*a, **kw)", "def __str__(self):\n if self.commands:\n if hpccm.config.g_ctype == container_type.DOCKER:\n # Format:\n # RUN cmd1 && \\\n # cmd2 && \\\n # cmd3\n s = ['RUN {}'.format(self.commands[0])]\n s.extend([' {}'.format(x) for x in self.commands[1:]])\n return ' && \\\\\\n'.join(s)\n elif hpccm.config.g_ctype == container_type.SINGULARITY:\n # Format:\n # %post\n # cmd1\n # cmd2\n # cmd3\n s = ['%post']\n s.extend([' {}'.format(x) for x in self.commands])\n return '\\n'.join(s)\n else:\n raise RuntimeError('Unknown container type')\n else:\n return ''", "def _make_posix_command():\n qsearch = _re.compile(r'[^a-zA-Z\\d_./-]').search\n needq = lambda x: not x or qsearch(x)\n\n def posix_command(command, *args, **kwargs):\n \"\"\"\n Return a POSIX shell suitable commandline\n\n Either args or kwargs or neither of them can be set. There cannot be\n set both of them.\n\n :Parameters:\n `command` : ``str``\n Generic commandline, possibly containing substitutions, filled by\n args or kwargs. See `split_command` for generic commandline\n syntax.\n\n `args` : ``tuple``\n Substitution tuple\n\n `kwargs` : ``dict``\n Substitution dict\n\n :Return: Strictly quoted shell commandline for POSIX shells\n :Rtype: ``str``\n \"\"\"\n # pylint: disable = redefined-outer-name\n return ' '.join([\n \"'%s'\" % (token.replace(\"'\", \"'\\\\''\")) if needq(token) else token\n for token in map(_make_formatter(*args, **kwargs),\n split_command(command))\n ])\n return posix_command", "def sh_quote_safe_cmdline(args):\n return str.join(' ', (sh_quote_safe(arg) for arg in args))", "def fmt(self, val):\n if type(val) in self.QUOTABLE_TYPES:\n s = decode_string(val)\n return u\"{0}{1}{2}\".format(self.quotechar, s, self.quotechar)\n else:\n return decode_string(str(val))", "def getquoted(self): # real signature unknown; restored from __doc__\n pass", "def SingleQuote(s):\n return pipes.quote(s)", "def cmd(*args):\r\n return \" \".join([str(arg) for arg in args])", "def argument_list_quote(arguments):\n args = []\n for arg in arguments:\n args.append(argument_quote(arg))\n return '\"%s\"' % ' '.join(args)", "def _build_direct_command(self, cmd, arg):\n return \"%s%s\" % (arg, self._newline)", "def shellquote(arg):\n if re.match('^[-_.:/=a-zA-Z0-9]*$', arg):\n return arg\n else:\n return \"'%s'\" % arg.replace(\"'\", r\"'\\''\")", "def commandListToCommandString(cmdlist):\n\n if isinstance(cmdlist, list) and len(cmdlist) > 0:\n cmd = [str(cmdlist[0])]\n\n for arg in cmdlist[1:]:\n argstr = repr(arg)\n\n if isinstance(arg,dict):\n argstr = argstr.replace('{', '[')\n argstr = argstr.replace('}', ']')\n\n cmd.append(argstr)\n\n return NULL.join(cmd)\n else:\n return ''", "def __str__(self):\n if not self._args and not self.subcommand:\n return self.cmd\n elif not self._args and self.subcommand:\n return '{} {}'.format(\n self.cmd, self.subcommand)\n elif self._args and not self.subcommand:\n return '{} {}'.format(\n self.cmd, ' '.join(self._args))\n else:\n return '{} {} {}'.format(\n self.cmd, self.subcommand, ' '.join(self._args))", "def __str__(self):\n\n if self._s == '':\n return ''\n\n if len(self.quote) == 1:\n s = self.to_short()\n else:\n s = self.to_long()\n\n try:\n eval(self.quote + s + self.quote)\n except UnicodeDecodeError:\n if self._safe_mode:\n raise\n\n self._safe_mode = True\n\n assert eval(self.quote + s + self.quote) == self._s\n\n return s", "def quote(value):\n single = value.find(\"'\")\n double = value.find('\"')\n multiline = value.find('\\n') != -1\n if multiline or ((single != -1) and (double != -1)):\n if value.find('\"\"\"') == -1 and value[0] != '\"' and value[-1] != '\"':\n s = '\"\"\"%s\"\"\"' % value\n else:\n s = \"'''%s'''\" % value\n elif (single != -1) and (double == -1):\n s = '\"%s\"' % value\n else:\n s = \"'%s'\" % value\n return s", "def standardise_quotes(self, val):\n if val.startswith(self.altquote) and val.endswith(self.altquote):\n middle = val[1:-1]\n val = \"%s%s%s\" % (self.quote, middle, self.quote)\n\n val = self.escape_quotes(val)\n\n return val", "def shellify(val):\n\n if val==None:\n s=''\n elif not isinstance(val,str):\n s=str(val)\n else:\n return shlex.quote(val)\n return shlex.quote(s)", "def quote(value):\n return DoubleQuotedScalarString(value)", "def _quoter(self, col) :\n\n j = self.cols.index(col)\n if self.types[j] == 'TEXT' :\n return '\"%s\"'\n else :\n return '%s'", "def get_quoted_cs_string(self, key, default=None):\n quoted_list = self.get_quoted_list(key, default)\n quoted_cs_string = ','.join(map(str, quoted_list))\n\n return quoted_cs_string", "def get_command() -> str:\n if settings.COMMAND_LINE: return settings.COMMAND_LINE\n # Command not configured; let's use a fallback to alert the user\n if platform.system() == 'Linux': return 'echo \"ALARM ALARM ALARM\"|espeak'\n if platform.system() == 'Windows': return 'PowerShell -Command \"Add-Type –AssemblyName System.Speech; ' \\\n '(New-Object System.Speech.Synthesis.SpeechSynthesizer).Speak(\\'ALARM ALARM ALARM\\');\"'\n if platform.system() == 'Darwin': return 'say \"ALARM ALARM ALARM\"'\n return ''", "def shlex_quote(s):\n if not s:\n return \"''\"\n # PKGW: builtin not available in Python 2\n ###if _find_unsafe(s) is None:\n ### return s\n\n # use single quotes, and put single quotes into double quotes\n # the string $'b is then quoted as '$'\"'\"'b'\n return \"'\" + s.replace(\"'\", \"'\\\"'\\\"'\") + \"'\"", "def _quote_arguments(args):\n return map(lambda x: '\"{}\"'.format(x) if ' ' in x else '{}'.format(x), args)", "def shell_escape(s):\n import io\n\n fp = io.StringIO()\n sq = \"'\" + '\"' + \"'\" + '\"' + \"'\"\n dollar = \"'\" + \"'\" + \"$\" + \"'\" + \"'\"\n print(\"'\", end=\"\", file=fp)\n for c in s:\n if c == \"'\":\n print(sq, end=\"\", file=fp)\n elif c == \"$\":\n print(dollar, end=\"\", file=fp)\n else:\n print(c, end=\"\", file=fp)\n print(\"'\", end=\"\", file=fp)\n return fp.getvalue()", "def add_quote(item):\n if type(item) == str:\n return \"\\'\" + item + \"\\'\"\n else:\n return item", "def command_and_args(self) -> str:\n if self.command and self.args:\n rtn = f'{self.command} {self.args}'\n elif self.command:\n # there were no arguments to the command\n rtn = self.command\n else:\n rtn = ''\n return rtn", "def QuotedEscaped (s):\n return repr(s)", "def esc_quotes(strng):\n\n return strng.replace('\"','\\\\\"').replace(\"'\",\"\\\\'\")", "def _escapeString(self, value):\n if '\"' in value and \"'\" in value:\n substrings = value.split(\"\\\"\")\n result = [\"concat(\"]\n for substring in substrings:\n result.append(\"\\\"%s\\\"\" % substring)\n result.append(\", '\\\"', \")\n result = result[0:-1]\n if value.endswith('\"'):\n result.append(\", '\\\"'\")\n return \"\".join(result) + \")\"\n\n if '\"' in value:\n return \"'%s'\" % value\n return \"\\\"%s\\\"\" % value", "def quote_list(the_list):\n return [\"'%s'\" % element for element in the_list]", "def quote(s):\n if not s:\n return \"''\"\n if _find_unsafe(s) is None:\n return s\n\n # use single quotes, and put single quotes into double quotes\n # the string $'b is then quoted as '$'\"'\"'b'\n return \"'\" + s.replace(\"'\", \"'\\\"'\\\"'\") + \"'\"", "def wrap_with_in_single_quote(s):\n return \"'{}'\".format(s)", "def wrap_with_in_single_quote(s):\n return \"'{}'\".format(s)", "def get_magic_quotes_runtime():\n raise NotImplementedError()", "def elem_quote(member):\n# member = str(member) # since we now stringify everything - this is probably a redundant command\n if member.find(\"'\") == -1:\n outline = \"'\" + member + \"'\"\n elif member.find('\"') == -1:\n outline = '\"' + member + '\"'\n else:\n outline = '\"' + member.replace('\"','&mjf-quot;')+'\"'\n return outline.replace('\\n','&mjf-lf;')", "def shell_command_strings(self, command):\n return (None, \"$(shell \" + command + \")\", None)", "def quot(string):\r\n return string.replace('\"', \"'\")", "def shlex_quote(s):\n if not s:\n return \"''\"\n if _find_unsafe(s) is None:\n return s\n\n # use single quotes, and put single quotes into double quotes\n # the string $'b is then quoted as '$'\"'\"'b'\n return \"'\" + s.replace(\"'\", \"'\\\"'\\\"'\") + \"'\"", "def sql(self, quoted=True):\n if quoted:\n return '\"%s\"' % MySQLdb.escape_string(str(self.data))\n else:\n return '%s' % MySQLdb.escape_string(str(self.data))", "def _build_command(self, cmd, unit):\n return '#' + unit + cmd + NEWLINE", "def __repr__(self):\n p = \" if {0!r}\".format(self.precondition) if self.precondition else \"\"\n i = \" ignore-exit-code\" if self.ignore_exit_code else \"\"\n return \"<Command {0!r}{1}{2}>\".format(self.command, p, i)", "def test_symlit_escape():\n return \"\\\"=\\\"\"", "def clean_command_lines(cmd):\r\n cmd = ' '.join(cmd.split())\r\n return cmd", "def __str__(self):\n\n if self._b == b'':\n return ''\n\n if len(self.quote) == 1:\n s = self.to_short()\n else:\n s = self.to_long()\n\n assert eval('b' + self.quote + s + self.quote) == self._b\n\n return s", "def fullCmdStr(self):\n return \"%s %s\" % (self.locCmdID, self.cmdStr)", "def test_commandRepr(self):\n repr(imap4.Command(b\"COMMAND\", [b\"arg\"], (b'extra')))", "def quote(self, expr):\n return \"'\" + self.escape(str(expr)) + \"'\"", "def cmd(self) -> List[str]:\n raise NotImplementedError(\"Must implement in frontend subclass.\")", "def shlex_join(argv):\n def quote(arg):\n if arg.find(\" \") >= 0:\n return '\"%s\"' % arg\n else:\n return arg\n return \" \".join([quote(arg) for arg in argv])", "def getquoted(self):\n if self.is_geometry:\n # Psycopg will figure out whether to use E'\\\\000' or '\\000'.\n return b\"%s(%s)\" % (\n b\"ST_GeogFromWKB\" if self.geography else b\"ST_GeomFromEWKB\",\n sql.quote(self.ewkb).encode(),\n )\n else:\n # For rasters, add explicit type cast to WKB string.\n return b\"'%s'::raster\" % self.ewkb.hex().encode()", "def _buildCmd(self, cmd, cmdArg=0x00):\n res = [cmd, cmdArg]\n if self.USE_SUFFIX:\n return res + [self.CMD_SUFFIX]\n return res", "def split_and_honor_quotation_marks(cmdline):\n\n # See\n # http://msdn.microsoft.com/library/en-us/vccelng/htm/progs_12.asp\n\n # Step 1: Translate all literal quotes into QUOTE. Justify number\n # of backspaces before quotes.\n tokens = []\n bs_buf = \"\"\n QUOTE = 1 # \\\", literal quote\n for c in cmdline:\n if c == '\\\\':\n bs_buf += c\n elif c == '\"' and bs_buf:\n # A quote preceded by some number of backslashes.\n num_bs = len(bs_buf)\n tokens.extend([\"\\\\\"] * (num_bs//2))\n bs_buf = \"\"\n if num_bs % 2:\n # Odd. Quote should be placed literally in array\n tokens.append(QUOTE)\n else:\n # Even. This quote serves as a string delimiter\n tokens.append('\"')\n\n else:\n # Normal character (or quote without any preceding\n # backslashes)\n if bs_buf:\n # We have backspaces in buffer. Output these.\n tokens.extend(list(bs_buf))\n bs_buf = \"\"\n\n tokens.append(c)\n\n # Step 2: split into arguments\n result = [] # Array of strings\n quoted = False\n arg = [] # Current argument\n tokens.append(\" \")\n for c in tokens:\n if c == '\"':\n # Toggle quote status\n quoted = not quoted\n arg.append('\"')\n elif c == QUOTE:\n arg.append('\"')\n elif c in (' ', '\\t'):\n if quoted:\n arg.append(c)\n else:\n # End of argument. Output, if anything.\n if arg:\n result.append(''.join(arg))\n arg = []\n else:\n # Normal character\n arg.append(c)\n \n return result", "def command(cmd):\n\n if isinstance(cmd, list):\n command_output = \" \".join(cmd)\n else:\n command_output = cmd\n\n return colored('$ ' + command_output, attrs=['bold'])", "def qstring(self, s):\n\n if '\"' in s or ' ' in s or '\\\\' in s:\n return '\"' + s.replace('\\\\', '\\\\\\\\').replace('\"', '\\\\\"') + '\"'\n else:\n return s", "def set_magic_quotes_runtime():\n raise NotImplementedError()", "def _sh_quote(s):\n if not s:\n return b\"\"\n if _find_unsafe(s) is None:\n return s\n\n # use single quotes, and put single quotes into double quotes\n # the string $'b is then quoted as '$'\"'\"'b'\n return b\"'\" + s.replace(b\"'\", b\"'\\\"'\\\"'\") + b\"'\"", "def _make_win32_command():\n wsp, meta = r'\\r\\n\\t\\x0b\\x0c\\x08 ', r'()%!^\"<>&|'\n slashsub = _ft.partial(_re.compile(r'(\\\\+)(\"|$)').sub, r'\\1\\1\\2')\n metasub = _ft.partial(_re.compile(r'([%s%s])' % (wsp, meta)).sub, r'^\\1')\n qsearch = _re.compile(r'[%s\"]' % (wsp,)).search\n needq = lambda x: not x or qsearch(x)\n\n def win32_command(command, *args, **kwargs):\n \"\"\"\n Return a win32/cmd.exe suitable commandline\n\n :See: https://blogs.msdn.microsoft.com/twistylittlepassagesallalike/\n 2011/04/23/everyone-quotes-command-line-arguments-the-wrong-way/\n\n Either args or kwargs or neither of them can be set. There cannot be\n set both of them.\n\n :Parameters:\n `command` : ``str``\n Generic commandline, possibly containing substitutions, filled by\n args or kwargs. See `split_command` for generic commandline\n syntax.\n\n `args` : ``tuple``\n Substitution tuple\n\n `kwargs` : ``dict``\n Substitution dict\n\n :Return: Strictly quoted shell commandline for ``cmd.exe``\n :Rtype: ``str``\n \"\"\"\n # pylint: disable = redefined-outer-name\n return ' '.join([metasub(\n '\"%s\"' % (slashsub(token).replace('\"', '\\\\\"'),)\n if needq(token) else token\n ) for token in map(_make_formatter(*args, **kwargs),\n split_command(command))])\n\n return win32_command", "def _escapeArg(arg):\n #XXX There is a *lot* more that we should escape here.\n return arg.replace('\"', r'\\\"')", "def __str__(self):\n return \"\\\"%s\\\"\" % self.__string", "def _special_cmd_strs(feature, memory, module, module_root, conda_env):\n feature_str = ''\n if feature is not None:\n feature_str = '#SBATCH {} # extra feature\\n'.format(feature)\n\n mem_str = ''\n if memory is not None:\n mem_str = ('#SBATCH --mem={} # node RAM in MB\\n'\n .format(int(memory * 1000)))\n\n env_str = ''\n if module is not None:\n env_str = (\"echo module use {module_root}\\n\"\n \"module use {module_root}\\n\"\n \"echo module load {module}\\n\"\n \"module load {module}\\n\"\n \"echo module load complete!\\n\"\n .format(module_root=module_root, module=module))\n elif conda_env is not None:\n env_str = (\"echo source activate {conda_env}\\n\"\n \"source activate {conda_env}\\n\"\n \"echo conda env activate complete!\\n\"\n .format(conda_env=conda_env))\n\n return feature_str, mem_str, env_str", "def _build_send_optode_command(self, cmd, command):\n return \"%s=%s%s\" % (cmd, command, self._newline)", "def __joinCmdStringWithExtras (self,cmdString,extras):\n if (extras != \"\"):\n self._log(\"joining-extras\").debug4(\"joining cmd '%s' with extra params '%s'\",cmdString,extras)\n cmdString += \" \" + extras\n return cmdString", "def get_magic_quotes_gpc():\n raise NotImplementedError()", "def command_string(func, targets, sources, kwds):\n args= [repr(targets[0])] if len(targets) == 1 \\\n else [] if not targets else [repr(targets)]\n if sources:\n args.append(repr(sources[0]) if len(sources) == 1\n else repr(sources))\n if kwds:\n args.append(', '.join(['{}={}'.format(k, repr(v))\n for k, v in kwds.items()]))\n return '{}({})'.format(func.__name__, ', '.join(args))", "def _commandline_join(self, tokens):\r\n commands = filter(None, map(str, tokens))\r\n return self._command_delimiter.join(commands).strip()", "def _commandline_join(self, tokens):\r\n commands = filter(None, map(str, tokens))\r\n return self._command_delimiter.join(commands).strip()", "def _commandline_join(self, tokens):\r\n commands = filter(None, map(str, tokens))\r\n return self._command_delimiter.join(commands).strip()", "def optstr(self) -> str:\n if self.is_help:\n return self.namestr()\n typestr: str = (\n self.typestr().upper() if self.type_frozen else self.typestr()\n )\n\n if not self.ns_param or not self.argname_shorten:\n # * makes sure it's not wrapped'\n return f\"{self.namestr()}*[{typestr}]\"\n\n ret: List[str] = []\n for term in sorted(self.terminals, key=len):\n ret.append(f\"~<ns>.{term}\")\n return \", \".join(ret) + f\"*[{typestr}]\"", "def daqStringMod(self, arg):\n\t\tself.stuff = []\n\t\tfor i in arg:\n\t\t\tself.stuff.append(\"\\'\" + i + \"\\'\")\n\t\treturn self.stuff", "def _get_choices_str(self):\n return ', '.join(\n '\"%s\"' % choice\n for choice in self.choices\n )", "def escape_quotes(self, val):\n if val.startswith(self.quote) and val.endswith(self.quote):\n # make sure any previously escaped quotes are not re-escaped\n middle = val[1:-1].replace(\"\\\\\" + self.quote, self.quote)\n middle = middle.replace(self.quote, \"\\\\\" + self.quote)\n val = \"%s%s%s\" % (self.quote, middle, self.quote)\n\n return val", "def __cmd_builder(self):\n self.cmd = 'python -m lizard \"%s\" ' % self.get_proj_path()\n args = \"\"\n if self.get_cyclo_args():\n args = self.get_cyclo_args()\n exclude = \",\".join(str(x) for x in self.get_cyclo_exclude() if x is not None)\n if exclude:\n exclude = ','.join(' -x \"{0}\"'.format(w) for w in exclude.rstrip().split(','))\n self.cmd = self.cmd + args + \" \" + exclude + \" --csv\"\n print(self.cmd) # pragma: no mutate", "def _escapePaths(self, paths):\n cmd = \"\"\n for p in paths:\n if ' ' in p:\n cmd += ' \"{0}\"'.format(p)\n else:\n cmd += ' ' + p\n return cmd", "def DoubleQuote(s):\n if not s:\n return '\"\"'\n elif all(c in _SafeShellChars for c in s):\n return s\n else:\n return '\"' + s.replace('\"', '\\\\\"') + '\"'", "def shlex_join(split_command) -> str:\n return \" \".join(shlex.quote(str(arg)) for arg in split_command)", "def pddl_rep(self):\n rep = '(' + self.obj_list[0] + \" (\"\n for argument in self.obj_list[1:-1]:\n rep += argument + \" \"\n rep = rep[:-1]\n rep += \") \" + self.obj_list[-1] + \") \"\n return rep", "def pddl_rep(self):\n rep = ''\n if self.is_negated:\n rep += \"(not \"\n if self.name != \"\":\n rep += \"(\" + self.name + \" \"\n else:\n rep += \"(\"\n for argument in self.args:\n if self.is_typed:\n rep += argument[0] + \" - \" + argument[1] + \" \"\n else:\n rep += argument + \" \"\n rep = rep[:-1]\n rep += \")\"\n if self.is_negated:\n rep += \")\"\n return rep", "def buildCmd( tcmpCmd, cmd, target, sequence, fieldList):\n cmdList = [tcmpCmd, cmd, target, sequence, fieldList]\n\n return \"<{cmd}>\".format(cmd=\":\".join(cmdList))" ]
[ "0.7206458", "0.7059188", "0.6501883", "0.6480349", "0.6454796", "0.6273626", "0.6229917", "0.620037", "0.61765164", "0.6134786", "0.610084", "0.6098873", "0.60828114", "0.6048522", "0.6045765", "0.602606", "0.6023066", "0.6018371", "0.5999344", "0.59954375", "0.59915906", "0.59812146", "0.5945601", "0.59439075", "0.5922113", "0.59001267", "0.5874881", "0.5854723", "0.58499295", "0.58299446", "0.58200806", "0.58080333", "0.58004284", "0.5787458", "0.57666177", "0.5726046", "0.56890875", "0.5669257", "0.5666388", "0.5649754", "0.5618534", "0.5608566", "0.5607447", "0.56027275", "0.56019115", "0.5597243", "0.55862916", "0.5581915", "0.55613554", "0.55612975", "0.5560865", "0.55602235", "0.555233", "0.5551645", "0.5551645", "0.5533439", "0.5527414", "0.55052924", "0.5500004", "0.54824436", "0.5475174", "0.5471038", "0.5467072", "0.5456249", "0.54510635", "0.54476094", "0.54331297", "0.5432648", "0.54291654", "0.54215765", "0.5414184", "0.5397864", "0.5392333", "0.5382186", "0.53747135", "0.53745073", "0.5372513", "0.5371547", "0.5369326", "0.5365213", "0.53599876", "0.53572667", "0.5353723", "0.5345106", "0.5333559", "0.5332316", "0.5330273", "0.5330273", "0.5330273", "0.533004", "0.53072685", "0.5294325", "0.5291168", "0.5290272", "0.5290021", "0.52874416", "0.52747345", "0.52623427", "0.5247143", "0.52429575" ]
0.67177594
2
Run |cmd| inside of |cwd| and exit if it fails.
def run(cmd: List[str], cmd_prefix: List[str] = None, log_prefix: List[str] = None, check: bool = True, cwd: str = None, extra_env: Dict[str, str] = None, **kwargs): # Python 3.6 doesn't support capture_output. if sys.version_info < (3, 7): capture_output = kwargs.pop('capture_output', None) if capture_output: assert 'stdout' not in kwargs and 'stderr' not in kwargs kwargs['stdout'] = subprocess.PIPE kwargs['stderr'] = subprocess.PIPE # The |env| setting specifies the entire environment, so we need to manually # merge our |extra_env| settings into it before passing it along. if extra_env is not None: env = kwargs.pop('env', os.environ) env = env.copy() env.update(extra_env) kwargs['env'] = env if not log_prefix: log_prefix = [] log_cmd = log_prefix + cmd if not cmd_prefix: cmd_prefix = [] real_cmd = cmd_prefix + cmd if cwd is None: cwd = os.getcwd() logging.info('Running: %s\n (cwd = %s)', cmdstr(log_cmd), cwd) if cmd_prefix: logging.debug('Real full command: %s', cmdstr(real_cmd)) result = subprocess.run(real_cmd, cwd=cwd, check=False, **kwargs) if check and result.returncode: logging.error('Running %s failed!', log_cmd[0]) if result.stdout is not None: logging.error('stdout:\n%s', result.stdout) if result.stderr is not None: logging.error('stderr:\n%s', result.stderr) sys.exit(result.returncode) return result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def run_cmd(self, cmd, cwd=None):\n logging.debug('Running %s', cmd)\n proc = subprocess.Popen(\n cmd,\n cwd=cwd or self._app_dir,\n stdout=subprocess.PIPE)\n output, _ = proc.communicate()\n if proc.returncode:\n sys.stderr.write('\\n' + output + '\\n')\n raise subprocess.CalledProcessError(proc.returncode, cmd, output)\n return output", "def _run_cmd(self, cmd, args=[], allow_fail=True, cwd=None):\n cwd = cwd or self.package_dir\n result = _run_cmd([cmd] + list(args), cwd=cwd)\n if result[2] and not allow_fail:\n raise Exception(\"Command failed retcode=%s\" % result[2])\n return result", "def run_cmd(cmd, ignore_return_code=False, no_shell=False, cwd=None):\n return run_cmd_sync(\n cmd=cmd, ignore_return_code=ignore_return_code, no_shell=no_shell, cwd=cwd\n )", "def run(cmd, directory, fail_ok=False, verbose=False):\n if verbose:\n print(cmd)\n p = subprocess.Popen(cmd,\n cwd=directory,\n stdout=subprocess.PIPE)\n (stdout, _) = p.communicate()\n if p.returncode != 0 and not fail_ok:\n raise RuntimeError('Failed to run {} in {}'.format(cmd, directory))\n return stdout", "def run(cmd, cmd_input=None, cwd=None):\n\n with Popen(\n \" \".join(cmd) if cwd else cmd,\n stdin=PIPE,\n stdout=PIPE,\n stderr=PIPE,\n cwd=cwd,\n shell=True,\n env={\"PATH\": cwd} if cwd else None,\n ) as proc:\n out, err = proc.communicate(\n input=cmd_input.encode(\"utf-8\") if cmd_input else None\n )\n rcode = proc.returncode\n\n return out.decode(\"utf-8\"), err.decode(\"utf-8\"), rcode", "def run(cmd):\n print ' '.join(cmd)\n try:\n check_call(cmd)\n except CalledProcessError as cpe:\n print \"Error: return code: \" + str(cpe.returncode)\n sys.exit(cpe.returncode)", "def run(cmd):\n print(cmd)\n r = os.system(cmd)\n if r:\n print(\"ERROR: command returned {0}\".format(r))\n sys.exit(r)", "def execute(cmd, path):\n oldPath = os.getcwd()\n os.chdir(path)\n\n exitcode, output = subprocess.getstatusoutput(cmd)\n\n os.chdir(oldPath)\n\n ok = not exitcode\n\n return ok, output", "def _invoke_cmd(args, **kwargs):\n\n cwd = kwargs.get('working_directory')\n\n exit_code = call(args, cwd=cwd)\n\n if exit_code != 0:\n print('External command failed.')", "def run_inside_dir(command, dirpath):\n # import shlex\n with inside_dir(dirpath):\n # return subprocess.check_call(shlex.split(command))\n return subprocess.run(\n command, shell=True, capture_output=True, universal_newlines=True\n ).returncode", "def run(cmd: str) -> None:\n subprocess.run(cmd, shell=True, check=True)", "def run_cmd(cmd, workdir=None):\n p = subprocess.Popen(\n cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=workdir,\n )\n out, err = p.communicate()\n _log_cmd(cmd, workdir, out, err, p.returncode)\n return (out.decode('utf-8'), err.decode('utf-8'), p.returncode)", "def run(cmd, shell=False, cwd=None):\n try:\n out = check_output(cmd, shell=shell, cwd=cwd, stderr=STDOUT)\n except CalledProcessError as ex:\n return ex.returncode, ex.output\n else:\n return 0, out", "def RunCmd(args, cwd=None, quiet=False):\n return cmd_util.RunCmd(args, cwd, quiet)", "def run_cmd(\n cmd: Union[List[str], str],\n cwd: Optional[Union[str, Path]] = None,\n env: Optional[Dict[str, str]] = None,\n timeout: Optional[int] = None,\n redirect: Optional[Union[str, Path, TextIOWrapper]] = None,\n) -> str:\n if isinstance(cmd, str):\n args = shlex.split(cmd)\n else:\n args = cmd\n cwd = cwd or Path.cwd()\n env = env or os.environ.copy()\n log.info(\"Calling: %s\", \" \".join(args))\n try:\n output = subprocess.run(\n args,\n shell=is_windows(),\n cwd=cwd,\n env=env,\n timeout=timeout,\n universal_newlines=True,\n check=True,\n stdout=PIPE,\n stderr=STDOUT, # combine stdout,stderr streams\n ).stdout\n except subprocess.CalledProcessError as err:\n handle_output(err.stdout, redirect)\n raise\n return handle_output(output, redirect)", "def run_cmd(cmd):\n print 'running: %s' % cmd\n return subprocess.call(cmd.split(), env=os.environ, shell=False)", "def RunCmd(args, cwd=None):\n logger.debug(str(args) + ' ' + (cwd or ''))\n return Call(args, cwd=cwd)", "def run(cmd):\n print('running', cmd)\n proc = sp.Popen([cmd], shell=True)\n proc.wait()\n assert proc.poll() == 0", "def run_command(cmd, debug=False):\n if debug:\n msg = ' PWD: {}'.format(os.getcwd())\n print_warn(msg)\n msg = ' COMMAND: {}'.format(cmd)\n print_warn(msg)\n cmd()", "def run_command_sync(cmd, allow_fail=False):\n logging.debug('Running %s', scrub(cmd))\n p = subprocess.Popen(cmd)\n p.wait()\n\n if p.returncode != 0 and not allow_fail:\n raise NonZeroReturnCode\n\n return p.returncode", "def try_cmd(cmd, stdout=None, stderr=None):\n print \"\\n\\n %s \\n\\n\" %cmd\n try:\n retval = sp.check_call(cmd, shell=True, stdout=stdout, stderr=stderr)\n except sp.CalledProcessError:\n print(\"The command:\\n %s \\ndid not work, quitting...\" %cmd)\n sys.exit(0)", "def run_command(cmd, cmd_input=None, ok_exit_codes=None):\n proc = make_subprocess(cmd, stdout=True, stderr=True, stdin=True,\n close_fds=True)\n return finish_subprocess(proc, cmd, cmd_input=cmd_input,\n ok_exit_codes=ok_exit_codes)", "def execute_cmd(cmd, cwd=None, timeout=5):\n p = subprocess.Popen(cmd, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n try:\n p.wait(timeout=timeout)\n except subprocess.TimeoutExpired:\n return None\n else:\n stdout, stderr = p.stdout.read(), p.stderr.read()\n stdout, stderr = stdout.decode('utf-8', errors='ignore'), stderr.decode('utf-8', errors='ignore')\n if p.returncode:\n raise ExecuteError('Error running command {}: The error code {} has returned. Stderr: {}'.format(\n ' '.join(cmd), p.returncode, stderr\n ))\n else:\n return stdout, stderr", "def exec_cmd(cmd):\n\targs = shlex.split(cmd)\n\tverbose = True\n\n\ttry:\n\t\tif verbose == True:\n\t\t\tsubprocess.check_call(args)\n\t\telse:\n\t\t\tsubprocess.check_call(args,\n\t\t\t\t\t\t\t\t stdout=subprocess.STDOUT,\n\t\t\t\t\t\t\t\t stderr=subprocess.STDOUT)\n\t# Exception\n\texcept subprocess.CalledProcessError as e:\n\t\tprint \"Command\t :: \", e.cmd\n\t\tprint \"Return Code :: \", e.returncode\n\t\tprint \"Output\t :: \", e.output", "def _execute_cmd(cmd, outdir: str = DEFAULT_OUTDIR, stdout_file=None, stderr_file=None):\n if cmd is None:\n raise Exception(\"cmd was not specified\")\n\n logging.info(f\"Will attempt to execute '{cmd}'\")\n\n if outdir is None:\n outdir = '/tmp'\n logging.info(f\"outdir was not defined and therefore was set to default '{outdir}'\")\n\n if stdout_file is None:\n stdout_file = os.path.join(outdir, os.path.basename(__file__) + '.stdout')\n logging.info(f\"stdout_file was not specified and therefore was set to '{stdout_file}'\")\n\n if stderr_file is None:\n stderr_file = os.path.join(outdir, os.path.basename(__file__) + '.stderr')\n logging.info(f\"stderr_file was not specified and therefore was set to '{stderr_file}'\")\n\n if os.path.exists(stdout_file):\n logging.info(f\"STDOUT file '{stdout_file}' already exists so will delete it now\")\n os.remove(stdout_file)\n\n if os.path.exists(stderr_file):\n logging.info(f\"STDERR file '{stderr_file}' already exists so will delete it now\")\n os.remove(stderr_file)\n\n p = subprocess.Popen(cmd, shell=True)\n\n (stdout, stderr) = p.communicate()\n\n pid = p.pid\n\n logging.info(f\"The child process ID is '{pid}'\")\n\n p_status = p.wait()\n\n p_returncode = p.returncode\n\n if p_returncode is not None:\n logging.info(f\"The return code was '{p_returncode}'\")\n else:\n logging.info(\"There was no return code\")\n\n if p_status == 0:\n logging.info(f\"Execution of cmd '{cmd}' has completed\")\n else:\n raise Exception(f\"Received status '{p_status}'\")\n\n if stdout is not None:\n logging.info(\"stdout is: \" + stdout)\n\n if stderr is not None:\n logging.info(\"stderr is: \" + stderr)\n\n return stdout_file", "def run(cmd: List[str]) -> int:\n logger.debug('cmd: %s', ' '.join(cmd))\n child = Popen(cmd, stdout=PIPE, stderr=PIPE)\n stdoutdata, stderrdata = child.communicate()\n\n if stdoutdata.strip():\n log_std('stdout', stdoutdata.decode(),\n logging.DEBUG if child.returncode == 0 else logging.ERROR)\n\n if stderrdata.strip():\n log_std('stderr', stderrdata.decode(), logging.ERROR)\n\n logger.debug(\"returncode %s\", child.returncode)\n return child.returncode", "def run(path, cmd):\n logging.info('Processing %s', path)\n logging.debug('Running: %s', ' '.join(cmd))\n subprocess.call(cmd)", "def run(self, cmd):\n log = logging.getLogger(self.name)\n try:\n retcode = subprocess.call(cmd, shell=True, env=self.augmented_environment())\n\n if retcode < 0:\n log.error('Command received signal %s: %s' % (-retcode, cmd))\n raise zc.buildout.UserError('System error')\n elif retcode > 0:\n log.error('Command failed with exit code %s: %s' % (retcode, cmd))\n raise zc.buildout.UserError('System error')\n except OSError as e:\n log.error('Command failed: %s: %s' % (e, cmd))\n raise zc.buildout.UserError('System error')", "def exec_cmd(cmd):\n args = shlex.split(cmd)\n verbose = True\n\n # TRY\n FNULL = open(os.devnull, 'w')\n try:\n if verbose == True:\n subprocess.check_call(args, env=my_env)\n else:\n subprocess.check_call(args, stdout=FNULL, stderr=subprocess.STDOUT, env=my_env)\n # Exception\n except subprocess.CalledProcessError as e:\n print \"Command :: \", e.cmd\n print \"Return Code :: \", e.returncode\n print \"Output :: \", e.output\n # Finally\n finally:\n FNULL.close()", "def run(cmd, dieOnError=True):\n\n\tps = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE)\n\texitcode = ps.returncode\n\tstdout,stderr = ps.communicate()\n\treturn exitcode, stdout, stderr", "def run_command(cmd):\n if cmdl_args.debug:\n print(f'==DEBUG== Executing {cmd}')\n # In debug mode, keep the output. Otherwise, redirect it to devnull.\n out = tempfile.NamedTemporaryFile(suffix='.out', prefix=f'{cmd[0]}_', dir='./', delete=False)\n err = tempfile.NamedTemporaryFile(suffix='.err', prefix=f'{cmd[0]}_', dir='./', delete=False)\n else:\n out = open(os.devnull, 'w')\n err = open(os.devnull, 'w')\n\n return_value = subprocess.call(cmd, stdout=out, stderr=err)\n\n out.close()\n err.close()\n\n if return_value == 0:\n None\n else:\n print(f'==ERROR== {cmd} failed with return value {return_value}')\n\n if cmdl_args.debug:\n print(f'==DEBUG== See {out.name} and {err.name} for more details about the command execution.')\n\n return return_value", "def run_cmd(cmd, comment=\"\", check_status=True):\n verbose_msg(\"Running\", f\"'{cmd}'\", bcolors.BOKBLUE + comment)\n try:\n to_run = cmd\n if check_status:\n to_run = f\"{cmd} && echo OK\"\n content = os.popen(to_run).read()\n if content:\n content = content.strip()\n for i in content.strip().split(\"\\n\"):\n verbose_msg(\"++\", i)\n if \"Encountered error\" in content:\n msg(\"[WARNING] Error encountered runtime in\",\n cmd, color=bcolors.BWARNING)\n if check_status:\n if \"OK\" not in content and \"root\" not in cmd:\n msg(\"Error:\\n\",\n content, color=bcolors.FAIL)\n raise RuntimeError(\n \"Command\", cmd, \"does not have the OK tag\", content)\n except:\n fatal_msg(\"Error while running\", f\"'{cmd}'\")", "def os_call( self, cmd_arg, ):\n while True: # will exit when it works or run out of editors\n a_command = self.working_command\n if a_command is None:\n a_command = self.get_next_command( )\n\n if a_command is None: # no commands left to try\n msg = \"Run out of editors to try\"\n# AppGlobal.__logger.error( msg )\n raise RuntimeError( msg ) # or fail in some other where\n break # we are aread done\n try:\n if cmd_arg is None:\n proc = Popen( [ a_command, ] )\n else:\n proc = Popen( [ a_command, cmd_arg ] )\n self.working_command = a_command\n break # do not get here if exception so command \"worked \"\n except Exception as excpt: # this should let us loop ignoring exception\n pass\n msg = ( f\"os_call exception trying to use >{a_command}< with cmd_arg >{cmd_arg}< exception: {excpt}\" )\n # if exception proc not returned f\"\\npopen returned {proc}\" )\n AppGlobal.logger.debug( msg )", "def _run(self, run_cmd):\n cmd = f'cd {self.save_dir};{run_cmd} {self.save_dir.name}'\n command = subprocess.Popen(cmd, shell=True, stdout=PIPE, )\n std_out = str(command.stdout.read())\n\n if 'Error Message' in std_out:\n self.delete_from_cash()\n assert False, f'Error when executing {run_cmd}:\\n{std_out}'", "def call(self, cmd):\n exitcode, _stdout, _stderr = self.run(cmd, nonzero_e = None)\n return exitcode", "def run_cmd(cmd, chdir=None, env=None):\n l = logging.getLogger('screpper.util')\n cmd_l = shlex.split(cmd)\n l.debug('exec command %s' % (cmd))\n l.debug('as list %s' % (cmd_l))\n\n # set directory to current if not defined\n if not chdir:\n chdir = os.getcwd()\n\n # prepare environemtn\n if env:\n new_env = dict(os.environ.items() + env.items())\n else:\n new_env = os.environ\n p = subprocess.Popen(cmd_l, cwd=chdir, stdout=subprocess.PIPE, \n stderr=subprocess.PIPE, env=new_env)\n \n try:\n out, err = p.communicate()\n l.debug('stdout: ' + str(out))\n l.debug('stderr: ' + str(err))\n except Exception, e:\n l.error('failed to run process: %s' % (str(e)))\n pass\n\n l.debug('process finished, retcode %d' % (p.returncode))\n return p.returncode, out, err", "def run_command(cmd):\n return subprocess.call(cmd, shell=True)", "def run(cmd):\n \n proc = subprocess.Popen (cmd, \n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE,\n shell=True\n )\n stdout_value, stderr_value = proc.communicate()\n print stdout_value\n print stderr_value\n\n if proc.poll() > 0:\n sys.stderr.write ( \"\\nError\\n\" )\n print '\\tstderr:', repr(stderr_value.rstrip())\n return False\n else:\n return True", "def execute(parent, cmd, *args, **kwargs):\n\n with xtrace(parent, flatten(cmd)) as h:\n try:\n code = subprocess.call(cmd, *args, **kwargs)\n except:\n sys.exit(\n DiagnosticReporter.fatal(EXCEPTION_EXECUTING_PROCESS, cmd[0]))\n finally:\n h.report(code)\n return code", "def call_and_exit(self, cmd, shell=True):\n sys.exit(subprocess.call(cmd, shell=shell))", "def shell(cmd):\n print('Running \"{}\"...'.format(cmd))\n subprocess.check_call(cmd, shell=True)", "def run_check_errors(cmd):\n if type(cmd) == str:\n cmd = cmd.split()\n output = subprocess.run(cmd, capture_output=True, text=True)\n if output.stderr != \"\":\n print_cmd = \" \".join(map(str, cmd))\n sys.exit(\n f\"The error {output.stderr} was generated when running {print_cmd}. Exiting.\"\n )\n return", "def execute(cmd) :\n return os.system( cmd )", "def run_cmd(cmd, verbose=1, target=None):\n cmd = \"set -u pipefail; \" + cmd\n if verbose == 2:\n sys.stderr.write(\"\\nRunning command:\\n%s\\n\" % cmd)\n stdout = open(\"/dev/stdout\", \"w\")\n stderr = open(\"/dev/stderr\", \"w\")\n elif verbose == 1:\n sys.stderr.write(\"\\nRunning command:\\n%s\\n\" % cmd)\n stdout = open(\"/dev/null\", \"w\")\n stderr = open(\"/dev/null\", \"w\")\n else:\n stdout = open(\"/dev/null\", \"w\")\n stderr = open(\"/dev/null\", \"w\")\n\n res = subprocess.call(cmd, shell=True, stderr=stderr, stdout=stdout)\n stderr.close()\n if res != 0:\n print(\"Command Failed! Please Check!\")\n exit(1)", "def run_executable(cmd, build_dir, env):\n # First look if we are inside a display.\n if env.get('_CHROMIUM_INSIDE_XVFB') == '1':\n # No need to recurse.\n return test_env.run_executable(cmd, env)\n\n pid = None\n xvfb = 'Xvfb'\n try:\n if sys.platform == 'linux2':\n # Defaults to X display 9.\n display = ':9'\n pid = start_xvfb(xvfb, display)\n if not pid:\n return 1\n env['DISPLAY'] = display\n if not wait_for_xvfb(os.path.join(build_dir, 'xdisplaycheck'), env):\n return 3\n # Inhibit recursion.\n env['_CHROMIUM_INSIDE_XVFB'] = '1'\n # Some ChromeOS tests need a window manager. Technically, it could be\n # another script but that would be overkill.\n try:\n ice_cmd = ['icewm']\n subprocess.Popen(\n ice_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env)\n except OSError:\n print >> sys.stderr, 'Failed to run %s' % ' '.join(ice_cmd)\n return 1\n return test_env.run_executable(cmd, env)\n finally:\n if pid:\n kill(pid)", "def shell_command(context, cmd, err_msg=\"Shell command error\"):\n try:\n\n context.last_cmd = cmd\n output = check_output(cmd, shell=True, cwd=os.getcwd())\n context.output = output\n\n except:\n raise Exception(err_msg)", "def run(self, cmd, code):\n files = [f for f in listdir(dirname(self.filename)) if f[-3:] == '.go']\n return self.tmpdir(cmd, files, code)", "def exec_command(cmd):\n with subprocess.Popen(\n cmd,\n stdout=subprocess.PIPE,\n stderr=subprocess.STDOUT,\n shell=True) as p:\n stdout, _ = p.communicate()\n if p.returncode != 0:\n logger.error(stdout)\n return None\n\n return stdout", "def run_cmd_silent(\n cmd: Union[List[str], str],\n cwd: Optional[Union[str, Path]] = None,\n env: Optional[Dict[str, str]] = None,\n timeout: Optional[int] = None,\n) -> bool:\n try:\n run_cmd(cmd=cmd, cwd=cwd, env=env, timeout=timeout, redirect=None)\n return True\n except Exception: # Do not raise here to prevent leaking sensitive data such as the cmd args\n return False", "def run_with_subprocess(cmd):\n new_env = dict(os.environ, LC_ALL='C')\n try:\n proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=new_env)\n output, error = proc.communicate()\n returncode = proc.returncode\n except OSError, (errno, strerror):\n output, error = \"\", \"Could not execute %s: %s\" % (cmd[0], strerror)\n returncode = 1\n\n return (output, error, returncode)", "def run_cmd(cmd, call=True, echo=False, fail_silently=False):\n if sys.version_info < (2, 7):\n alt_retcode = True\n check_output = subprocess.check_call\n else:\n alt_retcode = False\n check_output = subprocess.check_output\n\n cmd_str = ' '.join(cmd)\n kwargs = {}\n method = subprocess.call if call else check_output\n stdout = sys.stdout if echo else subprocess.PIPE\n\n if echo:\n print('$ {0}'.format(cmd_str))\n\n if call:\n kwargs.update({'stdout': stdout})\n\n try:\n retcode = method(cmd, **kwargs)\n except subprocess.CalledProcessError as err:\n if fail_silently:\n return False\n error(str(err) if IS_PY3 else unicode(err))\n\n if call and retcode and not fail_silently:\n error('Command {0!r} returned non-zero exit status {1}'.\n format(cmd_str, retcode))\n\n return not retcode if alt_retcode else retcode", "def runin(cmd, stdin):\n result = subprocess.Popen(cmd,stdin=subprocess.PIPE)\n result.wait()\n return result.returncode", "def run_and_handle_error(cmd, print_cmd=True):\n stdout, stderr = run_subprocess_cmd(cmd, print_cmd=print_cmd, print_stdout_stderr=False)\n if stderr:\n import sys; sys.exit('Standard Errors:\\n%s\\n' % stderr)\n return stdout, stderr", "def _run_cmd(*args):\n proc = Popen(\n args, stdin=PIPE, stdout=PIPE, stderr=PIPE,\n cwd=os.path.dirname(__file__))\n output, _ = proc.communicate()\n code = proc.returncode\n return code, output", "def execcmd(cmd, debug=False):\n\n success = True\n\n try:\n\n if debug:\n subprocess.check_call(cmd)\n\n else:\n with open(os.devnull, 'w') as devnull:\n subprocess.check_call(cmd, stdout=devnull, stderr=subprocess.STDOUT)\n\n success = True\n\n except subprocess.CalledProcessError:\n success = False\n\n return success", "def check(*cmd):\n print >>sys.stderr, 'Run:', cmd\n subprocess.check_call(cmd)", "async def checked_run(cmd, env=None):\n\n # Start the subprocess.\n logging.info('Running: %s', await expand_cmd_str(cmd))\n with logged_timer('{} finished'.format(get_cmd_name(cmd))):\n p = await asyncio.create_subprocess_exec(\n *cmd, env=env,\n stdout=asyncio.subprocess.PIPE,\n stderr=asyncio.subprocess.STDOUT)\n\n # Stream output from the process stdout.\n lines = []\n while True:\n line = await p.stdout.readline()\n if not line:\n break\n line = line.decode()[:-1]\n lines.append(line)\n logging.info(line)\n\n # Wait for the process to finish, check it was successful & build stdout.\n await p.wait()\n output = '\\n'.join(lines)[:-1]\n if p.returncode:\n raise RuntimeError('Return code {} from process: {}\\n{}'.format(\n p.returncode, await expand_cmd_str(cmd), output))\n\n return output", "def RunCommand(cmd, always_dump_stdout_stderr):\n if VERBOSE:\n Print(str(cmd))\n Print(\" \".join(cmd))\n start = time.time()\n p = subprocess.Popen(cmd,\n bufsize=1000*1000,\n stderr=subprocess.PIPE,\n stdout=subprocess.PIPE)\n while p.poll() is None:\n time.sleep(0.1)\n now = time.time()\n if now - start > TIMEOUT:\n Print('Error: timeout')\n Print('Killing pid %d' % p.pid)\n os.waitpid(-1, os.WNOHANG)\n return -1\n stdout = p.stdout.read()\n stderr = p.stderr.read()\n retcode = p.wait()\n\n if retcode != 0:\n Print('Error: command failed %d %s' % (retcode, ' '.join(cmd)))\n always_dump_stdout_stderr = True\n\n if always_dump_stdout_stderr:\n Print(stderr)\n Print(stdout)\n return retcode", "def _subexec(command):\n lcwd = fabric.state.env.get('lcwd', None) or None #sets lcwd to None if it bools to false as well\n process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=lcwd)\n out, err = process.communicate()\n print \"command : %s \" % command\n print \"out: %s\" % out\n print \"err: %s\" % err", "def call(*args, **kargs):\n os.putenv('PWD', os.getcwd())\n return subprocess.call(*args, **kargs)", "def run_command(cmd):\n p = subprocess.Popen(cmd.split(' '), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False)\n data = p.communicate()\n return p.returncode == 0", "def run_cmd_and_log(cmd, log_msg, log_file_path, err_on_fail=True):\n logger.info(log_msg)\n logger.info(f\"log file can be found at {log_file_path}\")\n logger.info(cmd.replace(\"\\\\\", \"\\\\\\\\\"))\n with open(log_file_path, \"w\") as log_file:\n process = subprocess.Popen(cmd,\n stdout=log_file,\n stderr=log_file,\n shell=True,\n universal_newlines=True)\n\n return_code = process.wait()\n if return_code != 0 and err_on_fail:\n raise ChildProcessError(\n f\"The following command failed {cmd} \\n The log file can be found at {log_file_path}\")", "def execute(self, cmd, cwd=None, capture_output=False, env=None, raise_errors=True):\n logging.info('Executing command: {cmd}'.format(cmd=str(cmd)))\n stdout = subprocess.PIPE if capture_output else None\n process = subprocess.Popen(cmd, cwd=cwd, env=env, stdout=stdout)\n output = process.communicate()[0]\n returncode = process.returncode\n if returncode:\n # Error\n if raise_errors:\n raise subprocess.CalledProcessError(returncode, cmd)\n else:\n logging.info('Command returned error status %s', returncode)\n if output:\n logging.info(output)\n return returncode, output", "def StartCmd(args, cwd=None, quiet=False, stdout=None, stderr=None, env=None):\n return cmd_util.StartCmd(args, cwd=cwd, quiet=quiet, stdout=stdout,\n stderr=stderr, env=env)", "def run(cmd):\n cmd = str(cmd)\n\n if env['verbose']:\n sys.stdout.write('--> %s\\n' % cmd)\n\n cmd_list = shlex.split(cmd)\n\n p = subprocess.Popen(\n cmd_list,\n stdin=subprocess.PIPE,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE\n )\n\n return p.communicate()", "def run_cmd(cmd):\n logging.debug('Run command \"'+cmd+'\"')\n try:\n process = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)\n process.check_returncode()\n\n except Exception as e:\n logging.exception(str(e) +\"\\nCMD_SHELL : \"+cmd+\"\\nSTDOUT : \"+process.stdout.decode()+\"\\nSTDERR : \"+process.stderr.decode(), exc_info=True)\n #logging.critical(\"{CDM : \"+cmd+\", \"} : \"+cmd)\n #logging.critical(\"STDOUT : \"+process.stdout.decode())\n #logging.critical(\"STDERR : \"+process.stderr.decode())\n #raise e\n\n return process.stdout.decode()", "def chroot(cmd, dest_dir, stdin=None, stdout=None):\n run = ['chroot', dest_dir]\n\n for element in cmd:\n run.append(element)\n\n try:\n proc = subprocess.Popen(run,\n stdin=stdin,\n stdout=subprocess.PIPE,\n stderr=subprocess.STDOUT)\n out = proc.communicate()[0]\n logging.debug(out.decode())\n except OSError as err:\n logging.error(\"Error running command: %s\", err.strerror)", "def run_cmd(self, cmd):\n command = \" \".join(cmd)\n print(command)\n logging.info(\"Running command \" + command)\n cmdProcess = subprocess.Popen(command,\n stdout=subprocess.PIPE,\n stderr=subprocess.STDOUT,\n shell=True)\n for line in cmdProcess.stdout:\n logging.info(line.decode(\"utf-8\").rstrip())\n cmdProcess.wait()\n logging.info('return code: ' + str(cmdProcess.returncode))\n if cmdProcess.returncode != 0:\n raise ValueError('Error in running command with return code: '\n + command\n + str(cmdProcess.returncode) + '\\n')\n logging.info(\"command \" + command + \" ran successfully\")\n return \"success\"", "def run(cmd, comment):\n print(\"―\" * 80)\n if comment:\n print(f\"💬 {comment}\")\n print(f\"➤ {cmd}\")\n proc = subprocess.run(cmd, shell=True) # nosec\n if proc.returncode == 0:\n print(\"✅ success\")\n else:\n print(f\"❌ ERROR! return code: {proc.returncode}\")\n sys.exit(proc.returncode)", "async def _run_cmd(self, cmd, timeout=5):\n try:\n self._flush_buffer()\n self.pexpect_child.sendline(cmd)\n ret = self.pexpect_child.expect_exact(\n [self.cmd_prompt, pexpect.TIMEOUT], timeout=timeout\n )\n stdout = self.parse_cmd_output(self.pexpect_child.before) if ret == 0 else \"\"\n self.pexpect_child.sendline(\"echo $?\")\n ret = self.pexpect_child.expect_exact(\n [self.cmd_prompt, pexpect.TIMEOUT], timeout=timeout\n )\n exit_status = self.parse_cmd_output(self.pexpect_child.before) if ret == 0 else -1\n try:\n exit_status = int(exit_status)\n except ValueError:\n exit_status = -1\n return exit_status, stdout\n except Exception as e:\n self.applog.exception(\"Exception occured --> _run_command\", exc_info=e)\n raise", "def run(cmd, proc_stdout = sys.stdout, proc_stderr = sys.stderr,\n check = True):\n print cmd\n proc = subprocess.Popen(cmd, shell=True, bufsize=-1,\n stdout=proc_stdout, stderr=proc_stderr)\n output, errors = proc.communicate()\n sts = proc.wait()\n if check is True and sts != 0:\n raise RuntimeError(\"Command: %s exited with non-zero status %i\" % (cmd, sts))\n return output, errors", "def run(self):\n\n pwd = self.chdir()\n if pwd is None: return -1\n res = mkstuff.run_cmd(self.bindir + '/' + self.func + ' ' + self.args)\n os.chdir(pwd)\n return res", "def subprocess_run(cmd):\n print(shlex.join(cmd))\n try:\n ret = subprocess.run(cmd, capture_output=True,\n text=True, env=os.environ.copy(), check=True)\n if (ret.stdout):\n print(ret.stdout)\n return ret\n except subprocess.CalledProcessError as e:\n if (e.stderr):\n print(e.stderr)\n raise e", "def shell_command(split_cmd, cwd=HERE):\n print(colored(f'Kör \"{\" \".join(split_cmd)}\"', 'blue', attrs=['bold']))\n try:\n sp.run(split_cmd, cwd=cwd, check=True)\n return True\n except sp.CalledProcessError:\n return False", "def _exec_cmd(cmd, stdout=None, stderr=None):\n rc = 0\n kwargs = {}\n if stdout is not None:\n kwargs[\"stdout\"] = stdout\n if stderr is not None:\n kwargs[\"stderr\"] = stderr\n try:\n subprocess.check_call(cmd, **kwargs)\n except CalledProcessError as e:\n LOG.error(\"[return code: %s] %s\", e.returncode, e)\n rc = e.returncode\n return rc", "def run_command(command, cwd=None):\n return subprocess.run(command, shell=True, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)", "def run_command(command, root_dir):\n print highlight('\\nRunning tests: %s' % command)\n\n start_time = time.time()\n proc = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE,\n stderr=subprocess.PIPE, cwd=root_dir)\n stdout_value, stderr_value = proc.communicate()\n end_time = time.time()\n\n if 0 < proc.returncode > 1:\n print error('Error trying to run the tests')\n print error(stderr_value)\n stdout_value = stderr_value\n else:\n print highlight('-------------------------------')\n print highlight('Finished tests in %.2f seconds\\n' % (end_time - start_time))\n\n return (proc.returncode, stdout_value)", "def _subprocess(cmd):\n\n log.debug('Running: \"%s\"', \" \".join(cmd))\n try:\n proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)\n ret = salt.utils.stringutils.to_unicode(proc.communicate()[0]).strip()\n retcode = proc.wait()\n\n if ret:\n return ret\n elif retcode != 1:\n return True\n else:\n return False\n except OSError as err:\n log.error(err)\n return False", "def do_run_script(self, arg):\n try:\n with open(os.path.join(os.getcwd(), arg), 'r') as fin:\n script = fin.readlines()\n for line in script:\n self.onecmd(line)\n except (FileNotFoundError) as exc:\n print(exc)", "def ExecCmd(self, cmd, halt_on_error=True):\n self.f_bash.write(\"%s\\n\"%cmd)\n self.f_bash.flush()\n if not self.dry_run:\n try:\n execCmd(cmd, self.f_log, self.f_crash, self.verbose)\n self.f_log.flush()\n except RuntimeError, errstr:\n if halt_on_error:\n raise RuntimeError(errstr)\n else:\n self.LogErrors('%s' % errstr)\n return True\n else:\n return False", "def run_cmd(cmd, **kwargs):\n log.info(f\"Executing command: {cmd}\")\n if isinstance(cmd, str):\n cmd = shlex.split(cmd)\n r = subprocess.run(\n cmd,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE,\n stdin=subprocess.PIPE,\n **kwargs\n )\n log.debug(f\"CMD output: {r.stdout.decode()}\")\n if r.stderr:\n log.error(f\"CMD error:: {r.stderr.decode()}\")\n if r.returncode:\n raise CommandFailed(\n f\"Error during execution of command: {cmd}.\"\n f\"\\nError is {r.stderr.decode()}\"\n )\n return r.stdout.decode()", "def RunCommand(cmd):\n logging.debug(\"Running cmd %s\", cmd)\n\n p = subprocess.Popen(cmd,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE,\n shell=True)\n o, e = p.communicate()\n s = p.returncode\n\n if s != 0:\n return (s, e)\n\n return (s, o)", "def run_command(cmd):\n proc = subprocess.Popen(\n cmd,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE,\n )\n stdout, stderr = proc.communicate()\n return proc.returncode, stdout, stderr", "def run_cmd_sync(cmd, ignore_return_code=False, no_shell=False, cwd=None):\n if isinstance(cmd, list) or no_shell:\n # Create the async process\n proc = subprocess.Popen(\n cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd\n )\n else:\n proc = subprocess.Popen(\n cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd\n )\n\n # Capture stdout/stderr\n stdout, stderr = proc.communicate()\n\n output_message = f\"\\n[{proc.pid}] Command:\\n{cmd}\"\n # Append stdout/stderr to the output message\n if stdout != \"\":\n output_message += f\"\\n[{proc.pid}] stdout:\\n{stdout.decode()}\"\n if stderr != \"\":\n output_message += f\"\\n[{proc.pid}] stderr:\\n{stderr.decode()}\"\n\n # If a non-zero return code was thrown, raise an exception\n if not ignore_return_code and proc.returncode != 0:\n output_message += f\"\\nReturned error code: {proc.returncode}\"\n\n if stderr != \"\":\n output_message += f\"\\nstderr:\\n{stderr.decode()}\"\n raise ChildProcessError(output_message)\n\n # Log the message with one call so that multiple statuses\n # don't get mixed up\n CMDLOG.debug(output_message)\n\n return CommandReturn(proc.returncode, stdout.decode(), stderr.decode())", "async def checked_run(*cmd):\n\n # Start the subprocess.\n logging.info('Running: %s', expand_cmd_str(cmd))\n with logged_timer('{} finished'.format(get_cmd_name(cmd))):\n p = await asyncio.create_subprocess_exec(\n *cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.STDOUT)\n\n # Stream output from the process stdout.\n chunks = []\n while True:\n chunk = await p.stdout.read(16 * 1024)\n if not chunk:\n break\n chunks.append(chunk)\n\n # Wait for the process to finish, check it was successful & build stdout.\n await p.wait()\n stdout = b''.join(chunks).decode()[:-1]\n if p.returncode:\n raise RuntimeError('Return code {} from process: {}\\n{}'.format(\n p.returncode, expand_cmd_str(cmd), stdout))\n\n return stdout", "def run_cmd(cmd, args, path=None, raise_error=True):\n\n if path is not None:\n # Transparently support py.path objects\n path = str(path)\n\n p = sp.Popen([cmd] + list(args), stdout=sp.PIPE, stderr=sp.PIPE,\n cwd=path)\n streams = tuple(s.decode('latin1').strip() for s in p.communicate())\n return_code = p.returncode\n\n if raise_error and return_code != 0:\n raise RuntimeError(\n \"The command `{0}` with args {1!r} exited with code {2}.\\n\"\n \"Stdout:\\n\\n{3}\\n\\nStderr:\\n\\n{4}\".format(\n cmd, list(args), return_code, streams[0], streams[1]))\n\n return streams + (return_code,)", "def exec_cmd(cmd):\n print(' '.join(str(e) for e in cmd))\n try:\n res = subprocess.run(cmd, capture_output=True, check=True)\n print(res.stdout.decode(\"utf8\"))\n return res\n except subprocess.CalledProcessError as err:\n logging.error(err.stderr)\n raise err", "def run_cmd(command, work_dir=None):\n if work_dir is not None:\n os.chdir(work_dir) # Change to working directory\n\n # Run Command\n ps = subprocess.Popen(command, stdout=subprocess.PIPE, shell=True)\n\n # Read + yield stdout until process ends\n while ps.poll() is None:\n line = ps.stdout.readline()\n if line != \"\":\n yield line\n\n return_code = ps.returncode\n # Throw exception if return code is not 0\n if return_code:\n exc = \"\\nCOMMAND:%s\\nRET_CODE:%i\" % (command, return_code)\n raise ReturnCodeError(exc, return_code)", "def call(cmd):\n with open(os.devnull, \"w+\") as w:\n try:\n check_call(cmd, stdout = w, stderr = w, shell = True)\n except CalledProcessError as err:\n print(err)", "def run_subprocess(cmd):\n subprocess.Popen(cmd, stdin =subprocess.PIPE,\n stderr=subprocess.PIPE,\n stdout=subprocess.PIPE,\n shell=True,)", "def execute(cmd, abort = True):\n print \"Executing '{0}'\".format(cmd)\n status = os.system(cmd)\n if not status == 0:\n if abort:\n message = 'Execution of command {0} failed, return code = {1}'.format(cmd, status)\n raise Exception(message)\n else:\n message = 'WARNING: Execution of command {0} failed, return code = {1}; ignore and proceed'.format(cmd, status)\n print message\n return status", "def execCMD(self, cmd, arg):\n result = subprocess.check_output([cmd, arg])\n return result", "def execute(self, cwd=None, **kwargs):\n args = [self.executable, \"<\", self.stdin_fname, \">\", self.stdout_fname, \"2>\", self.stderr_fname]\n\n self.cmd_str = \" \".join(args)\n\n p = Popen(self.cmd_str, shell=True, stdout=PIPE, stderr=PIPE, cwd=cwd)\n\n (self.stdout_data, self.stderr_data) = p.communicate()\n\n self.returncode = p.returncode\n\n if self.returncode != 0:\n with open(self.stdout_fname, \"r\") as out, open(self.stderr_fname, \"r\") as err:\n self.stdout_data = out.read()\n self.stderr_data = err.read()\n\n if self.verbose:\n print(\"*** stdout: ***\\n\", self.stdout_data)\n print(\"*** stderr ***\\n\", self.stderr_data)\n\n raise self.Error(\"%s returned %s\\n cmd_str: %s\" % (self, self.returncode, self.cmd_str))", "def file_operation(path, command):\n with ChDir(path):\n subprocess.check_call(command)", "def test_chdir(self):\n new_executor = self.executor.chdir('foo/bar')\n output, _err = self.executor.command(['pwd']).batch()\n self.assertEqual(output, '')\n output, _err = new_executor.command(['pwd']).batch()\n self.assertEqual(output, 'foo/bar')", "def runCommand(self, cmd, stdin=None, env=None):\n\n\t mycmd=subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)\n\t output, error=mycmd.communicate()\n\t while not mycmd.wait():\n\t \t# do stuff\n\t \treturn 0\n\n\n\n\t #if not isList(cmd):\n\t #cmd = shlex.split(cmd)\n\t #opts = dict(stderr=subprocess.PIPE, stdout=subprocess.PIPE)\n\t #if env:\n\t # opts.update(env=env)\n\t #if stdin:\n\t # opts.update(stdin=subprocess.PIPE)\n\t # stdout, stderr=subprocess.Popen(cmd, **opts).communicate(stdin)\n\t #else :\n\t # stdout, stderr=subprocess.Popen(cmd, **opts).communicate()\n\t #return stdout, stderr", "def run_cmd(context, exec_cmd, pty=True, hide=False, error_message=\"An unknown error has occurred!\"):\n print(f\"LOCAL - Running command {exec_cmd}\")\n result = context.run(exec_cmd, pty=pty, hide=hide, warn=True)\n if not result:\n print(f\"ERROR - {error_message}\\n{result.stdout if pty else result.stderr}\")\n raise invoke.exceptions.UnexpectedExit(result)\n\n return result", "def call(self):\n\n process = subprocess.Popen(self._cmd, stdout=self._stdout, stderr=self._stderr,\n shell=isinstance(self._cmd, basestring), env=self._env, cwd=self._cwd)\n returnData = process.communicate()\n\n return ProcessResult(process.returncode, returnData[0], returnData[1])", "def execute_cmd(cmd, verb=False):\n if verb:\n print(\"Executing: {}\".format(cmd))\n\n p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)\n out, err = p.communicate()\n code = p.returncode\n if code:\n sys.exit(\"Error {}: {}\".format(code, err))\n return out, err", "def run_cmd(cmd):\n cmdl = cmd.split(\" \")\n try:\n p = subprocess.Popen(cmdl, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n so, se = p.communicate()\n except subprocess.CalledProcessError, e:\n sys.stderr.write(\"Error encountered in running '\" + cmd +\n \"'. Return status is '\" + str(e.returncode) + \"'\\n\")\n sys.exit(1)\n except:\n sys.stderr.write(\"Unknown error encountered in running 'qhost -j -xml'.\\n\")\n sys.exit(1)\n return so" ]
[ "0.72233146", "0.714203", "0.7020301", "0.6959047", "0.6955779", "0.69045275", "0.6902057", "0.67949283", "0.67763764", "0.6725198", "0.6682631", "0.6600782", "0.6563511", "0.6562186", "0.6558308", "0.65555507", "0.65034527", "0.649164", "0.6484016", "0.6430076", "0.6378891", "0.6373561", "0.63681567", "0.6344992", "0.6336546", "0.63171834", "0.6301428", "0.62969524", "0.6294258", "0.6287321", "0.6259106", "0.6214222", "0.6212777", "0.6199826", "0.6197715", "0.6185217", "0.617875", "0.6178688", "0.6117498", "0.61074865", "0.61057025", "0.6099104", "0.60834", "0.6056196", "0.605584", "0.6051519", "0.6043435", "0.6019465", "0.6014725", "0.6012331", "0.5994678", "0.59898275", "0.598523", "0.5984842", "0.5981733", "0.5973611", "0.5963523", "0.59446764", "0.59400725", "0.5937373", "0.5934128", "0.5933399", "0.59270227", "0.5910384", "0.59057593", "0.589836", "0.5883102", "0.5876024", "0.5875906", "0.58748186", "0.5870155", "0.58616096", "0.5858797", "0.5840246", "0.5838962", "0.5836417", "0.58342016", "0.58301306", "0.5828254", "0.5821139", "0.5820329", "0.5815817", "0.5810534", "0.5806741", "0.580613", "0.58052015", "0.5803906", "0.5786384", "0.57838005", "0.5777681", "0.5763167", "0.57524323", "0.57507634", "0.5749838", "0.5748864", "0.5747551", "0.57379884", "0.5735745", "0.57298183", "0.5723618" ]
0.6026382
47
Return sha256 hex digest of |path|.
def sha256(path: Union[Path, str]) -> str: # The file shouldn't be too big to load into memory, so be lazy. with open(path, 'rb') as fp: data = fp.read() m = hashlib.sha256() m.update(data) return m.hexdigest()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_hash(path: Path) -> str:\n m = hashlib.sha256()\n m.update(path.read_bytes())\n return m.hexdigest()", "def _get_hash(self, path):\n with open(path, \"r\") as fp:\n content = fp.read()\n\n return sha256(content).hexdigest()", "def hash_of_file(path):\n with open(path, 'rb') as archive:\n sha = sha256()\n while True:\n data = archive.read(2 ** 20)\n if not data:\n break\n sha.update(data)\n return encoded_hash(sha)", "def hash(path):\n\n with open(path, 'r') as file:\n return hashlib.sha1(file.read()).hexdigest()", "def GetFileSha256(file_path):\n return base64.b64encode(GetFileHashes(file_path, do_sha256=True)['sha256'])", "def _get_file_sha256_hash(file_path):\n sha256hash = hashlib.sha256()\n chunk_size = 8192\n with open(file_path, \"rb\") as f:\n while True:\n buffer = f.read(chunk_size)\n if not buffer:\n break\n sha256hash.update(buffer)\n return sha256hash.hexdigest()", "def compute_digest(path):\n hash = hashlib.sha512()\n for part in DiskCrawler.partial_reader(path, 4 * 1024 * 1024):\n hash.update(part)\n return hash.digest()", "def sha256(self):\n return sha256file(self.abspath)", "def hash_file(path: str) -> str:\n return _hash_file(path, hashlib.md5()).hexdigest()", "def checksum(path):\n with open(path, 'r') as f:\n return md5(f.read()).digest()", "def sha256(value):\n return hashlib.sha256(value).hexdigest()", "def file_digest(path, algo=hashlib.md5):\n checksum = algo()\n with open(path, 'rb') as f:\n for chunk in iter(lambda: f.read(4096), b\"\"):\n checksum.update(chunk)\n return checksum.hexdigest()", "def hash_file_sha256(file_path, binary=False, buffer_size=65536):\n return hash_file(file_path, hash_type=hashlib.sha256, binary=binary, buffer_size=buffer_size)", "def sha256(content):\n content = content.encode('utf-8')\n return hashlib.sha256(content).hexdigest()", "def sha256(f: IO[str]) -> str:\n pos = f.tell()\n f.seek(0)\n digest = hashlib.sha256(f.read().encode()).hexdigest()\n f.seek(pos)\n\n return digest", "def hashFile(path: str) -> str:\n\tif not os.path.exists(path):\n\t\traise FileNotFoundError\n\n\thasher = hashlib.sha1()\n\tblock_sz = 8192\n\twith open(path, 'rb') as f:\n\t\tbuf = f.read(block_sz)\n\t\twhile len(buf) > 0:\n\t\t\thasher.update(buf)\n\t\t\tbuf = f.read(block_sz)\n\treturn str(hasher.hexdigest())", "def hash_file(path, digest=None):\r\n digest = digest or hashlib.sha1()\r\n with open(path, 'rb') as fd:\r\n s = fd.read(8192)\r\n while s:\r\n digest.update(s)\r\n s = fd.read(8192)\r\n return digest.hexdigest()", "def _hash_file_content(self, path):\n hasher = hashlib.sha1()\n with open(path, 'rb') as file:\n buffer = file.read(self.hash_block_size)\n while len(buffer) > 0:\n hasher.update(buffer)\n buffer = file.read(self.hash_block_size)\n return hasher.hexdigest()", "def sha256(cls, value):\n assert type(value) is str\n return int(sha256(value.encode()).hexdigest(), 16)", "def symlink_hash(path):\n hasher = sha1()\n data = path_to_bytes(os.readlink(path))\n hasher.update(('blob %u\\0' % len(data)).encode('ascii'))\n hasher.update(data)\n return hasher", "def file_hash(filepath: Path):\n hsh = hashlib.sha256()\n b = bytearray(128 * 1024)\n mv = memoryview(b)\n with Path(filepath).open(\"rb\", buffering=0) as f:\n for n in iter(lambda: f.readinto(mv), 0):\n hsh.update(mv[:n])\n return hsh.hexdigest()", "def hash_bytes_256(b: bytes) -> str:\n return hashlib.sha256(b).hexdigest()", "def get_checksum(file_path: str) -> str:\n\n # Open the file in binary mode\n with open(file_path, \"rb\") as file:\n # Create a SHA-256 hash object\n hash_object = hashlib.sha256()\n\n # Iterate over the file in chunks\n for chunk in iter(lambda: file.read(4096), b\"\"):\n # Feed the chunk to the hash object\n hash_object.update(chunk)\n\n # Obtain the checksum in hexadecimal format\n checksum = hash_object.hexdigest()\n\n return checksum", "def _Hash(content: bytes) -> str:\n return hashlib.sha256(content).hexdigest()", "def sha256_hexoutput(in_str):\r\n return sha256(in_str.encode('ascii')).hexdigest()", "def _asset_hash(path: str) -> str:\n full_path = THEME_PATH / \"static\" / path\n digest = hashlib.sha1(full_path.read_bytes()).hexdigest()\n\n return f\"_static/{path}?digest={digest}\"", "def hash_of(self, arcpath) -> str:\n return self._records[arcpath].hash", "def get_file_sha256(fname):\n with open(fname, 'rb') as afile:\n return base64.b64encode(get_file_hash(afile, hashlib.sha256()))", "def generate_sha256_hash(fpath, sig_key=None):\n return run(fpath, sig_key)", "def sha256(s: str) -> str:\n return hashlib.sha256(s.encode()).hexdigest()", "def _calc_sha1(path):\n calc = hashlib.sha1()\n with open(path, 'r') as f:\n calc.update(f.read())\n return calc.hexdigest()", "def hash(self):\n block = 1024 * 1024 * 4 # 4 MB.\n hasher = hashlib.sha256()\n\n with open(self.path, \"rb\") as f:\n while True:\n chunk = f.read(block)\n if not chunk:\n break\n hasher.update(hashlib.sha256(chunk).digest())\n\n digest = hasher.hexdigest()\n pdbox.debug(\"Hash for %s: %s\" % (self.path, digest))\n return digest", "def SHA256(self) -> _n_0_t_3[_n_0_t_9]:", "def get_hash(link):\n return hashlib.sha256(link.encode('utf-8')).hexdigest()", "def hash_file(path):\n if not os.path.isfile(path):\n raise ValueError(\"The given path `{}` is not a file.\".format(path))\n\n md5 = hashlib.md5()\n\n with open(path, 'rb') as file_:\n while True:\n data = file_.read(65536)\n if not data:\n break\n md5.update(data)\n\n return \"{}\".format(md5.hexdigest())", "def get_file_hash(file_path):\n with open(file_path, 'rb') as f:\n file_name = os.path.basename(file_path)\n to_hash = f.read() + file_name.encode('utf-8')\n new_hash = hashlib.md5(to_hash).hexdigest()\n return new_hash", "def sha3_256(x):\n return hashlib.sha3_256(x).digest()", "def hash(cls, path, digest=None, hasher=sha1):\r\n if digest is None:\r\n digest = hasher()\r\n with open(path, 'rb') as fh:\r\n cls.update_hash(fh, digest)\r\n return digest.hexdigest()", "def fetch_local_hashcode(self, path):\n\t\treturn hashlib.sha256(open(self.config[\"daemon\"][\"rootdir\"] + path, \"rb\").read()).hexdigest()", "def get_sha256_file(filename):\n BLOCKSIZE = 65536\n hasher = hashlib.sha256()\n with open(filename, 'rb') as afile:\n buf = afile.read(BLOCKSIZE)\n while len(buf) > 0:\n hasher.update(buf)\n buf = afile.read(BLOCKSIZE)\n return hasher.hexdigest()", "def get_file_sha(full_path):\n in_file = open(full_path, 'rb')\n try:\n # Bug: why doesn't this use sha_func?\n sha_value = sha1()\n while True:\n bytes = in_file.read(READ_CHUNK_LEN)\n if bytes == \"\":\n break\n sha_value.update(bytes)\n return sha_value.digest()\n finally:\n in_file.close()", "def sha256sum(filename):\n content = open(filename, 'rb').read()\n sha256_obj = hashlib.sha256(content)\n return sha256_obj.hexdigest()", "def convert_to_SHA256(x):\n result = hashlib.sha256(x.encode())\n result = result.hexdigest()\n return result", "def convert_to_SHA256(x):\n result = hashlib.sha256(x.encode())\n result = result.hexdigest()\n return result", "def calchash(filename):\n sha = hashlib.sha1()\n with open(filename, 'rb') as f:\n sha.update(f.read())\n return sha", "def file_sha256(file_path, chunk_size=10240):\n\n sha256 = hashlib.sha256()\n if os.path.exists(file_path):\n with open(file_path, \"rb\") as f:\n while True:\n data = f.read(chunk_size)\n if not data:\n break\n else:\n sha256.update(data)\n\n return sha256.hexdigest()", "def hexdigest(self):\r\n return ''.join(['%02x' % ord(c) for c in self.digest()])", "def _sha256(sha256):\n if not sha256:\n sha256 = \"0\" * 64\n\n return sha256", "def get_partial_sha256(self, nbytes):\n return sha256file(abspath=self.abspath, nbytes=nbytes)", "def file_hash(file_to_hash: Path) -> str:\n sha256_hash = hashlib.sha256()\n with file_to_hash.open(\"rb\") as f:\n for block in iter(lambda: f.read(4096), b\"\"):\n sha256_hash.update(block)\n return sha256_hash.hexdigest()", "def hash_file(method, path):\n f = open(path, \"rb\")\n h = method()\n while True:\n buf = f.read(BUFSIZE)\n if not buf:\n break\n h.update(buf)\n return h.hexdigest()", "def h(x):\n\n hasher = hashlib.sha256()\n hasher.update(x)\n return hasher.digest()", "def generate_content_hash(source_path):\n\n sha256 = hashlib.sha256()\n\n if os.path.isdir(source_path):\n source_dir = source_path\n for source_file in list_files(source_dir):\n update_hash(sha256, source_dir, source_file)\n else:\n source_dir = os.path.dirname(source_path)\n source_file = source_path\n update_hash(sha256, source_dir, source_file)\n\n return sha256", "def sha256(self):\n return self._sha256", "def checksum(file):\n\n cksm = hashlib.sha256()\n f = open(file, 'rb')\n try:\n cksm.update(f.read())\n finally:\n f.close()\n return cksm.hexdigest()", "def hash_file(filename):\n\n # make a hash object\n h = hashlib.sha256()\n\n # open file for reading in binary mode\n with open(filename,'rb') as file:\n\n # loop till the end of the file\n chunk = 0\n while chunk != b'':\n # read only 1024 bytes at a time\n chunk = file.read(1024)\n h.update(chunk)\n\n # return the hex representation of digest\n return h.hexdigest()", "def sha256(self):\n return self.sha256checksums()", "def sha256sum(filename):\n if not os.path.isfile(filename):\n return ''\n hasher = hashlib.sha256()\n with open(filename, 'rb') as hash_file:\n buf = hash_file.read(HASH_BLOCK_SIZE)\n while len(buf) > 0:\n hasher.update(buf)\n buf = hash_file.read(HASH_BLOCK_SIZE)\n return hasher.hexdigest()", "def sha256Sum(self, data):\n data = str(data)\n m = hashlib.sha256()\n if os.path.isfile(data):\n try:\n f = file(data, 'rb')\n except:\n return 'ERROR: unable to open %s' % data\n while True:\n d = f.read(8096)\n if not d:\n break\n m.update(d)\n f.close()\n # Otherwise it could be either 1) a directory 2) miscellaneous data (like json)\n else:\n m.update(data)\n return m.hexdigest()", "def sha256(data):\n\n d = rpki.POW.Digest(rpki.POW.SHA256_DIGEST)\n d.update(data)\n return d.digest()", "def get_256_hash_from_string(string):\n\n sha256 = hashlib.sha256()\n sha256.update(string.encode('utf-8'))\n\n return sha256.hexdigest()", "def _hash_file(fpath, algorithm='sha256', chunk_size=65535):\n if (algorithm == 'sha256') or (algorithm == 'auto' and len(hash) == 64):\n hasher = hashlib.sha256()\n else:\n hasher = hashlib.md5()\n\n with open(fpath, 'rb') as fpath_file:\n for chunk in iter(lambda: fpath_file.read(chunk_size), b''):\n hasher.update(chunk)\n\n return hasher.hexdigest()", "def hash_value(self, value):\n h = hashlib.sha256()\n h.update(str(value))\n return h.hexdigest()", "def hash(password):\n return sha256_crypt.encrypt(password)", "def get_hash(content):\n return hashlib.sha1(content).hexdigest()", "def hashfile(file):\n\n hasher = hashlib.sha256()\n\n with open(file, 'rb') as afile:\n buf = afile.read(BLOCKSIZE)\n hasher.update(buf)\n\n return(hasher.hexdigest())", "def get_file_checksum(file_path):\n with open(file_path) as f:\n content = f.read()\n return md5(content.encode()).hexdigest()", "def hash_file_at_path(file_path, algorithm=\"sha1\"):\n block_size = 64 * 1024\n hasher = getattr(hashlib, algorithm)()\n with open(file_path, \"rb\") as file_handler:\n while True:\n data = file_handler.read(block_size)\n if not data:\n break\n hasher.update(data)\n return hasher.hexdigest()", "def _hash_file(fpath, algorithm='sha256', chunk_size=65535):\n if (algorithm is 'sha256') or (algorithm is 'auto' and len(hash) is 64):\n hasher = hashlib.sha256()\n else:\n hasher = hashlib.md5()\n\n with open(fpath, 'rb') as fpath_file:\n for chunk in iter(lambda: fpath_file.read(chunk_size), b''):\n hasher.update(chunk)\n\n return hasher.hexdigest()", "def compute_sha256_for_file(file, as_base64, blocksize=65536):\n # type: (pathlib.Path, bool, int) -> str\n hasher = hashlib.sha256()\n if isinstance(file, pathlib.Path):\n file = str(file)\n with open(file, 'rb') as filedesc:\n while True:\n buf = filedesc.read(blocksize)\n if not buf:\n break\n hasher.update(buf)\n if as_base64:\n return base64_encode_string(hasher.digest())\n else:\n return hasher.hexdigest()", "def calculate_hash(self):\n return sha256_2_string(str(self.header()))", "def calculate_hash(self):\n return sha256_2_string(str(self.header()))", "def GetFileSha1(file_path):\n return base64.b64encode(GetFileHashes(file_path, do_sha1=True)['sha1'])", "def hash_file_native(file_path, tool=\"sha256sum\"):\n output = subprocess.check_output([tool, file_path], shell=False)\n return output.decode(\"utf-8\").partition(\" \")[0].strip()", "def get_sha256(src: str) -> str:\n if not isinstance(src, str) or src == \"\":\n raise Exception(\"Invalid src str\")\n i = io.BytesIO(bytearray(src, encoding='utf-8'))\n return get_sha256_from_stream(i)", "def _get_signature(value):\n mySha = hashlib.sha256()\n mySha.update(value)\n # print mySha.hexdigest()\n return mySha.hexdigest()", "def get_hash(file_buffer):\n data = file_buffer.read()\n hasher = sha1()\n hasher.update(data)\n return hasher.hexdigest()", "def md5_hash(file_path):\n with open(file_path, 'rb') as fp:\n return md5(fp.read()).hexdigest()", "def digest(self):\n d = MegaCrypto.str_to_a32(self.hash)\n return (d[0] ^ d[1], d[2] ^ d[3])", "def add_hash(path):\n if re.search(r\"^/.+\", path):\n path = path[1:]\n\n # If a story, fix the path.\n is_story = False\n original_path = path\n if re.search(r\"^\\d{4}\\-\\d{2}\\-\\d{2}\", path):\n path = \"static/stories/%s.json\" % path\n is_story = True\n\n blocksize = 32768\n file_hash = hashlib.sha256()\n file_path = \"dist/%s\" % path\n\n with open(file_path) as file_to_hash:\n file_buffer = file_to_hash.read(blocksize)\n while (len(file_buffer) > 0):\n file_hash.update(file_buffer)\n file_buffer = file_to_hash.read(blocksize)\n\n if is_story:\n hashed_path = \"%s.%s\" % (original_path, file_hash.hexdigest())\n else:\n hashed_path = re.sub(r'(.*?)\\.(.*)$', (\"\\\\1.%s.\\\\2\" % file_hash.hexdigest()), path)\n\n return hashed_path", "def hexdigest(self):\n return \"\".join(\"%02x\" % ord(x)\n for x in MegaCrypto.a32_to_str(self.digest()))", "def get_file_hash(fname, hash_length):\n hash_sha = hashlib.sha256()\n with open(fname, 'rb') as infile:\n for chunk in iter(lambda: infile.read(4096), b''):\n hash_sha.update(chunk)\n hash_sha = hash_sha.hexdigest()\n hash_sha = int(hash_sha, 16) % (2 ** (4 * hash_length))\n return hex_encode(hash_sha, hash_length)", "def hashhex(s):\n h = hashlib.sha1()\n h.update(s.encode('utf-8'))\n return h.hexdigest()", "def calc_file_hash(filepath):\n with open(filepath, 'rb') as f:\n return md5(f.read()).hexdigest()", "def computeHash(infile):\n f = open(infile, 'rb')\n buffer = f.read()\n f.close()\n return hashlib.sha1(buffer).hexdigest()", "def hashhex(s):\n h = hashlib.sha1()\n h.update(s.encode('utf-8'))\n return h.hexdigest()", "def get_sha256_from_stream(src: io.IOBase) -> str:\n if not isinstance(src, io.IOBase) or not src.readable():\n raise Exception(\"src is not stream or unreadable\")\n m: hashlib._hashlib.HASH = hashlib.sha256()\n return calc_hash(src, m)", "def sha256(ctx, salt=\"\"):\n if ctx.data:\n salted_input_value = salt + \":\" + ctx.data\n ctx.data = hashlib.sha256(salted_input_value.encode()).hexdigest()\n else:\n raise RefError(\n \"Ref error: eval_func: nothing to sha256 hash; try \" \"something like '|random:str|sha256'\"\n )", "def HexDigest(self, name, truncation_length=None):\n\n if truncation_length is None:\n truncation_length = 64\n name_bytes = name.encode('UTF-8')\n return hashlib.sha256(name_bytes).hexdigest()[:truncation_length]", "def fingerprint(self) -> str:\n fp = self.sha256.hex()\n return fp", "def sigfile(fpath):\n sigsha = hashlib.sha1()\n fbj = open(fpath, 'rb')\n try:\n sigsha.update(fbj.read()) # pylint: disable-msg=E1101\n finally:\n fbj.close()\n return sigsha.hexdigest()", "def hash_sbox(f):\n hf = sha256()\n for x in f:\n hf.update(hex(x).encode('utf-8'))\n return hf.hexdigest()", "def hashhex(s):\n h = hashlib.sha1()\n h.update(s)\n return h.hexdigest()", "def hashhex(s):\n h = hashlib.sha1()\n h.update(s.encode())\n return h.hexdigest()", "def computeHash(filename):\n fileHash = hashlib.sha256()\n with open(filename, \"rb\") as f:\n for chunk in iter(lambda: f.read(4096), b\"\"):\n fileHash.update(chunk)\n return fileHash.hexdigest()", "def hash_from_file(file_path):\r\n return hash_from_code(open(file_path, 'rb').read())", "def hashing(word) :\r\n ans = hashlib.sha256(word.encode())\r\n return ans.hexdigest()", "def file_digest(file):\n # 'rb' file mode reads the file as bytes\n input_file = open(file, 'rb')\n data = input_file.read()\n # getting the digest\n digest = hash_comparing(data).hexdigest()\n input_file.close()\n return digest", "def get_checksum(path: Union[Path, str]) -> str:\n path = Path(path)\n if not (path.is_file() or path.is_dir()):\n msg.fail(f\"Can't get checksum for {path}: not a file or directory\", exits=1)\n if path.is_file():\n return hashlib.md5(Path(path).read_bytes()).hexdigest()\n else:\n # TODO: this is currently pretty slow\n dir_checksum = hashlib.md5()\n for sub_file in sorted(fp for fp in path.rglob(\"*\") if fp.is_file()):\n dir_checksum.update(sub_file.read_bytes())\n return dir_checksum.hexdigest()", "def sha_hash(file_name: str):\n BLOCKSIZE = 65536\n line = '' # format one line for hash\n with open(file_name, 'rb') as afile:\n buf = afile.read(BLOCKSIZE) # read each line of doc\n while len(buf) > 0:\n line += buf.decode('utf-8')\n buf = afile.read(BLOCKSIZE)\n\n hex = \"0x\" + sha1(line.encode()) # create sha1 hash\n return int(hex, 0)" ]
[ "0.84448147", "0.7994854", "0.7628011", "0.7436823", "0.73758745", "0.72930205", "0.72381604", "0.7233479", "0.72159195", "0.70954305", "0.70228964", "0.69657105", "0.695293", "0.6945403", "0.69420445", "0.6941568", "0.6924218", "0.68792206", "0.68724394", "0.68499297", "0.68373364", "0.6779685", "0.6776504", "0.6775331", "0.6764068", "0.6713469", "0.6618318", "0.66041803", "0.6601849", "0.6572953", "0.65561384", "0.6545195", "0.653763", "0.652317", "0.65152943", "0.650471", "0.6504436", "0.64979255", "0.6487399", "0.6482584", "0.6467815", "0.64678085", "0.6401437", "0.6401437", "0.6385896", "0.6376744", "0.63654596", "0.63537323", "0.63503486", "0.6347823", "0.63474894", "0.6340303", "0.633341", "0.631223", "0.62880313", "0.62845474", "0.6278515", "0.6272023", "0.62715954", "0.62653893", "0.62576973", "0.6245622", "0.61999106", "0.6196438", "0.6191889", "0.6191473", "0.6190666", "0.61839736", "0.61797667", "0.61795753", "0.6178214", "0.6178214", "0.6177924", "0.61755955", "0.61687565", "0.61654186", "0.6156914", "0.61493737", "0.61462736", "0.61434054", "0.6137521", "0.6127559", "0.61186254", "0.6117214", "0.6108033", "0.6105395", "0.6098895", "0.6093349", "0.60894173", "0.6086875", "0.60842043", "0.6078656", "0.60733104", "0.6069315", "0.6067553", "0.6064376", "0.60639995", "0.6062925", "0.60615104", "0.60593843" ]
0.83290803
1
Unpack |archive| into |cwd|.
def unpack(archive: Union[Path, str], cwd: Optional[Path] = None, files: Optional[List[Union[Path, str]]] = ()): archive = Path(archive) if cwd is None: cwd = Path.cwd() if files: files = ['--'] + list(files) else: files = [] # Try to make symlink usage easier in Windows. extra_env = { 'MSYS': 'winsymlinks:nativestrict', } logging.info('Unpacking %s', archive.name) # We use relpath here to help out tar on platforms where it doesn't like # paths with colons in them (e.g. Windows). We have to construct the full # before running through relpath as relative archives will implicitly be # checked against os.getcwd rather than the explicit cwd. src = os.path.relpath(cwd / archive, cwd) run(['tar', '--no-same-owner', '-xf', src] + files, cwd=cwd, extra_env=extra_env)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _unpack_archive(self):\n with zipfile.ZipFile(self._archive_full_path, 'r') as zip_ref:\n zip_ref.extractall(self._storage_path)\n\n _logger.debug('Archive has been unpacked.')", "def unpack_dir(indir, outdir, bands=None, clouds=None):\r\n archives = glob.glob(indir + '*.tar.gz')\r\n count = len(archives)\r\n for idx, archive in enumerate(archives):\r\n # Determine the outpath directory name for the unpacked landsat archive\r\n unpackDir = outdir + os.path.splitext(os.path.split(\r\n os.path.splitext(archive)[0])[1])[0]\r\n\r\n # Check if the directory already exists and make it if it doesn't\r\n if not os.path.exists(unpackDir):\r\n os.makedirs(unpackDir)\r\n\r\n # Unpack the current archive.\r\n unpack_landsat(archive, unpackDir, bands=bands,clouds=clouds)\r\n\r\n # Let the user know how progress is going.\r\n print(archive + ' unpacked (' + str(idx + 1) + ' of ' + str(count) + ')')", "def unpack_archive(self, archive_name):\n archive = zipfile.ZipFile(\n os.path.join(\n self.current_path,\n os.path.split(self.exe_file)[0],\n archive_name\n )\n )\n\n self.extraction_path = os.getcwd()\n\n archive.extractall(self.extraction_path)\n\n self.rename_main_script()\n\n archive_pyc_files = []\n\n for path, dirs, files in os.walk(self.extraction_path):\n for f in files:\n archive_pyc_files.append(os.path.join(path, f))\n\n return archive_pyc_files", "def _unzip_archive(archive_path, target_directory, source_path=None, **_):\n\n # Create a temporary directory.\n # Create a zip archive object.\n # Extract the object.\n ctx.logger.debug('Unzipping {src} to {dst}.'.format(\n src=archive_path, dst=target_directory))\n\n src = unzip_archive(archive_path, skip_parent_directory=False)\n copy_directory(src, target_directory)\n remove_dir(src)\n return target_directory", "def unpack(backend_name, archive_id):\n backend = get_backend(backend_name)\n click.echo(f\"Retrieving archive {archive_id}\")\n backend.archive_retrieve(config.root_path, archive_id)", "def _unpack_archive(self, dir, filters):\n ext = os.path.splitext(self.path)[1]\n if ext in [\".zip\", \".xpi\"]:\n if filters:\n raise GbpError(\"Can only filter tar archives: %s\", (ext, self.path))\n self._unpack_zip(dir)\n else:\n self._unpack_tar(dir, filters)", "def restore(self, archive):\n logger.info(\"Restoring an old archive run from {}\".format(archive))\n if os.path.isabs(archive):\n restorefile = archive\n else:\n restorefile = os.path.join(self.containerpath, const.ARCHIVEDIR, archive)\n with ignored(OSError):\n shutil.rmtree(os.path.join(self.rundir))\n with tarfile.open(restorefile, \"r:gz\") as f:\n def is_within_directory(directory, target):\n \n abs_directory = os.path.abspath(directory)\n abs_target = os.path.abspath(target)\n \n prefix = os.path.commonprefix([abs_directory, abs_target])\n \n return prefix == abs_directory\n \n def safe_extract(tar, path=\".\", members=None, *, numeric_owner=False):\n \n for member in tar.getmembers():\n member_path = os.path.join(path, member.name)\n if not is_within_directory(path, member_path):\n raise Exception(\"Attempted Path Traversal in Tar File\")\n \n tar.extractall(path, members, numeric_owner=numeric_owner) \n \n \n safe_extract(f, self.rundir)\n self._refreshconfig()", "def extract_one(self, archive: Path, dest: Path):\n if dest.exists():\n shutil.rmtree(dest)\n\n dest.mkdir(parents=True)\n\n if self.should_use_libarchive_c:\n import libarchive\n\n old_cwd = os.getcwd()\n os.chdir(str(dest))\n try:\n libarchive.extract_file(str(archive))\n finally:\n os.chdir(old_cwd)\n return\n\n if archive.name.endswith(EXTENSION_ZIP):\n with zipfile.ZipFile(archive) as zf:\n zf.extractall(dest)\n elif archive.name.endswith(EXTENSION_TAR):\n mode = \"r:bz2\" if archive.name.endswith(\".bz2\") else \"r:gz\"\n with tarfile.open(archive, mode) as tf:\n self.safe_extract_all(tf, dest)\n else:\n raise ValueError(f\"Unrecognized archive format {archive.name}\")\n\n for path in [dest, *dest.rglob(\"*\")]:\n path.chmod(MOD_DIRECTORY if path.is_dir() else MOD_FILE)", "def unpackToProject(self,archive,project,progress=None):\n progress = progress or bolt.Progress()\n files = self.sortFiles([x[0] for x in self.fileSizeCrcs])\n if not files: return 0\n #--Clear Project\n destDir = dirs['installers'].join(project)\n if destDir.exists(): destDir.rmtree(safety='Installers')\n #--Extract\n progress(0,project.s+_(\"\\nExtracting files...\"))\n self.unpackToTemp(archive,files,SubProgress(progress,0,0.9))\n #--Move\n progress(0.9,project.s+_(\"\\nMoving files...\"))\n count = 0\n tempDir = self.tempDir\n for file in files:\n srcFull = tempDir.join(file)\n destFull = destDir.join(file)\n if srcFull.exists():\n srcFull.moveTo(destFull)\n count += 1\n self.clearTemp()\n return count", "def unpack_archive(\n filepath: types.PathLike, *, extract_dir: Optional[types.PathLike] = None\n) -> types.PathLike:\n filepath = utils.to_path(filepath).resolve()\n if not extract_dir:\n extract_dir = str(filepath.parent)\n filepath = str(filepath)\n os.makedirs(extract_dir, exist_ok=True)\n is_zipfile = zipfile.is_zipfile(filepath)\n is_tarfile = tarfile.is_tarfile(filepath)\n if not is_zipfile and not is_tarfile:\n LOGGER.debug(\"'%s' is not an archive\", filepath)\n return extract_dir\n else:\n LOGGER.info(\"extracting data from archive file '%s'\", filepath)\n shutil.unpack_archive(filepath, extract_dir=extract_dir, format=None)\n # we want to rename the unpacked directory to a consistent value\n # unfortunately, shutil doesn't pass this back to us\n # so, we get the root path of all the constituent members\n if is_zipfile:\n with zipfile.ZipFile(filepath, mode=\"r\") as zf:\n members = zf.namelist()\n else:\n with tarfile.open(filepath, mode=\"r\") as tf:\n members = tf.getnames()\n src_basename = os.path.commonpath(members)\n dest_basename = os.path.basename(filepath)\n if src_basename:\n while True:\n tmp, _ = os.path.splitext(dest_basename)\n if tmp == dest_basename:\n break\n else:\n dest_basename = tmp\n if src_basename != dest_basename:\n return shutil.move(\n os.path.join(extract_dir, src_basename),\n os.path.join(extract_dir, dest_basename),\n )\n else:\n return os.path.join(extract_dir, src_basename)\n else:\n return extract_dir", "def unzip_archive(archive):\n tmpdir = os.path.join(tempfile.gettempdir(),\n os.path.basename(archive))\n assert tmpdir != archive # That wouldn't work out\n\n if os.path.exists(tmpdir):\n # files are already extracted\n pass\n else:\n if tarfile.is_tarfile(archive):\n print 'Extracting tarfile ...'\n with tarfile.open(archive) as tf:\n tf.extractall(path=tmpdir)\n elif zipfile.is_zipfile(archive):\n print 'Extracting zipfile ...'\n with zipfile.ZipFile(archive) as zf:\n zf.extractall(path=tmpdir)\n else:\n raise ValueError('Unknown file type for %s' % os.path.basename(archive))\n return tmpdir", "def untar(archive):\n log.info('Unpacking archive \"%s\".' % archive)\n tar = module.params['tar']\n tar_extra_options = shlex.split(module.params['tar_extra_options'])\n if not tar:\n tar = module.get_bin_path('tar', required=True)\n if archive.endswith('.gz'):\n uncompress = 'z'\n elif archive.endswith('.bz2'):\n uncompress = 'j'\n else:\n raise ValueError('Unsupported compression type: %s' % archive)\n options = ''.join(['x', uncompress, 'f'])\n args = [tar, options] + tar_extra_options + [archive]\n rc, out, err = module.run_command(args)\n log.info('untar: rc=%d out=%s err=%s', rc, out, err)\n if rc != 0:\n raise ValueError('tar command failed: %d' % rc)", "def unpack(input_filename, extract_dir):\n if not is_archive_file(input_filename):\n raise AttributeError(\"Input_filename must be an archive (ex: .tar.gz, .zip)\")\n if zipfile.is_zipfile(input_filename):\n unzip(input_filename, extract_dir)\n else:\n untar(input_filename, extract_dir)", "def unpack(tarball, dst, verbose=False, match=None):\n print(\"extracting\", tarball)\n fname = os.path.basename(tarball).replace(\".tar.gz\", \"\")\n with contextlib.closing(tarfile.open(tarball)) as tar:\n for member in tar.getnames():\n if \"/\" not in member:\n continue\n name = member.replace(fname + \"/\", \"\", 1)\n if match is not None and not name.startswith(match):\n continue\n name = name[len(match) + 1:]\n\n dst_path = os.path.join(dst, name)\n if verbose:\n print(\" extracting\", member)\n tar.extract(member, dst)\n src_path = os.path.join(dst, member)\n if os.path.isdir(src_path) and os.path.exists(dst_path):\n continue\n shutil.move(src_path, dst_path)\n shutil.rmtree(os.path.join(dst, fname))", "def unzip_and_untar(item):\n print(\"Unpacking %s\" % item)\n\n f = tarfile.open(item, mode=\"r\")\n f.extractall(path=\"working\")\n f.close()", "def extract_file(self):\n# path_destination = os.path.join(\n# self.root, self.resources.replace(\".zip\", \"\"))\n# os.makedirs(path_destination, exist_ok=True)\n shutil.unpack_archive(os.path.join(\n self.root, self.resources), self.root)\n os.remove(os.path.join(self.root, self.resources))", "def _unpack_from_actor(pack_actor: ray.ActorID, target_dir: str) -> None:\n stream = io.BytesIO()\n for buffer in _iter_remote(pack_actor):\n stream.write(buffer)\n _unpack_dir(stream, target_dir=target_dir)", "async def unarchive_dir(\n archive_to_extract: Path,\n destination_folder: Path,\n *,\n max_workers: int = _MAX_UNARCHIVING_WORKER_COUNT,\n progress_bar: ProgressBarData | None = None,\n log_cb: Callable[[str], Awaitable[None]] | None = None,\n) -> set[Path]:\n if not progress_bar:\n progress_bar = ProgressBarData(steps=1)\n async with AsyncExitStack() as zip_stack:\n zip_file_handler = zip_stack.enter_context(\n zipfile.ZipFile( # pylint: disable=consider-using-with\n archive_to_extract,\n mode=\"r\",\n )\n )\n zip_stack.enter_context(logging_redirect_tqdm())\n process_pool = zip_stack.enter_context(\n non_blocking_process_pool_executor(max_workers=max_workers)\n )\n\n # running in process poll is not ideal for concurrency issues\n # to avoid race conditions all subdirectories where files will be extracted need to exist\n # creating them before the extraction is under way avoids the issue\n # the following avoids race conditions while unzippin in parallel\n _ensure_destination_subdirectories_exist(\n zip_file_handler=zip_file_handler,\n destination_folder=destination_folder,\n )\n\n futures: list[asyncio.Future] = [\n asyncio.get_event_loop().run_in_executor(\n process_pool,\n # ---------\n _zipfile_single_file_extract_worker,\n archive_to_extract,\n zip_entry,\n destination_folder,\n zip_entry.is_dir(),\n )\n for zip_entry in zip_file_handler.infolist()\n ]\n\n try:\n extracted_paths: list[Path] = []\n total_file_size = sum(\n zip_entry.file_size for zip_entry in zip_file_handler.infolist()\n )\n async with AsyncExitStack() as progress_stack:\n sub_prog = await progress_stack.enter_async_context(\n progress_bar.sub_progress(steps=total_file_size)\n )\n tqdm_progress = progress_stack.enter_context(\n tqdm.tqdm(\n desc=f\"decompressing {archive_to_extract} -> {destination_folder} [{len(futures)} file{'s' if len(futures) > 1 else ''}\"\n f\"/{_human_readable_size(archive_to_extract.stat().st_size)}]\\n\",\n total=total_file_size,\n **_TQDM_MULTI_FILES_OPTIONS,\n )\n )\n for future in asyncio.as_completed(futures):\n extracted_path = await future\n extracted_file_size = extracted_path.stat().st_size\n if tqdm_progress.update(extracted_file_size) and log_cb:\n with log_catch(log, reraise=False):\n await log_cb(f\"{tqdm_progress}\")\n await sub_prog.update(extracted_file_size)\n extracted_paths.append(extracted_path)\n\n except Exception as err:\n for f in futures:\n f.cancel()\n\n # wait until all tasks are cancelled\n await asyncio.wait(\n futures, timeout=2 * _MIN, return_when=asyncio.ALL_COMPLETED\n )\n\n # now we can cleanup\n if destination_folder.exists() and destination_folder.is_dir():\n await remove_directory(destination_folder, ignore_errors=True)\n\n raise ArchiveError(\n f\"Failed unarchiving {archive_to_extract} -> {destination_folder} due to {type(err)}.\"\n f\"Details: {err}\"\n ) from err\n\n # NOTE: extracted_paths includes all tree leafs, which might include files and empty folders\n return {\n p\n for p in extracted_paths\n if p.is_file() or (p.is_dir() and not any(p.glob(\"*\")))\n }", "def archive(ctx, config):\n log.info('Creating archive directory...')\n archive_dir = misc.get_archive_dir(ctx)\n run.wait(\n ctx.cluster.run(\n args=[\n 'install', '-d', '-m0755', '--', archive_dir,\n ],\n wait=False,\n )\n )\n\n try:\n yield\n except Exception:\n # we need to know this below\n set_status(ctx.summary, 'fail')\n raise\n finally:\n passed = get_status(ctx.summary) == 'pass'\n if ctx.archive is not None and \\\n not (ctx.config.get('archive-on-error') and passed):\n log.info('Transferring archived files...')\n logdir = os.path.join(ctx.archive, 'remote')\n if (not os.path.exists(logdir)):\n os.mkdir(logdir)\n for rem in ctx.cluster.remotes.iterkeys():\n path = os.path.join(logdir, rem.shortname)\n misc.pull_directory(rem, archive_dir, path)\n # Check for coredumps and pull binaries\n fetch_binaries_for_coredumps(path, rem)\n\n log.info('Removing archive directory...')\n run.wait(\n ctx.cluster.run(\n args=[\n 'rm',\n '-rf',\n '--',\n archive_dir,\n ],\n wait=False,\n ),\n )", "def untar(conn, tarball, path):\n conn.run(f\"tar xf {tarball} -C {path}\")", "def unpack(filepath, target_dir, rm_tar=False):\n print(\"Unpacking %s ...\" % filepath)\n tar = tarfile.open(filepath)\n tar.extractall(target_dir)\n tar.close()\n if rm_tar == True:\n os.remove(filepath)", "def _uncompress(fname, outdir, msg=msg):\n import os\n assert os.access(fname, os.R_OK), \"could not access [%s]\" % fname\n fname = os.path.abspath(os.path.realpath(fname))\n if not os.path.exists(outdir):\n os.makedirs(outdir)\n orig_dir = os.getcwd()\n try:\n os.chdir(outdir)\n ext = os.path.splitext(fname)[1][1:] # drop the dot\n if ext in ('gz', 'bz2'):\n import tarfile\n f = tarfile.open(fname, 'r:%s'%ext)\n f.extractall()\n else:\n err = 'extension [%s] not handled (yet?)' % ext\n msg.error(err)\n raise ValueError(err)\n finally:\n os.chdir(orig_dir)", "def _copy_binaries_to_archive(archive: PyfmuArchive) -> PyfmuArchive:\n\n binaries_path = Resources.get().binaries_dir\n\n\n archive_binaries_path = archive.root / 'binaries'\n\n copytree(binaries_path,archive_binaries_path)\n\n # paths\n archive.binaries_dir = archive_binaries_path\n archive.wrapper_win64 = archive.binaries_dir / 'win64' / 'pyfmu.dll'\n archive.wrapper_linux64 = archive.binaries_dir / 'linux64' / 'pyfmu.so'\n\n return archive", "def unpackArchiveToFiles(source, target = None, filter = None):\n if target is None:\n target, _ = os.path.split(source)\n \n for fileRecord, fileData in unpackArchive(source, filter):\n path, offset, size, compressedSize, archiveFileIndex = fileRecord\n \n outPath = os.path.join(target, path)\n outHead, outTail = os.path.split(outPath)\n os.makedirs(outHead, exist_ok = True)\n outFile = open(outPath, \"wb\")\n outFile.write(fileData)\n outFile.close()", "def extract_source(source_archive, target):\r\n with tarfile.open(source_archive) as tar_file:\r\n safetar_extractall(tar_file, target)", "def extract(self, archive_path: str, extracted_path: str) -> None:\n if not os.listdir(archive_path):\n self.log.warning(\n \"No files found in directory: {}\".format(archive_path))\n return\n\n for root, _, archive_files in os.walk(archive_path):\n if not archive_files:\n continue\n\n extract_to = os.path.normpath(os.path.join(\n extracted_path,\n os.path.relpath(root, archive_path)\n ))\n if not os.path.isdir(extract_to):\n os.makedirs(extract_to)\n\n for zfile in archive_files:\n zfile = os.path.join(root, zfile)\n filename, ext = os.path.splitext(os.path.basename(zfile))\n # unzip (tree) each archive file in archive_path\n if ext in self.zip_ext:\n # double splitext for .tar.gz\n fname, ext = os.path.splitext(os.path.basename(filename))\n if ext == '.tar':\n filename = fname\n self.log.info(\"Extracting from: {}\".format(zfile))\n self.log.info(\" Extracting to: {}\".format(\n os.path.join(extract_to, filename)))\n unzip(\n zfile,\n extract_to,\n zip_ext=self.zip_ext,\n create_own_folder=True,\n tree=True\n )\n\n # move each non-archive file in archive_path\n else:\n dest = os.path.join(extract_to, os.path.basename(zfile))\n self.log.info(\"Copying from: {}\".format(zfile))\n self.log.info(\" Copying to: {}\".format(dest))\n shutil.copy(zfile, dest)", "def unzip(source_archive_path, target_path):\n assert zipfile.is_zipfile(source_archive_path), 'Not a valid ZIP archive'\n print('Decompressing archive {} into {}'.format(source_archive_path, target_path))\n with zipfile.ZipFile(source_archive_path) as zf:\n zf.extractall(target_path)\n print('Done')", "def _decompress_tarball(*, in_fileobj, out_fileobj):\n with tarfile.open(fileobj=in_fileobj, mode=\"r\") as it, tarfile.open(\n fileobj=out_fileobj, mode=\"w|\"\n ) as ot:\n for member in it.getmembers():\n extracted = it.extractfile(member)\n ot.addfile(member, extracted)", "def unpack(file_path, extraction_path, remove):\n print(file_path)\n Archive(file_path).extractall(extraction_path, auto_create_dir=True)\n # remove original compressed file???\n if remove is True:\n os.remove(file_path)", "def unpack_package(package, dest):\n members = []\n for member in package.getmembers():\n # this is the equivalent of `--strip-components 1` when using tar CLI\n split_res = member.path.split('/', 1)\n if len(split_res) == 1:\n continue\n stripped_path = split_res[1]\n if not stripped_path:\n continue\n # set the name to the stripped path to take effect when extracting\n member.name = stripped_path\n members.append(member)\n package.extractall(dest, members=members)", "def __extract_tgz(self):\n tar_file = tarfile.open(self.archive)\n try:\n extract_dir = tempfile.mkdtemp()\n archive_binaries_dir = self.__create_extraction_dir(\n tar_file.getnames(), extract_dir, tar_file.extract)\n finally:\n tar_file.close()\n return archive_binaries_dir, extract_dir", "def _extract_file(dest_path, root_dir):\n logger.info(\"Unzipping the dataset file.\")\n with zipfile.ZipFile(dest_path, \"r\") as zip_dir:\n zip_dir.extractall(root_dir)", "def _extract_archive(path: str, extracted_dir_path: str) -> str:\n logging.info('extracting %s to %s', path, extracted_dir_path)\n with tarfile.open(path) as tar:\n tar.extractall(path=extracted_dir_path)\n extracted_items = os.listdir(extracted_dir_path)\n if len(extracted_items) != 1:\n raise ValueError(\n 'archive at {} did not contain a single directory'.format(path))\n return os.path.join(extracted_dir_path, extracted_items[0])", "def unzip(checkpoint_path: pathlib.Path, archive_path: pathlib.Path) -> None:\n checkpoint_path.mkdir(parents=True, exist_ok=True)\n with ZipFile(archive_path, \"r\") as zf:\n zf.extractall(path=checkpoint_path)\n archive_path.unlink()", "def fetch(self) -> None:\n archive_path = os.path.join(self._output_dir, self._archive_name)\n self._download_file(self._parsed_url.original_url, archive_path)\n try:\n with zipfile.ZipFile(archive_path, \"r\") as zip_file:\n zip_file.extractall(path=self._output_dir)\n except zipfile.BadZipfile:\n raise REANAFetcherError(\"The provided zip file is not valid\")\n\n os.remove(archive_path)\n\n if not self._discover_workflow_specs():\n top_level_entries = [\n os.path.join(self._output_dir, entry)\n for entry in os.listdir(self._output_dir)\n ]\n # Some zip archives contain a single directory with all the files.\n if len(top_level_entries) == 1 and os.path.isdir(top_level_entries[0]):\n top_level_dir = top_level_entries[0]\n # Move all entries inside the top level directory\n # to the output directory.\n for entry in os.listdir(top_level_dir):\n shutil.move(os.path.join(top_level_dir, entry), self._output_dir)\n os.rmdir(top_level_dir)", "def __extract_zip(self):\n archive_binaries_dir = None\n zip_file = zipfile.ZipFile(self.archive)\n try:\n extract_dir = tempfile.mkdtemp()\n archive_binaries_dir = self.__create_extraction_dir(\n zip_file.namelist(), extract_dir, zip_file.extract)\n finally:\n zip_file.close()\n return archive_binaries_dir, extract_dir", "def _unzip(save_path, _, database_name, data_path):\r\n print('Extracting {}...'.format(database_name))\r\n with zipfile.ZipFile(save_path) as zf:\r\n zf.extractall(data_path)", "def unpack(self):\n CraftCore.log.debug(\"ArchiveSource.unpack called\")\n\n filenames = self.localFileNames()\n\n # TODO: this might delete generated patches\n utils.cleanDirectory(self.workDir())\n\n if not self.checkDigest(3):\n return False\n\n for filename in filenames:\n if not filename:\n continue\n ext = Path(filename).suffix\n if not ext or ext in {\".exe\", \".bat\", \".msi\", \".AppImage\"}:\n filePath = os.path.abspath(os.path.join(self.__downloadDir, filename))\n if self.subinfo.options.unpack.runInstaller:\n if ext == \".exe\":\n return utils.system(\"%s %s\" % (filePath, self.subinfo.options.configure.args))\n elif ext == \".msi\":\n return utils.system(\"msiexec /package %s %s\" % (filePath, self.subinfo.options.configure.args))\n if not utils.copyFile(filePath, os.path.join(self.workDir(), filename)):\n return False\n else:\n if not utils.unpackFile(self.__downloadDir, filename, self.workDir()):\n return False\n\n ret = self.applyPatches()\n if CraftCore.settings.getboolean(\"General\", \"EMERGE_HOLD_ON_PATCH_FAIL\", False):\n return ret\n return True", "def _unpack_dir(stream: io.BytesIO, target_dir: str, *, _retry: bool = True) -> None:\n stream.seek(0)\n target_dir = os.path.normpath(target_dir)\n try:\n # Timeout 0 means there will be only one attempt to acquire\n # the file lock. If it cannot be aquired, a TimeoutError\n # will be thrown.\n with TempFileLock(f\"{target_dir}.lock\", timeout=0):\n with tarfile.open(fileobj=stream) as tar:\n tar.extractall(target_dir)\n except TimeoutError:\n # wait, but do not do anything\n with TempFileLock(f\"{target_dir}.lock\"):\n pass\n # if the dir was locked due to being deleted,\n # recreate\n if not os.path.exists(target_dir):\n if _retry:\n _unpack_dir(stream, target_dir, _retry=False)\n else:\n raise RuntimeError(\n f\"Target directory {target_dir} does not exist \"\n \"and couldn't be recreated. \"\n \"Please raise an issue on GitHub: \"\n \"https://github.com/ray-project/ray/issues\"\n )", "def unpackToTemp(self,archive,fileNames,progress=None):\n if not fileNames: raise ArgumentError(_(\"No files to extract for %s.\") % archive.s)\n progress = progress or bolt.Progress()\n progress.state,progress.full = 0,len(fileNames)\n #--Dump file list\n out = self.tempList.open('w')\n out.write('\\n'.join(fileNames).encode('utf8'))\n out.close()\n #--Extract files\n self.clearTemp()\n apath = dirs['installers'].join(archive)\n command = '7z.exe x \"%s\" -y -o%s @%s' % (apath.s, self.tempDir.s, self.tempList.s)\n ins = os.popen(command,'r')\n reExtracting = re.compile('Extracting\\s+(.+)')\n extracted = []\n for line in ins:\n maExtracting = reExtracting.match(line)\n if maExtracting: \n extracted.append(maExtracting.group(1).strip())\n progress.plus()\n result = ins.close()\n if result:\n raise StateError(_(\"Extraction failed.\"))\n #ensure that no file is read only\n for thedir, subdirs, files in os.walk(self.tempDir.s):\n for f in files:\n os.chmod(os.path.join(thedir, f),stat.S_IWRITE)\n #--Done\n self.tempList.remove()", "def unzipper(data_address, target_directory):\n import zipfile\n data = \"/home/sharoonsaxena/Datasets/dogs-vs-cats.zip\"\n zip_ref = zipfile.ZipFile(data, \"r\")\n zip_ref.extractall(\"/home/sharoonsaxena/Datasets/extracted/\")\n zip_ref.close()", "def _expand_archive(self, name):\r\n target = path(self.temp_dir) / uuid.uuid4().hex\r\n os.mkdir(target)\r\n with tarfile.open(self.data_dir / name) as tar_file:\r\n tar_file.extractall(path=target)\r\n\r\n return target", "def unpack_or_cp():\n if args.input_type == \"zip\":\n zip_out, zip_error = Popen([\"unzip\", args.input, \"-d\", args.out_folder.strip() + \"/fasta\"], stdout=PIPE,stderr=PIPE).communicate()\n admin_log(zip_out, zip_error)\n else:\n cp_out, cp_error = Popen([\"cp\", args.input, args.out_folder.strip() + \"/fasta\"], stdout=PIPE,stderr=PIPE).communicate()\n admin_log(cp_out, cp_error)", "def extract_tars(file_pattern, path_in, path_out):\n for f in glob.glob(os.path.join(path_in, file_pattern)):\n shutil.unpack_archive(f, path_out)", "def pack(archive: Union[Path, str],\n paths: List[Union[Path, str]],\n cwd: Optional[Path] = None,\n exclude: Optional[List[Union[Path, str]]] = ()):\n archive = Path(archive)\n if cwd is None:\n cwd = Path.cwd()\n if archive.suffix == '.xz':\n archive = archive.with_suffix('')\n\n # Make sure all the paths have sane permissions.\n def walk(path):\n if path.is_symlink():\n return\n elif path.is_dir():\n # All dirs should be 755.\n mode = path.stat().st_mode & 0o777\n if mode != 0o755:\n path.chmod(0o755)\n\n for subpath in path.glob('*'):\n walk(subpath)\n elif path.is_file():\n # All scripts should be 755 while other files should be 644.\n mode = path.stat().st_mode & 0o777\n if mode in (0o755, 0o644):\n return\n if mode & 0o111:\n path.chmod(0o755)\n else:\n path.chmod(0o644)\n else:\n raise ValueError(f'{path}: unknown file type')\n\n logging.info('Forcing sane permissions on inputs')\n for path in paths:\n walk(cwd / path)\n\n logging.info('Creating %s tarball', archive.name)\n # We use relpath here to help out tar on platforms where it doesn't like\n # paths with colons in them (e.g. Windows). We have to construct the full\n # before running through relpath as relative archives will implicitly be\n # checked against os.getcwd rather than the explicit cwd.\n tar = os.path.relpath(cwd / archive, cwd)\n run(['tar', '--owner=0', '--group=0', '-cf', tar] +\n [f'--exclude={x}' for x in exclude] + ['--'] + paths, cwd=cwd)\n\n logging.info('Compressing tarball')\n run(['xz', '-f', '-T0', '-9', tar], cwd=cwd)", "def _unpack_stdlib(self):\n output_dir = self.manager.output_dir\n\n with tempfile.TemporaryDirectory() as td:\n tdp = Path(td)\n self.extract_one(self.app_archive, tdp)\n self.copy_one(tdp / \"package\", output_dir)\n\n self.maybe_timestamp(output_dir)", "def test_unarchive_run(self):\n pass", "def unpack_archive(\r\n file_path: str, unpack_path: str = None, remove_if_exists: bool = False\r\n) -> Optional[str]:\r\n import tarfile\r\n import zipfile\r\n\r\n if not os.path.isfile(file_path):\r\n log.warning(\"File does not exist: \" + file_path)\r\n return None\r\n\r\n if zipfile.is_zipfile(file_path):\r\n unpack_path = extract_zip(file_path, unpack_path, remove_if_exists)\r\n elif tarfile.is_tarfile(file_path):\r\n unpack_path = extract_tar(file_path, unpack_path, remove_if_exists)\r\n else:\r\n unpack_path = extract_via_patoolib(file_path, unpack_path, remove_if_exists)\r\n\r\n if unpack_path and os.path.isdir(unpack_path):\r\n unpack_folder_name = os.path.basename(unpack_path)\r\n if len(os.listdir(unpack_path)) == 1 and unpack_folder_name in os.listdir(\r\n unpack_path\r\n ):\r\n # unpacked folder contains one folder with same name -> move content to higher up folder\r\n folder_to_move = os.path.join(unpack_path, unpack_folder_name)\r\n files = os.listdir(folder_to_move)\r\n for f in files:\r\n shutil.move(os.path.join(folder_to_move, f), unpack_path)\r\n\r\n # Remove empty folder\r\n if len(os.listdir(folder_to_move)) == 0:\r\n os.rmdir(folder_to_move)\r\n else:\r\n log.info(\"Folder content was moved but folder is not empty.\")\r\n\r\n return unpack_path", "def unzip_item(source_path, destination_path, password):\n\n if not destination_path:\n destination_path = source_path.replace(\".zip\", \"\")\n if not os.path.isdir(destination_path):\n os.makedirs(destination_path)\n else:\n destination_path += \"_unzipped\"\n if not os.path.isdir(destination_path):\n os.makedirs(destination_path)\n\n try:\n with pyzipper.AESZipFile(source_path) as z:\n members = z.infolist()\n for i, member in enumerate(members):\n z.extract(member, destination_path, pwd=password)\n print(f\"Unpacked {member.filename} from archive.\")\n print(f\"{source_path} unpacked successfully to {destination_path}.\")\n except Exception:\n tb = traceback.format_exc()\n print(\"Something went wrong\")\n print(tb)", "def untar(input_filename, extract_dir):\n try:\n tar_ds = tarfile.open(input_filename)\n except tarfile.TarError:\n raise ValueError(\"%s is not a tar file\" % (input_filename))\n tar_ds.extractall(path=extract_dir)\n tar_ds.close()", "def unzip(f, targetdir):\n import zipfile\n\n with zipfile.ZipFile(f, \"r\") as zip_ref:\n zip_ref.extractall(targetdir)", "def unpackArchive(source, filter = None, encoding = None):\n \n primaryArchiveFile = open(source, \"rb\")\n header = readHeader(primaryArchiveFile)\n version, dataOffset, archiveFileCount, fileTableLength, endianness, fileCount = header\n \n # open all other archive files\n archiveFiles = [primaryArchiveFile]\n sourceRoot, sourceExt = os.path.splitext(source)\n for i in range(1, archiveFileCount):\n path = \"%s_%d%s\" % (sourceRoot, i, sourceExt)\n archiveFiles.append(open(path, \"rb\"))\n \n fileTable = readFileTable(primaryArchiveFile, header)\n for fileRecord in fileTable:\n path, offset, size, compressedSize, archiveFileIndex = fileRecord\n \n if callable(filter) and not filter(fileRecord): continue\n \n if archiveFileIndex == 0:\n offset += dataOffset\n \n archiveFile = archiveFiles[archiveFileIndex]\n archiveFile.seek(offset)\n \n if compressedSize != 0:\n fileData = zlib.decompress(archiveFile.read(compressedSize))\n else:\n fileData = archiveFile.read(size)\n \n if encoding is not None:\n fileData = fileData.decode(encoding)\n \n print('Extracted file \"%s\" (%u bytes)' % (path, size))\n yield fileRecord, fileData\n \n for f in archiveFiles:\n f.close()", "def file_unzipper(directory):\n debug.log(\"Unzipping directory (%s)...\"%directory)\n #FINDING AND UNZIPPING ZIPPED FILES\n for root, dirs, files in os.walk(directory, topdown=False):\n if root != \"\":\n orig_dir = os.getcwd()\n os.chdir(directory)\n Popen('gunzip -q -f *.gz > /dev/null 2>&1', shell=True).wait()\n Popen('unzip -qq -o \"*.zip\" > /dev/null 2>&1', shell=True).wait()\n Popen('rm -f *.zip > /dev/null 2>&1', shell=True).wait()\n os.chdir(orig_dir)", "def _unpack_tar(self, dir, filters):\n try:\n unpackArchive = gbpc.UnpackTarArchive(self.path, dir, filters)\n unpackArchive()\n except gbpc.CommandExecFailed:\n # unpackArchive already printed an error message\n raise GbpError", "def load(self):\n self.cleanup()\n \n \"\"\" create the working directory \"\"\"\n try:\n os.makedirs(self.paths['workspace'])\n except OSError:\n pass\n \n files = [ \"base.tar.gz\" ]\n \n for f in files:\n (dirname, extension) = f.split(\".\", 1)\n \n \"\"\" download tar archive \"\"\"\n self.download(self.sessionurl + \"/\" + f)\n \n \"\"\" create directory for content of the archive \"\"\"\n destdir = os.path.join(self.paths['workspace'], dirname)\n \n try:\n os.makedirs(destdir)\n except OSError:\n pass\n \n if extension == \"tar.gz\":\n \"\"\" extract the tar archive \"\"\"\n tar = tarfile.open(os.path.join(self.paths['workspace'], f))\n tar.extractall(destdir)\n tar.close()\n \n logging.info(self.log_format((\"done\")))", "def _extract_multi_vol_zip(src, dst):\n cat_zip_path = os.path.join(dst, os.path.basename(src))\n _cat_multi_vol_zip(src, cat_zip_path)\n _extract_zip(cat_zip_path, dst)\n os.remove(cat_zip_path)", "def SshExtractZip(host, zipname, dst):\n command = ['ssh', host, 'unzip', '-o', '-d', dst, zipname]\n result = RunCommand(command)\n if result:\n raise ExternalError('Failed to ssh unzip -o -d \"%s\" \"%s\" on \"%s\" (%s)' %\n (dst, zipname, host, result))\n\n # unzip will create directories with access 700, which is not often what we\n # need. Fix the permissions for the whole archive.\n command = ['ssh', host, 'chmod', '-R', '755', dst]\n result = RunCommand(command)\n if result:\n raise ExternalError('Failed to ssh chmod -R 755 \"%s\" on \"%s\" (%s)' %\n (dst, host, result))", "def extract(cls, path, outdir):\r\n raise NotImplementedError()", "def extract(self, packages, tracked):\n # Lazy imports in a thread are actively discouraged,\n # see bpo issue 39430.\n # Do the import here first before starting the threads.\n import tarfile\n import zstandard\n from concurrent.futures import ThreadPoolExecutor\n\n def extract_from(pkg):\n with tarfile_open(str(pkg), pkg.suffix[1:]) as tar:\n for tinfo in tar:\n fname = tinfo.name\n if (fname.startswith(ROOT_SUBDIR) and\n (tinfo.isfile() or tinfo.issym() or tinfo.islnk())\n and fname not in self.exclude_files):\n path = EtcPath(self.repodir, tinfo.name)\n # Remember the sha1 of the existing file, if it\n # exists, before extracting it from the tarball\n # (EtcPath.digest is lazily evaluated).\n not_used = path.digest\n extracted[tinfo.name] = path\n\n # The Python tarfile implementation fails to create\n # symlinks, see also issue bpo-10761.\n if tinfo.issym():\n abspath = os.path.join(self.repodir, tinfo.name)\n try:\n if os.path.lexists(abspath):\n os.unlink(abspath)\n except OSError as err:\n warn(err)\n tar.extract(tinfo, self.repodir)\n print(pkg.name)\n\n extracted = {}\n max_workers = len(os.sched_getaffinity(0)) or 4\n # Extracting from tarfiles is not thread safe (see msg315067 in bpo\n # issue https://bugs.python.org/issue23649).\n with threadsafe_makedirs():\n with ThreadPoolExecutor(max_workers=max_workers) as executor:\n futures = [executor.submit(extract_from, pkg) for\n pkg in packages]\n for f in futures:\n exc = f.exception()\n if exc is not None:\n raise exc\n for rpath in extracted:\n if rpath not in tracked:\n # Ensure that the file can be overwritten on a next\n # 'update' command.\n path = os.path.join(self.repodir, rpath)\n mode = os.lstat(path).st_mode\n if mode & RW_ACCESS != RW_ACCESS:\n os.chmod(path, mode | RW_ACCESS)\n return extracted", "def get_archive_async(\n hostname, project, treeish, dir_path=None, **fetch_kwargs):\n _validate_args(hostname, project, treeish, dir_path)\n dir_path = (dir_path or '').strip('/')\n if dir_path:\n dir_path = '/%s' % dir_path\n return gerrit.fetch_async(\n hostname,\n '%s/+archive/%s%s.tar.gz' % _quote_all(project, treeish, dir_path),\n **fetch_kwargs)", "def extract(cls, path, outdir):\r\n with open_zip(path) as zip:\r\n for path in zip.namelist():\r\n # While we're at it, we also perform this safety test.\r\n if path.startswith('/') or path.startswith('..'):\r\n raise ValueError('Zip file contains unsafe path: %s' % path)\r\n # Ignore directories. extract() will create parent dirs as needed.\r\n if not path.endswith('/'):\r\n zip.extract(path, outdir)", "def unpackage():\n\n zipfileLoc = hou.ui.selectFile(title=\"please select a zipFile created by the package function\", pattern=\"*.zip\")\n if not zipfileLoc: \n \n return\n \n file_ = zipfile.ZipFile(hou.expandString(zipfileLoc), \"r\")\n\n isOke = False\n \n for name in file_.namelist():\n \n if name.endswith(\".hip\") or name.endswith(\".hipnc\"):\n \n isOke = True\n break\n \n if not isOke: \n \n return\n \n unpackLoc = hou.expandString(hou.ui.selectFile(title=\"please select a directory you wish to use to unpack the files to.\"))\n \n if not unpackLoc or not os.path.isdir(unpackLoc): \n \n return\n \n unzip(file_, unpackLoc)\n unpackageDir = os.path.dirname(file_.namelist()[0])\n otlsfiles = glob.glob(os.path.join(unpackLoc, unpackageDir, \"otls\", \"*\"))\n hipfile = glob.glob(os.path.join(unpackLoc, unpackageDir, \"*.hip*\"))\n \n if len(hipfile) != 1: \n \n return\n \n hou.hipFile.load(hipfile[0])\n \n for otl in otlsfiles:\n\n hou.hda.installFile(otl)", "def extract_to_disk(self):\n archive_name, extension = os.path.splitext(os.path.basename(self.file.name))\n if not os.path.isdir(os.path.join(os.getcwd(), archive_name)):\n os.mkdir(archive_name)\n os.chdir(archive_name)\n for filename, data in self.extract().items():\n f = open(filename, 'wb')\n f.write(data or b'')\n f.close()", "def loadPluginTarget(archive, folder):\n\n # download target repo zip\n req = retryget(archive)\n filename = archive.split(\"/\")[-1]\n zip_file_path = os.path.join(folder, filename)\n try:\n with open(zip_file_path, \"wb\") as output_file:\n output_file.write(req.content)\n except IOError:\n raise RuntimeError(\n \"Could not save the zip file to the working directory {}\".format(folder)\n )\n\n # unzip repo\n plugin_extracted_path = os.path.join(folder, UPDATE_CONFIG_NAME)\n plugin_extracted_path_folder = os.path.join(\n plugin_extracted_path,\n \"{repo_name}-{target}\".format(\n repo_name=REPO_NAME, target=re.sub(r\"^v\", \"\", filename.split(\".zip\")[0])\n ),\n )\n try:\n plugin_zipfile = zipfile.ZipFile(BytesIO(req.content))\n plugin_zipfile.extractall(plugin_extracted_path)\n plugin_zipfile.close()\n except (zipfile.BadZipfile, zipfile.LargeZipFile) as e:\n raise RuntimeError(\"Could not unzip plugin repo - error: {}\".format(e))\n\n # copy new dependencies to working directory\n try:\n shutil.copy2(\n os.path.join(\n plugin_extracted_path_folder, MAIN_SRC_FOLDER_NAME, \"dependencies.txt\"\n ),\n os.path.join(folder, \"dependencies.txt\"),\n )\n except IOError:\n raise RuntimeError(\"Could not copy dependencies to working directory\")\n\n # copy new update script to working directory\n try:\n shutil.copy2(\n os.path.join(\n plugin_extracted_path_folder,\n MAIN_SRC_FOLDER_NAME,\n \"scripts/update_script.py\",\n ),\n os.path.join(folder, \"update_script.py\"),\n )\n except IOError:\n raise RuntimeError(\"Could not copy update_script to working directory\")\n\n return zip_file_path", "def transform_binary_dist(archive_path, prefix='/usr'):\r\n # Copy the tar archive file by file so we can rewrite the pathnames.\r\n logger.debug(\"Transforming binary distribution: %s.\", archive_path)\r\n logger.debug(\"Using environment prefix: %s.\", prefix)\r\n archive = tarfile.open(archive_path, 'r')\r\n for member in archive.getmembers():\r\n # In my testing the `dumb' tar files created with the `python setup.py\r\n # bdist' command contain pathnames that are relative to `/' which is\r\n # kind of awkward: I would like to use os.path.relpath() on them but\r\n # that won't give the correct result without some preprocessing...\r\n original_pathname = member.name\r\n absolute_pathname = re.sub(r'^\\./', '/', original_pathname)\r\n if member.isdev():\r\n logger.warn(\"Ignoring device file: %s.\", absolute_pathname)\r\n elif not member.isdir():\r\n modified_pathname = os.path.relpath(absolute_pathname, prefix)\r\n if os.path.isabs(modified_pathname):\r\n logger.warn(\"Failed to transform pathname in binary distribution to relative path! (original: %r, modified: %r)\",\r\n original_pathname, modified_pathname)\r\n else:\r\n # Rewrite /usr/local to /usr (same goes for all prefixes of course).\r\n modified_pathname = re.sub('^local/', '', modified_pathname)\r\n logger.debug(\"Transformed %r -> %r.\", original_pathname, modified_pathname)\r\n # Get the file data from the input archive.\r\n handle = archive.extractfile(original_pathname)\r\n # Yield the pathname, file mode and a handle to the data.\r\n member.name = modified_pathname\r\n yield member, handle\r\n archive.close()", "def test_unpack(self):\n if not os.path.isfile(akrr_tar_gz):\n raise Exception(\"Should do test_packager first\")\n \n if os.path.exists(cfg.akrr_home):\n shutil.rmtree(cfg.akrr_home)\n \n if verbosity>=3: print \"\\n\"+\"~\"*80\n \n #start bash shell\n bash = self.getBash()\n \n output=bash.runcmd('tar -xvf {akrr_tar_gz} -C {above_akrr_home}'.format(akrr_tar_gz=akrr_tar_gz,above_akrr_home=os.path.abspath(os.path.join(cfg.akrr_home, \"..\"))),printOutput=True)\n output=bash.runcmd('export AKRR_HOME={akrr_home}'.format(akrr_home=cfg.akrr_home),printOutput=True)\n output=bash.runcmd('cd $AKRR_HOME',printOutput=True)\n output=bash.runcmd('pwd',printOutput=True)\n \n if verbosity>=3: print \"~\"*80\n #test some files presence\n filesToCheck=['src/akrr.py',\n 'src/akrrscheduler.py']\n for f in filesToCheck:\n self.assertEqual(os.path.isfile(os.path.abspath(os.path.join(cfg.akrr_home, f))), True, \"AKRR distribution archive can not be unpacked\")", "def dir_2_cbz(dir_pth):\r\n shutil.make_archive(dir_pth, 'zip', dir_pth)\r\n shutil.rmtree(dir_pth)\r\n os.rename(dir_pth+'.zip', dir_pth+'.cbz')\r\n pass", "def extract_file(self):\n shutil.unpack_archive(os.path.join(\n self.root, self.resources), f\"{self.root}\")\n os.remove(os.path.join(self.root, self.resources))", "async def extract(self, destination, entries=None, callback=None):\n # :param srcdestpairs: A list of 2-tuples where the first item\n # is the source path within the archive of a file to install,\n # and the second item is the path (relative to the mod\n # installation directory) where the source should be extracted.\n\n # TODO: ignore \"._\"-prefixed mac-cruft files\n loop = asyncio.get_event_loop()\n c=0\n # noinspection PyTypeChecker\n async for extracted in self.archiver.extract(\n archive=self.archive,\n destination=destination,\n specific_entries=entries,\n # callback=callback\n ):\n c+=1\n if callback:\n loop.call_soon_threadsafe(callback, extracted, c)\n self.LOGGER << f\"{c} files extracted\"\n\n # srcdestpairs = srcdestpairs,", "def _extract_archive(file_path, path='.', archive_format='auto'):\n if archive_format is None:\n return False\n if archive_format is 'auto':\n archive_format = ['tar', 'zip']\n if isinstance(archive_format, six.string_types):\n archive_format = [archive_format]\n\n for archive_type in archive_format:\n if archive_type is 'tar':\n open_fn = tarfile.open\n is_match_fn = tarfile.is_tarfile\n if archive_type is 'zip':\n open_fn = zipfile.ZipFile\n is_match_fn = zipfile.is_zipfile\n\n if is_match_fn(file_path):\n with open_fn(file_path) as archive:\n # check weather extracted or not\n extracted = True\n for fname in archive.getnames():\n if not os.path.exists(os.path.join(path, fname)):\n extracted = False\n if not extracted:\n try:\n archive.extractall(path)\n print('extracted to', path)\n except (tarfile.TarError, RuntimeError,\n KeyboardInterrupt):\n if os.path.exists(path):\n if os.path.isfile(path):\n os.remove(path)\n else:\n shutil.rmtree(path)\n raise\n return True\n return False", "def extract_tar(tar_path, target_folder):\n with tarfile.open(tar_path, 'r') as archive:\n archive.extractall(target_folder)", "def extract_file(self):\n shutil.unpack_archive(os.path.join(self.root, self.resources), self.root)\n os.remove(os.path.join(self.root, self.resources))", "def _extract_book(local_path, order_hash):\n unzipped_directory = '.'.join((local_path, order_hash, 'uncompressed'))\n\n if not os.path.exists(unzipped_directory):\n os.makedirs(unzipped_directory)\n\n with open(local_path, 'rb') as f:\n zip_file = zipfile.ZipFile(f)\n for name in zip_file.namelist():\n zip_file.extract(name, unzipped_directory)\n\n return unzipped_directory", "def do_pack():\n\n now = datetime.now()\n # format the name of the file with the timestamps\n now_year = now.year\n now_month = now.month\n now_day = now.day\n now_hour = now.hour\n now_minute = now.minute\n now_second = now.second\n # apply the format\n file_name = 'versions/web_static_{}{}{}{}{}{}.tgz'.format(\n now_year, now_month, now_day, now_hour, now_minute, now_second\n )\n # All archives must be stored in the folder versions\n local('mkdir -p versions')\n # execute locally the compression of the folder\n command = local(\"tar -cvzf \" + file_name + \" ./web_static/\")\n # return the archive path if the archive has been correctly generated\n if command.succeeded:\n return file_name\n else:\n return None", "def _unzip_files(self) -> None:\n for file in self.input_path.iterdir():\n if is_zipfile(file):\n with ZipFile(file, mode=\"r\") as archive:\n archive.extractall(path=self.temp_path)", "def _extract_archive(file_path, path='.', archive_format='auto'):\n if archive_format is None:\n return False\n if archive_format == 'auto':\n archive_format = ['tar', 'zip']\n if isinstance(archive_format, str):\n archive_format = [archive_format]\n\n file_path = path_to_string(file_path)\n path = path_to_string(path)\n\n for archive_type in archive_format:\n if archive_type == 'tar':\n open_fn = tarfile.open\n is_match_fn = tarfile.is_tarfile\n if archive_type == 'zip':\n open_fn = zipfile.ZipFile\n is_match_fn = zipfile.is_zipfile\n\n if is_match_fn(file_path):\n with open_fn(file_path) as archive:\n try:\n archive.extractall(path)\n except (tarfile.TarError, RuntimeError, KeyboardInterrupt):\n if os.path.exists(path):\n if os.path.isfile(path):\n os.remove(path)\n else:\n shutil.rmtree(path)\n raise\n return True\n return False", "def pack():\n clean_local()\n build()\n copy_json()\n optimize()\n tarball()", "def recursive_unpack(dir_path):\n exten = ['7z', 'zip', 'rar']\n one_more = False\n for r, d, files in os.walk(dir_path):\n packed = []\n for ext in exten:\n code_files = fnmatch.filter(files, '*.' + ext)\n if len(code_files) > 0:\n tmp_paths = [os.path.join(os.path.abspath(r), f) for f in code_files]\n packed.extend(tmp_paths)\n if not one_more and len(packed) > 0:\n one_more = True\n if len(packed) > 0:\n print(\"unpack list:\", packed)\n for p in packed:\n extract(p, os.path.dirname(p))\n os.remove(p)\n if one_more:\n recursive_unpack(dir_path)", "def unpack(filename: Union[str, Path], extract_to: Union[str, Path]) -> None:\n raise NotImplemented", "def fromZip(self, zip_location,extract_location):\n zip_file = zipfile.ZipFile(zip_location,'r')\n zip_file.extractall(extract_location)", "def unarchive(filename, project_dir, parent_dir=None, frontend=None):\n if frontend is None:\n frontend = _null_frontend()\n return archiver._unarchive_project(filename, project_dir=project_dir, parent_dir=parent_dir, frontend=frontend)", "def _extract_archive(file_path, path=\".\", archive_format=\"auto\"):\n if archive_format is None:\n return False\n if archive_format == \"auto\":\n archive_format = [\"tar\", \"zip\"]\n if isinstance(archive_format, six.string_types):\n archive_format = [archive_format]\n\n for archive_type in archive_format:\n if archive_type == \"tar\":\n open_fn = tarfile.open\n is_match_fn = tarfile.is_tarfile\n if archive_type == \"zip\":\n open_fn = zipfile.ZipFile\n is_match_fn = zipfile.is_zipfile\n\n if is_match_fn(file_path):\n with open_fn(file_path) as archive:\n try:\n archive.extractall(path)\n except (tarfile.TarError, RuntimeError, KeyboardInterrupt):\n if os.path.exists(path):\n if os.path.isfile(path):\n os.remove(path)\n else:\n shutil.rmtree(path)\n raise\n return True\n return False", "def prepare(self, location):\n if os.path.isdir(location):\n return location\n else:\n extracted = 0\n tempdir = tempfile.mkdtemp()\n # Maybe it is file or something on http://...\n if os.path.isfile(location):\n fp = open(location, 'r')\n tar = tarfile.open(fileobj=fp, mode='r:*')\n else:\n fp = requests.get(location)\n tar = tarfile.open(fileobj=io.BytesIO(fp.content), mode='r:*')\n # Unzip only files interesting for use. These are markers only now\n for member in tar.getmembers():\n for marker in MARKERS.keys():\n if member.name.endswith(marker):\n tar.extract(member, tempdir)\n extracted += 1\n # If we have extracted succesfully, return directory location\n if extracted > 0:\n return tempdir\n else:\n raise Exception(\"Failed to extract expected files for '%s'\" % location)", "def _extract_archive(file_path, path='.', archive_format='auto'):\n if archive_format is None:\n return False\n if archive_format == 'auto':\n archive_format = ['tar', 'zip']\n if isinstance(archive_format, six.string_types):\n archive_format = [archive_format]\n\n for archive_type in archive_format:\n if archive_type == 'tar':\n open_fn = tarfile.open\n is_match_fn = tarfile.is_tarfile\n if archive_type == 'zip':\n open_fn = zipfile.ZipFile\n is_match_fn = zipfile.is_zipfile\n\n if is_match_fn(file_path):\n with open_fn(file_path) as archive:\n try:\n archive.extractall(path)\n except (tarfile.TarError, RuntimeError, KeyboardInterrupt):\n if os.path.exists(path):\n if os.path.isfile(path):\n os.remove(path)\n else:\n shutil.rmtree(path)\n raise\n return True\n return False", "def unzipdir(path):\n filenames = fullpathlist(path)\n for filename in filenames:\n if filename.endswith(\"bz2\"):\n print \"doing\", filename\n os.system('bunzip2 \"%s\"' % filename)\n else:\n print \"skipping\", filename", "def extract_recursive(curr_apath):\n\n handler = resolve_format(curr_apath)\n unpacker = HandlersFactory.get_handler(handler)\n _files = unpacker.files_list(curr_apath)\n\n for f in _files:\n if is_matched(f, ffilter=ffilter):\n _fpath = unpacker.extract(curr_apath, f)\n files.append(_fpath)\n if is_archive(f):\n _apath = unpacker.extract(curr_apath, f)\n extract_recursive(_apath)", "def extract_to_vtr_flow_dir(args, tar_xz_filename, destination, extract_path=\"\"):\n\n # Reference directories\n dest_dir = os.path.join(args.vtr_flow_dir, destination)\n symbiflow_extract_dir = os.path.join(dest_dir, \"symbiflow\")\n\n if not args.force:\n # Check that all expected directories exist\n expected_dirs = [\n args.vtr_flow_dir,\n symbiflow_extract_dir,\n ]\n for directory in expected_dirs:\n if not os.path.isdir(directory):\n raise ExtractionError(\"{} should be a directory\".format(directory))\n\n # Create a temporary working directory\n tmpdir = tempfile.mkdtemp(suffix=\"download_symbiflow\", dir=\".\")\n\n # Extract matching files into the temporary directory\n subprocess.call(\n \"tar -C {} -xf {} {}\".format(tmpdir, tar_xz_filename, extract_path),\n shell=True,\n )\n\n # Move the extracted files to the relevant directories, SDC files first (since we\n # need to look up the BLIF name to make it match)\n for dirpath, _, filenames in os.walk(tmpdir):\n for filename in filenames:\n src_file_path = os.path.join(dirpath, filename)\n dst_file_path = None\n\n if fnmatch.fnmatch(src_file_path, \"*/xc7a50t_test/arch.timing.xml\"):\n dst_file_path = os.path.join(symbiflow_extract_dir, \"arch.timing.xml\")\n\n elif fnmatch.fnmatch(src_file_path, \"*/xc7a50t_test/*.bin\"):\n dst_file_path = os.path.join(symbiflow_extract_dir, filename)\n\n elif fnmatch.fnmatch(src_file_path, \"**/*.eblif\"):\n dst_file_path = os.path.join(symbiflow_extract_dir, filename)\n\n elif fnmatch.fnmatch(src_file_path, \"**/*.sdc\"):\n dst_file_path = os.path.join(symbiflow_extract_dir, \"sdc\", filename)\n\n elif fnmatch.fnmatch(src_file_path, \"**/*.place\"):\n dst_file_path = os.path.join(symbiflow_extract_dir, \"place_constr\", filename)\n\n if dst_file_path:\n shutil.move(src_file_path, dst_file_path)\n\n shutil.rmtree(tmpdir)\n\n print(\"Done\")", "def untar(file_path, target_dir=None, gzipped=True, verbose=False):\n return posix.untar(file_path, target_dir, gzipped, verbose)", "def untar(file_path, extract_folder=None):\n if extract_folder is None:\n extract_folder = os.path.dirname(file_path)\n tar = tarfile.open(file_path)\n tar.extractall(extract_folder)\n tar.close()", "def untar(tar_path, cleanup=False):\n tfile = tarfile.open(tar_path, 'r')\n tfile.extractall(os.path.dirname(tar_path))\n tfile.close()\n if cleanup:\n os.remove(tar_path)", "def _decompress_data():\n\n dest_dir = get_cachedir()\n if dest_dir is None:\n print('No cache dir found, not decompressing anything.')\n return\n\n filename = _data_url.split('/')[-1]\n tarball = dest_dir / filename\n\n print(\"Trying to decompress file {}\".format(tarball))\n with tarfile.open(str(tarball), \"r:bz2\") as tar:\n tar.extractall(str(dest_dir))\n\n data_dir = dest_dir / 'data'\n pickle_files = data_dir.glob('*.pickle')\n print(\"Data directory {} contains {} pickle files\"\n .format(data_dir, len(list(pickle_files))))", "def mass_extract(source_directory, target_directory):\n\n import os\n import ZipFile\n\n source_directory = raw_input(\"Where are the zips? \")\n target_directory = raw_input(\"To where do you want to extract the files? \")\n \n if not os.path.exists(source_directory):\n print \"Sorry, that folder doesn't seem to exist.\"\n source_directory = raw_input(\"Where are the zips? \")\n\n if not os.path.exists(target_directory):\n os.mkdir(target_directory)\n \n for path, directory, filename in os.walk(source_directory):\n zip_file = ZipFile.ZipFile(filenames)\n ZipFile.extract(zip_file, target_directory)\n zip_file.close()\n\n print \"Done.\"", "def load_archive(archive):\n\n\t### THIS VERSION SEEMS TO SEGFAULT!\n\t\"\"\"\n\t# Flush and save old file descriptors\n\tsys.stderr.flush()\n\told_stderr_fd = os.dup(sys.stderr.fileno())\n\told_stderr = sys.stderr\n\t\n\terrf = 'ErRoR.LoG'\n\terr_log = file(errf,'a+',0)\n\tos.dup2(err_log.fileno(),sys.stderr.fileno())\n\n\tarch = p.Archive_load(archive)\n\t\n\t# Now restore original stderr\n\tsys.stderr = old_stderr\n\tos.dup2(old_stderr_fd,sys.stderr.fileno())\n\t#os.unlink(errf)\n\t\n\treturn arch\n\t\"\"\"\n\t\n\t#\"\"\"\n\t### WHILE THIS ONE DOES NOT (UNTIL NOW!)\n\t# Flush and save old file descriptors\n\terrf = 'ErRoR.LoG'\n\terr_log = file(errf,'a+',0)\n\tos.dup2(err_log.fileno(),sys.stderr.fileno())\n\tarch = p.Archive_load(archive)\n\t#os.unlink(errf)\n\t\n\treturn arch\n\t#\"\"\"", "def extract_all(fn,dst=\".\"):\r\n if tarfile.is_tarfile(fn): \r\n with tarfile.open(fn,'r') as tf:\r\n tf.extractall(dst)\r\n tf.close()\r\n elif zipfile.is_zipfile(fn):\r\n with zipfile.ZipFile(fn, 'r') as zf:\r\n zf.extractall(dst)\r\n zf.close()\r\n else:\r\n print( \"Please provide a tar archive file or zip file\" )", "def _extract_tar_dir(tar, dirname, b_dest):\n member_names = [to_native(dirname, errors='surrogate_or_strict')]\n\n # Create list of members with and without trailing separator\n if not member_names[-1].endswith(os.path.sep):\n member_names.append(member_names[-1] + os.path.sep)\n\n # Try all of the member names and stop on the first one that are able to successfully get\n for member in member_names:\n try:\n tar_member = tar.getmember(member)\n except KeyError:\n continue\n break\n else:\n # If we still can't find the member, raise a nice error.\n raise AnsibleError(\"Unable to extract '%s' from collection\" % to_native(member, errors='surrogate_or_strict'))\n\n b_dir_path = os.path.join(b_dest, to_bytes(dirname, errors='surrogate_or_strict'))\n\n b_parent_path = os.path.dirname(b_dir_path)\n try:\n os.makedirs(b_parent_path, mode=0o0755)\n except OSError as e:\n if e.errno != errno.EEXIST:\n raise\n\n if tar_member.type == tarfile.SYMTYPE:\n b_link_path = to_bytes(tar_member.linkname, errors='surrogate_or_strict')\n if not _is_child_path(b_link_path, b_dest, link_name=b_dir_path):\n raise AnsibleError(\"Cannot extract symlink '%s' in collection: path points to location outside of \"\n \"collection '%s'\" % (to_native(dirname), b_link_path))\n\n os.symlink(b_link_path, b_dir_path)\n\n else:\n if not os.path.isdir(b_dir_path):\n os.mkdir(b_dir_path, 0o0755)", "def _extract_zip(src, dst):\n # check if src is a valid .zip\n assert zipfile.is_zipfile(src), \"{} is not a valid .zip file.\".format(src)\n\n zip_file = zipfile.ZipFile(src, \"r\")\n for file in zip_file.namelist():\n zip_file.extract(file, dst)", "def process_archive(self, file):\n self.recursive_archive_depth += 1\n # LOG: write_log or somehow log the archive file here\n if self.recursive_archive_depth >= self.max_recursive_depth:\n file.make_dangerous('Archive bomb')\n else:\n tempdir_path = file.make_tempdir()\n # TODO: double check we are properly escaping file.src_path\n # otherwise we are running unvalidated user input directly in the shell\n command_str = '{} -p1 x \"{}\" -o\"{}\" -bd -aoa'\n unpack_command = command_str.format(SEVENZ_PATH,\n file.src_path, tempdir_path)\n self._run_process(unpack_command)\n self.process_dir(tempdir_path, file.dst_path)\n self.safe_rmtree(tempdir_path)\n self.recursive_archive_depth -= 1", "def do_pack():\n date = datetime.datetime.now()\n archive = 'versions/web_static_{}{}{}{}{}{}.tgz'.format(date.year,\n date.month,\n date.day,\n date.hour,\n date.minute,\n date.second)\n local('mkdir -p versions')\n check = local('tar -cvzf {} web_static'.format(archive))\n if check.failed:\n return None\n else:\n return archive", "def main():\r\n parser = CommonArgParser(__file__)\r\n parser.add_argument('src_dir', help='Source directory')\r\n parser.add_argument(\r\n 'out_dir',\r\n default='.',\r\n help=\"\"\"The directory the files to be extracted.\r\n (Default: Current directoty\"\"\")\r\n args = parser.parse_all()\r\n for f in next_file(args.src_dir, ['*.tgz', '*.tar.gz']):\r\n untgz(f, args.out_dir)", "def unpack_source(sdist):\n g = glob.glob('%s/openafs-*-src.tar.bz2' % sdist)\n if len(g) == 0:\n raise ValueError('Source archive not found in path \"%s\".' % sdist)\n if len(g) > 1:\n raise ValueError(\n 'More than one source archive found in path \"%s\".' % sdist)\n archive = g[0]\n untar(archive)\n roots = glob.glob('openafs*')\n if len(roots) != 1:\n raise ValueError('One root directory expected source archive.')\n os.chdir(roots[0])\n version = extract_version_info()\n log.info('Version info %s' % pprint.pformat(version))\n results['version'] = version\n return version" ]
[ "0.72888505", "0.6926044", "0.6809945", "0.6746972", "0.6636526", "0.65584207", "0.6507182", "0.6496216", "0.6464963", "0.6461252", "0.64201564", "0.63850856", "0.6347677", "0.63212836", "0.6308422", "0.6223524", "0.6171321", "0.61497027", "0.61496353", "0.614074", "0.6140402", "0.6132096", "0.6075669", "0.6067505", "0.60018367", "0.5995476", "0.59874576", "0.5977743", "0.59769696", "0.5966177", "0.59524107", "0.59442157", "0.5938585", "0.59330976", "0.5930144", "0.59046715", "0.5883367", "0.58310133", "0.58257025", "0.58101434", "0.57981473", "0.5793493", "0.577577", "0.57740384", "0.5759815", "0.5735824", "0.5731805", "0.5707171", "0.57007194", "0.56962454", "0.56668407", "0.5651196", "0.56367785", "0.56340826", "0.5633216", "0.5632602", "0.56278354", "0.562252", "0.5616833", "0.56133676", "0.5586587", "0.55804265", "0.5568942", "0.5567047", "0.55665666", "0.5565623", "0.5548318", "0.5544427", "0.55377686", "0.5533401", "0.5529591", "0.55208904", "0.5514953", "0.5512306", "0.55012673", "0.5500543", "0.54991746", "0.54950124", "0.5489898", "0.5479198", "0.54611164", "0.5460209", "0.5458907", "0.545805", "0.5453921", "0.54522353", "0.5450674", "0.5434899", "0.5434662", "0.54250216", "0.541442", "0.5408253", "0.53901196", "0.53812945", "0.538021", "0.5377764", "0.53542876", "0.5339567", "0.53335375", "0.53302294" ]
0.7522544
0
Create an |archive| with |paths| in |cwd|. The output will use XZ compression.
def pack(archive: Union[Path, str], paths: List[Union[Path, str]], cwd: Optional[Path] = None, exclude: Optional[List[Union[Path, str]]] = ()): archive = Path(archive) if cwd is None: cwd = Path.cwd() if archive.suffix == '.xz': archive = archive.with_suffix('') # Make sure all the paths have sane permissions. def walk(path): if path.is_symlink(): return elif path.is_dir(): # All dirs should be 755. mode = path.stat().st_mode & 0o777 if mode != 0o755: path.chmod(0o755) for subpath in path.glob('*'): walk(subpath) elif path.is_file(): # All scripts should be 755 while other files should be 644. mode = path.stat().st_mode & 0o777 if mode in (0o755, 0o644): return if mode & 0o111: path.chmod(0o755) else: path.chmod(0o644) else: raise ValueError(f'{path}: unknown file type') logging.info('Forcing sane permissions on inputs') for path in paths: walk(cwd / path) logging.info('Creating %s tarball', archive.name) # We use relpath here to help out tar on platforms where it doesn't like # paths with colons in them (e.g. Windows). We have to construct the full # before running through relpath as relative archives will implicitly be # checked against os.getcwd rather than the explicit cwd. tar = os.path.relpath(cwd / archive, cwd) run(['tar', '--owner=0', '--group=0', '-cf', tar] + [f'--exclude={x}' for x in exclude] + ['--'] + paths, cwd=cwd) logging.info('Compressing tarball') run(['xz', '-f', '-T0', '-9', tar], cwd=cwd)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def generate_archive_file(location, paths, environment=None, compression=None, archive_format=None):\n if archive_format == 'zip':\n archive = ZipTarWrapper(location.name, 'w', zipfile.ZIP_DEFLATED)\n else:\n write_type = \"w\"\n if compression:\n write_type = \"w|{0}\".format(compression)\n archive = tarfile.open(location.name, write_type)\n\n # Add all the things to the archive\n for path_spec in paths:\n path_spec.add_to_tar(archive, environment)\n\n # Finish the zip\n archive.close()\n\n return archive", "def _create_zip_file(self, dest, paths):\n with zipfile.ZipFile(dest, 'w') as zip_file:\n for path in paths:\n zip_file.write(path, os.path.basename(path))", "def zip_package(paths: List[Path], fp, compression=zipfile.ZIP_DEFLATED):\n\n with zipfile.ZipFile(\n file=fp, mode=\"w\", compression=compression, compresslevel=9\n ) as z:\n for path in paths:\n (local_path, zip_path) = path\n z.write(filename=str(path[0]), arcname=str(path[1]))", "def archive(self):\n logging.info(_('Creating compressed archive...'))\n\n report_file_ext = 'bz2'\n compressor = 'bzip2'\n caller = Caller({})\n try:\n caller.call('xz --version')\n report_file_ext = 'xz'\n compressor = 'xz'\n except Exception:\n logging.debug('xz compression not available')\n\n if not os.path.exists(self.conf[\"output\"]):\n os.makedirs(self.conf[\"output\"])\n\n self.conf[\"path\"] = os.path.join(\n self.conf[\"output\"],\n \"sosreport-%s-%s.tar.%s\" % (\n 'LogCollector',\n time.strftime(\"%Y%m%d%H%M%S\"),\n report_file_ext\n )\n )\n\n if self.conf[\"ticket_number\"]:\n self.conf[\"path\"] = os.path.join(\n self.conf[\"output\"],\n \"sosreport-%s-%s-%s.tar.%s\" % (\n 'LogCollector',\n self.conf[\"ticket_number\"],\n time.strftime(\"%Y%m%d%H%M%S\"),\n report_file_ext\n )\n )\n\n config = {\n 'report': os.path.splitext(self.conf['path'])[0],\n 'compressed_report': self.conf['path'],\n 'compressor': compressor,\n 'directory': self.conf[\"local_tmp_dir\"],\n 'rname': os.path.basename(self.conf['path']).split('.')[0],\n }\n caller.configuration = config\n shutil.move(\n os.path.join(\n self.conf[\"local_tmp_dir\"],\n 'working'\n ),\n os.path.join(\n self.conf[\"local_tmp_dir\"],\n config[\"rname\"]\n ),\n )\n caller.call(\"tar -cf '%(report)s' -C '%(directory)s' '%(rname)s'\")\n shutil.rmtree(self.conf[\"local_tmp_dir\"])\n caller.call(\"%(compressor)s -1 '%(report)s'\")\n os.chmod(self.conf[\"path\"], stat.S_IRUSR | stat.S_IWUSR)\n sha256_out = caller.call(\"sha256sum '%(compressed_report)s'\")\n checksum = sha256_out.split()[0]\n with open(\"%s.sha256\" % self.conf[\"path\"], 'w') as checksum_file:\n checksum_file.write(sha256_out)\n\n msg = ''\n if os.path.exists(self.conf[\"path\"]):\n archiveSize = float(os.path.getsize(self.conf[\"path\"])) / (1 << 20)\n\n size = '%.1fM' % archiveSize\n\n msg = _(\n 'Log files have been collected and placed in {path}\\n'\n 'The sha256 for this file is {checksum} and its size is {size}'\n ).format(\n path=self.conf[\"path\"],\n size=size,\n checksum=checksum,\n )\n\n if archiveSize >= 1000:\n msg += _(\n '\\nYou can use the following filters -c, -d, -H in the '\n 'next execution to limit the number of Datacenters,\\n'\n 'Clusters or Hosts that are collected in order to '\n 'reduce the archive size.'\n )\n return msg", "def create_zip(\n output_path,\n input_paths,\n ignore_dotfiles,\n ignore_windows_volume_folders,\n put_all_files_in_shared_root_dir,\n path_separator,\n):\n # Hash each file, add hashes to file_hash_dict, then add to zip\n file_hash_dict = {}\n total_file_count = 0\n with zipfile.ZipFile(output_path, \"w\", zipfile.ZIP_DEFLATED, allowZip64=True) as zip_handler:\n for path in input_paths:\n if len(input_paths) == 1:\n common_root_directory = os.path.dirname(path)\n else:\n common_root_directory = get_common_root_directory(input_paths, path_separator)\n if os.path.isdir(path):\n file_list, total_size = get_file_paths_and_size(\n [path], ignore_dotfiles, ignore_windows_volume_folders\n )\n printer(\n \"'{}' contains {} files ({}) for compression\".format(\n path, len(file_list), bytes_filesize_to_readable_str(total_size)\n ),\n \"info\",\n )\n total_file_count += len(file_list)\n directory_hash_dict = get_hash_dict(\n file_list,\n common_root_directory,\n put_all_files_in_shared_root_dir,\n )\n for hash_value, relative_paths in directory_hash_dict.items():\n if hash_value not in file_hash_dict:\n file_hash_dict[hash_value] = relative_paths\n else:\n file_hash_dict[hash_value].extend(relative_paths)\n add_files_to_zip(\n file_list,\n common_root_directory,\n zip_handler,\n put_all_files_in_shared_root_dir,\n )\n printer(\"'{}' contents added to zip successfully\".format(path), \"info\")\n else:\n total_file_count += 1\n individual_file_hash_dict = get_hash_dict(\n [path],\n common_root_directory,\n put_all_files_in_shared_root_dir,\n )\n for hash_value, relative_paths in individual_file_hash_dict.items():\n if hash_value not in file_hash_dict:\n file_hash_dict[hash_value] = relative_paths\n else:\n file_hash_dict[hash_value].extend(relative_paths)\n add_files_to_zip(\n [path],\n common_root_directory,\n zip_handler,\n put_all_files_in_shared_root_dir,\n )\n printer(\"'{}' added to zip successfully\".format(path), \"info\")\n return file_hash_dict, total_file_count", "def Zip(args):\n parser = argparse.ArgumentParser(description=Zip.__doc__)\n parser.add_argument(\n '-r', dest='recursive', action='store_true',\n default=False,\n help='recurse into directories')\n parser.add_argument(\n '-q', dest='quiet', action='store_true',\n default=False,\n help='quiet operation')\n parser.add_argument('zipfile')\n parser.add_argument('filenames', nargs='+')\n options = parser.parse_args(args)\n\n src_files = []\n for filename in options.filenames:\n globbed_src_args = glob.glob(filename)\n if not globbed_src_args:\n if not options.quiet:\n print('zip warning: name not matched: %s' % filename)\n\n for src_file in globbed_src_args:\n src_file = os.path.normpath(src_file)\n src_files.append(src_file)\n if options.recursive and os.path.isdir(src_file):\n for root, dirs, files in os.walk(src_file):\n for dirname in dirs:\n src_files.append(os.path.join(root, dirname))\n for filename in files:\n src_files.append(os.path.join(root, filename))\n\n # zip_data represents a list of the data to be written or appended to the\n # zip_stream. It is a list of tuples:\n # (OS file path, zip path/zip file info, and file data)\n # In all cases one of the |os path| or the |file data| will be None.\n # |os path| is None when there is no OS file to write to the archive (i.e.\n # the file data already existed in the archive). |file data| is None when the\n # file is new (never existed in the archive) or being updated.\n zip_data = []\n new_files_to_add = [OSMakeZipPath(src_file) for src_file in src_files]\n zip_path_to_os_path_dict = dict((new_files_to_add[i], src_files[i])\n for i in range(len(src_files)))\n write_mode = 'a'\n if os.path.exists(options.zipfile):\n with zipfile.ZipFile(options.zipfile, 'r') as zip_stream:\n try:\n files_to_update = set(new_files_to_add).intersection(\n set(zip_stream.namelist()))\n if files_to_update:\n # As far as I can tell, there is no way to update a zip entry using\n # zipfile; the best you can do is rewrite the archive.\n # Iterate through the zipfile to maintain file order.\n write_mode = 'w'\n for zip_path in zip_stream.namelist():\n if zip_path in files_to_update:\n os_path = zip_path_to_os_path_dict[zip_path]\n zip_data.append((os_path, zip_path, None))\n new_files_to_add.remove(zip_path)\n else:\n file_bytes = zip_stream.read(zip_path)\n file_info = zip_stream.getinfo(zip_path)\n zip_data.append((None, file_info, file_bytes))\n except IOError:\n pass\n\n for zip_path in new_files_to_add:\n zip_data.append((zip_path_to_os_path_dict[zip_path], zip_path, None))\n\n if not zip_data:\n print('zip error: Nothing to do! (%s)' % options.zipfile)\n return 1\n\n with zipfile.ZipFile(options.zipfile, write_mode,\n zipfile.ZIP_DEFLATED) as zip_stream:\n for os_path, file_info_or_zip_path, file_bytes in zip_data:\n if isinstance(file_info_or_zip_path, zipfile.ZipInfo):\n zip_path = file_info_or_zip_path.filename\n else:\n zip_path = file_info_or_zip_path\n\n if os_path:\n st = os.stat(os_path)\n if stat.S_ISDIR(st.st_mode):\n # Python 2.6 on the buildbots doesn't support writing directories to\n # zip files. This was resolved in a later version of Python 2.6.\n # We'll work around it by writing an empty file with the correct\n # path. (This is basically what later versions do anyway.)\n zip_info = zipfile.ZipInfo()\n zip_info.filename = zip_path\n zip_info.date_time = time.localtime(st.st_mtime)[0:6]\n zip_info.compress_type = zip_stream.compression\n zip_info.flag_bits = 0x00\n zip_info.external_attr = (st[0] & 0xFFFF) << 16\n zip_info.CRC = 0\n zip_info.compress_size = 0\n zip_info.file_size = 0\n zip_stream.writestr(zip_info, '')\n else:\n zip_stream.write(os_path, zip_path)\n else:\n zip_stream.writestr(file_info_or_zip_path, file_bytes)\n\n if not options.quiet:\n if zip_path in new_files_to_add:\n operation = 'adding'\n else:\n operation = 'updating'\n zip_info = zip_stream.getinfo(zip_path)\n if (zip_info.compress_type == zipfile.ZIP_STORED or\n zip_info.file_size == 0):\n print(' %s: %s (stored 0%%)' % (operation, zip_path))\n elif zip_info.compress_type == zipfile.ZIP_DEFLATED:\n print(' %s: %s (deflated %d%%)' % (operation, zip_path,\n 100 - zip_info.compress_size * 100 / zip_info.file_size))\n\n return 0", "def create_archive(filelist):\n\t\n\n\ttmp = tempfile.NamedTemporaryFile()\n\t# with tempfile.SpooledTemporaryFile() as tmp:\n\twith zipfile.ZipFile(tmp, 'w', zipfile.ZIP_DEFLATED) as archive:\n\t\tarcname = './docs/'\n\t\tfor x in filelist:\n\t\t\tfilename = os.path.basename(x[1])\n\t\t\t_file = x[0]\n\t\t\t# make sure we're at the start...\n\t\t\t_file.seek(0)\n\t\t\tarchive.write(_file.name, arcname=os.path.join(arcname, filename))\n\n\t# Reset file pointer\n\ttmp.seek(0)\n\n\treturn tmp\n\n\t\t# Write file data to response\n\t\t# return HttpResponse(tmp.read(), content_type='application/x-zip-compressed')", "def _make_archive(file_list, archive, root):\n with zipfile.ZipFile(archive, 'w', zipfile.ZIP_DEFLATED) as zipf:\n for f in file_list:\n zipf.write(f, os.path.relpath(f, root))", "def pack_zip(output_filename, sources):\n previous_dir = os.getcwd()\n if not isinstance(sources, (list, tuple)) and \\\n isinstance(sources, str):\n sources = [sources]\n zip_ds = zipfile.ZipFile(output_filename, 'w', zipfile.ZIP_DEFLATED)\n for source in sources:\n os.chdir(os.path.dirname(source))\n if os.path.isdir(source):\n for root, dirs, files in os.walk(os.path.basename(source)):\n for file in files:\n zip_ds.write(os.path.join(root, file))\n else:\n zip_ds.write(os.path.basename(source))\n zip_ds.close()\n os.chdir(previous_dir)", "def _zip_files(self):\n\n zip_file = Path(self.build_directory.parent).joinpath(\n self.package_name + '.zip'\n )\n logger.info('Creating zip file: %s', zip_file)\n\n shutil.make_archive(zip_file.with_suffix(''), 'zip', self.build_directory)\n shutil.move(str(zip_file), self.build_directory)", "def add_files(self, *paths, **kw):\n write_p = self._pointer\n\n block_size = ffi.write_get_bytes_per_block(write_p)\n if block_size <= 0:\n block_size = 10240 # pragma: no cover\n\n with new_archive_entry() as entry_p:\n entry = ArchiveEntry(None, entry_p)\n for path in paths:\n with new_archive_read_disk(path, **kw) as read_p:\n while 1:\n r = read_next_header2(read_p, entry_p)\n if r == ARCHIVE_EOF:\n break\n entry.pathname = entry.pathname.lstrip('/')\n read_disk_descend(read_p)\n write_header(write_p, entry_p)\n if entry.isreg:\n with open(entry_sourcepath(entry_p), 'rb') as f:\n while 1:\n data = f.read(block_size)\n if not data:\n break\n write_data(write_p, data, len(data))\n write_finish_entry(write_p)\n entry_clear(entry_p)", "def archive(ctx, config):\n log.info('Creating archive directory...')\n archive_dir = misc.get_archive_dir(ctx)\n run.wait(\n ctx.cluster.run(\n args=[\n 'install', '-d', '-m0755', '--', archive_dir,\n ],\n wait=False,\n )\n )\n\n try:\n yield\n except Exception:\n # we need to know this below\n set_status(ctx.summary, 'fail')\n raise\n finally:\n passed = get_status(ctx.summary) == 'pass'\n if ctx.archive is not None and \\\n not (ctx.config.get('archive-on-error') and passed):\n log.info('Transferring archived files...')\n logdir = os.path.join(ctx.archive, 'remote')\n if (not os.path.exists(logdir)):\n os.mkdir(logdir)\n for rem in ctx.cluster.remotes.iterkeys():\n path = os.path.join(logdir, rem.shortname)\n misc.pull_directory(rem, archive_dir, path)\n # Check for coredumps and pull binaries\n fetch_binaries_for_coredumps(path, rem)\n\n log.info('Removing archive directory...')\n run.wait(\n ctx.cluster.run(\n args=[\n 'rm',\n '-rf',\n '--',\n archive_dir,\n ],\n wait=False,\n ),\n )", "def archive(filepath,archive_dir='archive'):\n\n # Make sure we have a directory to archive to\n try:\n mkdir(archive_dir)\n except:\n print(\"Error making archive directory\")\n return\n\n try:\n (dir, filename) = os.path.split(filepath)\n outfile = os.path.join(dir,archive_dir,filename)+'.gz'\n with open(filename, 'rb') as f_in, gzip.open(outfile, 'wb') as f_out:\n shutil.copyfileobj(f_in, f_out)\n except Exception as e:\n print(\"Error archiving \",filepath)\n print(e)\n else:\n try:\n os.remove(filepath)\n except:\n print(\"Error removing \",filepath)", "def create_zip_from_files(files: List[Path]) -> Any:\n temp = tempfile.NamedTemporaryFile()\n with zipfile.ZipFile(temp, 'w') as handle:\n for f in files:\n filename = f.name\n handle.write(f, arcname=filename)\n temp.flush()\n return temp", "def archive(project, filename, pack_envs=False):\n return archiver._archive_project(project, filename, pack_envs)", "def writepy(self, paths=[]):\n from vyperlogix import misc\n for top in paths if (misc.isList(paths)) else [paths]:\n try:\n for root, dirs, files in os.walk(top):\n if (self.rx.search(root) == None):\n print '='*80\n print 'files=%s' % files\n py_files = [os.path.join(root,f) for f in files if f.endswith('.py' if not self.isSourceless else '.pyo')]\n print '-'*80\n print 'py_files=%s' % py_files\n util.byte_compile(py_files,optimize=2,force=1)\n for f in py_files:\n print 'ZIP Adding (%s) to (%s)' % (f,self.filename)\n f_base = f.replace('.pyo','.pyc').replace(top,'')\n _f_base = f_base.split(os.sep)[-1]\n self.write(f,f_base)\n print '='*80\n except Exception as details:\n print 'Error in ZIP processing. (%s)' % (str(details))", "def main(args):\n\n for dir in args.dirs:\n # prepdir = mdssprep.Directory(dir,exclude=['file_*3*','file_2??'],include=['file_*5*'],maxarchivesize=mdssprep.one_meg*200.,minsize=mdssprep.one_meg*100.)\n prepdir = mdssprep.Directory(dir)\n prepdir.archive(dryrun=False)", "def make_archive(fname_archive: str, \n sim_epoch: rebound.Simulation, \n object_names: List[str],\n epoch: datetime, dt0: datetime, dt1: datetime, \n time_step: int, save_step: int = 1,\n save_elements: bool = False,\n progbar: bool = False) -> rebound.SimulationArchive:\n try:\n # First try to load the named archive\n sa = rebound.SimulationArchive(filename=fname_archive)\n except:\n # If the archive is not on disk, save it to disk\n print(f'Generating archive {fname_archive}\\n'\n f'from {dt0} to {dt1}, time_step={time_step}, save_step={save_step}...')\n make_archive_impl(fname_archive=fname_archive, sim_epoch=sim_epoch, object_names=object_names,\n epoch=epoch, dt0=dt0, dt1=dt1, \n time_step=time_step, save_step=save_step, \n save_elements=save_elements, progbar=progbar)\n # Load the new archive into memory\n sa = rebound.SimulationArchive(filename=fname_archive)\n return sa", "def _zip_archive(extracted_source, exclude_files=None, **_):\n ctx.logger.debug(\"Zipping source {source}\".format(source=extracted_source))\n exclude_files = exclude_files or []\n ctx.logger.debug('Excluding files {l}'.format(l=exclude_files))\n with tempfile.NamedTemporaryFile(suffix=\".zip\",\n delete=False) as updated_zip:\n updated_zip.close()\n with zipfile.ZipFile(updated_zip.name,\n mode='w',\n compression=zipfile.ZIP_DEFLATED) as output_file:\n for dir_name, subdirs, filenames in os.walk(extracted_source):\n # Make sure that the files that we don't want\n # to include (e.g. plugins directory) will not be archived.\n exclude_dirs(dir_name, subdirs, exclude_files)\n for filename in filenames:\n # Extra layer of validation on the excluded files.\n if not exclude_file(dir_name, filename, exclude_files):\n # Create the path as we want to archive it to the\n # archivee.\n file_to_add = os.path.join(dir_name, filename)\n # The name of the file in the archive.\n if file_storage_breaker(file_to_add):\n continue\n arc_name = file_to_add[len(extracted_source)+1:]\n output_file.write(file_to_add, arcname=arc_name)\n archive_file_path = updated_zip.name\n return archive_file_path", "def main():\n run_time_str = datetime.datetime.now().strftime(\"%Y-%m-%d_%H-%M-%S\")\n log = _prepare_logging()\n Args = collections.namedtuple(\n \"Args\",\n (\n \"input_paths\",\n \"output_path\",\n \"root_directory\",\n \"ignore_dotfiles\",\n \"ignore_windows_volume_folders\",\n ),\n )\n # If we are running from Mac Automator, take file paths from sys.argv\n if check_running_from_automator():\n # Example sys.argv for two files selected: ['-c', '/absolute/path/1.txt',\n # '/absolute/path/to/2.txt']\n args = Args(\n input_paths=sys.argv[1:],\n output_path=None,\n root_directory=False,\n ignore_dotfiles=False,\n ignore_windows_volume_folders=False,\n )\n # Otherwise, use argparse and allow for some additional options\n else:\n parser = argparse.ArgumentParser()\n parser.add_argument(\"input_paths\", nargs=\"+\", help=\"Items to compress\")\n parser.add_argument(\"-o\", \"--output_path\", \"--output\", help=\"Filename for zip\")\n parser.add_argument(\n \"-d\",\n \"--root-directory\",\n action=\"store_true\",\n help=\"Place all files in zip within a shared parent folder\",\n )\n parser.add_argument(\n \"--ignore-dotfiles\",\n action=\"store_true\",\n help=\"Ignore files and folders beginning with '.' (typically these are hidden folders)\",\n )\n parser.add_argument(\n \"--ignore-windows-volume-folders\",\n action=\"store_true\",\n help=(\n \"Ignore folders named 'System Volume Information' and '$RECYCLE.BIN' (typically\"\n \" these contain hidden system information)\"\n ),\n )\n\n parsed_args = parser.parse_args()\n args = Args(**vars(parsed_args))\n\n # Check passed arguments and return if issues\n if get_missing_sources(args.input_paths):\n printer(\n \"Path(s) {} not found\".format(get_list_as_str(get_missing_sources(args.input_paths))),\n \"error\",\n True,\n )\n return\n\n # Set path separator based on OS\n if platform.system() == \"Windows\":\n path_separator = \"\\\\\"\n else:\n path_separator = \"/\"\n\n # Convert input paths into absolute paths\n input_paths = [os.path.abspath(path) for path in args.input_paths]\n\n # Set output path\n if args.output_path is not None:\n output_path = args.output_path\n output_directory = os.path.dirname(output_path)\n else:\n if check_running_from_automator():\n # Last item in the list of arguments will be the last item clicked in Finder\n output_directory = os.path.dirname(input_paths[-1])\n else:\n output_directory = \".\"\n if len(input_paths) == 1:\n output_filename = os.path.basename(\"{}.zip\".format(input_paths[0]))\n else:\n output_filename = \"{}_archive.zip\".format(run_time_str)\n output_path = get_safe_file_path(os.path.join(output_directory, output_filename))\n printer(\"Zip file will be created at path '{}'\".format(output_path), \"info\")\n\n # Create zipfile and get file_hash_dict info for subsequent verification\n try:\n file_hash_dict, total_file_count = create_zip(\n output_path,\n input_paths,\n args.ignore_dotfiles,\n args.ignore_windows_volume_folders,\n args.root_directory,\n path_separator,\n )\n except:\n # Log the exception to a file, so we can view later if running from Automator\n error_log_file_path = os.path.join(\n output_directory, \"{}_verizip_error.txt\".format(run_time_str)\n )\n error_log_handler = logging.FileHandler(error_log_file_path)\n error_log_handler.setLevel(logging.ERROR)\n error_log_handler.setFormatter(\n logging.Formatter(\"%(asctime)s - %(levelname)s - %(message)s\")\n )\n log.addHandler(error_log_handler)\n log.exception(\"Exception occurred during creation of zip file '%s':\", output_path)\n printer(\n \"Error occurred - see '{}'\".format(os.path.abspath(error_log_file_path)), \"error\", True\n )\n if os.path.isfile(output_path):\n os.remove(output_path)\n return\n printer(\"'{}' finalised - will now be verified\".format(output_path), \"info\")\n\n # Get hashes of files within finalised zip\n zip_hash_dict = {}\n with zipfile.ZipFile(output_path, \"r\") as zip_handler:\n zip_file_listing = zip_handler.namelist()\n zip_file_count = 0\n for file_within_zip in zip_file_listing:\n # Todo: confirm no 'file_info.is_dir()' type check needed here - don't believe so, as\n # only files with paths are being added, rather than directories as separate archive\n # items\n zip_file_count += 1\n hash_value = hash_file_in_zip(zip_handler, file_within_zip)\n if hash_value not in zip_hash_dict:\n zip_hash_dict[hash_value] = []\n zip_hash_dict[hash_value].append(file_within_zip)\n\n # Verify that hashes from source files match those for compressed files within newly-created zip\n if file_hash_dict == zip_hash_dict and total_file_count == zip_file_count:\n printer(\"Verification complete; no discrepancies identified\", \"info\")\n printer(\"'{}' created successfully\".format(output_path), \"info\", True)\n else:\n error_log_file_path = os.path.join(\n output_directory, \"{}_verizip_error.txt\".format(run_time_str)\n )\n with open(error_log_file_path, \"w\") as error_log_file_handler:\n for hash_value, file_paths in file_hash_dict.items():\n if hash_value not in zip_hash_dict:\n error_log_file_handler.write(\n \"Hash '{}' not present in zip file (with expected files {})\\n\".format(\n hash_value, get_list_as_str(file_paths)\n )\n )\n elif sorted(file_paths) != sorted(zip_hash_dict[hash_value]):\n error_log_file_handler.write(\n \"Files for hash '{}' do not match between source and zip ({} in source - {}\"\n \" in zip)\\n\".format(hash_value, file_paths, zip_hash_dict[hash_value])\n )\n printer(\n \"'{}' failed verification - see error log at '{}'\".format(\n output_path, os.path.abspath(error_log_file_path)\n ),\n \"error\",\n True,\n )\n os.remove(output_path) # Delete the zip that failed verification", "def compress_skim_dir(directory, output=\"zarr\"):\n\n if output not in (\"zarr\", \"zarr.zip\"):\n raise NotImplementedError(output)\n\n if output == \"zarr\":\n if not os.path.exists(directory+\".zarr\"):\n os.makedirs(directory+\".zarr\")\n elif output == \"zarr.zip\":\n if os.path.exists(directory+\".zarr.zip\"):\n raise FileExistsError(directory+\".zarr.zip\")\n\n master = {}\n for f in os.walk(directory):\n for fi in f[2]:\n if \".emx\" in fi:\n arr = np.fromfile(fi, dtype='f4')\n side = int(np.sqrt(arr.size))\n arr = arr.reshape(side, side)\n tazrange = pd.RangeIndex(1, side+1)\n master[fi.replace(\".emx\", \"\")] = xr.DataArray(\n arr,\n dims=['otaz', 'dtaz'],\n coords={'otaz': tazrange, 'dtaz': tazrange}\n )\n\n master = sh.Dataset(master)\n\n if output == \"zarr\":\n master.to_zarr(directory+\".zarr\", mode='a')\n elif output == \"zarr.zip\":\n with zarr.ZipStore(directory+\".zarr.zip\", mode='w') as store:\n master.to_zarr(store)\n return master", "def make_zip(self):\n shutil.make_archive(self.name, 'zip', self.name)", "def archive(self, files, name):\n self.log.debug(\"Putting files into archive: %s\" % \"\\n\".join(files))\n tar_name = \"%s%s\" % (name, self.extension)\n if os.path.exists(tar_name):\n raise RuntimeError (\"Tried to create an archive that already exists: %s\" % tar_name) \n else:\n self.log.info(\"Creating a new archive %s\" % tar_name)\n tar = tarfile.open(tar_name, 'w:gz');\n for name in files:\n tar.add(name)\n print '%s'% (name)\n tar.close()\n return tar_name", "def _archive_project(name, buff, files=None, repo=None, branch='master',\n ignore_deleted=False):\n if repo is None:\n repo = Repoman.open_repo(name)\n now = datetime.now().timetuple()[:6]\n archive = zipfile.ZipFile(buff, \"w\", zipfile.ZIP_DEFLATED)\n files_list = files if files is not None else \\\n repo.list_files_for_branch(branch)\n all_files = files_list if files is None else \\\n repo.list_files_for_branch(branch)\n\n template_paths = defaultdict(list)\n for file_path in all_files:\n split_file_path = file_path.split('/')\n if len(split_file_path) > 2:\n template_paths[split_file_path[1]].append(file_path)\n extractors = json.loads(repo.file_contents_for_branch('extractors.json',\n branch) or '{}')\n\n seen_files = set()\n spiders = set()\n for file_path in files_list:\n if file_path.startswith('spiders'):\n try:\n parts = file_path.split(\"/\")\n if len(parts) >= 2:\n spider_name = parts[1]\n if spider_name.endswith('.json'):\n spider_name = spider_name[:-5]\n if spider_name not in spiders:\n # Load spider if necessary\n if len(parts) > 2:\n file_path = 'spiders/' + spider_name + '.json'\n file_contents = repo.file_contents_for_branch(\n file_path, branch)\n as_json = json.loads(file_contents)\n templates = []\n # Load all spider templates\n spider_templates = template_paths.get(spider_name, [])\n for template_path in spider_templates:\n seen_files.add(template_path)\n existing = {}\n # Ignore deleted templates\n try:\n templ_contents = repo.file_contents_for_branch(\n template_path, branch)\n except (TypeError, ValueError):\n continue\n json_template = json.loads(templ_contents)\n # Validate extractors\n template_extractors = json_template.get(\n 'extractors', {})\n for field, eids in template_extractors.items():\n existing[field] = [eid for eid in eids\n if eid in extractors]\n json_template['extractors'] = existing\n spider_name = parts[1]\n templates.append(json_template)\n spiders.add(spider_name)\n as_json.pop('template_names', None)\n as_json['templates'] = templates\n _add_to_archive(archive, file_path,\n json.dumps(as_json), now)\n except TypeError:\n if ignore_deleted:\n continue\n # Handle Deleted Spiders\n file_contents = repo.file_contents_for_branch(file_path,\n 'master')\n file_info = {'deleted': True}\n if file_contents:\n as_json = json.loads(file_contents)\n _add_to_archive(archive, file_path, json.dumps(file_info), now)\n else:\n file_contents = repo.file_contents_for_branch(file_path, branch)\n _add_to_archive(archive, file_path, file_contents, now)\n seen_files.add(file_path)\n\n # Add empty placeholders for missing files required by dash\n for file_path in {'extractors.json', 'items.json'} - seen_files:\n _add_to_archive(archive, file_path, '{}', now)\n archive.close()", "def _archive(self, name, contents, isolate_content):\n # Shared code for all test_isolated_* test cases.\n root = os.path.join(self.tmpdir, name)\n # Refuse reusing the same task name twice, it makes the whole test suite\n # more manageable.\n self.assertFalse(os.path.isdir(root), root)\n os.mkdir(root)\n isolate_path = os.path.join(root, 'i.isolate')\n with open(isolate_path, 'wb') as f:\n f.write(isolate_content)\n for relpath, content in contents.items():\n p = os.path.join(root, relpath)\n d = os.path.dirname(p)\n if not os.path.isdir(d):\n os.makedirs(d)\n with open(p, 'wb') as f:\n f.write(content)\n return self.client.isolate(isolate_path)", "def create_zip_file():\n shutil.make_archive(os.path.join(DIST_DIR, \"build\"), \"zip\", BUILD_DIR)", "def __gitCreateArchive(self):\n self.vcs.gitCreateArchive(self.project.getProjectPath())", "def unpack(archive: Union[Path, str],\n cwd: Optional[Path] = None,\n files: Optional[List[Union[Path, str]]] = ()):\n archive = Path(archive)\n if cwd is None:\n cwd = Path.cwd()\n if files:\n files = ['--'] + list(files)\n else:\n files = []\n\n # Try to make symlink usage easier in Windows.\n extra_env = {\n 'MSYS': 'winsymlinks:nativestrict',\n }\n\n logging.info('Unpacking %s', archive.name)\n # We use relpath here to help out tar on platforms where it doesn't like\n # paths with colons in them (e.g. Windows). We have to construct the full\n # before running through relpath as relative archives will implicitly be\n # checked against os.getcwd rather than the explicit cwd.\n src = os.path.relpath(cwd / archive, cwd)\n run(['tar', '--no-same-owner', '-xf', src] + files, cwd=cwd,\n extra_env=extra_env)", "def open(self, *args, **kwargs):\n return ZipFileArchiver(*args,**kwargs)", "def generate_test_dataset_archive(filepath, dataset):\n\n # 'file:///some/path' to '/some/path'\n if filepath[:7] == 'file://':\n filepath = filepath[7:]\n\n # Check if the dataset exists.\n # When not been generate it.\n if not os.path.isfile(filepath):\n\n print(\"Generating\", filepath)\n data = get_test_dataset(dataset)\n \n ensure_dir(os.path.dirname(filepath))\n idxgz.save(filepath, data)", "def archive(po_filename, bl_filename):\n\n # Store archive in same dir as this script\n root = os.path.abspath(os.path.dirname(sys.argv[0]))\n\n po_archive = root + '/po.csv.%s' % datetime.date.today()\n bl_archive = root + '/bl.csv.%s' % datetime.date.today()\n\n shutil.move(po_filename, po_archive)\n shutil.move(bl_filename, bl_archive)\n\n perms = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH\n os.chmod(po_archive, perms)\n os.chmod(bl_archive, perms)", "def zipdir(path, ziph):\n for root, dirs, files in os.walk(path):\n for file in files:\n ziph.write(os.path.join(root, file),\n arcname=os.path.join(os.path.relpath(root, path), file))", "def do_pack():\n\n now = datetime.now()\n # format the name of the file with the timestamps\n now_year = now.year\n now_month = now.month\n now_day = now.day\n now_hour = now.hour\n now_minute = now.minute\n now_second = now.second\n # apply the format\n file_name = 'versions/web_static_{}{}{}{}{}{}.tgz'.format(\n now_year, now_month, now_day, now_hour, now_minute, now_second\n )\n # All archives must be stored in the folder versions\n local('mkdir -p versions')\n # execute locally the compression of the folder\n command = local(\"tar -cvzf \" + file_name + \" ./web_static/\")\n # return the archive path if the archive has been correctly generated\n if command.succeeded:\n return file_name\n else:\n return None", "def make_zipfile(output_filename, source_dir):\n relroot = os.path.abspath(os.path.join(source_dir, os.pardir))\n with zipfile.ZipFile(output_filename, \"w\", zipfile.ZIP_DEFLATED) as zip:\n for root, dirs, files in os.walk(source_dir):\n # add directory (needed for empty dirs)\n zip.write(root, os.path.relpath(root, relroot))\n for file in files:\n filename = os.path.join(root, file)\n if os.path.isfile(filename): # regular files only\n arcname = os.path.join(os.path.relpath(root, relroot), file)\n zip.write(filename, arcname)", "def zip_repo(src_path, dest_path):\n tar = tarfile.open(dest_path, \"w:gz\")\n for file_name in glob.glob(os.path.join(src_path, \"*\")):\n tar.add(file_name, os.path.basename(file_name))\n\n tar.close()", "def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0,\n dry_run=0, owner=None, group=None):\n save_cwd = os.getcwd()\n if root_dir is not None:\n log.debug(\"changing into '%s'\", root_dir)\n base_name = os.path.abspath(base_name)\n if not dry_run:\n os.chdir(root_dir)\n\n if base_dir is None:\n base_dir = os.curdir\n\n kwargs = {'dry_run': dry_run}\n\n try:\n format_info = ARCHIVE_FORMATS[format]\n except KeyError:\n raise ValueError(\"unknown archive format '%s'\" % format)\n\n func = format_info[0]\n for arg, val in format_info[1]:\n kwargs[arg] = val\n\n if format != 'zip':\n kwargs['owner'] = owner\n kwargs['group'] = group\n\n try:\n filename = func(base_name, base_dir, **kwargs)\n finally:\n if root_dir is not None:\n log.debug(\"changing back to '%s'\", save_cwd)\n os.chdir(save_cwd)\n\n return filename", "def archive(self,prompt=True,dry_run=False):\n\t\t# make sure the project is valid\n\t\tself.validate_project()\n\n\t\t# get the segments\n\t\tself.get_segments()\n\n\t\t# get the header file(s)\n\t\tself.get_headers()\n\n\t\t# determine which pool each file is going to\n\t\tself._set_element_pools()\n\n\t\t# check the files against the pools for duplicates\n\t\tself._get_archive_status()\n\n\t\t# print what we found for archiving\n\t\tself.print_queue()\n\n\t\t# check to see if we have anything to archive\n\t\t# and prompt the user \n\t\tself.ready_check(prompt=prompt)\n\n\t\t# do the archive\n\t\tself._archive(dry_run=dry_run)", "def archive_files(output_dir, output_dir_name):\n\n file_name = output_dir + \".tar.gz\"\n\n logger.info(\"Archiving files into %s\", file_name)\n with tarfile.open(file_name, \"w|gz\") as tar:\n tar.add(output_dir, arcname=output_dir_name)\n logger.info(\"Archived files into %s\", file_name)\n\n try:\n shutil.rmtree(output_dir)\n except OSError as ex:\n logger.warning(\"Failed to delete directory after archiving: %s\", ex)", "def make_zipfile(output_filename, source_dir):\n import zipfile, zlib\n relroot = os.path.abspath(os.path.join(source_dir, os.pardir))\n with zipfile.ZipFile(output_filename, \"w\", zipfile.ZIP_DEFLATED, allowZip64) as zip:\n for root, dirs, files in os.walk(source_dir):\n # add directory (needed for empty dirs)\n zip.write(root, os.path.relpath(root, relroot))\n for file in files:\n filename = os.path.join(root, file)\n if os.path.isfile(filename): # regular files only\n arcname = os.path.join(os.path.relpath(root, relroot), file)\n zip.write(filename, arcname)", "def compress_files(self):\n archive_file_path = tkinter.filedialog.asksaveasfilename(parent=self,\n defaultextension=\".zip\",\n filetypes=[(\"Zip File\", \"*.zip\")])\n treeview_items = self.files_treeview.get_children()\n if archive_file_path and treeview_items:\n with ZipFile(archive_file_path, \"w\", ZIP_DEFLATED) as archive:\n for row in treeview_items:\n file_path = self.files_treeview.item(row, \"values\")[0]\n file_name = os.path.basename(file_path)\n archive.write(file_path, arcname=file_name)", "def dir_2_cbz(dir_pth):\r\n shutil.make_archive(dir_pth, 'zip', dir_pth)\r\n shutil.rmtree(dir_pth)\r\n os.rename(dir_pth+'.zip', dir_pth+'.cbz')\r\n pass", "def git_archive_all(path, archive_file_name):\n import os\n import tarfile\n\n def ls_files(prefix=''):\n \"\"\"\n Does a `git ls-files` on every git repository (eg: submodules)\n found in the working git repository and returns a list with all the\n filenames returned by each `git ls-files`\n\n --full-name Forces paths to be output relative to the project top\n directory\n --exclude-standard adds standard git exclusions\n (.git/info/exclude, .gitignore, ...)\n \"\"\"\n cmd = 'git ls-files --full-name --exclude-standard'\n raw_files = local(cmd, capture=True)\n files = []\n\n for filename in raw_files.split('\\n'):\n if (os.path.isdir(filename) and\n os.path.exists(os.path.join(filename, '.git'))):\n os.chdir(filename)\n files.extend(ls_files(prefix=filename))\n else:\n files.append(os.path.join(prefix, filename))\n\n return files\n\n cwd = os.getcwd()\n os.chdir(path)\n files = ls_files()\n os.chdir(path)\n project_tar = tarfile.open(archive_file_name, 'w:gz')\n\n for filename in files:\n project_tar.add(filename)\n\n project_tar.close()\n os.chdir(cwd)\n\n print(green('Archive created at %s/%s' % (path, archive_file_name)))", "def zip_file(backup_objects):\n\n # Get name from date_time\n name_of_zip_file = (get_date(\"%d%m%Y_%H.%S\") + '.zip')\n # put files in zip archiv\n z = zipfile.ZipFile(name_of_zip_file, 'a', zipfile.ZIP_DEFLATED) # create archive\n for i in backup_objects:\n if os.path.isdir(i):\n for root, dirs, files in os.walk(i): # get list of files in folder\n for file in files:\n z.write(os.path.join(root, file)) # Создание относительных путей и запись файлов в архив\n else:\n z.write(i)\n z.close()\n if zipfile.is_zipfile(name_of_zip_file):\n notest_file(\"arckhiving is conplite! Created file\" + name_of_zip_file)\n return name_of_zip_file", "def create_tar(self):\n with tarfile.open(self.tgzfile, \"w:gz\") as tar_handle:\n for root, _, files in os.walk(self.dirname):\n for file in files:\n tar_handle.add(os.path.join(root, file))", "def zipfiles (downloadable, name):\n\n print \"compressing files. almost done.\"\n import zipfile\n for book in downloadable:\n if (os.path.exists(os.path.join(name, book[1]))):\n files = os.listdir(os.path.join(name, book[1]))\n cbz = zipfile.ZipFile(os.path.join(name, name + '-' + book[1] + '.cbz'), 'w')\n for file in files:\n cbz.write(os.path.join(name, book[1],file))\n cbz.close()", "def archive(self, virtual_path_to_tar_files, root, target_name):\n\n\n # TODO: RSYNC and do a diff. if there are no changes, we can just skip this part of the dockerfile to maximize layering\n for x in virtual_path_to_tar_files:\n assert os.path.isabs(x)\n\n rel_to_root = [os.path.relpath(x, '/') for x in virtual_path_to_tar_files]\n real_path = [os.path.join(root, x) for x in rel_to_root ]\n\n tup = zip(virtual_path_to_tar_files, real_path)\n\n tar = tarfile.open(os.path.join(self.dir, target_name), 'w')\n\n for vp, rp in tup:\n tar.add(rp, arcname=vp)\n\n tar.close()\n\n self.df.add_docker_cmd('ADD %s /' % target_name)", "async def mkarchivefs(self):\n # create an empty archivefs--just has a root.\n modfs : arcfs.ArchiveFS = arcfs.ArchiveFS()\n\n # add path of each file in the archive; since intermediate\n # directories are created automatically, there's no need to do\n # mkdir--although this method DOES mean that, if the archive contains\n # any empty directories, they will not be present in the fs. Not sure\n # yet if this is going to be an issue.\n async for arc_entry in self.archive_contents(dirs=False):\n # add root anchor to all entries\n modfs.touch(\"/\"+arc_entry)\n\n return modfs", "def _pack_ex(file, names, cwd, implementor=None):\n assert isdir(cwd)\n if exists(file):\n console.rm(file)\n if not implementor: implementor = GzipTarredFile\n \n with console.cd(cwd):\n relnames = [relpath(name, cwd) for name in names]\n implementor.pack(relnames, file)\n return file", "def zip_file(src_dir):\n zip_name = slugify(src_dir) + '.zip'\n z = zipfile.ZipFile(zip_name, 'w', zipfile.ZIP_DEFLATED)\n for dirpath, dirnames, filenames in os.walk(src_dir):\n fpath = dirpath.replace(src_dir, '')\n fpath = fpath and fpath + os.sep or ''\n for filename in filenames:\n z.write(os.path.join(dirpath, filename), fpath + filename)\n z.close()", "def create_zip(file_dir):\n curr_path = os.getcwd()\n os.chdir(file_dir)\n zip_name = 'files_archive_{}.zip'.format(\n str(datetime.datetime.now())[5:16].replace(' ', \"_\"))\n files = os.listdir()\n print(\"Creating zipfile from files in...\", file_dir)\n with zipfile.ZipFile(zip_name, 'w') as zip:\n for f in files:\n zip.write(f)\n print(\"Added file: \", f)\n\n zip_path = file_dir + \"/\" + zip_name\n os.chdir(curr_path)\n # double check if path is absolute\n if os.path.isabs(zip_path):\n return zip_path\n else:\n return os.getcwd() + \"/\" + zip_name", "def zip_all(search_dir, extensions, output_path):\r\n with ZipFile(output_path, 'w') as zip_object:\r\n for folder, _, filenames in os.walk(search_dir):\r\n rel_path = os.path.relpath(folder, search_dir)\r\n for filename in filenames:\r\n if pathlib.Path(filename).suffix in extensions:\r\n zip_object.write(\r\n os.path.join(folder, filename),\r\n os.path.join(rel_path, filename)\r\n )", "def get_pseudo_archive(filepath_pseudos, request):\n archive_type, exception, message = request.param\n suffix = '.tar.gz'\n\n with tempfile.TemporaryDirectory() as dirpath:\n\n distutils.dir_util.copy_tree(filepath_pseudos, dirpath)\n\n if archive_type == ArchiveType.INVALID_ARCHIVE_FORMAT:\n suffix = '.txt'\n\n if archive_type == ArchiveType.INVALID_ARCHIVE_SUBFOLDER:\n os.makedirs(os.path.join(dirpath, 'subfolder'))\n\n if archive_type == ArchiveType.INVALID_UPF_FILE:\n open(os.path.join(dirpath, 'corrupt.upf'), 'a').close()\n\n with tempfile.NamedTemporaryFile(suffix=suffix) as filepath_archive:\n\n with tarfile.open(filepath_archive.name, 'w:gz') as tar:\n tar.add(dirpath, arcname='.')\n\n yield filepath_archive.name, exception, message", "def zip_files():\n zipper = ZipFile(\"Moritz_Bunse_ML_project.zip\", \"w\")\n files_to_write = [\"poi_id.py\",\n \"my_classifier.pkl\",\n \"my_dataset.pkl\",\n \"my_feature_list.pkl\",\n \"tester.py\",\n \"Look+At+Enron+data+set.html\",\n \"Look At Enron data set.ipynb\",\n \"data_dict.pkl\",\n \"final_project_dataset.pkl\",\n \"img/Flow chart feature selection.png\"\n ]\n for filename in files_to_write:\n zipper.write(filename)\n\n zipper.close()", "def _archive_logs(self, logdir, files):\n cwd = os.getcwd()\n archive_wd = os.path.dirname(logdir)\n archive_file = os.path.basename(logdir) + \".tgz\"\n\n # move files into logdir for archive\n for f in files:\n self.logger.info(\"moving '%s' to archive folder\" % f)\n shutil.move(f, logdir)\n\n # move to logdir parent folder\n self.logger.info(\"archiving profile logs into '%s'\" % archive_file)\n os.chdir(archive_wd)\n archive = tarfile.open(archive_file, \"w:gz\")\n archive.add(os.path.basename(logdir))\n archive.close()\n\n # go back to current working dir and remove logdir\n os.chdir(cwd)\n shutil.rmtree(logdir)", "def init_archive(init_path=os.getcwd()):\n base_path = os.path.abspath(os.path.join(init_path, 'archive'))\n if not os.path.exists(base_path):\n try:\n # Create top direcoty\n os.mkdir(base_path)\n ACTUAL_CONFIG = default_cfg.CONFIG_TEMPLATE.format(\n archive_path=base_path,\n filter_path=config.get_default('general.filterpath'),\n depth=config.get_default('crawler.depth'),\n interval_in_min=config.get_default('crawler.interval'),\n max_inst=config.get_default('crawler.maxInst'),\n user_agent=config.get_default('crawler.userAgent'),\n temp_dir=config.get_default('crawler.tempRoot'),\n robots=config.get_default('crawler.ignoreRobots'),\n url_path=config.get_default('crawler.urllistpath'),\n custom_wget=config.get_default('crawler.customWgetParms'),\n db_file=config.get_default('db.path'),\n sql_source=config.get_default('db.sqlSource'),\n server_port=config.get_default('server.port'),\n notify_in_min=config.get_default('server.notify.interval'),\n javadapter_port=config.get_default('javadapter.port'))\n # Create base structure\n for folder in ['content', 'tmp', 'filter', 'logs', 'pickle_cache', 'xml']:\n os.mkdir(os.path.join(base_path, folder))\n\n # Default url.txt\n files = [('url.txt', DEFAULT_URLS),\n (os.path.join('xml', 'file.xsd'), xsd.XSD_TEMPLATE),\n ('webarchive.conf.xml', ACTUAL_CONFIG)]\n\n for item in files:\n with open(os.path.join(base_path, item[0]), 'w') as item_handle:\n item_handle.write(item[1])\n\n print('Initialized new archive at', base_path)\n except OSError as err:\n print(err)\n else:\n print('Archive-Root', base_path, 'already exists.')", "def _zip_dir(path):\n file_path = '/tmp/iapydependencies-' + uuid.uuid1().hex + \".zip\"\n _make_archive(_get_dir_entries(path, True), file_path, path[0:path.rfind('/')])\n return file_path", "def compress_wrapper(args: Namespace) -> None:\n directory_path = os.path.join(DATASETS_DIR, args.directory)\n compress_datasets(directory_path, args.holdout)", "def pack():\n clean_local()\n build()\n copy_json()\n optimize()\n tarball()", "def create_files(paths, chroot):\n dirs, files = set(), set()\n for path in paths:\n path = osp.join(chroot, path)\n filename = osp.basename(path)\n # path is a directory path\n if filename == '':\n dirs.add(path)\n # path is a filename path\n else:\n dirs.add(osp.dirname(path))\n files.add(path)\n for dirpath in dirs:\n if not osp.isdir(dirpath):\n os.makedirs(dirpath)\n for filepath in files:\n open(filepath, 'w').close()", "def compress(src,dstfile):\n\tafile = zipfile.ZipFile(dstfile,\"w\",zipfile.ZIP_DEFLATED)\n\tfor root,dirs,files in os.walk(src):\n\t\tfor filename in files:\n\t\t\tabspath = osp.join(root,filename)\n\t\t\trelpath = osp.relpath(abspath,src)\n\t\t\tafile.write(abspath, relpath)\n\tafile.close();", "def test_archive() -> None:\n\n rule_runner = PythonRuleRunner(\n rules=[\n *target_type_rules(),\n *pex_from_targets.rules(),\n *package_pex_binary.rules(),\n *python_target_type_rules.rules(),\n QueryRule(BuiltPackage, [ArchiveFieldSet]),\n ],\n target_types=[ArchiveTarget, FilesGeneratorTarget, RelocatedFiles, PexBinary],\n )\n rule_runner.set_options([], env_inherit={\"PATH\", \"PYENV_ROOT\", \"HOME\"})\n\n rule_runner.write_files(\n {\n \"resources/d1.json\": \"{'k': 1}\",\n \"resources/d2.json\": \"{'k': 2}\",\n \"resources/BUILD\": dedent(\n \"\"\"\\\n files(name='original_files', sources=['*.json'])\n\n relocated_files(\n name='relocated_files',\n files_targets=[':original_files'],\n src=\"resources\",\n dest=\"data\",\n )\n \"\"\"\n ),\n \"project/app.py\": \"print('hello world!')\",\n \"project/BUILD\": \"pex_binary(entry_point='app.py')\",\n \"BUILD\": dedent(\n \"\"\"\\\n archive(\n name=\"archive1\",\n packages=[\"project\"],\n files=[\"resources:original_files\"],\n format=\"zip\",\n )\n\n archive(\n name=\"archive2\",\n packages=[\":archive1\"],\n files=[\"resources:relocated_files\"],\n format=\"tar\",\n output_path=\"output/archive2.tar\",\n )\n \"\"\"\n ),\n }\n )\n\n def get_archive(target_name: str) -> FileContent:\n tgt = rule_runner.get_target(Address(\"\", target_name=target_name))\n built_package = rule_runner.request(BuiltPackage, [ArchiveFieldSet.create(tgt)])\n digest_contents = rule_runner.request(DigestContents, [built_package.digest])\n assert len(digest_contents) == 1\n return digest_contents[0]\n\n def assert_archive1_is_valid(zip_bytes: bytes) -> None:\n io = BytesIO()\n io.write(zip_bytes)\n with zipfile.ZipFile(io) as zf:\n assert set(zf.namelist()) == {\n \"resources/d1.json\",\n \"resources/d2.json\",\n \"project/project.pex\",\n }\n with zf.open(\"resources/d1.json\", \"r\") as f:\n assert f.read() == b\"{'k': 1}\"\n with zf.open(\"resources/d2.json\", \"r\") as f:\n assert f.read() == b\"{'k': 2}\"\n\n archive1 = get_archive(\"archive1\")\n assert_archive1_is_valid(archive1.content)\n\n archive2 = get_archive(\"archive2\")\n assert archive2.path == \"output/archive2.tar\"\n io = BytesIO()\n io.write(archive2.content)\n io.seek(0)\n with tarfile.open(fileobj=io, mode=\"r:\") as tf:\n assert set(tf.getnames()) == {\"data/d1.json\", \"data/d2.json\", \"archive1.zip\"}\n\n def get_file(fp: str) -> bytes:\n reader = tf.extractfile(fp)\n assert reader is not None\n return reader.read()\n\n assert get_file(\"data/d1.json\") == b\"{'k': 1}\"\n assert get_file(\"data/d2.json\") == b\"{'k': 2}\"\n assert_archive1_is_valid(get_file(\"archive1.zip\"))", "def _archive(self,pools=['backup','archive'],verbose=True,dry_run=False):\n\t\tif type(pools) is not list:\n\t\t\tpools = [pools]\n\n\t\t_start = datetime.today()\n\t\tself.archive_bytes = 0\n\t\tfor pool in pools:\n\t\t\tqueue = self.generate_queue(pool)\n\t\t\tlog.info('%s: %s' % (pool.upper(),queue))\n\t\t\tif len(queue) == 0:\n\t\t\t\tmessage = \"%s Warning: '%s' pool: Nothing to %s.\" % (pool.title(),pool,pool)\n\t\t\t\tlog.info(message)\n\t\t\t\tif verbose:\n\t\t\t\t\tprint \" %s\" % message\n\t\t\t\tcontinue\n\n\t\t\tif verbose:\n\t\t\t\tprint \"\\n ++ %s POOL ++\" % (pool.upper())\n\t\t\t\tprint \" Creating %s of the following files:\" % (pool)\n\n\t\t\t# create a filelist and calculate the size\n\t\t\tfilelist = []\n\t\t\tfor ele in queue:\n\t\t\t\tfilelist.append(ele.abs_path)\n\t\t\t\tself.archive_bytes+=ele.st_size\n\t\t\t\tif verbose:\n\t\t\t\t\tprint \" %s\" % ele.abs_path\n\t\t\n\t\t\t# determine which strategy \n\t\t\t# we're using\n\t\t\tif pool == 'archive':\n\t\t\t\tstrat = 'A'\n\t\t\telif pool == 'backup':\n\t\t\t\tstrat = 'B'\n\t\t\tpath = ' '.join(filelist)\n\n####################### TESTING ###########################3\n#\t\t\tTina.backup(path=path,application='fake_application',strat=strat,dry_run=dry_run)\n####################### TESTING ###########################3\n\t\t\tTina.backup(path=path,application='flame_archive',strat=strat,dry_run=dry_run)\n\t\t_stop = datetime.today()\n\t\tself.archive_delta = (_stop-_start)\n\t\tself.archive_seconds = (_stop-_start).seconds\n\t\tself.archive_size = numberutil.humanize(self.archive_bytes,scale='bytes')\n\t\ttry:\n\t\t\trph = (self.archive_bytes/self.archive_seconds)*3600\n\t\texcept:\n\t\t\trph = 0\n\t\tself.archive_rate = numberutil.humanize(rph,scale='bytes')", "def pack(backend_name, patterns, size, minimum, yes):\n # Load the backend\n backend = get_backend(backend_name)\n # Find the paths\n click.echo(\"Scanning files... \", nl=False)\n paths, size_used = Scanner(config.root_path, patterns).unstored_paths(\n config.index, size * (1024 ** 3)\n )\n click.secho(\"Done\", fg=\"green\")\n if not paths:\n click.secho(\"No files found to add.\", fg=\"yellow\")\n return\n # Print what we found\n for path in paths:\n click.echo(\"> \" + click.style(path, fg=\"blue\"))\n click.echo(\"%s files, %s\" % (len(paths), human_size(size_used)))\n # Prompt to continue\n if not yes:\n if not click.confirm(\"Proceed with build?\"):\n return\n click.echo()\n # Select an unused archive ID\n archive_id = config.index.new_archive_id()\n # Pack the volume\n archive = Archive.from_files(archive_id, paths, config.root_path)\n click.echo(f\"Archive is {archive.id}, size {human_size(archive.size)}\")\n if archive.size < minimum * (1024 ** 3):\n click.echo(\"Archive too small, quitting\")\n sys.exit(1)\n backend.archive_store(config.root_path, archive)\n click.echo(\"Archive stored\")\n config.index.add_archive(archive, backend_name)\n click.echo(\"Archive indexed\")", "def create_temp_archive(case_dict):\n # ---------------------------------------------------------------------\n archive_temp_dir = \"{0}/archive_temp_dir\".format(case_dict[\"workdir\"])\n logger.debug(\"create_temp_archive %s\", archive_temp_dir)\n\n if not os.path.exists(archive_temp_dir):\n os.makedirs(archive_temp_dir)\n else:\n logger.info(\n \"ERROR archive_metadata archive_temp_dir already exists. exiting...\"\n )\n sys.exit(1)\n\n return archive_temp_dir", "def _make_tar_gz_file(output_filename, source_dir):\n with tarfile.open(output_filename, \"w:gz\") as tar:\n for f in os.listdir(source_dir):\n tar.add(os.path.join(source_dir, f), arcname=f)", "def archive_logs():\n logging.info('Archive start...')\n\n for log_dir in filter(dir_filter, os.listdir('logs')):\n path = 'logs/{}'.format(log_dir)\n archive_files = filter(lambda x: '.log.' in x, os.listdir(path))\n zip_file_name = '{}/{}.zip'.format(\n path,\n str(datetime.now())\n .replace(' ', '_').replace('.', '_').replace(':', '_'))\n zip_file = zipfile.ZipFile(\n zip_file_name, mode='w', compression=zipfile.ZIP_DEFLATED)\n for f in archive_files:\n log_file = '{}/{}'.format(path, f)\n zip_file.write(log_file)\n os.remove(log_file)\n\n logging.info('Archive end.')", "def pack(output_filename, sources):\n dirname = os.path.dirname(output_filename)\n if not os.path.exists(dirname):\n os.makedirs(dirname)\n ext = os.path.splitext(output_filename)[1][1:]\n if ext == 'zip':\n pack_zip(output_filename, sources)\n elif ext in ('gz', 'tgz', 'bz2', 'tar'):\n pack_tar(output_filename, sources, ext)\n else:\n raise AttributeError('Output_filename must be an archive (ex: .tar.gz, .zip)')", "def cleanup(self, archive, files):\n mtime = self.test(archive, files)\n backup_home = os.path.join(self.download_dir, '-')\n if not os.path.exists(backup_home):\n os.makedirs(backup_home)\n backup_dir = tempfile.mkdtemp('', datetime.utcnow().strftime(\"%Y-%m-%d_\"), backup_home)\n for file in files:\n os.makedirs(os.path.join(backup_dir, file))\n if os.path.getmtime(file) != mtime[file]:\n raise RuntimeError(\"Failed to cleanup archived data: %s has been modified.\" % file)\n os.rename(file, os.path.join(backup_dir, file))\n self.log.debug(\"Moved %s to %s\" % (file, os.path.join(backup_dir, file)))\n return", "def construct_zip(src_dir, base_dir, base_name=\"vimwiki_diff_backup\", excluded_ending=None,\n dbg=False):\n dt_str, time_str = calc_date_time()\n base_name = \"_\".join([base_name, dt_str, time_str])\n zipname = None\n\n if excluded_ending is None:\n excluded_final = set([\".swo\", \".swp\", \".pyc\", \".o\", \".gz\"])\n else:\n excluded_final = set(excluded_ending)\n\n\n try:\n zipname = \"\".join([src_dir, os.sep, base_name, \".zip\"])\n zip_count = 0\n with zp.ZipFile(zipname, mode='w') as zp_ptr:\n dw = dwa.diskwalk(os.sep.join([src_dir, base_dir]))\n for itm in dw.enumeratePaths():\n _, init_splt = os.path.splitext(itm)\n\n # print(filename + \" \" + str(init_splt) + \" \" + str(not_empty) + \" \" + cur_dir)\n if init_splt != '' and init_splt in excluded_final:\n base_str = \": \".join([\"Excluding\", itm])\n dbc.print_helper(base_str, dbg=dbg)\n else:\n itm_loc = str(itm).find(base_dir)\n base_str = \"--\".join([\"adding\", itm[itm_loc:]])\n zp_ptr.write(itm[itm_loc:])\n if not itm.endswith(base_dir):\n zip_count = zip_count + 1\n\n zp_ptr.close()\n\n if zip_count < 2:\n dbc.print_helper(\"Warning construct_zip -- likely empty zip\", dbg=dbg)\n except OSError as err:\n if zp_ptr is not None:\n zp_ptr.close()\n dbc.error_helper((\"OSError: Zip\" + err.strerror), stderr=None, post=zipname, dbg=dbg)\n except:\n if zp_ptr is not None:\n zp_ptr.close()\n dbc.error_helper((\"Error: Zip\" + str(sys.exc_info()[0])), stderr=None, post=None, dbg=dbg)\n\n return zipname", "def extract_to_disk(self):\n archive_name, extension = os.path.splitext(os.path.basename(self.file.name))\n if not os.path.isdir(os.path.join(os.getcwd(), archive_name)):\n os.mkdir(archive_name)\n os.chdir(archive_name)\n for filename, data in self.extract().items():\n f = open(filename, 'wb')\n f.write(data or b'')\n f.close()", "def gzip_assets():\n run('cd %(repo_path)s; python gzip_assets.py' % env)", "def compress_files(time_stamp, files_to_be_appended):\n process = subprocess.run([\"tar\", \"-czf\", f\"connect-log.{time_stamp}.tar.gz\", files_to_be_appended[0],\n files_to_be_appended[1],\n files_to_be_appended[2],\n files_to_be_appended[3], files_to_be_appended[4]], capture_output=True)", "def process_and_zip(import_path, export_path, params=None):\n # If the path does not exist, create it\n export_dirpath = os.path.dirname(export_path)\n print(export_dirpath)\n if not os.path.exists(export_dirpath):\n os.makedirs(export_dirpath)\n\n if params is not None:\n # print('get pipeline with params {}'.format(params))\n pipeline = get_default_pipeline_parameterized(\n import_path, export_path, params)\n else:\n pipeline = get_default_pipeline(import_path, export_path)\n\n pipeline.execute()\n\n return export_path", "def testArchiveExport(self):\n\n archive = alembic.Abc.OArchive(\"iterator.abc\")\n for i in range(3):\n child = alembic.Abc.OObject(archive.getTop(), \"childObj\" + str(i))\n for j in range(3):\n gchild = alembic.Abc.OObject(child, \"grandChild\" + str(j))\n for k in range(3):\n cp = alembic.Abc.OCompoundProperty(gchild.getProperties(), \"prop\" + str(k))\n sp = alembic.Abc.OStringProperty(cp, \"scalar\")\n sp.setValue(\"a\")\n sp.setValue(\"b\")\n sp.setValue(\"c\")\n ap = alembic.Abc.OStringArrayProperty(cp, \"array\")\n stra = imath.StringArray(3)\n stra[0] = 'a'\n stra[1] = 'b'\n stra[2] = 'c'\n ap.setValue(stra)\n strb = imath.StringArray(2)\n strb[0] = 'd'\n strb[1] = 'e'\n ap.setValue(strb)\n strc = imath.StringArray(1)\n strc[0] = 'f'\n ap.setValue(strc)", "def main (options, args):\n if len (args) == 0:\n args = [os.path.join (os.environ ['OWN'], '...')]\n p4zip (options, string.join (args))\n if options.verbose:\n print 'created', os.path.abspath (options.outfile)", "def archive_log(self, f_in, filename):\n if not os.path.isdir('archived'):\n os.makedirs('archived')\n f_out = gzip.open('archived/'+filename+'.gz', 'wb')\n f_out.writelines(f_in)\n f_out.close()\n f_in.close()", "def archive(self, filepath=None, compressiontype='xz', overwrite=False):\n if filepath is None:\n filepath = f'{self.path}.tar.{compressiontype}'\n if overwrite:\n filemode = 'w'\n else:\n filemode = 'x'\n supported_compressiontypes = ('xz', 'gz', 'bz2')\n if compressiontype not in supported_compressiontypes:\n raise ValueError(f'\"{compressiontype}\" is not a valid '\n f'compressiontype, use one of '\n f'{supported_compressiontypes}.')\n with tarfile.open(filepath, f\"{filemode}:{compressiontype}\") as tf:\n tf.add(self.path, arcname=self.path.name)\n return Path(filepath)", "def generate(number, output):\n output = os.path.abspath(output)\n\n if os.path.exists(output):\n if len(os.listdir(output)) > 0:\n raise click.FileError(\n output, hint='folder exists and is not empty.')\n else:\n os.makedirs(output)\n\n padding = len(str(number))\n template = '{i:0%dd}.zip' % padding\n for i in range(number):\n archive_name = template.format(i=i)\n click.echo(f'Generating archive: {archive_name}')\n filename = join(output, archive_name)\n archive = RandomArchive(filename)\n try:\n archive.build()\n except FileExistsError:\n click.echo(f'Warning! Archive already exists: {filename}')\n except Exception as e:\n click.echo(f'Unexpected error: {str(e)}')\n raise click.Abort(1)\n\n click.echo(f'Archives generated: {output}')", "def compress_experiment(self, exp_id):\n exp_folder = self.um.experiment_path(str(exp_id))[:-1]\n exp_folder = os.path.join(os.path.dirname(\n os.path.realpath(__file__)), exp_folder)\n archive_name = os.path.join(os.path.dirname(os.path.realpath(__file__)),\n \"backup\", str(exp_id)+\".zip\")\n\n print exp_folder, archive_name\n retval = self.um.compress_folder_zip(exp_folder, archive_name)\n if retval:\n return \"Success\"\n else:\n return \"Failure\"", "def zip_files(files, empty_files, output):\n with zipfile.ZipFile(output, 'w', zipfile.ZIP_DEFLATED) as ziph:\n for dest in empty_files:\n info = zipfile.ZipInfo(filename=dest, date_time=(1980, 1, 1, 0, 0, 0))\n info.external_attr = 0777 << 16L # give full access to included file\n ziph.writestr(info, '')\n for (src, dest) in files:\n info = zipfile.ZipInfo(filename=dest, date_time=(1980, 1, 1, 0, 0, 0))\n info.external_attr = 0777 << 16L # give full access to included file\n with open(src, 'r') as fh:\n ziph.writestr(info, fh.read())", "def dump_cworld_tar(\n cooler_paths,\n out_path,\n):\n\n dataset_name = os.path.splitext(os.path.split(out_path)[1])[0]\n\n with tempfile.TemporaryDirectory() as cworld_tmp_path:\n for cooler_path in cooler_paths:\n res = cooler.Cooler(cooler_path).info[\"bin-size\"]\n os.mkdir(os.path.join(cworld_tmp_path, \"C-\" + str(res)))\n for iced, iced_label in [(True, \"iced\"), (False, \"raw\")]:\n folder_path = os.path.join(cworld_tmp_path, \"C-\" + str(res), iced_label)\n os.mkdir(folder_path)\n\n mat_path = os.path.join(\n folder_path,\n \"{}__C-{}-{}.matrix.gz\".format(dataset_name, res, iced_label),\n )\n\n dump_cworld(\n in_cooler=cooler_path, out=mat_path, iced=iced, iced_unity=False\n )\n\n with tarfile.open(out_path, mode=\"w\") as archive:\n archive.add(cworld_tmp_path, arcname=dataset_name, recursive=True)", "def _expand_archive(self, name):\r\n target = path(self.temp_dir) / uuid.uuid4().hex\r\n os.mkdir(target)\r\n with tarfile.open(self.data_dir / name) as tar_file:\r\n tar_file.extractall(path=target)\r\n\r\n return target", "def default_archiver(random, population, archive, args):\r\n return archive", "def setup(zip_path, dest_path):\n\n #makes folder for zip files\n make_directory(zip_path)\n\n #makes folder for processed data\n make_directory(dest_path)", "def tar_cz_relative(*path):\n file_out = io_BytesIO()\n tar = tarfile_open(mode = \"w:gz\", fileobj = file_out)\n for p in path:\n tar.add(p, arcname='./')\n tar.close()\n return file_out.getvalue()", "def MakeZip(output_dir, archive_name, file_list, file_relative_dir,\n raise_error=True, remove_archive_directory=True, strip_files=None):\n if not strip_files:\n strip_files = []\n start_time = time.clock()\n # Collect files into the archive directory.\n archive_dir = os.path.join(output_dir, archive_name)\n print 'output_dir: %s, archive_name: %s' % (output_dir, archive_name)\n print 'archive_dir: %s, remove_archive_directory: %s, exists: %s' % (\n archive_dir, remove_archive_directory, os.path.exists(archive_dir))\n if remove_archive_directory and os.path.exists(archive_dir):\n # Move it even if it's not a directory as expected. This can happen with\n # FILES.cfg archive creation where we create an archive staging directory\n # that is the same name as the ultimate archive name.\n if not os.path.isdir(archive_dir):\n print 'Moving old \"%s\" file to create same name directory.' % archive_dir\n previous_archive_file = '%s.old' % archive_dir\n MoveFile(archive_dir, previous_archive_file)\n else:\n print 'Removing %s' % archive_dir\n RemoveDirectory(archive_dir)\n print 'Now, os.path.exists(%s): %s' % (\n archive_dir, os.path.exists(archive_dir))\n MaybeMakeDirectory(archive_dir)\n for needed_file in file_list:\n needed_file = needed_file.rstrip()\n # These paths are relative to the file_relative_dir. We need to copy\n # them over maintaining the relative directories, where applicable.\n src_path = os.path.join(file_relative_dir, needed_file)\n dirname, basename = os.path.split(needed_file)\n dest_dir = os.path.join(archive_dir, dirname)\n if dest_dir != archive_dir:\n MaybeMakeDirectory(dest_dir)\n try:\n if os.path.isdir(src_path):\n dst_path = os.path.join(archive_dir, needed_file)\n if WIN_LINK_FUNC:\n WIN_LINK_FUNC(src_path, dst_path)\n else:\n if os.path.islink(src_path):\n # Need to re-create symlink at dst_path to preserve build structure.\n # Otherwise shutil.copytree copies whole dir (crbug.com/693624#c35).\n os.symlink(os.readlink(src_path), dst_path)\n else:\n shutil.copytree(src_path, dst_path, symlinks=True)\n else:\n CopyFileToDir(src_path, dest_dir, basename, link_ok=True)\n if not IsWindows() and basename in strip_files:\n cmd = ['strip', os.path.join(dest_dir, basename)]\n RunCommand(cmd)\n except PathNotFound:\n if raise_error:\n raise\n end_time = time.clock()\n print 'Took %f seconds to create archive directory.' % (end_time - start_time)\n\n # Pack the zip file.\n output_file = '%s.zip' % archive_dir\n previous_file = '%s_old.zip' % archive_dir\n MoveFile(output_file, previous_file)\n\n # If we have 7z, use that as it's much faster. See http://crbug.com/418702.\n windows_zip_cmd = None\n if os.path.exists('C:\\\\Program Files\\\\7-Zip\\\\7z.exe'):\n windows_zip_cmd = ['C:\\\\Program Files\\\\7-Zip\\\\7z.exe', 'a', '-y', '-mx1']\n\n # On Windows we use the python zip module; on Linux and Mac, we use the zip\n # command as it will handle links and file bits (executable). Which is much\n # easier then trying to do that with ZipInfo options.\n start_time = time.clock()\n if IsWindows() and not windows_zip_cmd:\n print 'Creating %s' % output_file\n\n def _Addfiles(to_zip_file, dirname, files_to_add):\n for this_file in files_to_add:\n archive_name = this_file\n this_path = os.path.join(dirname, this_file)\n if os.path.isfile(this_path):\n # Store files named relative to the outer output_dir.\n archive_name = this_path.replace(output_dir + os.sep, '')\n if os.path.getsize(this_path) == 0:\n compress_method = zipfile.ZIP_STORED\n else:\n compress_method = zipfile.ZIP_DEFLATED\n to_zip_file.write(this_path, archive_name, compress_method)\n print 'Adding %s' % archive_name\n zip_file = zipfile.ZipFile(output_file, 'w', zipfile.ZIP_DEFLATED,\n allowZip64=True)\n try:\n os.path.walk(archive_dir, _Addfiles, zip_file)\n finally:\n zip_file.close()\n else:\n if IsMac() or IsLinux():\n zip_cmd = ['zip', '-yr1']\n else:\n zip_cmd = windows_zip_cmd\n saved_dir = os.getcwd()\n os.chdir(os.path.dirname(archive_dir))\n command = zip_cmd + [output_file, os.path.basename(archive_dir)]\n result = RunCommand(command)\n os.chdir(saved_dir)\n if result and raise_error:\n raise ExternalError('zip failed: %s => %s' %\n (str(command), result))\n end_time = time.clock()\n print 'Took %f seconds to create zip.' % (end_time - start_time)\n return (archive_dir, output_file)", "def tar_dir(output_path, source_dir):\n with tarfile.open(output_path, \"w:gz\") as tar:\n tar.add(source_dir, arcname=os.path.basename(source_dir))", "def archive_files(archive_fileprefix, flist, zip_type, reldir, prefix=\"\"):\n def archive_filter(tinfo):\n fdir, fbase = os.path.split(tinfo.name)\n archpath = os.path.join(prefix, os.path.relpath(tinfo.name, reldir))\n tinfo.name = archpath\n return tinfo\n write_type = 'w:'+zip_type\n\n if zip_type:\n archive_filename = '{}.tar.{}'.format(archive_fileprefix, zip_type)\n else:\n archive_filename = '{}.tar'.format(archive_fileprefix)\n\n with tarfile.open(archive_filename, write_type) as out_file:\n for f in flist:\n out_file.add(f, filter=archive_filter)", "def zipdir(path, ziph):\n zf = zipfile.ZipFile(ziph, \"w\")\n for root, dirs, files in os.walk(path):\n for file in files:\n zf.write(os.path.join(root, file))\n zf.close()\n return ziph", "def zip_folder(source_path, destination_path, password):\n\n source_path = os.path.abspath(source_path)\n\n if not destination_path:\n destination_path = source_path + \".zip\"\n\n if not destination_path.endswith(\".zip\"):\n destination_path += \".zip\"\n\n try:\n parent_folder = os.path.dirname(source_path)\n contents = os.walk(source_path)\n\n if password:\n z = pyzipper.AESZipFile(destination_path + \"\\\\\", 'w', compression=pyzipper.ZIP_LZMA, encryption=pyzipper.WZ_AES)\n z.setpassword(password)\n else:\n z = pyzipper.ZipFile(destination_path + \"\\\\\", 'w', compression=pyzipper.ZIP_LZMA)\n\n try:\n for root, folders, files in contents:\n # Include all subfolders, including empty ones.\n for folder_name in folders:\n absolute_path = os.path.join(root, folder_name)\n relative_path = absolute_path.replace(parent_folder + '\\\\', '')\n print(f\"Adding {absolute_path} to archive.\")\n z.write(absolute_path, relative_path)\n for file_name in files:\n absolute_path = os.path.join(root, file_name)\n relative_path = absolute_path.replace(parent_folder + '\\\\', '')\n print(f\"Adding {absolute_path} to archive.\")\n z.write(absolute_path, relative_path)\n print(f\"{destination_path} created successfully.\")\n\n except Exception:\n tb = traceback.format_exc()\n print(\"Something went wrong\")\n print(tb)\n\n finally:\n z.close()\n\n except Exception:\n tb = traceback.format_exc()\n print(\"Something went wrong\")\n print(tb)", "def archive_logs(self):\n source = GAConfig[\"log_file_location\"]\n destination = source + \"Archive/\"\n\n if not os.path.exists(source):\n os.makedirs(source)\n if not os.path.exists(destination):\n os.makedirs(destination)\n\n if len(os.listdir(source)) > 1:\n specific_folder = destination + str(\n len(os.listdir(destination))) + '/'\n os.makedirs(specific_folder)\n for f in os.listdir(source):\n if((\".log\" in f) or (\".zip\" in f)):\n shutil.move(source + f, specific_folder)", "def build_omex(storage, manifest_path='manifest.xml'):\n\n paths = extract_storage_manifest(storage, manifest_path)\n return _create_zip(filelist_generator(storage, paths))\n # return _process(storage.file, locations)", "def archive_experiment(experiment_dir: str,\n dst_dir: str,\n save_extensions: Union[str, Sequence[str]]='py',\n exclude_dirs: Union[str, Sequence[str]]='output',\n archive_format: str='zip',\n base_name: Optional[str]=None):\n # Format save_extensions for consistency\n # Make into a sequence\n if isinstance(save_extensions, str):\n save_extensions = [save_extensions]\n # Drop any .'s\n save_extensions = [s.strip('.') for s in save_extensions]\n # Format exclude_dirs for consistency\n if isinstance(exclude_dirs, str):\n exclude_dirs = [exclude_dirs]\n # Get default base name\n if base_name is None:\n experiment_path = os.path.abspath(experiment_dir)\n base_name = [p for p in experiment_path.split('/') if p][-1]\n\n # Full name of the archive name uses a time stamp\n timestamp = time.strftime('%b%d%Y_%H%M%S')\n archive_name = f'{base_name}_{timestamp}'\n\n # Use a temporary folder to create the archive\n tmp_folder = f'/tmp/{str(uuid.uuid4())}'\n if os.path.exists(tmp_folder):\n shutil.rmtree(tmp_folder)\n os.makedirs(tmp_folder)\n tmp_experiment = os.path.join(tmp_folder, archive_name)\n os.makedirs(tmp_experiment)\n\n # Recurse through the experiment directory and non-'output' subdirectories,\n # saving files to the temporary folder\n dirs_to_check = [experiment_dir]\n while len(dirs_to_check) > 0:\n # A directory to check (DTC), relative to the experiment_dir\n dtc = dirs_to_check.pop(0)\n # Full path to the DTC\n full_dtc = dtc if dtc == experiment_dir \\\n else os.path.join(experiment_dir, dtc)\n # List of all files and folders in the DTC\n dlist = os.listdir(full_dtc)\n # List of all files in the DTC\n files = [d for d in dlist\n if os.path.isfile(os.path.join(full_dtc, d))]\n # Check each file to see if it should be archived.\n for f in files:\n if f.split('.')[-1] in save_extensions:\n # Recreate the file structure inside experiment_dir, up to\n # the folder containing f\n tmp_save_dir = tmp_experiment if dtc == experiment_dir \\\n else os.path.join(tmp_experiment, dtc)\n os.makedirs(tmp_save_dir, exist_ok=True)\n # Save a copy of f\n shutil.copy2(os.path.join(full_dtc, f), tmp_save_dir)\n\n # Get non-excluded subdirectories\n subdirs = [d for d in dlist\n if os.path.isdir(os.path.join(full_dtc, d))\n and d not in exclude_dirs]\n # Track subdirectories as paths relative to the experiment dir\n if dtc != experiment_dir and len(subdirs) > 0:\n subdirs = [os.path.join(dtc, d) for d in subdirs]\n\n dirs_to_check += subdirs\n\n # At this point, all archivable files and folders are saved in tmp_folder.\n # Create an archive, coincidentally the same name as tmp_experiment's path\n tmp_archive = tmp_experiment[:]\n shutil.make_archive(tmp_archive, archive_format, tmp_folder, archive_name)\n # Get the full name of the archive. There should only be one file in\n # tmp_experiment\n tmp_archive_full = [f for f in os.listdir(tmp_folder)\n if os.path.isfile(os.path.join(tmp_folder, f))][0]\n # Copy the archive to its destination\n os.makedirs(dst_dir, exist_ok=True)\n shutil.move(os.path.join(tmp_folder, tmp_archive_full),\n os.path.join(dst_dir, tmp_archive_full),\n copy_function=shutil.copyfile)\n # Remove the temporary folder\n shutil.rmtree(tmp_folder)\n\n pass", "async def archive_dir(\n dir_to_compress: Path,\n destination: Path,\n *,\n compress: bool,\n store_relative_path: bool,\n exclude_patterns: set[str] | None = None,\n progress_bar: ProgressBarData | None = None,\n) -> None:\n if not progress_bar:\n progress_bar = ProgressBarData(steps=1)\n\n async with AsyncExitStack() as stack:\n\n folder_size_bytes = sum(\n file.stat().st_size\n for file in _iter_files_to_compress(dir_to_compress, exclude_patterns)\n )\n sub_progress = await stack.enter_async_context(\n progress_bar.sub_progress(folder_size_bytes)\n )\n thread_pool = stack.enter_context(\n non_blocking_thread_pool_executor(max_workers=1)\n )\n try:\n await asyncio.get_event_loop().run_in_executor(\n thread_pool,\n # ---------\n _add_to_archive,\n dir_to_compress,\n destination,\n compress,\n store_relative_path,\n functools.partial(_update_progress, sub_progress),\n asyncio.get_event_loop(),\n exclude_patterns,\n )\n except Exception as err:\n if destination.is_file():\n destination.unlink(missing_ok=True)\n\n raise ArchiveError(\n f\"Failed archiving {dir_to_compress} -> {destination} due to {type(err)}.\"\n f\"Details: {err}\"\n ) from err\n\n except BaseException:\n if destination.is_file():\n destination.unlink(missing_ok=True)\n raise", "def zipdata(filename: str) -> None:\n\n # Generate the path to the project TODO: check if this is entire project or server\n directoryName = ROOT.split(\"/\")[-3]\n codeDestination = \"/\".join(ROOT.split(\"/\")[:-2])\n\n # Create the output file\n zippedFile = zipfile.ZipFile(filename, \"w\", compression=zipfile.ZIP_DEFLATED)\n\n # Walk over the directory and save all files\n for abspath, dirnames, filenames in os.walk(codeDestination):\n local = abspath[abspath.index(directoryName):]\n [zippedFile.write(os.path.join(abspath, name), os.path.join(local, name)) for name in filenames]\n\n # Close the zip file\n zippedFile.close()", "def _BuildAndArchiveChromeSysroot(self):\n assert self.archive_path.startswith(self._build_root)\n extra_env = {}\n if self._run.config.useflags:\n extra_env['USE'] = ' '.join(self._run.config.useflags)\n in_chroot_path = path_util.ToChrootPath(self.archive_path)\n cmd = ['cros_generate_sysroot', '--out-dir', in_chroot_path, '--board',\n self._current_board, '--package', constants.CHROME_CP]\n cros_build_lib.RunCommand(cmd, cwd=self._build_root, enter_chroot=True,\n extra_env=extra_env)\n self._upload_queue.put([constants.CHROME_SYSROOT_TAR])", "def archive_folders(args, directory_list):\n # Archive each of the subfolders\n # If we haven't selected archive then we return immediately.\n if not args.archive:\n return\n\n # Otherwise a simple tar command should do\n tar_commands = []\n for directory in directory_list:\n tar_commands.append(\"tar -cf - %s --remove-files | pigz -9 -p 8 > %s.tar.gz\" %\n (directory, directory))\n\n # Multi-thread our tar command\n processes = (subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n for cmd in tar_commands)\n\n # We use the islice command to split our commands into five smaller lists.\n running_processes = list(itertools.islice(processes, args.num_threads))\n\n while running_processes:\n for i, process in enumerate(running_processes):\n if process.poll() is not None: # Means that the process is complete!\n stdout, stderr = process.communicate() # Get the output of the completed process\n if not stderr == \"\":\n print stderr\n running_processes[i] = next(processes, None)\n # Run the next number in the list.\n if running_processes[i] is None: # No more commands waiting to be processed.\n del running_processes[i] # Not a valid process.\n break", "def _copy_binaries_to_archive(archive: PyfmuArchive) -> PyfmuArchive:\n\n binaries_path = Resources.get().binaries_dir\n\n\n archive_binaries_path = archive.root / 'binaries'\n\n copytree(binaries_path,archive_binaries_path)\n\n # paths\n archive.binaries_dir = archive_binaries_path\n archive.wrapper_win64 = archive.binaries_dir / 'win64' / 'pyfmu.dll'\n archive.wrapper_linux64 = archive.binaries_dir / 'linux64' / 'pyfmu.so'\n\n return archive", "def extract(self, paths=None):\n\n all_files = self._get_package_files()\n if paths is None:\n extracted_files = all_files.values()\n else:\n extracted_files = [all_files[path] for path in paths]\n\n # filter already extracted file\n extracted_files = [pf for pf in extracted_files if not os.path.isfile(self.project.storage.fspath(pf.extract_path))]\n\n # group files by package\n files_by_package = defaultdict(list)\n for pf in extracted_files:\n files_by_package[pf.package].append(pf)\n\n package_files_path = f\"{self.path}/packages/files\"\n\n for package, files in files_by_package.items():\n with self.project.storage.stream(f\"{package_files_path}/{package}\") as reader:\n # sort files by offset to extract while streaming the bin file\n for pkgfile in sorted(files, key=lambda f: f.offset):\n logger.debug(f\"extracting {pkgfile.path}\")\n reader.skip_to(pkgfile.offset)\n fspath = self.project.storage.fspath(pkgfile.extract_path)\n with write_file_or_remove(fspath) as fout:\n if pkgfile.compressed:\n zobj = zlib.decompressobj(zlib.MAX_WBITS | 32)\n def writer(data):\n return fout.write(zobj.decompress(data))\n reader.copy(writer, pkgfile.size)\n fout.write(zobj.flush())\n else:\n reader.copy(fout.write, pkgfile.size)", "def TempDeploymentDir(paths):\n try:\n deployment_dir = tempfile.mkdtemp(prefix='deploy-')\n _PopulateDeploymentDir(deployment_dir, paths)\n yield deployment_dir\n finally:\n _CleanUp(deployment_dir)" ]
[ "0.6538356", "0.6479174", "0.64458525", "0.6059645", "0.6056643", "0.5963025", "0.596087", "0.5931735", "0.5828145", "0.5716116", "0.57026917", "0.56945693", "0.56655836", "0.5658968", "0.5611987", "0.5595439", "0.5587479", "0.5581423", "0.55702585", "0.5558787", "0.5543491", "0.5520394", "0.5510049", "0.5508748", "0.5494774", "0.54929924", "0.54857963", "0.54503274", "0.5448339", "0.54331535", "0.54262245", "0.5425151", "0.5393751", "0.5349091", "0.53349423", "0.533277", "0.5320148", "0.53020483", "0.5295554", "0.52510273", "0.5219557", "0.52002335", "0.5199463", "0.5172473", "0.51719856", "0.51522136", "0.51438934", "0.51409286", "0.5138405", "0.51352084", "0.51332635", "0.513101", "0.5127222", "0.5126812", "0.5122869", "0.5122041", "0.51199454", "0.5114386", "0.5104601", "0.51001453", "0.51000047", "0.5067204", "0.5065671", "0.50569826", "0.50495553", "0.50455046", "0.5037794", "0.50127643", "0.49937344", "0.49914742", "0.49860266", "0.49826086", "0.4980682", "0.49775103", "0.49753806", "0.49746847", "0.49701503", "0.4966248", "0.4965137", "0.49635375", "0.49584627", "0.49447945", "0.49447685", "0.49374053", "0.49202162", "0.49164766", "0.4906089", "0.4892502", "0.4891902", "0.48916286", "0.48879212", "0.48841104", "0.48807427", "0.48762065", "0.4875178", "0.48728633", "0.48709178", "0.4870464", "0.4868277", "0.4867281" ]
0.720668
0
Fetch |uri| and write the results to |output| (or return BytesIO).
def fetch_data(uri: str, output=None, verbose: bool = False, b64: bool = False): # This is the timeout used on each blocking operation, not the entire # life of the connection. So it's used for initial urlopen and for each # read attempt (which may be partial reads). 5 minutes should be fine. TIMEOUT = 5 * 60 if output is None: output = io.BytesIO() try: with urllib.request.urlopen(uri, timeout=TIMEOUT) as infp: mb = 0 length = infp.length while True: data = infp.read(1024 * 1024) if not data: break # Show a simple progress bar if the user is interactive. if verbose: mb += 1 print('~%i MiB downloaded' % (mb,), end='') if length: percent = mb * 1024 * 1024 * 100 / length print(' (%.2f%%)' % (percent,), end='') print('\r', end='', flush=True) if b64: data = base64.b64decode(data) output.write(data) except urllib.error.HTTPError as e: logging.error('%s: %s', uri, e) sys.exit(1) return output
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def fetch(uri, output, b64=False):\n output = os.path.abspath(output)\n distdir, name = os.path.split(output)\n if os.path.exists(output):\n logging.info('Using existing download: %s', name)\n return\n\n logging.info('Downloading %s to %s', uri, output)\n os.makedirs(distdir, exist_ok=True)\n\n # Use kokoro build cache or Gentoo distdir if available.\n for envvar in ('KOKORO_GFILE_DIR', 'DISTDIR'):\n cache_dir = os.getenv(envvar)\n if cache_dir:\n cache_file = os.path.join(cache_dir, name)\n if os.path.exists(cache_file):\n logging.info(' Cache hit via %s', envvar)\n symlink(cache_file, output)\n return\n\n # Don't be verbose if running on CI systems.\n verbose = os.isatty(sys.stdout.fileno())\n\n # We use urllib rather than wget or curl to avoid external utils & libs.\n # This seems to be good enough for our needs.\n tmpfile = output + '.tmp'\n for _ in range(0, 5):\n try:\n with open(tmpfile, 'wb') as outfp:\n fetch_data(uri, outfp, verbose=verbose, b64=b64)\n break\n except ConnectionError as e:\n time.sleep(1)\n logging.warning('Download failed; retrying: %s', e)\n else:\n logging.error('Unabled to download; giving up')\n unlink(tmpfile)\n sys.exit(1)\n\n # Clear the progress bar.\n if verbose:\n print(' ' * 80, end='\\r')\n\n os.rename(tmpfile, output)", "def fetch_file(self, location, output=None):\n\n self.log.debug(\"Fetching '%s' file...\" % location)\n\n if not output:\n output = tempfile.mktemp(\"-dogen\")\n \n self.log.debug(\"File will be saved as '%s'...\" % output)\n\n with open(output, 'wb') as f:\n f.write(requests.get(location, verify=self.ssl_verify).content)\n\n return output", "def download(self, source_uri, output, **kwargs):\n raise NotImplementedError(\"Subclass needs to implement this method\")", "def fetch(self, url) -> bytes:\n buffer = self.download(url)\n zfs = ZipFileSystem(buffer, \"r\")\n return zfs.open(zfs.glob(\"*\")[0]).read()", "def download_img(self, url, output):\n try:\n print(\"Downloading from: %s\" % url)\n with open(output, 'wb') as f:\n f.write(urllib2.urlopen(url).read())\n print(\"Wrote to: %s\" % output)\n except IOError, e:\n print(e)", "def do_GET(self):\n self.send_head()\n f = io.BytesIO()\n f.write(self.output.encode())\n f.seek(0)\n shutil.copyfileobj(f, self.wfile)\n f.close()", "def url_retrieve(url, output_file):\n r = requests.get(url, allow_redirects=True)\n if r.status_code != 200:\n raise ConnectionError(f\"Could not download {url}\\nError code: {r.status_code}\")\n\n output_file.write_bytes(r.content)", "def get_output(self, download_dir, output=None, overwrite=False, callback=None, block=4096):\n if output:\n name = output.get('name', \"\")\n download = self._get_intermediate_output(output,\n download_dir,\n overwrite,\n callback=callback,\n block=block)\n\n elif self.output_url and self.output_filename:\n name = self.output_filename\n download = self._get_final_output(download_dir, overwrite,\n callback=callback, block=block)\n\n else:\n raise FileDownloadException(\n \"Job has no reference to an output file, \"\n \"please update to check if the output is ready\")\n\n if download.success:\n return os.path.join(download_dir, name)\n\n else:\n raise download.result", "def download():\n try:\n cli.run(\n [URL, '--output', TEMP_DIR],\n )\n except SystemExit:\n return None", "def fetch_save(url):\n\n name = url.split(\"/\")[-1]\n response = requests.get(url, stream=True)\n if response.status_code == 200:\n with open(f\"{DATA_PATH}/{name}\", \"wb\") as f:\n f.write(response.raw.read())\n else:\n logging.info(f\"Failed {url} download\")", "def __call__(self, url, output_file, pooch):\n kwargs = self.kwargs.copy()\n kwargs.setdefault(\"stream\", True)\n ispath = not hasattr(output_file, \"write\")\n if ispath:\n output_file = open(output_file, \"w+b\")\n try:\n response = requests.get(url, **kwargs)\n response.raise_for_status()\n content = response.iter_content(chunk_size=self.chunk_size)\n if self.progressbar:\n total = int(response.headers.get(\"content-length\", 0))\n # Need to use ascii characters on Windows because there isn't\n # always full unicode support\n # (see https://github.com/tqdm/tqdm/issues/454)\n use_ascii = bool(sys.platform == \"win32\")\n progress = tqdm(\n total=total,\n ncols=79,\n ascii=use_ascii,\n unit=\"B\",\n unit_scale=True,\n leave=True,\n )\n for chunk in content:\n if chunk:\n output_file.write(chunk)\n output_file.flush()\n if self.progressbar:\n # Use the chunk size here because chunk may be much\n # larger if the data are decompressed by requests after\n # reading (happens with text files).\n progress.update(self.chunk_size)\n # Make sure the progress bar gets filled even if the actual number\n # is chunks is smaller than expected. This happens when streaming\n # text files that are compressed by the server when sending (gzip).\n # Binary files don't experience this.\n if self.progressbar:\n progress.reset()\n progress.update(total)\n progress.close()\n finally:\n if ispath:\n output_file.close()", "def fetch(self, url):\r\n fname = os.path.join(self._cachedir, self._formatter(url))\r\n if not os.path.exists(fname):\r\n time.sleep(self._sleep)\r\n html = urllib.urlopen(url).read()\r\n with codecs.open(fname, 'w', 'utf-8') as f:\r\n soup = BeautifulSoup(html)\r\n f.write(unicode(soup))\r\n return fname", "def _download(self, url, output_dir, dataset, chunk_size=1024):\n r = self.session.get(url, stream=True, allow_redirects=True)\n if not r.ok:\n r = self.session.get(r.url, stream=True, allow_redirects=True, auth=(self._username, self._password))\n file_size = int(r.headers['Content-Length'])\n\n with tqdm(total=file_size, unit_scale=True, unit='B', unit_divisor=1024) as pbar:\n ### GET FILE NAME ###\n if \"Content-Disposition\" in r.headers.keys():\n local_filename = re.findall(\"filename=(.+)\", r.headers[\"Content-Disposition\"])[0]\n else:\n local_filename = url.split(\"/\")[-3]\n local_filename = self.api.lookup(dataset, local_filename)[0]\n local_filename = local_filename + util.convert_to_extension(r.headers['content-type'])\n print(\"*** FNAME\", local_filename)\n\n local_filename = os.path.join(output_dir, local_filename)\n\n ### WRITE FILE ###\n with open(local_filename, 'wb') as f:\n for chunk in r.iter_content(chunk_size=chunk_size):\n if chunk:\n f.write(chunk)\n pbar.update(chunk_size)\n return local_filename", "def get_url(self,url,output=None):\n parsed_url = urlparse(url)\n hostname = parsed_url[1]\n \n #Make the command\n cmd = \"wget %s -O -\" % url\n (ssh_input,ssh_output,ssh_err) = self.execute_command(cmd)\n \n if(output==None):\n p = urlparse(url)[2]\n filename = os.path.split(p)[1] \n output = filename\n # See if it's ok.\n err = sio.StringIO()\n dat = ssh_err.read(BLOCKSIZE)\n while(dat):\n err.write(dat)\n dat = ssh_err.read(BLOCKSIZE)\n \n err_out = err.getvalue()\n print >> sys.stderr, err_out\n err1 = re.compile(r\"failed\") # Failed to resolve hostname\n err2 = re.compile(r\"404 Not Found\") # File not found\n \n if(err1.search(err_out)):\n raise SSHError(\"ERROR: Failed to retrieve file! Hostname unknown\")\n elif(err2.search(err_out)):\n raise SSHError(\"ERROR: Failed to retrieve file. File not found\")\n # If it didn't fail, read the file.\n \n if(output==\"-\"):\n f = sys.stdout\n else:\n f = open(output,\"w+b\")\n dat = ssh_output.read(BLOCKSIZE)\n while(dat):\n f.write(dat)\n dat = ssh_output.read(BLOCKSIZE)", "def _download_file(url: str, output_path: str):\n\n def write_to_file(response: requests.Response, output_path: str) -> int:\n \"\"\"Write the response content to the given file.\n\n :param response: Response to be written to the output file.\n :param output_path: Path to the output file.\n :returns: Number of bytes read from the response content.\n \"\"\"\n read_bytes = 0\n with open(output_path, \"wb\") as output_file:\n # Use the same chunk size of `urlretrieve`\n for chunk in response.iter_content(chunk_size=1024 * 8):\n read_bytes += len(chunk)\n output_file.write(chunk)\n if read_bytes > FETCHER_MAXIMUM_FILE_SIZE:\n break\n return read_bytes\n\n try:\n with requests.get(\n url, stream=True, timeout=FETCHER_REQUEST_TIMEOUT\n ) as response:\n response.raise_for_status()\n\n content_length = int(response.headers.get(\"Content-Length\", 0))\n if content_length > FETCHER_MAXIMUM_FILE_SIZE:\n raise REANAFetcherError(\"Maximum file size exceeded\")\n\n read_bytes = write_to_file(response, output_path)\n\n if read_bytes > FETCHER_MAXIMUM_FILE_SIZE:\n os.remove(output_path)\n raise REANAFetcherError(\"Maximum file size exceeded\")\n except HTTPError as e:\n error = f\"Cannot fetch the workflow specification: {e.response.reason} ({response.status_code})\"\n if response.status_code == 404:\n error = \"Cannot find the given workflow specification\"\n raise REANAFetcherError(error)\n except Timeout:\n raise REANAFetcherError(\n \"Timed-out while fetching the workflow specification\"\n )\n except RequestException:\n raise REANAFetcherError(\n \"Something went wrong while fetching the workflow specification\"\n )", "def __fetch_output_task(\n self, task, download_dir, overwrite, changed_only, **extra_args):\n return task.fetch_output(\n download_dir, overwrite, changed_only, **extra_args)", "def get_output(self, output, download_dir, overwrite=False, callback=None, block=4096):\n download = self._get_file(output, download_dir, overwrite, callback=callback, block=block)\n if download.success:\n return os.path.join(download_dir, output.get('name', ''))\n else:\n raise download.result", "def download(url, output, encoding, insrs, format_name):\n\n folder = download_data(url, encoding)\n joined_file = join_files(folder)\n transform(joined_file, output, insrs, format_name)\n\n shutil.rmtree(folder)\n os.remove(joined_file)\n\n if not os.path.isfile(output):\n raise Error(\"Output file not created, the whole process failed\")\n else:\n logging.info(\"File %s successfuly created\" % output)", "def save(self, url, output):\n\n shutil.copy2(self.get(url), output)", "def url_fetch(self, url):\n user_agent = random.choice(self.conf.user_agents)\n if self.isCompress == True:\n headers = {\n 'Uesr-Agent': user_agent,\n \"Accept-Encoding\": \"gzip,deflate\",\n \"Accept-Charset\" : \"UTF-8,*\"\n }\n else:\n headers = {\n 'Uesr-Agent': user_agent,\n \"Accept-Charset\" : \"UTF-8,*\"\n }\n raw_data = ''\n try:\n conn = httplib.HTTPConnection(self.proxy, timeout=3.0)\n conn.request('GET', url, None, headers)\n response = conn.getresponse()\n raw_data = response.read()\n except Exception as err:\n self.logger.error('connect error[%s]' % err)\n return '999', 'Request failed', ''\n finally:\n conn.close()\n \n content = ''\n if self.isCompress == True:\n if response.status == 200:\n try:\n stream = StringIO.StringIO(raw_data)\n decompressor = gzip.GzipFile(fileobj=stream)\n content = decompressor.read()\n except:\n self.logger.error('status[%s] len_raw_data[%d]' % (response.status, len(raw_data)))\n return '998', 'content err', ''\n else:\n if response.status == 200:\n content = raw_data \n\n return response.status, response.reason, content", "def fetch(self, url: furl) -> str:\n try:\n contents = self._download(url)\n except requests.ConnectionError as err:\n logger.exception(f\"Request failed with {err}\")\n click.secho(\n f\"The URL {url} could not be downloaded. Either your network is unreachable or the URL is broken.\"\n f\" Check the URL, fix your connection, or use \"\n f\" {OptionEnum.OFFLINE.as_flake8_flag()} / {OptionEnum.OFFLINE.as_envvar()}=1\",\n fg=\"red\",\n err=True,\n )\n return \"\"\n return contents", "def download(self, outputfile: str, outputformat: str):\n pass", "def fetch(self) -> None:\n workflow_spec_path = os.path.join(self._output_dir, self._spec)\n self._download_file(self._parsed_url.original_url, workflow_spec_path)", "def read_and_save(res):\n fname = os.path.split(urlsplit(res.url).path)[-1]\n fpath = os.path.join(cfg.OUTPUT_DIR, fname)\n with open(fpath, 'wb') as f:\n for chunk in res.iter_content(cfg.CHUNK):\n f.write(chunk)", "def download_from_url(url, output_path):\n\n print('Pulling data from {} to {}'.format(url, output_path))\n wget.download(url, output_path)\n print('done')", "def fetch(self, url, body=None, headers=None):\r\n if body:\r\n # method = 'POST'\r\n # undo the URL encoding of the POST arguments\r\n data = parse_qs(body)\r\n response = self.client.post(url, data)\r\n else:\r\n # method = 'GET'\r\n data = {}\r\n if headers and 'Accept' in headers:\r\n data['CONTENT_TYPE'] = headers['Accept']\r\n response = self.client.get(url, data)\r\n\r\n # Translate the test client response to the fetcher's HTTP response abstraction\r\n content = response.content\r\n final_url = url\r\n response_headers = {}\r\n if 'Content-Type' in response:\r\n response_headers['content-type'] = response['Content-Type']\r\n if 'X-XRDS-Location' in response:\r\n response_headers['x-xrds-location'] = response['X-XRDS-Location']\r\n status = response.status_code\r\n\r\n return HTTPResponse(\r\n body=content,\r\n final_url=final_url,\r\n headers=response_headers,\r\n status=status,\r\n )", "def _get_file(self, output, download_dir, overwrite, callback=None, block=4096):\n if output.get('type') == 'TaskPreview':\n size = None\n\n else:\n output_props = self._api.props_output_file(url=output.get('link'))\n\n if output_props.success:\n size = output_props.result\n\n else:\n raise output_props.result\n\n return self._api.get_output_file(download_dir,\n size,\n overwrite,\n fname=output.get('name'),\n url=output.get('link'),\n callback=callback,\n block=block)", "def fetch_and_save(cls, url, path):\n content = cls.fetch_with_retry(url)\n if not content:\n return False\n # print(\"Saving {}\".format(os.path.basename(path)))\n with open(path, \"wb\") as file:\n file.write(content)\n return content", "def download_http(self, url):\n\n # Set things up.\n # ==============\n\n out = None\n headers = {}\n if (url.username is not None) and (url.password is not None):\n tmp = base64.b64encode(':'.join([url.username, url.password]))\n headers['Authorization'] = \"Basic %s\" % tmp\n\n\n # Toe the waters.\n # ===============\n # We start with an HTTP HEAD request to check the status.\n\n conn = httplib.HTTPConnection(url.netloc)\n conn.request(\"HEAD\", url.path, '', headers)\n r = conn.getresponse()\n conn.close()\n if self.verbose:\n print >> sys.stderr, url, r.status, ''\n\n\n # Bail.\n # =====\n # Short-cut when we just care whether it's a package.\n\n if url.path.endswith('/'):\n out = r.status == 200\n\n\n elif r.status == 200:\n\n # Wade in.\n # ========\n # If the status is positive we check to see if we've already\n # downloaded the latest copy.\n\n etag = r.getheader('etag', '')\n lm = r.getheader('last-modified', '')\n key = sha.new(str(url) + etag + lm).hexdigest()\n\n if not self.cachedir:\n raise ValueError(\"netimp.importer.cachedir not set\")\n if not os.path.isdir(self.cachedir):\n raise IOError( \"netimp.importer.cachedir not found \"\n + \"(%s)\" % self.cachedir\n )\n\n path = join(self.cachedir, key)\n if os.path.isfile(path):\n out = open(path, 'rb')\n else:\n\n # Dive in!\n # ========\n # We don't have this module locally yet: download it for real.\n\n conn = httplib.HTTPConnection(url.netloc)\n conn.request(\"GET\", url.path, '', headers)\n r = conn.getresponse()\n if r.status == 200: # just in case!\n fp = open(path, 'w+b')\n fp.write(r.read())\n fp.flush()\n fp.close()\n out = open(path, 'rb')\n conn.close()\n\n return out", "def fetch(url, filename):\n with open(filename, 'wb') as handle:\n response = requests.get(url, stream=True)\n\n if not response.ok:\n logger.error('Download failed')\n return False\n\n for block in response.iter_content(1024):\n if not block:\n break\n\n handle.write(block)\n\n logger.info(' -> Rewriting URIs')\n q = re.compile(r'http://data.ub.uio.no/realfagstermer/([0-9]+)')\n with open(filename, 'r') as infile:\n with open(filename + '.tmp', 'w') as outfile:\n outfile.write(q.sub('http://data.ub.uio.no/realfagstermer/c\\\\1', infile.read()))\n os.unlink(filename)\n os.rename(filename + '.tmp', filename)\n\n return True", "def download_file(url, outputfile):\r\n try:\r\n req = requests.get(url, stream=True, timeout=120)\r\n try:\r\n with open(outputfile, 'wb') as file_download:\r\n for chunk in req.iter_content(chunk_size=1024): \r\n if chunk: \r\n file_download.write(chunk)\r\n except IOError as error:\r\n print error\r\n except requests.exceptions.RequestException as err:\r\n print err\r\n except socket.error as err:\r\n print err\r\n return None", "def http(self, url):\n \n res = 'fail', url\n try: \n res = urllib2.urlopen(url).read(self.mt.buffer) \n except: pass\n finally:\n if self.verbose > 2: print res\n self.mt.out[url] = res\n self.queue.task_done()", "def fetch_output(self, path, name, working_directory, action_type, output_type):\n if output_type in ['output_workdir', 'output_metadata']:\n self._populate_output_path(name, path, action_type, output_type)\n elif output_type == 'output':\n self._fetch_output(path=path, name=name, action_type=action_type)\n else:\n raise Exception(\"Unknown output_type %s\" % output_type)", "def fetch(\n self, output_folder: Path, cache: Dict[str, str], fetch_opts: List[Dict[str, Any]]\n ) -> List[str]:\n return [\n download_snapshot(source_config[\"url\"], output_folder, **source_config.get(\"opts\", {}))\n for source_config in fetch_opts\n ]", "def fetch(self, url, timeout=None):\n\n # ISO-8859-1 is the default encoding for text files per the specs for\n # HTTP 1.0 (RFC 1945 sec 3.6.1) and HTTP 1.1 (RFC 2616 sec 3.7.1).\n # ref: http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7.1\n encoding = \"iso-8859-1\"\n content = \"\"\n expires_header = None\n content_type_header = None\n self._response_code = 0\n self._source_url = url\n\n if self.user_agent:\n req = urllib_request.Request(url, None, \n { 'User-Agent' : self.user_agent })\n else:\n req = urllib_request.Request(url)\n\n try:\n if timeout:\n f = urllib_request.urlopen(req, timeout=timeout)\n else:\n f = urllib_request.urlopen(req)\n\n content = f.read(MAX_FILESIZE)\n if VERBOSE:\n print 'Response Headers:'\n print f.info()\n\n # As of Python 2.5, f.info() looks like it returns the HTTPMessage\n # object created during the connection. \n expires_header = f.info().get(\"expires\")\n content_type_header = f.info().get(\"Content-Type\")\n # As of Python 2.4, this file-like object reports the response \n # code, too. \n if hasattr(f, \"code\"):\n self._response_code = f.code\n else:\n self._response_code = 200\n f.close()\n except urllib_error.URLError:\n # This is a slightly convoluted way to get the error instance,\n # but it works under Python 2 & 3. \n error_instance = sys.exc_info()\n if len(error_instance) > 1:\n error_instance = error_instance[1]\n if hasattr(error_instance, \"code\"):\n self._response_code = error_instance.code\n if VERBOSE:\n print 'Code:%d\\nConnect to %s timeout.'%(self._response_code, url)\n \n # MK1996 section 3.4 says, \"...robots should take note of Expires \n # header set by the origin server. If no cache-control directives \n # are present robots should default to an expiry of 7 days\".\n \n # This code is lazy and looks at the Expires header but not \n # Cache-Control directives.\n self.expiration_date = None\n if self._response_code >= 200 and self._response_code < 300:\n # All's well.\n if expires_header:\n self.expiration_date = email_utils.parsedate_tz(expires_header)\n \n if self.expiration_date:\n # About time zones -- the call to parsedate_tz() returns a\n # 10-tuple with the time zone offset in the 10th element. \n # There are 3 valid formats for HTTP dates, and one of \n # them doesn't contain time zone information. (UTC is \n # implied since all HTTP header dates are UTC.) When given\n # a date that lacks time zone information, parsedate_tz() \n # returns None in the 10th element. mktime_tz() interprets\n # None in the 10th (time zone) element to mean that the \n # date is *local* time, not UTC. \n # Therefore, if the HTTP timestamp lacks time zone info \n # and I run that timestamp through parsedate_tz() and pass\n # it directly to mktime_tz(), I'll get back a local \n # timestamp which isn't what I want. To fix this, I simply\n # convert a time zone of None to zero. It's much more \n # difficult to explain than to fix. =)\n # ref: http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.3.1\n if self.expiration_date[9] == None: \n self.expiration_date = self.expiration_date[:9] + (0,)\n \n self.expiration_date = email_utils.mktime_tz(self.expiration_date)\n if self.use_local_time: \n # I have to do a little more converting to get this \n # UTC timestamp into localtime.\n self.expiration_date = time.mktime(time.gmtime(self.expiration_date)) \n #else:\n # The expires header was garbage.\n\n if not self.expiration_date: self.expiration_date = self._now() + SEVEN_DAYS\n\n if (self._response_code >= 200) and (self._response_code < 300):\n # All's well.\n media_type, encoding = _parse_content_type_header(content_type_header)\n # RFC 2616 sec 3.7.1 -- \n # When no explicit charset parameter is provided by the sender, \n # media subtypes of the \"text\" type are defined to have a default\n # charset value of \"ISO-8859-1\" when received via HTTP.\n # http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7.1\n if not encoding: \n encoding = \"iso-8859-1\"\n elif self._response_code in (401, 403):\n # 401 or 403 ==> Go away or I will taunt you a second time! \n # (according to MK1996)\n content = \"User-agent: *\\nDisallow: /\\n\"\n elif self._response_code == 404:\n # No robots.txt ==> everyone's welcome\n content = \"\"\n else: \n # Uh-oh. I punt this up to the caller. \n _raise_error(urllib_error.URLError, self._response_code)\n\n if ((PY_MAJOR_VERSION == 2) and isinstance(content, str)) or \\\n ((PY_MAJOR_VERSION > 2) and (not isinstance(content, str))):\n # This ain't Unicode yet! It needs to be.\n \n # Unicode decoding errors are another point of failure that I punt \n # up to the caller.\n try:\n content = content.decode(encoding)\n except UnicodeError:\n _raise_error(UnicodeError,\n \"Robots.txt contents are not in the encoding expected (%s).\" % encoding)\n except (LookupError, ValueError):\n # LookupError ==> Python doesn't have a decoder for that encoding.\n # One can also get a ValueError here if the encoding starts with \n # a dot (ASCII 0x2e). See Python bug 1446043 for details. This \n # bug was supposedly fixed in Python 2.5.\n _raise_error(UnicodeError,\n \"I don't understand the encoding \\\"%s\\\".\" % encoding)\n if VERBOSE:\n print 'Response:'\n print content\n\n\n if not content:\n # 响应为空,清空自身数据集,跳过解析步骤\n self._sitemaps = [ ]\n self.__rulesets = [ ]\n return False\n else:\n # Now that I've fetched the content and turned it into Unicode, I \n # can parse it.\n self.parse(content)\n return True", "def download_file(url, output_filename):\n print(\"Downloading\", url, \"to\", output_filename)\n r = requests.get(url)\n r.raise_for_status()\n with open(output_filename, 'wb') as f:\n f.write(r.content)", "def run(url, output, loglevel, logfile):\n # Logging setup\n loader.logging.setup(level=loglevel, logfile=logfile)\n\n # Download page and get DOM\n dom = BeautifulSoup(loader.network.download(url), DEFAULT_PARSER)\n\n # Split URL to fragments\n scheme, net_loc, *_ = list(urlparse(url))\n\n # Get resource objects from DOM\n resources = loader.handler.get_resources(dom)\n\n if resources:\n # Build resource dirname\n local_dirname = loader.path.for_resource_dir(url)\n # Create dir for resource inside 'output'\n loader.storage.mkdir(os.path.join(output, local_dirname))\n\n web_resource_paths = []\n for resource in resources:\n # Get resource path from resource object\n web_resource_path = loader.handler.get_path(resource)\n # Build resource local path\n local_resource_path = os.path.join(\n local_dirname,\n loader.path.for_resource(web_resource_path),\n )\n # Set local path in resource object\n loader.handler.update_resource(\n resource=resource,\n new_link=local_resource_path,\n )\n web_resource_paths.append(web_resource_path)\n # Save modified DOM\n loader.storage.save(\n f_content=dom.encode(),\n output=output,\n filename=loader.path.for_page(url),\n )\n # Download resources\n for resource_path in tqdm(web_resource_paths, desc=BAR_DESC):\n resource_url = urlunsplit(\n [scheme, net_loc, resource_path, None, None],\n )\n try:\n loader.storage.save(\n f_content=loader.network.download(resource_url),\n output=os.path.join(output, local_dirname),\n filename=loader.path.for_resource(resource_path),\n )\n except loader.network.NetworkError as error:\n logging.debug(error, exc_info=sys.exc_info())", "def _fetch(self, output_type='xml'):\n\n # authenticate\n self._auth()\n\n # get the table\n response = self._do('GET', self.URLS['adp'])\n\n # load results\n self._results = self._parse_doc(response.text)", "def download(\n self,\n uri: str,\n path: Optional[str] = None,\n filename: Optional[str] = None,\n headers: Optional[Union[SequenceT[str], MappingT[str, str]]] = None,\n max_connections: Optional[int] = None,\n split: Optional[int] = None,\n continue_: Optional[bool] = None,\n retry_wait: Optional[int] = None,\n max_tries: Optional[int] = None,\n connect_timeout: Optional[int] = None,\n timeout: Optional[int] = None,\n no_netrc: Optional[bool] = None,\n ) -> str:\n\n if headers is not None:\n assert_type(\"headers\", headers, (Sequence, Mapping))\n\n if isinstance(headers, Mapping):\n headers = [f\"{k}: {v}\" for k, v in headers.items()]\n\n options = self.default_options.copy()\n update(\n options,\n {\n \"dir\": path,\n \"out\": filename,\n \"header\": headers,\n \"max-connection-per-server\": max_connections,\n \"split\": split,\n \"continue\": aria_bool(continue_),\n \"retry-wait\": retry_wait,\n \"max-tries\": max_tries,\n \"connect-timeout\": connect_timeout,\n \"timeout\": timeout,\n \"no-netrc\": aria_bool(no_netrc),\n },\n )\n\n gid = self.query(\"add_uri\", [uri], options)\n self.gids.add(gid)\n return gid", "def download(self):\n\n # os.open *should* give a thread-safe way to exlusivly open files\n filepath = self.film\n try:\n # os.O_BINARY is only avilable and needed on windows\n flags = os.O_CREAT | os.O_EXCL | os.O_WRONLY | os.O_BINARY\n except:\n flags = os.O_CREAT | os.O_EXCL | os.O_WRONLY\n try:\n fd = os.open(filepath, flags)\n except:\n return\n\n try:\n response = self.session.get(self.filmurl, stream=True)\n if response.status_code == 200:\n for chunk in response.iter_content(1024):\n os.write(fd, chunk)\n except:\n # Remove partial img file if request or stream fails\n os.close(fd)\n os.remove(filepath)", "def __call__(self, url, output_file, pooch):\n\n parsed_url = parse_url(url)\n ftp = ftplib.FTP(timeout=self.timeout)\n ftp.connect(host=parsed_url[\"netloc\"], port=self.port)\n ispath = not hasattr(output_file, \"write\")\n if ispath:\n output_file = open(output_file, \"w+b\")\n try:\n ftp.login(user=self.username, passwd=self.password, acct=self.account)\n command = f\"RETR {parsed_url['path']}\"\n if self.progressbar:\n # Make sure the file is set to binary mode, otherwise we can't\n # get the file size. See: https://stackoverflow.com/a/22093848\n ftp.voidcmd(\"TYPE I\")\n size = int(ftp.size(parsed_url[\"path\"]))\n use_ascii = bool(sys.platform == \"win32\")\n progress = tqdm(\n total=size,\n ncols=79,\n ascii=use_ascii,\n unit=\"B\",\n unit_scale=True,\n leave=True,\n )\n with progress:\n\n def callback(data):\n \"Update the progress bar and write to output\"\n progress.update(len(data))\n output_file.write(data)\n\n ftp.retrbinary(command, callback, blocksize=self.chunk_size)\n else:\n ftp.retrbinary(command, output_file.write, blocksize=self.chunk_size)\n finally:\n ftp.quit()\n if ispath:\n output_file.close()", "def fetch(self, url, headers=DEFAULTHEADERS):\n logger = self.loggers['http']\n request = urllib2.Request(url, headers=headers)\n try:\n response = urllib2.urlopen(request)\n except urllib2.HTTPError:\n logger.error(\"failed to retrieve the resource at %s\" % url)\n raise\n urlgot = response.geturl()\n rawcontent = response.read()\n if urlgot != url:\n logger.info(\"successfully retrieved resource from %s, redirected from %s\" % (urlgot, url))\n self.http['redirect'] = True\n else:\n logger.info(\"successfully retrieved resource from %s\" % url)\n self.http['redirect'] = False\n rheaders = response.info()\n \n # store useful info on the object for later access\n self.http['request'] = {}\n self.http['request']['headers'] = headers\n self.http['urlsought'] = url\n self.http['urlgot'] = urlgot\n self.http['response'] = response\n self.http['response_headers'] = {}\n for k in sorted(rheaders.keys()): \n logger.debug(\"response header %s: '%s'\" % (k, rheaders[k]))\n self.http['response_headers'][k.strip().lower()] = rheaders[k].strip() \n self.documenturl = urlgot\n self.rawcontent = rawcontent", "def fetch(file_url):\n\n tmp_file_handle = NamedTemporaryFile(delete=True)\n headers = {'User-Agent': 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.61 Safari/537.36'}\n\n # download file and save to temp object\n with requests.get(file_url, headers=headers, stream=True) as r:\n tmp_file_handle.write(r.content)\n\n tmp_file_handle.flush()\n\n return tmp_file_handle", "def _download_epw_file(url):\n r = requests.get(url)\n if r.ok:\n # py2 and 3 compatible: binary write, encode text first\n log.debug(\" ... OK!\")\n return io.StringIO(r.text)\n else:\n log.error(\" connection error status code: %s\" % r.status_code)\n r.raise_for_status()", "def get_media(self, url, out_filename=None, raw_data=False):\n if not raw_data:\n if not out_filename:\n out_filename = os.path.join(settings.BW_MMS_DIRECTORY,\n url.split('/')[-1])\n\n if not os.path.isdir(os.path.dirname(out_filename)):\n raise ValueError('Invalid output directory: {} - '\n 'unable to download MMS'.\n format(os.path.dirname(out_filename)))\n\n if os.path.isfile(out_filename):\n logging.info('filename {}, already exists - will be '\n 'overwritten.....'.format(out_filename))\n\n try:\n resp = requests.get(url, auth=(self.token, self.secret))\n except requests.exceptions.RequestException as e:\n logging.info('Error while fetching media: {}'.format(e))\n return\n\n if resp.status_code == requests.codes.ok:\n try:\n if raw_data:\n return resp.content\n else:\n with open(out_filename, 'wb') as fd:\n fd.write(resp.content)\n\n return out_filename\n except Exception as e:\n logging.info('Error: {} while writing file: {}'.\n format(e, out_filename))\n return\n\n logging.info('Invalid URI or an error occured, response: {}, '\n 'response content: {}'.format(resp.status_code,\n resp.text))", "def read_url_all(url):\n\n\t\treturn write_file(read_url(url))", "def perform_download(url, outdir=None):\n if outdir is None:\n outdir = os.getcwd()\n\n direct_link_path = urlparse(url).path\n path_parts = direct_link_path.split('/')\n file_name = path_parts[-1]\n\n output_full_path = os.path.join(outdir, file_name)\n\n r = requests.get(url, stream=True)\n \n file_size = int(r.headers[\"Content-Length\"])\n \n print(\"Starting download of {0} to {1} (file size = {2} bytes)\".format(file_name, output_full_path, file_size))\n \n output_file = open(output_full_path, 'wb')\n \n counter = 0\n chunksize = 1024\n previousPerCent = 0\n\n sys.stdout.write(\n '\\n\\r0% 0/{0}'.format(file_size)\n )\n sys.stdout.flush()\n\n for chunk in r.iter_content(chunk_size=chunksize):\n if chunk:\n output_file.write(chunk)\n output_file.flush()\n \n currentPercent = int((counter * chunksize) * 100 / file_size)\n\n if currentPercent > previousPerCent:\n previousPerCent = currentPercent\n \n sys.stdout.write(\n '\\r{0}% {1}/{2}'.format(currentPercent, counter * chunksize, file_size)\n )\n sys.stdout.flush()\n \n counter += 1\n\n output_file.close()\n\n sys.stdout.write('\\r100% {0}/{1}\\n'.format(file_size, file_size))\n\n print('\\nCompleted downloading to {0}\\n'.format(output_full_path))", "def download(url, filename):\n response = requests.get(url, stream=True)\n with open(filename, \"wb\") as handle:\n for data in response.iter_content():\n handle.write(data)", "def download_file(url: str) -> str:\n\n assert len(url) > 0\n\n filename = url.split('/')[-1]\n\n with open(filename, 'wb') as output_file:\n response = requests.get(url, stream=True)\n total = response.headers.get('content-length')\n\n if total is None:\n output_file.write(response.content)\n else:\n downloaded = 0\n total = int(total)\n for data in response.iter_content(chunk_size=max(int(total / 1000), 1024 * 1024)):\n downloaded += len(data)\n output_file.write(data)\n done = int(50 * downloaded / total)\n sys.stdout.write('\\r[{}{}]'.format('█' * done, '.' * (50 - done)))\n sys.stdout.flush()\n sys.stdout.write('\\n')\n\n return filename", "def get(self, url, stream, chunk_size=BaseStorageConnector.CHUNK_SIZE):\n path = self.base_path / url\n with path.open(\"rb\", chunk_size) as f:\n for chunk in iter(lambda: f.read(chunk_size), b\"\"):\n stream.write(chunk)", "def fetch(url, user_agent=\"django-oembed/0.1\"):\r\n request = urllib2.Request(url)\r\n request.add_header('User-Agent', user_agent)\r\n request.add_header('Accept-Encoding', 'gzip')\r\n opener = urllib2.build_opener()\r\n f = opener.open(request)\r\n result = f.read()\r\n if f.headers.get('content-encoding', '') == 'gzip':\r\n result = gzip.GzipFile(fileobj=StringIO(result)).read()\r\n f.close()\r\n return result", "def download_file(self, url, filename):\n r = requests.get(url, stream=True)\n r.raise_for_status()\n\n with open(filename, 'wb') as f:\n for chunk in r.iter_content():\n if chunk:\n f.write(chunk)\n f.flush()", "def get_remote_bytes(file_url) -> io.BytesIO:\n result = urlfetch.fetch(file_url)\n return io.BytesIO(result.content)", "def httpretrieve_save_file(url, filename, querydata=None, postdata=None, \\\r\n httpheaders=None, proxy=None, timeout=None):\r\n\r\n # Open the output file object and http file-like object.\r\n outfileobj = open(filename, 'w')\r\n httpobj = httpretrieve_open(url, querydata=querydata, postdata=postdata, \\\r\n httpheaders=httpheaders, proxy=proxy, timeout=timeout)\r\n\r\n # Repeatedly read from the file-like HTTP object into our file, until the\r\n # response is finished.\r\n responsechunkstr = None\r\n while responsechunkstr != '':\r\n responsechunkstr = httpobj.read(4096)\r\n outfileobj.write(responsechunkstr)\r\n\r\n outfileobj.close()\r\n httpobj.close()", "def download_file(url, outfile=None):\n if not outfile:\n outfile = url.split(\"/\")[-1]\n info(\"Downloading %s to %s\" % (url, outfile))\n with requests.get(url, stream=True) as r:\n r.raise_for_status()\n with open(outfile, \"wb\") as f:\n for chunk in r.iter_content(chunk_size=8192):\n f.write(chunk)\n return outfile", "def urlfetch(self, url, **kwargs):\n logging.debug('Fetching %s with kwargs %s', url, kwargs)\n resp = urlfetch.fetch(url, deadline=999, **kwargs)\n\n if resp.status_code == 200:\n return resp.content\n else:\n logging.warning('GET %s returned %d:\\n%s',\n url, resp.status_code, resp.content)\n self.handler.response.headers.update(resp.headers)\n self.handler.response.out.write(resp.content)\n raise exc.status_map.get(resp.status_code)(resp.content)", "def fetch(self, url) -> StyleInfo:\n caching, caching_delta = parse_cache_option(self.cache_option)\n path = self._get_output_path(url)\n cached = self._get_from_cache(caching, url)\n if cached:\n return path, cached\n\n contents = self._do_fetch(url)\n if not contents:\n return None, \"\"\n\n self._save_to_cache(caching, caching_delta, url, contents)\n return path, contents", "def _download(self) -> None:\n download_url(\n self.url,\n self.root,\n filename=self.data_dir,\n md5=self.md5 if self.checksum else None,\n )\n self._extract()", "def run(self):\n if self.parsed_args.fetch_cache:\n issues = self.backend.fetch_from_cache()\n else:\n issues = self.backend.fetch(from_date=self.from_date)\n\n try:\n for issue in issues:\n obj = json.dumps(issue, indent=4, sort_keys=True)\n # self.outfile.write(issue['url']+\"\\n\")\n self.outfile.write(obj)\n self.outfile.write('\\n')\n except requests.exceptions.HTTPError as e:\n raise requests.exceptions.HTTPError(str(e.response.json()))\n except IOError as e:\n raise RuntimeError(str(e))\n except Exception as e:\n if self.backend.cache:\n self.backend.cache.recover()\n raise RuntimeError(str(e))", "def get_binary(url):\n a = requests.get(url, stream=True)\n return a.content", "def fetch_output(self, app, download_dir=None,\n overwrite=False, changed_only=True, **extra_args):\n assert isinstance(\n app, Task), \"Core.fetch_output: passed an `app` argument \" \\\n \"which is not a `Task` instance.\"\n if isinstance(app, Application):\n self.__fetch_output_application(\n app, download_dir, overwrite, changed_only, **extra_args)\n else:\n # generic `Task` object\n self.__fetch_output_task(\n app, download_dir, overwrite, changed_only, **extra_args)", "def fetch_web_cont(self):\n with open(self.input_file) as input_file:\n data = yaml.load(input_file, yaml.FullLoader)\n url_list = data.get(self.url_access)\n regex_list = data.get(self.regex_access)\n\n print('Fetching data:')\n\n for url in url_list:\n # This restores the same behavior as before.\n # Enabling certificate verification by default for stdlib http clients\n context = ssl._create_unverified_context()\n run_time = datetime.now().strftime(\"Date: %d-%m-%Y Time: %I:%M:%S:%f_%p\")\n start = time.perf_counter()\n web_resp = request.urlopen(url, context=context)\n respData = web_resp.read()\n resp_time = '%0.2f s' % (time.perf_counter() - start)\n\n for regex in regex_list:\n contents = re.findall(regex, str(respData))\n with open(self.output_file, 'a') as file:\n if not contents:\n print(run_time, ' | URL: ', url, '| content not found with this regex: ', regex,\n file=file)\n\n else:\n for content in contents:\n print(run_time, ' | URL: ', url, ' | Response Time: ', resp_time,\n url, ' | Contents: ', content, file=file)\n \n with open(self.output_file, 'a') as file:\n \n print('\\n#################################\\n', file=file)", "def download(self):\n data = urllib.urlopen(self.remoteurl).read()\n s = StringIO.StringIO(data)\n return Image.open(s)", "def download_from_uri(uri: str, dst: utils.ReadWritePath) -> str:\n if uri.startswith('github://'):\n raise NotImplementedError('Github sources not supported yet')\n\n path = utils.as_path(uri)\n if not path.exists():\n raise ValueError(f'Unsuported source: {uri}')\n\n # Download the main file\n python_module = path / f'{path.name}.py'\n python_module.copy(dst / python_module.name)\n\n # TODO(tfds): Should also support download on the extra files (e.g. label.txt,\n # util module,...)\n\n # Add the `__init__` file\n (dst / '__init__.py').write_text('')\n return python_module.stem", "def request_url(url, display, file=None):\n if file is not None:\n r = requests.get(url, stream=True)\n r.raise_for_status()\n with open(file, \"wb\") as fd:\n for chunk in r.iter_content(chunk_size=128):\n fd.write(chunk)\n return r.raise_for_status()\n else:\n r = requests.get(url)\n r.raise_for_status()\n if display == \"xml\":\n return xmltodict.parse(r.text)\n elif display == \"fasta\" or display == \"fastq\":\n return format_seq_content(r.text, display)\n else:\n return r.text", "def _download_from_url(self) -> bytes:\n response = requests.get(self.url, allow_redirects=True)\n return response.content", "async def fetch_file(self, download_url: str) -> bytes:\n log.debug(f\"Fetching file from branding repository: '{download_url}'.\")\n\n async with self.bot.http_session.get(download_url, params=PARAMS, headers=HEADERS) as response:\n if response.status != 200:\n raise RuntimeError(f\"Failed to fetch file due to status: {response.status}\")\n\n log.debug(\"Fetch successful, reading payload.\")\n return await response.read()", "def write_to_file(response: requests.Response, output_path: str) -> int:\n read_bytes = 0\n with open(output_path, \"wb\") as output_file:\n # Use the same chunk size of `urlretrieve`\n for chunk in response.iter_content(chunk_size=1024 * 8):\n read_bytes += len(chunk)\n output_file.write(chunk)\n if read_bytes > FETCHER_MAXIMUM_FILE_SIZE:\n break\n return read_bytes", "def save_file(url, output_path):\n\n print(url)\n\n try:\n response = requests.get(url, stream = True)\n except:\n print(\"=> Download failed: %s\" % url)\n return False\n\n if (response.status_code == 200):\n try:\n with open(output_path, \"wb\") as f:\n for chunk in response.iter_content(chunk_size = 512):\n if (chunk):\n f.write(chunk)\n sys.stdout.write(\"\\r=> %s (%dkb)\" % (output_path,\n f.tell()/1024))\n sys.stdout.flush()\n sys.stdout.write(\"\\r=> %s (%dkb)\" % (output_path,\n f.tell()/1024))\n sys.stdout.flush()\n print(\"\")\n return True\n\n except Exception as err:\n print(\"\\n=> Error: %s (%s)\" % (err, url))\n\n else:\n print(\"=> Download failed: %s\" % url)\n return False", "def fetch(file):\n\tprint \"Fetching {0}...\".format(file.split(\"/\")[-1])\n\tsubprocess.call(\"wget {0} > /dev/null 2>&1\".format(file), shell=True)", "def download(self, outputfile:str, **format_options) -> str:\n return self.connection.download(self.graph, outputfile, format_options)", "def fetch(self, url):\n self.log.info(\"Fetching URL: \" + url)\n\n r = requests.get(url, verify=False)\n # raise an HTTPError on badness\n r.raise_for_status()\n\n # this decodes r.content using a guessed encoding\n return r.text", "def fetch(url):\n content = requests.get(url).text\n if \"Error\" in content:\n raise ValueError(f\"Cannot read from: {url}\")\n return content", "def downloadData(url):\n \n content = urllib2.urlopen(url)\n return content", "def web_get_file(self, url):\n try:\n print(url)\n response = requests.get(url, verify=False)\n file_buffer = BytesIO(response.content)\n file_buffer.seek(0)\n return file_buffer\n except:\n print(traceback.print_exc())\n return None", "def fetch(self):\n # This method also sets self._results_filtered and\n # self._urltable.\n page = self._conn.fetch_page(self._ddg_url.relative())\n\n if logger.isEnabledFor(logging.DEBUG):\n import tempfile\n fd, tmpfile = tempfile.mkstemp(prefix='ddgr-response-')\n os.close(fd)\n with open(tmpfile, 'w', encoding='utf-8') as fp:\n fp.write(page)\n logger.debug(\"Response body written to '%s'.\", tmpfile)\n\n parser = DdgParser(news=self._ddg_url.news)\n parser.feed(page)\n\n self.results = parser.results\n self._results_filtered = parser.filtered\n self._urltable = {}\n for r in self.results:\n self._urltable.update(r.urltable())", "def download_file(filename, url):\n with open(filename, 'wb') as fout:\n response = requests.get(url, stream=True)\n response.raise_for_status()\n # Write response data to file\n for block in response.iter_content(4096):\n fout.write(block)", "def fetchReviews(outputFile):\n print 'Fetching reviews...'\n oStream = open(outputFile, 'w')\n for i in xrange(_startRange, _stopRange + 1):\n iStream = urllib.urlopen(_baseUrl % i)\n data = iStream.read()\n data = data.replace('\\r\\n', ' ').replace('\\t', ' ')\n data = re.sub('[ ]+', ' ', data)\n iStream.close()\n\n try:\n [(title, review)] = _pattern.findall(data)\n print '%d. %s' % (i, title)\n\n print >> oStream, '%s|%s' % (title, review)\n oStream.flush()\n except (TypeError, ValueError):\n pass\n oStream.close()\n print 'Done'\n\n return", "def to_file(self, filename):\n resp = urlopen(self.url)\n self.file_size = self._get_content_length(resp.headers)\n block_size = 8192\n self.bytes_read = 0\n with open(filename, 'wb') as f:\n while True:\n buf = resp.read(block_size)\n if not buf:\n break\n self.bytes_read += len(buf)\n f.write(buf)\n self._dl_progress_bar()\n if self.show_progress:\n print(' ✓')", "def single_file_download(url: str, encoding: str = \"utf-8\") -> str:\n\n recipient = BytesIO() # the stream we will write into\n\n # print(\"Opening %r . . .\" % url)\n curl = pycurl.Curl()\n curl.setopt(curl.URL, url)\n curl.setopt(curl.WRITEDATA, recipient)\n curl.perform()\n curl.close()\n # print(\"Closed %r.\" % url)\n\n return recipient.getvalue().decode(encoding)", "def get_contents_to_file(self, fp, headers=None,\r\n cb=None, num_cb=10,\r\n torrent=False,\r\n version_id=None,\r\n res_download_handler=None,\r\n response_headers=None):\r\n if self.bucket != None:\r\n if res_download_handler:\r\n res_download_handler.get_file(self, fp, headers, cb, num_cb,\r\n torrent=torrent,\r\n version_id=version_id)\r\n else:\r\n self.get_file(fp, headers, cb, num_cb, torrent=torrent,\r\n version_id=version_id,\r\n response_headers=response_headers)", "def download(self):\n logging.debug('start thread:%s at %s' % (self.getName(), get_current_time()))\n headers = {'Range': 'bytes=%s-%s' % (self.start_pos, self.end_pos)}\n res = requests.get(self.url, headers=headers)\n self.fd.seek(self.start_pos)\n self.fd.write(res.content)\n\n logging.debug('Stop thread:%s at%s' % (self.getName(), get_current_time()))\n self.fd.close()", "def download_simple(url): # url(str)\n html = urlopen(url).read().decode()\n return html", "def download():\n\treturn response.download(request, db)", "def download(self, output):\n self.wait()\n path = 'auditlogEntryReport/download'\n with open(output, 'w') as f:\n f.write(self._session.get(path))\n LOGGER.info('log downloaded: {}'.format(output))", "def _fetch_and_unzip(url, file_name):\n res = requests.get(url, stream=True, verify=False)\n # get dataset size\n total_size = int(res.headers[\"Content-Length\"])\n temp_size = 0\n with open(file_name, \"wb+\") as f:\n for chunk in res.iter_content(chunk_size=1024):\n temp_size += len(chunk)\n f.write(chunk)\n f.flush()\n done = int(100 * temp_size / total_size)\n # show download progress\n sys.stdout.write(\"\\r[{}{}] {:.2f}%\".format(\"█\" * done, \" \" * (100 - done), 100 * temp_size / total_size))\n sys.stdout.flush()\n print(\"\\n============== {} is already ==============\".format(file_name))\n _unzip(file_name)\n os.remove(file_name)", "def do_GET(self):\n self.send_response(200)\n self.send_header(\"Content-type\", self.mimetype)\n self.end_headers()\n\n with open(filename, \"rb\") as file_:\n self.wfile.write(file_.read())\n file_.close()", "def fetch(self, remote, *args):\n return self.cmd('fetch', remote, *args)", "def fetch(thread=False):\r\n if thread:\r\n Fetch.start()\r\n else:\r\n urlretrieve(OBSURL,ZFILE)", "def FetchUrlContent(url):\n content = memcache.get(url)\n if content:\n return content\n\n request = urlfetch.fetch(url)\n\n if request.status_code == 200:\n content = request.content\n memcache.add(url, content, 60 * 60)\n return content\n\n raise LookupError('Unable to fetch URL. Response code: ' +\n str(request.status_code))", "def download(url, out_folder):\n \n filename = \"2.png\"\n \n outpath = os.path.join(out_folder, filename)\n \n if url.lower().startswith(\"http\"):\n urlretrieve(url, outpath)\n else:\n urlretrieve(urlparse.urlunparse(parsed), outpath)", "def download(uri: str) -> None:\n logger = logging.getLogger(__name__)\n logger.info('Download the dataset')\n\n # create destination dirs\n destination = project_dir / 'data' / 'raw'\n destination.mkdir(exist_ok=True, parents=True)\n\n # download the file\n urllib.request.urlretrieve(uri, destination / \"original.zip\")", "def FetchArtifact(ab_client, branch, target, build_id, filepath, output=None):\n # The \"branch\" is unused, so silent pylint warnings about it:\n _ = branch\n\n # Get the media id to download.\n # NOTE: For some reason the git branch is not needed here, which looks weird.\n # That means in the ab:// URL the branch name will be essentially ignored.\n media_id = ab_client.buildartifact().get_media(\n target=target,\n buildId=build_id,\n attemptId='latest',\n resourceId=filepath)\n\n if output is None:\n output = filepath\n\n # Create directory structure, if needed.\n outdir = os.path.dirname(output)\n if outdir and not os.path.isdir(outdir):\n os.makedirs(outdir)\n\n with open(output, 'wb') as f:\n downloader = apiclient.http.MediaIoBaseDownload(\n f, media_id, chunksize=DEFAULT_MEDIA_IO_CHUNKSIZE)\n done = False\n while not done:\n _, done = downloader.next_chunk()", "def retrieve(self, url, filename, reporthook=None, data=None, cont=None):\n url = urllib.unwrap(urllib.toBytes(url))\n if self.tempcache and url in self.tempcache:\n return self.tempcache[url]\n type, url1 = urllib.splittype(url)\n if filename is None and (not type or type == 'file'):\n try:\n fp = self.open_local_file(url1)\n hdrs = fp.info()\n del fp\n return urllib.url2pathname(urllib.splithost(url1)[1]), hdrs\n except IOError, msg:\n pass\n bs = 1024*8\n size = -1\n read = 0\n blocknum = 0\n if cont:\n localsize = self.continue_file(filename)\n read = localsize\n blocknum = localsize / bs\n fp = self.open(url, data)\n headers = fp.info()\n if cont:\n if (self.fetcher.proto == self.fetcher.PROTO_HTTP and\n not (headers.dict.get(\"content-range\") or\n headers.dict.get(\"Content-Range\"))):\n raise ResumeNotSupported\n tfp = open(filename, 'rb+')\n tfp.seek(-self.checksum_size, os.SEEK_END)\n local = tfp.read(self.checksum_size)\n remote = fp.read(self.checksum_size)\n if not local == remote:\n raise ResumeChecksumFailed\n else:\n tfp = open(filename, 'wb')\n result = filename, headers\n if self.tempcache is not None:\n self.tempcache[url] = result\n if reporthook:\n if \"content-length\" in headers:\n size = int(headers[\"Content-Length\"])\n if cont and self.fetcher.proto == self.fetcher.PROTO_HTTP:\n size = size + localsize - self.checksum_size\n reporthook(blocknum, bs, size)\n while 1:\n block = fp.read(bs)\n if block == \"\":\n break\n read += len(block)\n tfp.write(block)\n blocknum += 1\n if reporthook:\n reporthook(blocknum, bs, size)\n fp.close()\n tfp.close()\n del fp\n del tfp\n\n # raise exception if actual size does not match content-length header\n if size >= 0 and read < size:\n raise urllib.ContentTooShortError(\"retrieval incomplete: got only %i out \"\n \"of %i bytes\" % (read, size), result)\n\n return result", "def GetAndSave(self, url, save_suffix, unzip=False):\n self.connection.request('GET',\n '/data/' + url,\n headers={'content-type': 'text/plain'})\n response = self.connection.getresponse()\n file_name = Clean(url) + save_suffix\n destination = os.path.join(self.path, file_name)\n\n if response.status != 200:\n raise IOError(url)\n\n if unzip:\n s = StringIO.StringIO(response.read())\n content = gzip.GzipFile(fileobj=s).read()\n else:\n content = response.read()\n\n with open(destination, 'w') as f:\n f.write(content)\n return content", "def fetch(url, http, cache=False, force_download=False, wsdl_basedir=''):\n\n # check / append a valid schema if not given:\n url_scheme, netloc, path, query, fragment = urlsplit(url)\n if not url_scheme in ('http', 'https', 'file'):\n for scheme in ('http', 'https', 'file'):\n try:\n if not url.startswith(\"/\") and scheme in ('http', 'https'):\n tmp_url = \"%s://%s\" % (scheme, os.path.join(wsdl_basedir, url))\n else:\n tmp_url = \"%s:%s\" % (scheme, os.path.join(wsdl_basedir, url))\n log.debug('Scheme not found, trying %s' % scheme)\n return fetch(tmp_url, http, cache, force_download, wsdl_basedir)\n except Exception as e:\n log.error(e)\n raise RuntimeError('No scheme given for url: %s' % url)\n\n # make md5 hash of the url for caching...\n filename = '%s.xml' % hashlib.md5(url.encode('utf8')).hexdigest()\n if isinstance(cache, basestring):\n filename = os.path.join(cache, filename)\n if cache and os.path.exists(filename) and not force_download:\n log.info('Reading file %s' % filename)\n f = open(filename, 'r')\n xml = f.read()\n f.close()\n else:\n if url_scheme == 'file':\n log.info('Fetching url %s using urllib2' % url)\n f = urllib2.urlopen(url)\n xml = f.read()\n else:\n log.info('GET %s using %s' % (url, http._wrapper_version))\n response, xml = http.request(url, 'GET', None, {})\n if cache:\n log.info('Writing file %s' % filename)\n if not os.path.isdir(cache):\n os.makedirs(cache)\n f = open(filename, 'w')\n f.write(xml)\n f.close()\n return xml", "def fetch_page(self, url):\n try:\n self._raw_get(url)\n except (http.client.HTTPException, OSError) as e:\n logger.debug('Got exception: %s.', e)\n logger.debug('Attempting to reconnect...')\n self.renew_connection()\n try:\n self._raw_get(url)\n except http.client.HTTPException as e:\n logger.debug('Got exception: %s.', e)\n raise DDGConnectionError(\"Failed to get '%s'.\" % url)\n\n resp = self._resp\n if resp.status in {301, 302, 303, 307, 308}:\n redirection_url = resp.getheader('location', '')\n if 'sorry/IndexRedirect?' in redirection_url:\n raise DDGConnectionError('Connection blocked due to unusual activity.')\n self._redirect(redirection_url)\n resp = self._resp\n\n if resp.status != 200:\n raise DDGConnectionError('Got HTTP %d: %s' % (resp.status, resp.reason))\n\n payload = resp.read()\n try:\n return gzip.GzipFile(fileobj=io.BytesIO(payload)).read().decode('utf-8')\n except OSError:\n # Not gzipped\n return payload.decode('utf-8')", "def query(url):\n\n outputHeader = Storage()\n outputBody = Storage()\n\n query = pycurl.Curl()\n query.setopt(pycurl.URL, url)\n query.setopt(pycurl.PROXY, 'localhost')\n query.setopt(pycurl.PROXYPORT, SOCKS_PORT)\n query.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS5_HOSTNAME)\n # query.setopt(pycurl.HEADER, 1)\n query.setopt(pycurl.WRITEFUNCTION, outputBody.store)\n # query.setopt(pycurl.HEADERFUNCTION, outputHeader.store)\n\n try:\n query.perform()\n # query.close()\n # print \"Reached here\"\n print query.getinfo(query.SIZE_DOWNLOAD)\n print query.getinfo(query.TOTAL_TIME)\n query.close()\n return\n except pycurl.error as exc:\n return \"Unable to reach %s (%s)\" % (url, exc)", "def get_file(self, uri_type, no_copy=False):\n return self.copy(target_uri_type=uri_type, no_copy=no_copy)", "def catalog_get(self, args):\n headers = DEFAULT_HEADERS.copy()\n headers.update(args.headers)\n\n if args.output_format == \"json\":\n headers[\"accept\"] = \"application/json\"\n elif args.output_format == \"json-stream\":\n headers[\"accept\"] = \"application/x-json-stream\"\n elif args.output_format == \"csv\":\n headers[\"accept\"] = \"text/csv\"\n else:\n raise UsageException(\"Unsupported output format: %s\" % args.output_format)\n\n catalog = self.server.connect_ermrest(args.id)\n try:\n if args.output_file:\n catalog.getAsFile(args.path,\n destfilename=args.output_file,\n headers=headers,\n delete_if_empty=args.auto_delete)\n else:\n pp(catalog.get(args.path, headers=headers).json())\n except HTTPError as e:\n if e.response.status_code == requests.codes.not_found:\n raise ResourceException('Catalog not found', e)\n except:\n if args.output_file and os.path.isfile(args.output_file):\n logging.info(\"Deleting empty file: %s\" % args.output_file)\n os.remove(args.output_file)\n raise" ]
[ "0.6911675", "0.6535504", "0.6211069", "0.60043406", "0.59758717", "0.58903176", "0.5829563", "0.5752656", "0.57295847", "0.5723706", "0.57197624", "0.5693564", "0.5693242", "0.56680185", "0.5658385", "0.56308764", "0.5616187", "0.5565478", "0.5563205", "0.5561096", "0.55498993", "0.54965216", "0.54814184", "0.5476896", "0.54705817", "0.54317045", "0.5420145", "0.5413119", "0.5394613", "0.5384457", "0.53809035", "0.5375299", "0.5354711", "0.53271896", "0.5314086", "0.5308194", "0.53036463", "0.52998966", "0.5297852", "0.5296683", "0.5296132", "0.5278616", "0.5253441", "0.52494603", "0.5232339", "0.5229949", "0.52240384", "0.5213331", "0.5209735", "0.5209065", "0.51957446", "0.5183391", "0.51813114", "0.5179395", "0.5177456", "0.517284", "0.517242", "0.5170571", "0.51701605", "0.5159455", "0.5157305", "0.51543546", "0.5147419", "0.5139751", "0.51360226", "0.5119113", "0.511219", "0.5111657", "0.51104033", "0.5107577", "0.50862354", "0.5085158", "0.5084064", "0.50830245", "0.5081778", "0.50803554", "0.5078322", "0.5060568", "0.5057767", "0.5056084", "0.50548404", "0.505459", "0.50529045", "0.5052482", "0.50524485", "0.5052412", "0.5051416", "0.5049934", "0.5038865", "0.50372803", "0.5034609", "0.50309265", "0.5028603", "0.5028402", "0.50235146", "0.50201666", "0.50152284", "0.50112146", "0.50100183", "0.5004718" ]
0.7404381
0
Download |uri| and save it to |output|.
def fetch(uri, output, b64=False): output = os.path.abspath(output) distdir, name = os.path.split(output) if os.path.exists(output): logging.info('Using existing download: %s', name) return logging.info('Downloading %s to %s', uri, output) os.makedirs(distdir, exist_ok=True) # Use kokoro build cache or Gentoo distdir if available. for envvar in ('KOKORO_GFILE_DIR', 'DISTDIR'): cache_dir = os.getenv(envvar) if cache_dir: cache_file = os.path.join(cache_dir, name) if os.path.exists(cache_file): logging.info(' Cache hit via %s', envvar) symlink(cache_file, output) return # Don't be verbose if running on CI systems. verbose = os.isatty(sys.stdout.fileno()) # We use urllib rather than wget or curl to avoid external utils & libs. # This seems to be good enough for our needs. tmpfile = output + '.tmp' for _ in range(0, 5): try: with open(tmpfile, 'wb') as outfp: fetch_data(uri, outfp, verbose=verbose, b64=b64) break except ConnectionError as e: time.sleep(1) logging.warning('Download failed; retrying: %s', e) else: logging.error('Unabled to download; giving up') unlink(tmpfile) sys.exit(1) # Clear the progress bar. if verbose: print(' ' * 80, end='\r') os.rename(tmpfile, output)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def download_img(self, url, output):\n try:\n print(\"Downloading from: %s\" % url)\n with open(output, 'wb') as f:\n f.write(urllib2.urlopen(url).read())\n print(\"Wrote to: %s\" % output)\n except IOError, e:\n print(e)", "def download(self, source_uri, output, **kwargs):\n raise NotImplementedError(\"Subclass needs to implement this method\")", "def url_retrieve(url, output_file):\n r = requests.get(url, allow_redirects=True)\n if r.status_code != 200:\n raise ConnectionError(f\"Could not download {url}\\nError code: {r.status_code}\")\n\n output_file.write_bytes(r.content)", "def download(self, url):\n try:\n webFile = urllib.urlopen(url)\n localFile = open(self.workdir + \"/\" + url.split('/')[-1], 'w')\n localFile.write(webFile.read())\n webFile.close()\n localFile.close()\n except IOError:\n print(\"could not get url \" + url)", "def download_file(url, output_filename):\n print(\"Downloading\", url, \"to\", output_filename)\n r = requests.get(url)\n r.raise_for_status()\n with open(output_filename, 'wb') as f:\n f.write(r.content)", "def download(url, save_as):\n\topen(save_as, 'w').write(urllib2.urlopen(url).read())", "def download_from_url(url, output_path):\n\n print('Pulling data from {} to {}'.format(url, output_path))\n wget.download(url, output_path)\n print('done')", "def download(url, out_folder):\n \n filename = \"2.png\"\n \n outpath = os.path.join(out_folder, filename)\n \n if url.lower().startswith(\"http\"):\n urlretrieve(url, outpath)\n else:\n urlretrieve(urlparse.urlunparse(parsed), outpath)", "def save(self, url, output):\n\n shutil.copy2(self.get(url), output)", "def download(self, url):\n try:\n logging.info(self.log_format((\"downloading \" + url)))\n webFile = urllib.urlopen(url)\n localFile = open(self.paths['workspace'] + \"/\" + url.split('/')[-1], 'w')\n localFile.write(webFile.read())\n webFile.close()\n localFile.close()\n except IOError:\n logging.error(self.log_format((\"could not get url \" + url)))", "def save_file(url, output_path):\n\n print(url)\n\n try:\n response = requests.get(url, stream = True)\n except:\n print(\"=> Download failed: %s\" % url)\n return False\n\n if (response.status_code == 200):\n try:\n with open(output_path, \"wb\") as f:\n for chunk in response.iter_content(chunk_size = 512):\n if (chunk):\n f.write(chunk)\n sys.stdout.write(\"\\r=> %s (%dkb)\" % (output_path,\n f.tell()/1024))\n sys.stdout.flush()\n sys.stdout.write(\"\\r=> %s (%dkb)\" % (output_path,\n f.tell()/1024))\n sys.stdout.flush()\n print(\"\")\n return True\n\n except Exception as err:\n print(\"\\n=> Error: %s (%s)\" % (err, url))\n\n else:\n print(\"=> Download failed: %s\" % url)\n return False", "def download():\n try:\n cli.run(\n [URL, '--output', TEMP_DIR],\n )\n except SystemExit:\n return None", "def download_file(self, url, path):\n print('\\tDownloading: ', path)\n with open(path, 'w') as outfile:\n try:\n response = self._http_client.get(url)\n outfile.write(response.text)\n finally:\n response.close()\n outfile.close()\n gc.collect()", "def download(url, filename=None):\n\t# requirements os, shutil, urllib.parse, urllib.request\n\tif not filename:\n\t\turl_parts = urllib.parse.urlparse(url)\n\t\tfilename = os.path.basename(url_parts.path)\n\turl_h = urllib.request.urlopen(url)\n\twith open(filename, 'wb') as file_h:\n\t\tshutil.copyfileobj(url_h, file_h)\n\turl_h.close()\n\treturn", "def download_file(download_url, save_path):\n url = \"https://www.encodeproject.org/\" + download_url\n urllib.request.urlretrieve(url, save_path)", "def download(url, output, encoding, insrs, format_name):\n\n folder = download_data(url, encoding)\n joined_file = join_files(folder)\n transform(joined_file, output, insrs, format_name)\n\n shutil.rmtree(folder)\n os.remove(joined_file)\n\n if not os.path.isfile(output):\n raise Error(\"Output file not created, the whole process failed\")\n else:\n logging.info(\"File %s successfuly created\" % output)", "def download(self, outputfile: str, outputformat: str):\n pass", "def download(self, url: str, dest: PathLike, force: bool = False):", "def _download(self, url, output_dir, dataset, chunk_size=1024):\n r = self.session.get(url, stream=True, allow_redirects=True)\n if not r.ok:\n r = self.session.get(r.url, stream=True, allow_redirects=True, auth=(self._username, self._password))\n file_size = int(r.headers['Content-Length'])\n\n with tqdm(total=file_size, unit_scale=True, unit='B', unit_divisor=1024) as pbar:\n ### GET FILE NAME ###\n if \"Content-Disposition\" in r.headers.keys():\n local_filename = re.findall(\"filename=(.+)\", r.headers[\"Content-Disposition\"])[0]\n else:\n local_filename = url.split(\"/\")[-3]\n local_filename = self.api.lookup(dataset, local_filename)[0]\n local_filename = local_filename + util.convert_to_extension(r.headers['content-type'])\n print(\"*** FNAME\", local_filename)\n\n local_filename = os.path.join(output_dir, local_filename)\n\n ### WRITE FILE ###\n with open(local_filename, 'wb') as f:\n for chunk in r.iter_content(chunk_size=chunk_size):\n if chunk:\n f.write(chunk)\n pbar.update(chunk_size)\n return local_filename", "def download_file(self, url, filename):\n r = requests.get(url, stream=True)\n r.raise_for_status()\n\n with open(filename, 'wb') as f:\n for chunk in r.iter_content():\n if chunk:\n f.write(chunk)\n f.flush()", "def download(uri: str) -> None:\n logger = logging.getLogger(__name__)\n logger.info('Download the dataset')\n\n # create destination dirs\n destination = project_dir / 'data' / 'raw'\n destination.mkdir(exist_ok=True, parents=True)\n\n # download the file\n urllib.request.urlretrieve(uri, destination / \"original.zip\")", "def download_to_file(url, filename):\n with browser_spoof_open(url) as download_conn:\n with open(filename, \"wb\") as out_file:\n shutil.copyfileobj(download_conn, out_file, 1024 * 8)", "def download_file(filename, url):\n with open(filename, 'wb') as fout:\n response = requests.get(url, stream=True)\n response.raise_for_status()\n # Write response data to file\n for block in response.iter_content(4096):\n fout.write(block)", "def _download_from_url(self, url):\n target_file_name = self.dir + \"/\" + url.split('/')[-1].split('?')[0]\n urllib.urlretrieve (url, target_file_name)", "def download(url, filename):\n response = requests.get(url, stream=True)\n with open(filename, \"wb\") as handle:\n for data in response.iter_content():\n handle.write(data)", "def download_file(url: str) -> str:\n\n assert len(url) > 0\n\n filename = url.split('/')[-1]\n\n with open(filename, 'wb') as output_file:\n response = requests.get(url, stream=True)\n total = response.headers.get('content-length')\n\n if total is None:\n output_file.write(response.content)\n else:\n downloaded = 0\n total = int(total)\n for data in response.iter_content(chunk_size=max(int(total / 1000), 1024 * 1024)):\n downloaded += len(data)\n output_file.write(data)\n done = int(50 * downloaded / total)\n sys.stdout.write('\\r[{}{}]'.format('█' * done, '.' * (50 - done)))\n sys.stdout.flush()\n sys.stdout.write('\\n')\n\n return filename", "def _download_file(url: str, output_path: str):\n\n def write_to_file(response: requests.Response, output_path: str) -> int:\n \"\"\"Write the response content to the given file.\n\n :param response: Response to be written to the output file.\n :param output_path: Path to the output file.\n :returns: Number of bytes read from the response content.\n \"\"\"\n read_bytes = 0\n with open(output_path, \"wb\") as output_file:\n # Use the same chunk size of `urlretrieve`\n for chunk in response.iter_content(chunk_size=1024 * 8):\n read_bytes += len(chunk)\n output_file.write(chunk)\n if read_bytes > FETCHER_MAXIMUM_FILE_SIZE:\n break\n return read_bytes\n\n try:\n with requests.get(\n url, stream=True, timeout=FETCHER_REQUEST_TIMEOUT\n ) as response:\n response.raise_for_status()\n\n content_length = int(response.headers.get(\"Content-Length\", 0))\n if content_length > FETCHER_MAXIMUM_FILE_SIZE:\n raise REANAFetcherError(\"Maximum file size exceeded\")\n\n read_bytes = write_to_file(response, output_path)\n\n if read_bytes > FETCHER_MAXIMUM_FILE_SIZE:\n os.remove(output_path)\n raise REANAFetcherError(\"Maximum file size exceeded\")\n except HTTPError as e:\n error = f\"Cannot fetch the workflow specification: {e.response.reason} ({response.status_code})\"\n if response.status_code == 404:\n error = \"Cannot find the given workflow specification\"\n raise REANAFetcherError(error)\n except Timeout:\n raise REANAFetcherError(\n \"Timed-out while fetching the workflow specification\"\n )\n except RequestException:\n raise REANAFetcherError(\n \"Something went wrong while fetching the workflow specification\"\n )", "def download(filename):\n print \"Downloading\", filename\n file_content = urlopen(\n urljoin(URL_PATH, filename)\n )\n write_data_to_file(\n file_content.read(),\n os.path.join(\n '/tmp',\n filename\n )\n )", "def download_file(url, outfile=None):\n if not outfile:\n outfile = url.split(\"/\")[-1]\n info(\"Downloading %s to %s\" % (url, outfile))\n with requests.get(url, stream=True) as r:\n r.raise_for_status()\n with open(outfile, \"wb\") as f:\n for chunk in r.iter_content(chunk_size=8192):\n f.write(chunk)\n return outfile", "def download_file(url_path):\n local_filename = url_path.split('/')[-3] + \"-\" + url_path.split('/')[-1]\n local_filename = OUT_DIR + local_filename\n print local_filename\n url = \"https://commoncrawl.s3.amazonaws.com/\" + url_path\n # NOTE the stream=True parameter\n req = requests.get(url, stream=True)\n with open(local_filename, 'wb') as write_f:\n for chunk in req.iter_content(chunk_size=1024):\n if chunk: # filter out keep-alive new chunks\n write_f.write(chunk)\n write_f.close()\n return local_filename", "def download_file(uri: str, target_path: str) -> None:\n\n if not HttpNavigationManager.__INSTANCE:\n raise Exception(\"HttpNavigationManager not Initialized.\")\n\n with open(target_path, 'wb') as file:\n file.write(\n HttpNavigationManager.__INSTANCE.get(uri).content\n )\n\n file.flush()\n file.close()", "def _download(url, outpath=None, dirname=None, branch='master', release=None):\n six.print_('downloading...')\n outfolder = outpath or os.getcwd()\n file, archive_url = get_archive_url(url, branch, release)\n six.print_(archive_url)\n if dirname:\n outfolder = \"{}/{}.zip\".format(outfolder, dirname)\n return file, wget.download(archive_url, out=outfolder)", "def to_file(self, filename):\n resp = urlopen(self.url)\n self.file_size = self._get_content_length(resp.headers)\n block_size = 8192\n self.bytes_read = 0\n with open(filename, 'wb') as f:\n while True:\n buf = resp.read(block_size)\n if not buf:\n break\n self.bytes_read += len(buf)\n f.write(buf)\n self._dl_progress_bar()\n if self.show_progress:\n print(' ✓')", "def download(self, url, filename):\n print(\"url\", url)\n print(\"filename\", filename)\n # open in binary mode\n with open(filename, \"wb\") as file:\n # get request\n try:\n r = requests.get(url)\n if r.status_code == 404:\n raise NotFoundException(\n \"URL: \", url, \" is not working. Status code 404\")\n # write to file\n file.write(r.content)\n print(\"file downloaded\")\n except ConnectionError as ex:\n print(ex)\n except NotFoundException as ex:\n print(ex)\n except Exception as ex:\n print(ex)", "def download_file(url, outputfile):\r\n try:\r\n req = requests.get(url, stream=True, timeout=120)\r\n try:\r\n with open(outputfile, 'wb') as file_download:\r\n for chunk in req.iter_content(chunk_size=1024): \r\n if chunk: \r\n file_download.write(chunk)\r\n except IOError as error:\r\n print error\r\n except requests.exceptions.RequestException as err:\r\n print err\r\n except socket.error as err:\r\n print err\r\n return None", "def fetch_file(self, location, output=None):\n\n self.log.debug(\"Fetching '%s' file...\" % location)\n\n if not output:\n output = tempfile.mktemp(\"-dogen\")\n \n self.log.debug(\"File will be saved as '%s'...\" % output)\n\n with open(output, 'wb') as f:\n f.write(requests.get(location, verify=self.ssl_verify).content)\n\n return output", "def _download(self) -> None:\n download_url(\n self.url,\n self.root,\n filename=self.data_dir,\n md5=self.md5 if self.checksum else None,\n )\n self._extract()", "def download(url, to):\n filename = url.rstrip('/').split('/')[-1] + '.zip'\n r = requests.get(url, stream=True)\n\n outpath = os.path.join(to, filename)\n\n with open(outpath, 'wb') as fd:\n for chunk in r.iter_content(1024 * 1024):\n fd.write(chunk)\n\n return outpath", "def download (url):\n path, url = url\n r = requests.get (url, stream = True)\n content = r.text\n #print (content)\n with open (path + '.txt', 'w') as f:\n f.write (content)", "def fetch_save(url):\n\n name = url.split(\"/\")[-1]\n response = requests.get(url, stream=True)\n if response.status_code == 200:\n with open(f\"{DATA_PATH}/{name}\", \"wb\") as f:\n f.write(response.raw.read())\n else:\n logging.info(f\"Failed {url} download\")", "def download(url, output_dir, output_name=None,\n callback=progress_callback, clobber=True, message=None):\n qisys.sh.mkdir(output_dir, recursive=True)\n if output_name:\n dest_name = os.path.join(output_dir, output_name)\n else:\n dest_name = url.split(\"/\")[-1]\n dest_name = os.path.join(output_dir, dest_name)\n error = None\n if os.path.exists(dest_name) and not clobber:\n return dest_name\n if message:\n ui.info(*message)\n try:\n dest_file = open(dest_name, \"wb\")\n except Exception as e:\n mess = \"Could not save %s to %s\\n\" % (url, dest_name)\n mess += \"Error was %s\" % e\n raise Exception(mess)\n url_split = urlparse.urlsplit(url)\n url_obj = None\n server_name = url_split.netloc\n try:\n if url_split.scheme == \"ftp\":\n # We cannot use urllib2 here because it has no support\n # for username/password for ftp, so we will use ftplib\n (username, password, root) = get_ftp_access(server_name)\n ftp = ftplib.FTP(server_name, username, password)\n if root:\n ftp.cwd(root)\n\n class Tranfert(object):\n \"\"\" Transfert Class \"\"\"\n pass\n\n # Set binary mode\n ftp.voidcmd(\"TYPE I\")\n size = ftp.size(url_split.path[1:])\n Tranfert.xferd = 0\n\n def retr_callback(data):\n \"\"\" Retr Callback \"\"\"\n Tranfert.xferd += len(data)\n if callback:\n callback(size, Tranfert.xferd)\n dest_file.write(data)\n\n cmd = \"RETR \" + url_split.path[1:]\n ftp.retrbinary(cmd, retr_callback)\n else:\n url_obj = authenticated_urlopen(url)\n if six.PY3:\n content_length = url_obj.headers.get('content-length')\n else:\n content_length = url_obj.headers.dict['content-length']\n size = int(content_length)\n buff_size = 100 * 1024\n xferd = 0\n while xferd < size:\n data = url_obj.read(buff_size)\n if not data:\n break\n xferd += len(data)\n if callback:\n callback(size, xferd)\n dest_file.write(data)\n except Exception as e:\n error = \"Could not download file from %s\\n to %s\\n\" % (url, dest_name)\n error += \"Error was: %s\" % e\n finally:\n dest_file.close()\n if url_obj:\n url_obj.close()\n if error:\n qisys.sh.rm(dest_name)\n raise Exception(error)\n return dest_name", "def torrent_download(download_url, torrent):\n webFile = urllib.urlopen(download_url)\n localFile = open(torrent, 'wb')\n localFile.write(webFile.read())\n webFile.close()\n localFile.close()", "def download_from_uri(uri: str, dst: utils.ReadWritePath) -> str:\n if uri.startswith('github://'):\n raise NotImplementedError('Github sources not supported yet')\n\n path = utils.as_path(uri)\n if not path.exists():\n raise ValueError(f'Unsuported source: {uri}')\n\n # Download the main file\n python_module = path / f'{path.name}.py'\n python_module.copy(dst / python_module.name)\n\n # TODO(tfds): Should also support download on the extra files (e.g. label.txt,\n # util module,...)\n\n # Add the `__init__` file\n (dst / '__init__.py').write_text('')\n return python_module.stem", "def download_file(url,file_name):\n #http://stackabuse.com/download-files-with-python/\n filedata = urllib2.urlopen(url)\n datatowrite = filedata.read()\n with open(file_name, 'wb') as f:\n f.write(datatowrite)", "def download_file(filename, url):\n print(\"downloading {0}\".format(url))\n with open(filename, \"wb\") as fout:\n response = requests.get(url, stream=True, verify=False)\n response.raise_for_status()\n # Write response data to file\n iblock = 0\n for block in response.iter_content(4096):\n if iblock % 10000 == 0:\n sys.stdout.write(\".\")\n sys.stdout.flush()\n iblock += 1\n fout.write(block)", "def download(self):\n if not self.url:\n raise RuntimeError(self.tips)\n\n download_file_name = os.path.join(\n self.raw_path, os.path.splitext(os.path.basename(self.url))[0]\n )\n file_format = self.url.split(\".\")[-1]\n if \"amazon\" in self.url:\n raw_file_path = os.path.join(\n self.raw_path, f\"{self.dataset_name}.json.{file_format}\"\n )\n else:\n raw_file_path = os.path.join(\n self.raw_path, f\"{self.dataset_name}.{file_format}\"\n )\n if \"1drv.ms\" in self.url:\n file_format = \"zip\"\n raw_file_path = os.path.join(\n self.raw_path, f\"{self.dataset_name}.{file_format}\"\n )\n if not os.path.exists(raw_file_path):\n print(f\"download_file: url: {self.url}, raw_file_path: {raw_file_path}\")\n download_file(self.url, raw_file_path)\n if \"amazon\" in raw_file_path:\n # amazon dataset do not unzip\n print(\"amazon dataset do not decompress\")\n return\n elif file_format == \"gz\":\n file_name = raw_file_path.replace(\".gz\", \"\")\n with gzip.open(raw_file_path, \"rb\") as fin:\n with open(file_name, \"wb\") as fout:\n shutil.copyfileobj(fin, fout)\n else:\n shutil.unpack_archive(\n raw_file_path, self.raw_path, format=get_format(file_format)\n )\n\n if not os.path.exists(download_file_name):\n return\n elif os.path.isdir(download_file_name):\n os.rename(\n download_file_name, os.path.join(self.raw_path, self.dataset_name)\n )\n else:\n os.rename(\n download_file_name,\n os.path.join(\n self.raw_path,\n f'{self.dataset_name}.{download_file_name.split(\".\")[-1]}',\n ),\n )", "def download(self, url, destination):\n fileDownloader = utils.HttpFileDownloader(url, destination)\n fileDownloader.download()", "def download(url, outfile=None, workdir=None):\n filename = Path(urlparse(url).path).name\n outfile = _format_path(outfile)\n if os.path.isdir(outfile):\n outfile /= f'{filename}.zip'\n\n workdir = _format_path(workdir)\n\n temp = tempfile.NamedTemporaryFile(delete=False, dir=workdir)\n temp.close()\n local_path = temp.name\n\n _download_raw_data(url, local_path)\n shutil.move(local_path, outfile)", "def download(self):\n file_url = posixpath.join(self.mirrors, self.resources)\n _urlretrieve(file_url, os.path.join(self.root, self.resources))", "def download(self):\n file_url = posixpath.join(self.mirrors, self.resources)\n _urlretrieve(file_url, os.path.join(self.root, self.resources))", "def get_url(self,url,output=None):\n parsed_url = urlparse(url)\n hostname = parsed_url[1]\n \n #Make the command\n cmd = \"wget %s -O -\" % url\n (ssh_input,ssh_output,ssh_err) = self.execute_command(cmd)\n \n if(output==None):\n p = urlparse(url)[2]\n filename = os.path.split(p)[1] \n output = filename\n # See if it's ok.\n err = sio.StringIO()\n dat = ssh_err.read(BLOCKSIZE)\n while(dat):\n err.write(dat)\n dat = ssh_err.read(BLOCKSIZE)\n \n err_out = err.getvalue()\n print >> sys.stderr, err_out\n err1 = re.compile(r\"failed\") # Failed to resolve hostname\n err2 = re.compile(r\"404 Not Found\") # File not found\n \n if(err1.search(err_out)):\n raise SSHError(\"ERROR: Failed to retrieve file! Hostname unknown\")\n elif(err2.search(err_out)):\n raise SSHError(\"ERROR: Failed to retrieve file. File not found\")\n # If it didn't fail, read the file.\n \n if(output==\"-\"):\n f = sys.stdout\n else:\n f = open(output,\"w+b\")\n dat = ssh_output.read(BLOCKSIZE)\n while(dat):\n f.write(dat)\n dat = ssh_output.read(BLOCKSIZE)", "def __download_file(self, filename):\r\n \r\n respons = requests.get(self.__url + filename, stream=True)\r\n save_filename = os.path.join(self.__folder, os.path.basename(filename))\r\n with open(save_filename, 'wb') as output_file:\r\n for chunk in respons.iter_content(chunk_size=128):\r\n output_file.write(chunk)", "def httpretrieve_save_file(url, filename, querydata=None, postdata=None, \\\r\n httpheaders=None, proxy=None, timeout=None):\r\n\r\n # Open the output file object and http file-like object.\r\n outfileobj = open(filename, 'w')\r\n httpobj = httpretrieve_open(url, querydata=querydata, postdata=postdata, \\\r\n httpheaders=httpheaders, proxy=proxy, timeout=timeout)\r\n\r\n # Repeatedly read from the file-like HTTP object into our file, until the\r\n # response is finished.\r\n responsechunkstr = None\r\n while responsechunkstr != '':\r\n responsechunkstr = httpobj.read(4096)\r\n outfileobj.write(responsechunkstr)\r\n\r\n outfileobj.close()\r\n httpobj.close()", "def download_file(url, fname_out=None) -> None:\n\n import ssl\n\n try:\n with urllib.request.urlopen(url) as f:\n if not fname_out:\n return f.read().decode(\"utf-8\")\n else:\n fdir = os.path.dirname(fname_out)\n if not os.path.exists(fdir):\n os.makedirs(fdir)\n\n with open(fname_out, \"wb\") as outfile:\n outfile.write(f.read())\n return fname_out\n\n except ssl.SSLError:\n print(\"WHAT!\")\n sys.exit(1)", "def download(self):\n\n # os.open *should* give a thread-safe way to exlusivly open files\n filepath = self.film\n try:\n # os.O_BINARY is only avilable and needed on windows\n flags = os.O_CREAT | os.O_EXCL | os.O_WRONLY | os.O_BINARY\n except:\n flags = os.O_CREAT | os.O_EXCL | os.O_WRONLY\n try:\n fd = os.open(filepath, flags)\n except:\n return\n\n try:\n response = self.session.get(self.filmurl, stream=True)\n if response.status_code == 200:\n for chunk in response.iter_content(1024):\n os.write(fd, chunk)\n except:\n # Remove partial img file if request or stream fails\n os.close(fd)\n os.remove(filepath)", "def fetch_data(uri: str, output=None, verbose: bool = False, b64: bool = False):\n # This is the timeout used on each blocking operation, not the entire\n # life of the connection. So it's used for initial urlopen and for each\n # read attempt (which may be partial reads). 5 minutes should be fine.\n TIMEOUT = 5 * 60\n\n if output is None:\n output = io.BytesIO()\n\n try:\n with urllib.request.urlopen(uri, timeout=TIMEOUT) as infp:\n mb = 0\n length = infp.length\n while True:\n data = infp.read(1024 * 1024)\n if not data:\n break\n # Show a simple progress bar if the user is interactive.\n if verbose:\n mb += 1\n print('~%i MiB downloaded' % (mb,), end='')\n if length:\n percent = mb * 1024 * 1024 * 100 / length\n print(' (%.2f%%)' % (percent,), end='')\n print('\\r', end='', flush=True)\n if b64:\n data = base64.b64decode(data)\n output.write(data)\n except urllib.error.HTTPError as e:\n logging.error('%s: %s', uri, e)\n sys.exit(1)\n\n return output", "def _download_epw_file(url):\n r = requests.get(url)\n if r.ok:\n # py2 and 3 compatible: binary write, encode text first\n log.debug(\" ... OK!\")\n return io.StringIO(r.text)\n else:\n log.error(\" connection error status code: %s\" % r.status_code)\n r.raise_for_status()", "def download_file(src_url, dst_path):\n logger.info(f'Downloading file from: {src_url}')\n with src_url.open(mode='r') as in_file:\n with open(dst_path, 'wb') as out_file:\n out_file.write(in_file.read())\n logger.info(f'Downloaded file path on disk: {dst_path}')\n return dst_path", "def main(url, localfile):\n ph.download_file(url, localfile)", "def download(self, item, save_dir='./'):\r\n try:\r\n os.makedirs(save_dir)\r\n except OSError as e:\r\n if e.errno == errno.EEXIST and os.path.isdir(save_dir):\r\n # another thread beat us to creating this dir\r\n pass\r\n else:\r\n # target dir exists as a file, or a different error\r\n raise\r\n\r\n item['url'] = item[item['type'] + 's']['standard_resolution']['url'].split('?')[0]\r\n # remove dimensions to get largest image\r\n item['url'] = re.sub(r'/s\\d{3,}x\\d{3,}/', '/', item['url']) \r\n\r\n base_name = item['url'].split('/')[-1]\r\n file_path = os.path.join(save_dir, base_name)\r\n\r\n if not os.path.isfile(file_path):\r\n\r\n with open(file_path, 'wb') as file:\r\n try:\r\n bytes = requests.get(item['url']).content\r\n except requests.exceptions.ConnectionError:\r\n\t\t\t\t\tsleep(5)\r\n\t\t\t\t\tbytes = requests.get(item['url']).content\r\n\t\t\t\t\t\r\n file.write(bytes)\r\n\r\n file_time = int(item['created_time'])\r\n os.utime(file_path, (file_time, file_time))", "def _download(url, file_name):\n # File length can only be approximated from the resulting GET, unfortunately\n r = requests.get(url, stream=True)\n if 'Content-Length' in r.headers:\n file_len = int(r.headers['Content-Length'])\n elif 'X-Original-Content-Length' in r.headers:\n file_len = int(r.headers['X-Original-Content-Length'])\n else:\n file_len = 0\n r.raw.decode_content = True\n with open(file_name, 'wb') as f:\n _copyfileobj(r.raw, f, chunks=(file_len / (64. * 1024)))\n r.close()\n\n return file_name", "def http_download(url, target_path):\n try:\n resp = urllib2.urlopen(url)\n except urllib2.URLError, e:\n if not hasattr(e, 'code'):\n raise\n resp = e\n if resp.code != 200:\n raise IOError(\"Request url(%s) expect 200 but got %d\" %(url, resp.code))\n\n with open(target_path, 'wb') as f:\n shutil.copyfileobj(resp, f)\n return target_path", "def download(self, url):\n req = self.request(url)\n inputfile, outputfile = BytesIO(urlopen(req).read()), BytesIO()\n\n img = Image.open(inputfile)\n img = img.convert(\"RGB\") if img.mode != \"RGB\" else img\n img.thumbnail((192, 192), Image.ANTIALIAS)\n img.save(outputfile, \"JPEG\")\n\n self.image.save(os.path.basename(\n self._clean_url(url)),\n ContentFile(outputfile.getvalue()),\n save=False,\n )", "def download_url(url, fd, handle=None):\n return _librepo.download_url(handle, url, fd)", "def downloadAndReplaceFile(file_path, download_url):\r\n file = urllib.request.urlopen(download_url)\r\n with open(file_path, 'wb') as output:\r\n output.write(file.read())", "def download(url: str, to_dir: str) -> str:\n to_file = os.path.join(to_dir, get_filename_from_url(url))\n logger.debug(\"Download %s to %s\", url, to_file)\n\n h = httplib2.Http(\".cache\")\n (_, content) = h.request(url, \"GET\")\n with open(to_file, 'wb') as f:\n f.write(content)\n return to_file", "def single_download(self, url, meta_mode=False):\n self.println(DL_HEAD)\n try:\n if self.djs_core is None or self.analyzer is None:\n print(\"Download failed, enter `help` for help.\")\n else:\n if meta_mode:\n self._meta_download([url, ])\n else:\n self._download([url, ])\n os.chdir(self.home)\n except Exception as e:\n self.println(\"Download failed and stopped.\")\n print(str(e))\n self.println(DL_TAIL)", "def download_file(url, filename):\n with requests.get(url, stream=True) as res:\n if res.status_code == 200:\n with open(filename, 'wb') as f:\n for chunk in res.iter_content(chunk_size=8192): \n f.write(chunk)\n else:\n raise ValueError(\"{} {}\".format(res.status_code, url))\n return filename", "def download_file(url, fname):\n urllib.request.urlretrieve(url, fname)", "def save_file(url, *, out_dir='sha_tmp/', out_name=None):\n exten_types = {'image/fits': '.fits',\n 'text/plain; charset=UTF-8': '.tbl',\n 'application/zip': '.zip',\n }\n # Make request\n response = requests.get(url, stream=True)\n response.raise_for_status()\n # Name file using ID at end\n if out_name is None:\n out_name = 'shaID_' + id_parse.findall(url)[0]\n # Determine extension\n exten = exten_types[response.headers['Content-Type']]\n # Check if path exists\n if not os.path.exists(out_dir):\n os.makedirs(out_dir)\n # Write file\n with open(out_dir + out_name + exten, 'wb') as f:\n for block in response.iter_content(1024):\n f.write(block)", "def download(url, target):\n # Add progress bar via:\n # http://stackoverflow.com/a/22776/317916\n if not url:\n return None\n urlretrieve(url, target)\n return target", "def download_file(url, download_path):\n\n # Extract the filename from the URL\n parsed = urlparse(url)\n filename = basename(parsed.path)\n\n # Ensure the output directory exists\n if not os.path.exists(download_path):\n os.makedirs(download_path)\n\n # Get a temporary file path for the compressed file download\n downloaded_file = os.path.join(tempfile.gettempdir(), filename)\n\n # Download the file\n urlretrieve(url, downloaded_file)\n\n # Move the file to the destination folder\n destination_path = os.path.join(download_path, filename)\n os.rename(downloaded_file, destination_path)", "def _download(url):\n \n filename = url.split('/')[-1]\n if os.path.isfile(filename):\n info('Using pre-existed file {} from local system.'.format(filename))\n else:\n info('Downloading {} from OMA Database.'.format(url.split('/')[-1]))\n filename, _ = urlretrieve(url, filename)\n return filename", "def download(self, download_path):\n return", "def _download_from_url(self) -> bytes:\n response = requests.get(self.url, allow_redirects=True)\n return response.content", "def perform_download(url, outdir=None):\n if outdir is None:\n outdir = os.getcwd()\n\n direct_link_path = urlparse(url).path\n path_parts = direct_link_path.split('/')\n file_name = path_parts[-1]\n\n output_full_path = os.path.join(outdir, file_name)\n\n r = requests.get(url, stream=True)\n \n file_size = int(r.headers[\"Content-Length\"])\n \n print(\"Starting download of {0} to {1} (file size = {2} bytes)\".format(file_name, output_full_path, file_size))\n \n output_file = open(output_full_path, 'wb')\n \n counter = 0\n chunksize = 1024\n previousPerCent = 0\n\n sys.stdout.write(\n '\\n\\r0% 0/{0}'.format(file_size)\n )\n sys.stdout.flush()\n\n for chunk in r.iter_content(chunk_size=chunksize):\n if chunk:\n output_file.write(chunk)\n output_file.flush()\n \n currentPercent = int((counter * chunksize) * 100 / file_size)\n\n if currentPercent > previousPerCent:\n previousPerCent = currentPercent\n \n sys.stdout.write(\n '\\r{0}% {1}/{2}'.format(currentPercent, counter * chunksize, file_size)\n )\n sys.stdout.flush()\n \n counter += 1\n\n output_file.close()\n\n sys.stdout.write('\\r100% {0}/{1}\\n'.format(file_size, file_size))\n\n print('\\nCompleted downloading to {0}\\n'.format(output_full_path))", "def download_data(url, filename, dst_dir):\r\n fullpath = os.path.join(dst_dir, filename)\r\n if os.path.exists(fullpath):\r\n return\r\n\r\n # Try to open url\r\n try:\r\n page = urlopen(url)\r\n except Exception:\r\n shutil.copy(PLACEHOLDER, fullpath)\r\n return\r\n\r\n f = open(fullpath, 'wb')\r\n while True:\r\n buff = page.read(BLOCK_SZ)\r\n if not buff:\r\n break\r\n f.write(buff)\r\n f.close()\r\n pass", "def download():\n response = requests.get(URL, stream=True)\n\n file = open(FILE_NAME, 'wb')\n file.write(response.content)\n\n with zipfile.ZipFile(FILE_NAME, 'r') as zip_ref:\n zip_ref.extractall()\n\n file.close()\n os.remove(FILE_NAME)", "def download(url, target):\n r = requests.get(url, stream=True)\n r.raise_for_status()\n with open(target, 'wb') as f:\n for chunk in r.iter_content(chunk_size=1024): \n if chunk:\n f.write(chunk)", "def download_content(content_link, output_dir):\n if content_link is None: return None\n res = requests.get(content_link, stream=True)\n try:\n res.raise_for_status()\n except requests.exceptions.HTTPError:\n return None\n img_name, img_format = parse_image_url(res.url)\n filepath = '{}/{}.{}'.format(output_dir, img_name, img_format)\n\n with open(filepath, mode='wb') as image_file:\n for chunk in res.iter_content(chunk_size=chunk_size):\n image_file.write(chunk)\n\n return abspath(filepath)", "def download_addon(self, url, target_path):\n try:\n filename = url.split('?')[0].rstrip('/').rsplit('/', 1)[-1]\n target_path = os.path.join(target_path, filename)\n\n print \"Downloading %s to %s\" % (url, target_path)\n urllib.urlretrieve(url, target_path)\n\n return target_path\n except Exception, e:\n print e", "def download_url(filename, url):\n latest_package_url = request.urlopen(url).read().decode(\"utf-8\")\n print(\"Downloading latest package:\\n{}\".format(latest_package_url))\n request.urlretrieve(latest_package_url, filename, reporthook=download_progress_callback)", "def _Download(url):\n response = urllib2.urlopen(url)\n if response.code != 200:\n raise RuntimeError('Failed to download \"%s\".' % url)\n return response.read()", "def download(url, path):\n response = requests.get(url)\n\n if response.ok:\n print(\"response is ok file is downloading ... \")\n # start to download file from url.\n with open(path, \"wb\") as f:\n f.write(response.content)\n else:\n print(\"Error!\", response.status_code)\n return False\n\n print(\"File downloaded succusfully.\")\n return True", "def download(self, url, path_to_dir):\n\n if not os.path.exists(path_to_dir):\n os.makedirs(path_to_dir)\n\n raw_data = self.__class__.get_raw_data(url)\n path_to_image = os.path.join(path_to_dir, url.split('/')[-1].split('?')[0])\n with open(path_to_image, 'wb') as f:\n self.__class__.copy_to(raw_data, f)\n\n return path_to_image", "def get_file(url):\n helpers.make_workdir() # create temp working directory\n file_url = url + constant.MALICIOUS_LOCATION\n print(file_url)\n filename = wget.download(file_url, out=constant.WORKDIR)\n return filename", "def download_file_from_url(url, PATH, file_name):\n with requests.get(url) as r:\n with open(PATH+'/'+file_name, 'wb') as f:\n f.write(r.content)", "def download_http(self, url):\n\n # Set things up.\n # ==============\n\n out = None\n headers = {}\n if (url.username is not None) and (url.password is not None):\n tmp = base64.b64encode(':'.join([url.username, url.password]))\n headers['Authorization'] = \"Basic %s\" % tmp\n\n\n # Toe the waters.\n # ===============\n # We start with an HTTP HEAD request to check the status.\n\n conn = httplib.HTTPConnection(url.netloc)\n conn.request(\"HEAD\", url.path, '', headers)\n r = conn.getresponse()\n conn.close()\n if self.verbose:\n print >> sys.stderr, url, r.status, ''\n\n\n # Bail.\n # =====\n # Short-cut when we just care whether it's a package.\n\n if url.path.endswith('/'):\n out = r.status == 200\n\n\n elif r.status == 200:\n\n # Wade in.\n # ========\n # If the status is positive we check to see if we've already\n # downloaded the latest copy.\n\n etag = r.getheader('etag', '')\n lm = r.getheader('last-modified', '')\n key = sha.new(str(url) + etag + lm).hexdigest()\n\n if not self.cachedir:\n raise ValueError(\"netimp.importer.cachedir not set\")\n if not os.path.isdir(self.cachedir):\n raise IOError( \"netimp.importer.cachedir not found \"\n + \"(%s)\" % self.cachedir\n )\n\n path = join(self.cachedir, key)\n if os.path.isfile(path):\n out = open(path, 'rb')\n else:\n\n # Dive in!\n # ========\n # We don't have this module locally yet: download it for real.\n\n conn = httplib.HTTPConnection(url.netloc)\n conn.request(\"GET\", url.path, '', headers)\n r = conn.getresponse()\n if r.status == 200: # just in case!\n fp = open(path, 'w+b')\n fp.write(r.read())\n fp.flush()\n fp.close()\n out = open(path, 'rb')\n conn.close()\n\n return out", "def download_file(directory, file_name, output_dir):\n endpoint_url = BASE_URL + \"/\" + directory\n final_file = \"lib/\" + output_dir + \"/\" + file_name\n if not os.path.exists(\"lib/\" + output_dir):\n os.makedirs(\"lib/\" + output_dir)\n print('Downloading ' + endpoint_url + \"/\" + file_name + ' ...')\n opener = urllib.URLopener()\n opener.retrieve(endpoint_url + \"/\" + file_name, final_file)\n os.chmod(final_file, 0o755)", "def download_and_save(url, file_name,file_extension):\n #make a request for the file\n response = requests.get(url, allow_redirects =True)\n\n #compose the file + extension\n file_to_be_saved = f\"{file_name}.{file_extension}\"\n \n #Create a new file with \"file_to_be_saved\" in the current directory\n # And save this file and print the directory with the OS module\n with open(file_to_be_saved, 'wb') as file:\n print(\"saving file.... \\n\")\n file.write(response.content)\n print('done....\\n')\n print('file saved as: ', file_to_be_saved )\n print('in: ', os.getcwd() )", "def download(url, dest):\n response = requests.get(url, stream=True)\n with open(dest, 'wb') as f:\n for chunk in response.iter_content(chunk_size=1024):\n if chunk:\n f.write(chunk)", "def download(url, dest):\n response = requests.get(url, stream=True)\n with open(dest, 'wb') as f:\n for chunk in response.iter_content(chunk_size=1024):\n if chunk:\n f.write(chunk)", "def download (httpfile, path_unzip = None, outfile = None) :\n if path_unzip is None : path_unzip = GetPath ()\n file = _check_source (httpfile, path_unzip = path_unzip, outfile = outfile)\n return file", "def download_file(url: str, fdst):\n split = urlsplit(url)\n filename = os.path.basename(split.path)\n\n print('Downloading {}'.format(filename))\n\n with urllib.request.urlopen(url) as response:\n length = response.getheader('content-length')\n if length:\n total = int(length)\n copyfileobj_with_progress(response, fdst, total=total)", "def filedownload(source, destination):\n\n # Initiate the download\n urllib.request.urlretrieve(source, destination)", "def maybe_download_and_extract():\n dest_directory = FLAGS.model_dir\n if not os.path.exists(dest_directory):\n os.makedirs(dest_directory)\n filename = DATA_URL.split('/')[-1]\n filepath = os.path.join(dest_directory, filename)\n if not os.path.exists(filepath):\n def _progress(count, block_size, total_size):\n sys.stdout.write('\\r>> Downloading %s %.1f%%' % (\n filename, float(count * block_size) / float(total_size) * 100.0))\n sys.stdout.flush()\n filepath, _ = urllib.request.urlretrieve(DATA_URL, filepath, _progress)\n print()\n statinfo = os.stat(filepath)\n print('Succesfully downloaded', filename, statinfo.st_size, 'bytes.')\n tarfile.open(filepath, 'r:gz').extractall(dest_directory)", "def download_file(url, target_path):\n\n r = requests.get(url, stream=True)\n\n with open(target_path, 'wb') as f:\n for chunk in r.iter_content(chunk_size=1024):\n if chunk:\n f.write(chunk)", "def download(urls, dest_folder):\n pass", "def _maybe_download(self, url):\n filename = os.path.basename(url)\n download_path = os.path.join(self._model_dir, filename)\n if os.path.exists(download_path):\n return download_path\n\n def _progress(count, block_size, total_size):\n sys.stdout.write(\n '\\r>> Downloading %s %.1f%%' %\n (filename, float(count * block_size) / float(total_size) * 100.0))\n sys.stdout.flush()\n\n urllib.request.urlretrieve(url, download_path, _progress)\n statinfo = os.stat(download_path)\n print('Succesfully downloaded', filename, statinfo.st_size, 'bytes.')\n return download_path", "def download_url(url):\n # use url_checker to verify URL is using the full address\n url_name = url_checker(url)\n if url_name:\n print(f'Requesting page {url_name}')\n tstamp = get_tstamp()\n # set the headers like we are a browser\n headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_2) AppleWebKit/537.36 (KHTML, like Gecko)'\n ' Chrome/72.0.3626.109 Safari/537.36'}\n # download the page\n response = requests.get(url, headers=headers)\n\n # create directory for saving file\n URL_DIR_NAME = os.path.join(OUTPUT_DIR, str(url_name))\n URL_TM_DIR_NAME = os.path.join(URL_DIR_NAME, str(tstamp))\n # create directory using url name and timestamp for directories\n ensure_dir(URL_TM_DIR_NAME)\n # save downloaded page as a .txt file\n with open(f'{URL_TM_DIR_NAME}{slash}response.html', 'w') as f:\n print(response.text, file=f)\n # use beautiful soup to extract links\n links = []\n soup = BeautifulSoup(response.text, 'html.parser')\n tags = soup.find_all('a')\n # append links to links list\n for tag in tags:\n links.append(tag.get('href'))\n # get only unique values and sort\n my_set = set(links)\n u_links = list(my_set)\n u_links.sort()\n # save links as a .txt file\n with open(f'{URL_TM_DIR_NAME}{slash}links.txt', 'w') as f:\n for list_item in u_links:\n f.write(f'{list_item}\\n')" ]
[ "0.75725585", "0.7476779", "0.74181694", "0.7360501", "0.73380905", "0.7268354", "0.72478145", "0.71812546", "0.7168818", "0.70615435", "0.7010822", "0.69799095", "0.696379", "0.69605494", "0.6942747", "0.6924495", "0.69024444", "0.68827945", "0.6880968", "0.6865058", "0.6858983", "0.6833913", "0.6801083", "0.6799624", "0.6786758", "0.67765975", "0.67526484", "0.6750834", "0.67463064", "0.67116565", "0.6708867", "0.66680235", "0.6664826", "0.66646135", "0.66597337", "0.6626594", "0.6615251", "0.6597246", "0.6572606", "0.65665114", "0.6536975", "0.6534876", "0.65297437", "0.6521598", "0.65149784", "0.651143", "0.6506154", "0.64896685", "0.6461538", "0.6461538", "0.6450199", "0.64388543", "0.64320385", "0.6417696", "0.640732", "0.6406928", "0.64014566", "0.6389559", "0.6386731", "0.6386255", "0.63841784", "0.6375767", "0.6361602", "0.63570255", "0.63563234", "0.63526314", "0.632706", "0.6325498", "0.6310002", "0.630934", "0.63080686", "0.62990344", "0.6282644", "0.62815267", "0.62715", "0.62651306", "0.6252991", "0.6249289", "0.62391365", "0.62379843", "0.6214592", "0.62111884", "0.620149", "0.62010086", "0.6191341", "0.6189028", "0.61883724", "0.61830515", "0.6182694", "0.6182385", "0.61797816", "0.61797816", "0.6164086", "0.6163751", "0.6161016", "0.61476415", "0.61378795", "0.61360276", "0.6134025", "0.6124618" ]
0.7142565
9
Download our copies of node & npm to our tree and updates env ($PATH).
def node_and_npm_setup(): # We have to update modules first as it'll nuke the dir node lives under. node.modules_update() node.update()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_requirements():\n with cd(REMOTE_REPO_DIR):\n cmd = ['npm install']\n # cmd += ['--requirement %s' % os.path.join(CODE_DIR,'requirements.txt')]\n run(' '.join(cmd))", "def InstallNodeDependencies():\n logging.info('entering ...')\n # Install the project dependencies specified in package.json into\n # node_modules.\n logging.info('installing AMP Validator engine dependencies ...')\n subprocess.check_call(\n ['npm', 'install', '--userconfig', '../.npmrc'],\n stdout=(open(os.devnull, 'wb') if os.environ.get('CI') else sys.stdout))\n logging.info('installing AMP Validator nodejs dependencies ...')\n subprocess.check_call(['npm', 'install', '--userconfig', '../../../.npmrc'],\n cwd='js/nodejs',\n stdout=(open(os.devnull, 'wb')\n if os.environ.get('CI') else sys.stdout))\n logging.info('... done')", "def nodejs(self):\n self.summarize_operation(\"Installing Nodejs\")\n process = Popen(shlex.split(\"curl --silent --location https://deb.nodesource.com/setup_5.x \"), stdout=subprocess.PIPE)\n process_stdout = Popen(shlex.split(\"sudo -E bash -\"), stdin=process.stdout)\n process_stdout.communicate()[0]\n self.install_package(\"nodejs\")\n self.npm_install_globally(\"npm@latest\")", "def install_frontend_deps():\n\n with lcd(FRONTENDDIR):\n cmd = '%(npm)s install' % {'npm': get_npm()}\n local(cmd)\n cmd = '%(bower)s install' % {'bower': get_bower()}\n local(cmd)", "def update_npm():\n path = os.path.join(settings.PROJECT_PATH, 'rnacentral', 'portal', 'static')\n with env.cd(path):\n env.run('npm update --loglevel info')", "def node_prereqs_installation():\n\n # NPM installs hang sporadically. Log the installation process so that we\n # determine if any packages are chronic offenders.\n shard_str = os.getenv('SHARD', None)\n if shard_str:\n npm_log_file_path = f'{Env.GEN_LOG_DIR}/npm-install.{shard_str}.log'\n else:\n npm_log_file_path = f'{Env.GEN_LOG_DIR}/npm-install.log'\n npm_log_file = open(npm_log_file_path, 'wb') # lint-amnesty, pylint: disable=consider-using-with\n npm_command = 'npm install --verbose'.split()\n\n # The implementation of Paver's `sh` function returns before the forked\n # actually returns. Using a Popen object so that we can ensure that\n # the forked process has returned\n proc = subprocess.Popen(npm_command, stderr=npm_log_file) # lint-amnesty, pylint: disable=consider-using-with\n retcode = proc.wait()\n if retcode == 1:\n # Error handling around a race condition that produces \"cb() never called\" error. This\n # evinces itself as `cb_error_text` and it ought to disappear when we upgrade\n # npm to 3 or higher. TODO: clean this up when we do that.\n print(\"npm install error detected. Retrying...\")\n proc = subprocess.Popen(npm_command, stderr=npm_log_file) # lint-amnesty, pylint: disable=consider-using-with\n retcode = proc.wait()\n if retcode == 1:\n raise Exception(f\"npm install failed: See {npm_log_file_path}\")\n print(\"Successfully installed NPM packages. Log found at {}\".format(\n npm_log_file_path\n ))", "def update_dependencies():\n pip = env.virtualenv.child('bin', 'pip')\n reqs = env.code_dir.child('deploy-requirements.txt')\n sudo('%s -q install -U pip' % pip)\n sudo('%s -q install -r %s' % (pip, reqs))", "def NPMInstall(infra_root):\n cipd_npm = os.path.join(\n infra_root, 'cipd', 'lib', 'node_modules', 'npm', 'bin', 'npm-cli.js')\n return RunNode(infra_root, [cipd_npm, 'install'])", "def do_base_setup(run_as_user, branch, base_path, dist_path):\n #change time to UTC\n runcmd(\"ln -sf /usr/share/zoneinfo/UTC /etc/localtime\")\n\n #install some necessary base deps\n runcmd(\"apt-get update\")\n runcmd(\"apt-get -y install git-core software-properties-common python-software-properties build-essential ssl-cert ntp runit\")\n \n #install node-js\n #node-gyp building has ...issues out of the box on Ubuntu... use Chris Lea's nodejs build instead, which is newer\n runcmd(\"apt-get -y remove nodejs npm gyp\")\n runcmd(\"add-apt-repository -y ppa:chris-lea/node.js\")\n runcmd(\"apt-get update\")\n runcmd(\"apt-get -y install nodejs\") #includes npm\n gypdir = None\n try:\n import gyp\n gypdir = os.path.dirname(gyp.__file__)\n except:\n pass\n else:\n runcmd(\"mv %s %s_bkup\" % (gypdir, gypdir))\n #^ fix for https://github.com/TooTallNate/node-gyp/issues/363\n\n #Create xcp user, under which the files will be stored, and who will own the files, etc\n try:\n pwd.getpwnam(USERNAME)\n except:\n logging.info(\"Creating user '%s' ...\" % USERNAME)\n runcmd(\"adduser --system --disabled-password --shell /bin/false --group %s\" % USERNAME)\n \n #Create xcpd user (to run counterpartyd, counterblockd, insight, bitcoind, nginx) if not already made\n try:\n pwd.getpwnam(DAEMON_USERNAME)\n except:\n logging.info(\"Creating user '%s' ...\" % DAEMON_USERNAME)\n runcmd(\"adduser --system --disabled-password --shell /bin/false --ingroup nogroup --home %s %s\" % (USER_HOMEDIR, DAEMON_USERNAME))\n \n #add the run_as_user to the xcp group\n runcmd(\"adduser %s %s\" % (run_as_user, USERNAME))\n \n #Check out counterpartyd-build repo under this user's home dir and use that for the build\n git_repo_clone(\"counterpartyd_build\", \"https://github.com/CounterpartyXCP/counterpartyd_build.git\",\n os.path.join(USER_HOMEDIR, \"counterpartyd_build\"), branch, for_user=run_as_user)\n\n #enhance fd limits for the xcpd user\n runcmd(\"cp -af %s/linux/other/xcpd_security_limits.conf /etc/security/limits.d/\" % dist_path)", "def update_go_deps(self):\n self.go_version()\n env = self.m.step.get_from_context('env', {})\n env.update(self.go_env)\n with self.m.step.context({'env': env}):\n self.m.run.with_retry(\n self.m.step,\n 'update go pkgs',\n UPDATE_GO_ATTEMPTS,\n cmd=[self.go_exe, 'get', '-u', '-t', '%s/...' % INFRA_GO_PKG])", "def deploy_node(app, deltas={}):\n\n virtualenv_path = join(ENV_ROOT, app)\n node_path = join(ENV_ROOT, app, \"node_modules\")\n node_modules_symlink = join(APP_ROOT, app, \"node_modules\")\n npm_prefix = abspath(join(node_path, \"..\"))\n env_file = join(APP_ROOT, app, 'ENV')\n deps = join(APP_ROOT, app, 'package.json')\n\n first_time = False\n if not exists(node_path):\n echo(\"-----> Creating node_modules for '{}'\".format(app), fg='green')\n makedirs(node_path)\n first_time = True\n\n env = {\n 'VIRTUAL_ENV': virtualenv_path,\n 'NODE_PATH': node_path,\n 'NPM_CONFIG_PREFIX': npm_prefix,\n \"PATH\": ':'.join([join(virtualenv_path, \"bin\"), join(node_path, \".bin\"), environ['PATH']])\n }\n if exists(env_file):\n env.update(parse_settings(env_file, env))\n\n # include node binaries on our path\n environ[\"PATH\"] = env[\"PATH\"]\n\n version = env.get(\"NODE_VERSION\")\n node_binary = join(virtualenv_path, \"bin\", \"node\")\n installed = check_output(\"{} -v\".format(node_binary), cwd=join(APP_ROOT, app), env=env, shell=True).decode(\"utf8\").rstrip(\n \"\\n\") if exists(node_binary) else \"\"\n\n if version and check_requirements(['nodeenv']):\n if not installed.endswith(version):\n started = glob(join(UWSGI_ENABLED, '{}*.ini'.format(app)))\n if installed and len(started):\n echo(\"Warning: Can't update node with app running. Stop the app & retry.\", fg='yellow')\n else:\n echo(\"-----> Installing node version '{NODE_VERSION:s}' using nodeenv\".format(**env), fg='green')\n call(\"nodeenv --prebuilt --node={NODE_VERSION:s} --clean-src --force {VIRTUAL_ENV:s}\".format(**env),\n cwd=virtualenv_path, env=env, shell=True)\n else:\n echo(\"-----> Node is installed at {}.\".format(version))\n\n if exists(deps) and check_requirements(['npm']):\n if first_time or getmtime(deps) > getmtime(node_path):\n copyfile(join(APP_ROOT, app, 'package.json'), join(ENV_ROOT, app, 'package.json'))\n if not exists(node_modules_symlink):\n symlink(node_path, node_modules_symlink)\n echo(\"-----> Running npm for '{}'\".format(app), fg='green')\n call('npm install --prefix {} --package-lock=false'.format(npm_prefix), cwd=join(APP_ROOT, app), env=env, shell=True)\n return spawn_app(app, deltas)", "def bootstrap(execute=dummy_execute):\n path = node(['-p',\n 'try { require.resolve(\"@prometheusresearch/react-scripts/bin/react-scripts.js\") } catch (e) {\"\"}'],\n quiet=True)\n if not path.strip():\n def bootstrap_yarn():\n url, md5_hash = download.parse_url(YARN_URL)\n yarn_data = download.download(url, md5_hash=md5_hash)\n yarn_path = os.path.join(sys.prefix, 'bin', 'yarn')\n with open(yarn_path, 'w') as f:\n f.write(yarn_data)\n yarn_stat = os.stat(yarn_path)\n os.chmod(yarn_path, yarn_stat.st_mode | stat.S_IEXEC)\n\n def bootstrap_npm():\n npm_path = find_executable('npm', 'npm')\n out, err = exe(npm_path, ['--version'])\n npm_version = out.strip()\n if npm_version[0] not in ('4', '3', '2'):\n npm(['install', '--global', '[email protected]'])\n npm(['install', '--global', 'npm@' + NPM_VERSION])\n\n def bootstrap_react_scripts():\n deps = [\n '@prometheusresearch/react-scripts@%s' % REACT_SCRIPTS_VERSION,\n '[email protected]', # this is required for yarn to function propely\n ]\n npm(['install', '--global'] + deps)\n\n execute(bootstrap_yarn, (), 'Installing yarn')\n execute(bootstrap_npm, (), 'Installing npm')\n execute(bootstrap_react_scripts, (), 'Installing react-scripts')", "def install_with_npm_fast_install(self, directory, silent=False):\n timer = Timer()\n program_name = 'npm-fast-install'\n if not self.context.test('which', 'npm-fast-install'):\n program_name = os.path.join(directory, 'node_modules', '.bin', 'npm-fast-install')\n if not self.context.exists(program_name):\n logger.verbose(\"Installing npm-fast-install locally (because it's not globally installed) ..\")\n self.context.execute('npm', 'install', 'npm-fast-install', directory=directory, silent=silent)\n package_file = os.path.join(directory, 'package.json')\n original_contents = self.context.read_file(package_file)\n metadata = dict(dependencies={}, devDependencies={})\n metadata.update(json.loads(auto_decode(original_contents)))\n need_patch = metadata['devDependencies'] and not self.production\n try:\n # Temporarily change the contents of the package.json file?\n if need_patch:\n logger.debug(\"Temporarily patching %s ..\", package_file)\n patched_data = copy.deepcopy(metadata)\n patched_data['dependencies'].update(patched_data['devDependencies'])\n patched_data.pop('devDependencies')\n self.context.write_file(package_file, json.dumps(patched_data).encode('UTF-8'))\n # Run the npm-fast-install command.\n logger.info(\"Running command: %s\", quote(program_name))\n self.context.execute(program_name, directory=directory, silent=silent)\n finally:\n # Restore the original contents of the package.json file?\n if need_patch:\n logger.debug(\"Restoring original contents of %s ..\", package_file)\n self.context.write_file(package_file, original_contents)\n logger.verbose(\"Took %s to install with npm-fast-install.\", timer)", "def _compile_web_assets_npm(project_root_dir):\n clientdir = os.path.join(project_root_dir, 'client')\n modulesdir = os.path.join(clientdir, 'node_modules')\n if os.path.isdir(modulesdir):\n log(\"removing \" + str(modulesdir))\n try:\n rmtree(modulesdir)\n except OSError as exception:\n log(exception.strerror + \": \" + exception.filename)\n return 1\n log(\"installing node modules under \" + str(clientdir))\n cmd = 'npm i'\n cr = container_users.make_host_user_command_runner()\n cr.set_working_dir(clientdir)\n res = cr.run(cmd, stream_log=True)\n return res.get_exit_code()", "def test_npm_install_url_referenced_package(modules, npm, npm_version, states):\n ret = npm.installed(\n name=\"request/request#v2.88.2\",\n registry=\"https://registry.npmjs.org/\",\n )\n assert ret.result is True\n ret = npm.removed(\n name=\"git://github.com/request/request\",\n )\n assert ret.result is True", "def npm_install(self, package):\n self.summarize_operation(\"Installing \" + package)\n print subprocess.call(shlex.split(\"sudo npm install --save \" + package))", "def _set_environment_vars(self):\n os.environ[\"PATH\"] = os.path.join(self.source_folder, \"depot_tools\") + os.pathsep + os.environ[\"PATH\"]\n os.environ[\"DEPOT_TOOLS_PATH\"] = os.path.join(self.source_folder, \"depot_tools\")\n if tools.os_info.is_windows:\n os.environ[\"DEPOT_TOOLS_WIN_TOOLCHAIN\"] = \"0\"\n os.environ[\"GYP_MSVS_VERSION\"] = \"2017\" if str(self.settings.compiler.version) == \"15\" else \"2019\"", "def _install_system_requirements_linux(self):\n self.output.info(\"Calling v8/build/install-build-deps.sh\")\n os.environ[\"PATH\"] += os.pathsep + os.path.join(self.source_folder, \"depot_tools\")\n sh_script = self.source_folder + \"/v8/build/install-build-deps.sh\"\n self.run(\"chmod +x \" + sh_script)\n cmd = sh_script + \" --unsupported --no-arm --no-nacl --no-backwards-compatible --no-chromeos-fonts --no-prompt \"\n cmd = cmd + (\"--syms\" if str(self.settings.build_type) == \"Debug\" else \"--no-syms\")\n cmd = \"export DEBIAN_FRONTEND=noninteractive && \" + cmd\n self.run(cmd)", "def npm_install_globally(self, package):\n self.summarize_operation(\"Installing \" + package)\n print subprocess.call(shlex.split(\"sudo npm install -g \" + package))", "def pip_install():\n _require_environment()\n remote(PIP_INSTALL_PREFIX)", "def install_backend_deps():\n with lcd(BACKENDDIR):\n cmd = '%(pip)s install -r %(requirements_file)s' % {\n 'pip': get_pip(),\n 'requirements_file': requirements_file\n }\n local(cmd)\n # Install Pandoc\n local(\"sudo apt-get install pandoc\")\n # Install Pyandoc\n with lcd(HOMEDIR):\n if not os.path.isdir(os.path.join(HOMEDIR, 'pyandoc')):\n local(\"git clone [email protected]:kennethreitz/pyandoc.git\")\n with lcd(\"pyandoc\"):\n if not env.local:\n\t with prefix('. /home/ubuntu/virtualenvs/venv-system/bin/activate'):\n local(\"python setup.py install\")\n else:\n local(\"python setup.py install\")", "def install_deps():\n pipenv_dev = run('pipenv install --dev'.split(), check=True)\n print('Installed dependencies and virtual environment. Type `pipenv shell` to activate later.')", "def install_deps():\n click.echo(\"install_deps\")", "def install_js_deps():\n click.echo('-> Installing JavaScript dependencies for the Vue.js client...')\n subprocess.check_call(['npm',\n '--prefix={0}'.format(os.path.join(os.path.dirname(aliquis.__file__),\n 'aliquisjs')),\n 'install'])\n click.echo('-> JavaScript dependencies succesfully installed.')", "def YumInstall(vm):\n vm.Install('build_tools')\n vm.InstallEpelRepo()\n vm.InstallPackages(YUM_PACKAGES)", "def local_install(self):\n import subprocess\n\n print(\"Making local install\")\n from pathlib import Path\n\n root = Path(__file__).parent.parent\n\n def run(args, shell=False):\n print(\"---\", \" \".join(args))\n return subprocess.check_call(args, cwd=curdir, shell=shell)\n\n def get_version():\n import json\n\n p = Path(curdir / \"package.json\")\n contents = json.loads(p.read_text())\n return contents[\"version\"]\n\n print(\"--- installing RobotFramework Language Server\")\n curdir = root / \"robotframework-ls\"\n run(\"python -m dev vendor_robocorp_ls_core\".split())\n run(\"vsce package\".split(), shell=sys.platform == \"win32\")\n run(\n f\"code --install-extension robotframework-lsp-{get_version()}.vsix\".split(),\n shell=sys.platform == \"win32\",\n )\n run(\"python -m dev remove_vendor_robocorp_ls_core\".split())\n\n print(\"\\n--- installing Robocorp Code\")\n curdir = root / \"robocorp-code\"\n run(\"python -m dev vendor_robocorp_ls_core\".split())\n run(\"vsce package\".split(), shell=sys.platform == \"win32\")\n run(\n f\"code --install-extension robocorp-code-{get_version()}.vsix\".split(),\n shell=sys.platform == \"win32\",\n )\n run(\"python -m dev remove_vendor_robocorp_ls_core\".split())", "def install():\n\n if (Path.cwd() / \"src\" / \"environment.yml\").is_file():\n call([\"conda\", \"install\", \"--file\", \"src/environment.yml\", \"--yes\"])\n\n pip_command = [\"install\", \"-U\", \"-r\", \"src/requirements.txt\"]\n\n if os.name == \"posix\":\n python_call(\"pip\", pip_command)\n else:\n command = [sys.executable, \"-m\", \"pip\"] + pip_command\n subprocess.Popen(command, creationflags=subprocess.CREATE_NEW_CONSOLE)", "def install_for_spec(self):\n self.create_package_json()\n os.system('npm install json-refs')\n os.system('npm install json2yaml')\n os.system('npm install yamljs')\n os.system('npm install swagger-split') # package only required while splitting hence being installed here\n self.delete_package_json()", "def sub_install_packages():\n sudo('apt-get update') # Update repository links\n sudo('apt-get -y upgrade') # Upgrade the system\n package_str = ' '.join(INSTALL_PACKAGES)\n sudo('apt-get -y install ' + package_str) # Install the packages", "def _install():\n download_file='http://www.ipol.im/pub/art/2015/136/inpaint_8.tgz'\n tools.download_and_extract(download_file) \n this_file_path=os.path.dirname(__file__)\n subprocess.call(' mkdir build; cd build; cmake ..; make', shell=True,cwd=exec_folder)", "def sync_virtualenv(ctx):\n if not path.isfile(\"./pyenv/bin/pip\"):\n ctx.run(\"virtualenv --no-site-packages --python=/usr/bin/python2.7 pyenv\")\n ctx.run(\"PIP_DOWNLOAD_CACHE=/var/tmp/ ./pyenv/bin/pip install -r requirements.txt\")\n print(\"\"\"\n Installation completed. Please check any error messages above.\n\n If you are going to use `openstack` or ansible directly on the command line, run\n\n . ./pyenv/bin/activate\n\n or even add it to your ~/.bashrc\n \"\"\")", "def pip_installs():\n pip = r'pip-2.7 install --install-option=\"--install-scripts=$PWD/bin\" --install-option=\"--install-lib=$PWD/lib/python2.7\" '\n with settings(warn_only=True):\n run(\"mkdir $HOME/tmp\")\n with cd(remote_dir):\n for installation in install_list:\n run(\"export TEMP=$HOME/tmp && %s %s\" % (pip, installation))\n run(\"echo '#%s' >> $HOME/.bash_profile\" % python_add_str)", "def upgrade_dependencies():\n # upgrade pip\n print(\"Upgrading/installing any required dependencies...\")\n subprocess.run([\"python\", \"-m\", \"pip\", \"install\", \"--user\",\n \"--upgrade\", \"pip\", \"--no-warn-script-location\"],\n shell=True, check=True)\n print(\"pip package manager has been upgraded to the latest version\")\n\n # upgrade/install dependencies such as robot framework\n subprocess.run([\"python\", \"-m\", \"pip\", \"install\", \"--user\",\n \"--upgrade\", \"--no-warn-script-location\", \"-r\",\n os.path.join(os.path.curdir, \"requirements.txt\")],\n shell=True, check=True)\n print(\"Robot framework has been upgraded to the latest version\")\n print(\"PyQT5 has been upgraded to the latest version\")", "def install_deps():\n dist = check_distribution()\n if dist == Distribution.TEXLIVE:\n texlive_install_deps()\n elif dist == Distribution.MIKTEX:\n miktex_install_deps()\n\n install_pygments()", "def _init_remote():\r\n require('path', provided_by = [staging])\r\n\r\n create_project_dir()\r\n deploy_nosyncdb()\r\n create_virtualenv()\r\n install_requirements()\r\n create_db()\r\n create_secret_settings()\r\n syncdb()\r\n createsuperuser()\r\n install_site()\r\n reload()", "def texlive_install_deps():\n print('Installing dependencies...')\n subprocess.run([\"tlmgr\", \"install\"] + read_deps())\n print('Dependencies installed')", "def install_with_npm_cache(self, directory, silent=False):\n timer = Timer()\n program_name = 'npm-cache'\n if not self.context.test('which', program_name):\n program_name = os.path.join(directory, 'node_modules', '.bin', 'npm-cache')\n if not self.context.exists(program_name):\n logger.verbose(\"Installing npm-cache locally (because it's not globally installed) ..\")\n self.context.execute('npm', 'install', 'npm-cache', directory=directory, silent=silent)\n install_command = [program_name, 'install', 'npm', self.production_option]\n logger.info(\"Running command: %s\", quote(install_command))\n self.context.execute(*install_command, directory=directory, silent=silent)\n logger.verbose(\"Took %s to install with npm-cache.\", timer)", "def set_up(dev=False):\n _install_dependencies()", "def setup():\n global venvs\n \n try:\n os.mkdir(basedir)\n except OSError as e:\n if e.errno != errno.EEXIST:\n raise\n os.chdir(basedir)\n \n # Delete virtualenvs and recreate\n for venv in glob('venv-*'):\n shutil.rmtree(venv)\n for py in available_python_versions():\n check_call(['virtualenv', '-p', py, '--system-site-packages', 'venv-%s' % py])\n venvs.append((py, 'venv-%s' % py))\n \n # Check out and update the repository\n if not os.path.exists('ipython'):\n try :\n check_call(['git', 'clone', ipy_repository])\n except CalledProcessError :\n check_call(['git', 'clone', ipy_http_repository])\n os.chdir(repodir)\n check_call(['git', 'checkout', 'master'])\n try :\n check_call(['git', 'pull', ipy_repository, 'master'])\n except CalledProcessError :\n check_call(['git', 'pull', ipy_http_repository, 'master'])\n os.chdir(basedir)", "def install():\n remote_egg_path = os.path.join(remote_egg_dir, get_egg_name())\n sudo('easy_install -U %s' % remote_egg_path)\n sudo('rm %s' % remote_egg_path)", "def _install_dependencies():\n logger.info('Installing S2E dependencies')\n\n all_install_packages = _compute_dependencies()\n if not all_install_packages:\n return\n\n install_packages = []\n deb_package_urls = []\n for package in all_install_packages:\n if '.deb' in package:\n deb_package_urls.append(package)\n else:\n install_packages.append(package)\n\n install_opts = ['--no-install-recommends']\n env = {}\n\n env['DEBIAN_FRONTEND'] = 'noninteractive'\n install_opts = ['-y'] + install_opts\n\n try:\n # Enable 32-bit libraries\n dpkg_add_arch = sudo.bake('dpkg', add_architecture=True, _fg=True)\n dpkg_add_arch('i386')\n\n # Perform apt-get install\n apt_get = sudo.bake('apt-get', _fg=True, _env=env)\n apt_get.update()\n apt_get.install(install_opts + install_packages)\n except ErrorReturnCode as e:\n raise CommandError(e) from e\n\n # Install deb files at the end\n for url in deb_package_urls:\n logger.info('Installing deb %s...', url)\n filename, _ = urllib.request.urlretrieve(url)\n os.rename(filename, f'{filename}.deb')\n apt_get = sudo.bake('apt-get', _fg=True, _env=env)\n apt_get.install(install_opts + [f'{filename}.deb'])", "def test_scan_and_find_dependencies_npm():\n manifests = [{\n \"filename\": \"npmlist.json\",\n \"filepath\": \"/bin/local\",\n \"content\": open(str(Path(__file__).parent / \"data/npmlist.json\")).read()\n }]\n res = DependencyFinder().scan_and_find_dependencies(\"npm\", manifests)\n assert \"result\" in res\n assert res['result'][0]['details'][0]['_resolved'][0]['package'] == \"body-parser\"\n assert len(res['result'][0]['details'][0]['_resolved'][0]['deps']) == 2", "def install_with_npm(self, directory, silent=False):\n timer = Timer()\n install_command = ['npm', 'install', self.production_option]\n logger.info(\"Running command: %s\", quote(install_command))\n self.context.execute(*install_command, directory=directory, silent=silent)\n logger.verbose(\"Took %s to install with npm.\", timer)", "def bootstrap():\n\n require('environment', provided_by=env.environments)\n sudo('mkdir -p %(root)s' % env, user=env.deploy_user)\n clone_repo()\n setup_dirs()\n link_config_files()\n update_services()\n create_virtualenv()\n update_requirements()\n create_local_settings()", "def install_deps_temp(self):\n if self.distribution.install_requires:\n self.distribution.fetch_build_eggs(\n self.distribution.install_requires)\n if self.distribution.tests_require:\n self.distribution.fetch_build_eggs(self.distribution.tests_require)", "def bootstrap():\n sub_install_packages()\n sub_install_virtualenv()\n sub_create_virtualenv()\n sub_install_python_requirements()", "def test_in_virtualenv(self):\n new_executor = self.executor.in_virtualenv('/appenv')\n output, _err = new_executor.pip.install('a-local-package').batch()\n self.assertEqual(output, 'a-local-package installed')\n new_executor_one = self.executor.patch_env(PATH='/bin')\n new_executor_two = new_executor_one.in_virtualenv('/appenv')\n output, _err = new_executor_two.pip.install('a-local-package').batch()\n self.assertEqual(output, 'a-local-package installed')", "def install(self):\n\n self.clean_git_checkout(self.git_repo, '/src')\n\n self.__copy_config_templates();\n\n self.local(\"sudo pip install -r src/requirements.txt --upgrade\")\n\n if not self.is_local():\n PiService.install(self) #copy to remote\n\n self.sudo(\"pip install -r src/requirements.txt --upgrade\")", "def setUpEnvironmentVariables(basedir):\n\tif sys.platform == 'win32':\n\t\toldpath = os.environ[\"PATH\"]\n\t\tcwd = os.getcwd()\n\t\tos.environ[\"PATH\"] = oldpath + ';' + cwd + fileSeperator + basedir + fileSeperator + \"platform-tools\"\n\t\tprint os.environ[\"PATH\"]\n\telse:\n\t\tcwd = os.getcwd()\n\t\toldpath = os.environ[\"PATH\"]\n\t\tnewpath = cwd + fileSeperator + basedir + fileSeperator + \"tools:\" + fileSeperator + cwd + fileSeperator + basedir + fileSeperator + \"platform-tools\"\n\t\tos.environ[\"PATH\"] = oldpath + fileSeperator + newpath", "def set_normal_environment(self):\n if 'RUSTUP_DIST_SERVER' in os.environ:\n self._download_url = os.environ['RUSTUP_DIST_SERVER']\n else:\n self._download_url = 'https://static.rust-lang.org'", "def use_npm_ci(path):\n # https://docs.npmjs.com/cli/ci#description\n with open(os.devnull, 'w') as fnull:\n if ((os.path.isfile(os.path.join(path,\n 'package-lock.json')) or\n os.path.isfile(os.path.join(path,\n 'npm-shrinkwrap.json'))) and\n subprocess.call(\n [NPM_BIN, 'ci', '-h'],\n stdout=fnull,\n stderr=subprocess.STDOUT) == 0):\n return True\n return False", "def install_requirements():\n run('source %(env_path)s/bin/activate; pip install -U -r %(repo_path)s/requirements.txt' % env)", "def install_packages():\n with open(\"requirements.txt\", \"w\") as requirements_file:\n subprocess.run([\"pipenv\", \"lock\", \"-r\"], stdout=requirements_file)\n\n subprocess.run(\n [\"pip\", \"install\", \"-r\", \"requirements.txt\", \"--no-deps\", \"-t\", BUILD_DIR]\n )", "def use_npm_ci(path):\n # https://docs.npmjs.com/cli/ci#description\n with open(os.devnull, 'w') as fnull:\n if ((os.path.isfile(os.path.join(path,\n 'package-lock.json')) or\n os.path.isfile(os.path.join(path,\n 'npm-shrinkwrap.json'))) and\n subprocess.call(\n ['npm', 'ci', '-h'],\n stdout=fnull,\n stderr=subprocess.STDOUT\n ) == 0):\n return True\n return False", "def setup_cross():\n if not os.path.exists(cross_prefix):\n docmd(\"mkdir %s\" % cross_prefix)\n epath = os.environ[\"PATH\"]\n set_evar(\"PATH\", \"%s/bin:%s\" % (cross_prefix, epath))", "def YumInstall(vm):\n vm.InstallPackages(YUM_PACKAGES)\n _Install(vm)", "def install(where='local'):\n config = get_config(where)\n print 'using configuration: %s' % config\n with settings(host_string=config['host_string']):\n if not files.exists(config['installation_dir']):\n run('git clone %(git_repo)s %(installation_dir)s' % config)\n with cd(config['installation_dir']):\n run('git submodule init')\n run('git submodule update --init')\n\n with settings(host_string=config['host_string']), cd(config['installation_dir']):\n run('python2.7 bootstrap.py -c %(cfg)s' % config)\n deploy(where)\n secs = 4\n sleep(secs)\n init_db(where)", "def update_path():\n\timport sys\n\tsys.path.append(directory_root())", "def setup():\r\n global venvs\r\n\r\n try:\r\n os.mkdir(basedir)\r\n except OSError, e:\r\n if e.errno != errno.EEXIST:\r\n raise\r\n os.chdir(basedir)\r\n\r\n # Delete virtualenvs and recreate\r\n for venv in glob('venv-*'):\r\n shutil.rmtree(venv)\r\n for py in available_python_versions():\r\n check_call(['virtualenv', '-p', py,\r\n '--system-site-packages', 'venv-%s' % py])\r\n venvs.append((py, 'venv-%s' % py))\r\n\r\n # Check out and update the repository\r\n if not os.path.exists('Theano'):\r\n try:\r\n check_call(['git', 'clone', ipy_repository])\r\n except CalledProcessError:\r\n check_call(['git', 'clone', ipy_http_repository])\r\n os.chdir(repodir)\r\n check_call(['git', 'checkout', 'master'])\r\n try:\r\n check_call(['git', 'pull', ipy_repository, 'master'])\r\n except CalledProcessError:\r\n check_call(['git', 'pull', ipy_http_repository, 'master'])\r\n os.chdir(basedir)", "def setup():\n require('hosts', provided_by=[prod])\n require('code_root')\n sudo('apt-get update')\n sudo('apt-get install -y python-setuptools')\n sudo('easy_install pip')\n sudo('pip install virtualenv')\n sudo('aptitude install -y apache2')\n sudo('aptitude install -y libapache2-mod-wsgi')\n sudo('apt-get install -y nginx')\n update_webserver_config()\n sudo('mkdir -p %s; cd %s; virtualenv .;' % (env.code_root, env.code_root))\n sudo('cd %s;mkdir releases; mkdir shared; mkdir packages; mkdir shared/media; mkdir shared/media/file;' % (env.code_root))\n deploy()", "def install_binaries(url):\n cmd = ('curl -sSL {url}'\n '\\n| tar zx -C /opt'\n '\\n&& cp /opt/fsl/etc/fslconf/fsl.sh /etc/profile.d/'\n '\\n&& FSLPYFILE=/opt/fsl/etc/fslconf/fslpython_install.sh'\n '\\n&& [ -f $FSLPYFILE ] && $FSLPYFILE -f /opt/fsl -q || true'\n ''.format(url=url))\n cmd = indent(\"RUN\", cmd)\n\n env_cmd = (\"FSLDIR=/opt/fsl\"\n \"\\nPATH=/opt/fsl/bin:$PATH\")\n env_cmd = indent(\"ENV\", env_cmd)\n\n return \"\\n\".join((cmd, env_cmd))", "def set_dev(session):\n set_environment_variables(PYBAMM_ENV, session=session)\n envbindir = session.bin\n session.install(\"-e\", \".[all]\")\n session.install(\"cmake\")\n if sys.platform == \"linux\" or sys.platform == \"darwin\":\n session.run(\n \"echo\",\n \"export\",\n f\"LD_LIBRARY_PATH={PYBAMM_ENV['LD_LIBRARY_PATH']}\",\n \">>\",\n f\"{envbindir}/activate\",\n external=True, # silence warning about echo being an external command\n )", "def update():\n\n # update plone\n with cd(env.directory):\n sudo('git pull', user=env.deploy_user)\n\n with cd(env.directory):\n stop()\n sudo('git checkout {}'.format(env.branch), user=env.deploy_user)\n\n # bootstrap\n\n if env.latest:\n sudo('./bin/pip install --no-cache-dir -r https://raw.githubusercontent.com/plone/buildout.coredev/5.2/requirements.txt', user=env.deploy_user) # noqa: E501\n sudo('rm -rf ./src-mrd', user=env.deploy_user)\n else:\n sudo('./bin/pip install --no-cache-dir -r requirements.txt', user=env.deploy_user) # noqa: E501\n\n sudo('rm -rf ./var/blobstorage ./var/filestorage .installed.cfg ', user=env.deploy_user) # noqa: E501\n\n # buildout\n sudo('./bin/buildout', user=env.deploy_user)\n\n # start zope\n start()\n sudo(\"sleep 10\")\n\n # create plonesite with addons (uses different ports for py2 and py3)\n if env.latest:\n if env.python3:\n with cd(env.directory):\n sudo(\"/usr/bin/wget -O- --user=admin --password=admin --post-data='site_id=Plone&form.submitted=True&title=Website&default_language=de&portal_timezone=Europe/Berlin&extension_ids=plonetheme.barceloneta:default&extension_ids=plone.app.contenttypes:plone-content&extension_ids=plonedemo.site:default' http://127.0.0.1:{zeoclient_port}/@@plone-addsite &> ./var/log/wget_demo-plone-latest-py3.log\".format(zeoclient_port=env.zeoclient_port), user=env.deploy_user) # noqa: E501\n else:\n with cd(env.directory):\n sudo(\"/usr/bin/wget -O- --user=admin --password=admin --post-data='site_id=Plone&form.submitted=True&title=Website&default_language=de&portal_timezone=Europe/Berlin&extension_ids=plonetheme.barceloneta:default&extension_ids=plone.app.contenttypes:plone-content&extension_ids=plonedemo.site:default' http://127.0.0.1:{zeoclient_port}/@@plone-addsite &> ./var/log/wget_demo-plone-latest-py2.log\".format(zeoclient_port=env.zeoclient_port), user=env.deploy_user) # noqa: E501\n else:\n with cd(env.directory):\n sudo(\"/usr/bin/wget -O- --user=admin --password=admin --post-data='site_id=Plone&form.submitted=True&title=Website&default_language=de&portal_timezone=Europe/Berlin&extension_ids=plonetheme.barceloneta:default&extension_ids=plone.app.contenttypes:plone-content&extension_ids=plonedemo.site:default' http://127.0.0.1:{zeoclient_port}/@@plone-addsite &> ./var/log/wget_demo-plone.log\".format(zeoclient_port=env.zeoclient_port), user=env.deploy_user) # noqa: E501\n\n # load page to warmup\n sudo('/usr/bin/wget -S -qO- {domain} > /dev/null'.format(domain=env.domain), user=env.deploy_user) # noqa: E501", "def _prepare_cli(self):\n self.logger.info('installing cli...')\n\n self._get_resource(self.cli_package_url, ops='-LO', sudo=True)\n self._get_resource('https://bootstrap.pypa.io/get-pip.py',\n pipe_command='sudo python2.7 -')\n self._execute_command('pip install virtualenv', sudo=True)\n\n last_ind = self.cli_package_url.rindex('/')\n return self.cli_package_url[last_ind + 1:]", "def install():\n return InstallGit()", "def install(self, directory, silent=False):\n timer = Timer()\n package_file = os.path.join(directory, 'package.json')\n modules_directory = os.path.join(directory, 'node_modules')\n dependencies = self.extract_dependencies(package_file)\n logger.info(\"Installing Node.js package(s) in %s ..\", format_path(directory))\n if dependencies:\n file_in_cache = self.get_cache_file(dependencies)\n logger.verbose(\"Checking the cache (%s) ..\", file_in_cache)\n if self.read_from_cache and self.context.is_file(file_in_cache):\n self.install_from_cache(file_in_cache, modules_directory)\n logger.info(\"Done! Took %s to install %s from cache.\",\n timer, pluralize(len(dependencies), \"dependency\", \"dependencies\"))\n else:\n self.installer_method(directory, silent=silent)\n self.prune_dependencies(directory)\n if self.write_to_cache:\n self.add_to_cache(modules_directory, file_in_cache)\n logger.info(\"Done! Took %s to install %s using npm.\",\n timer, pluralize(len(dependencies), \"dependency\", \"dependencies\"))\n self.clean_cache()\n else:\n logger.info(\"Nothing to do! (no dependencies to install)\")\n return dependencies", "def upgrade_packages():\n\n require('environment', provided_by=env.environments)\n system.update_apt_sources()\n system.upgrade_apt_packages()", "def install_p2k():\n if 'pkgs' not in env:\n env.pkgs = []\n\n pkgs = [\n 'python2',\n 'git',\n 'mercurial',\n 'emacs',\n # For flymake\n 'xmlstarlet',\n #'csslint-git',\n ]\n require.arch.packages(pkgs)\n\n python_cmd = 'python2.7'\n virtualenv = '.virtualenvs/emacs_p2k'\n require.python.pip(python_cmd=python_cmd)\n require.python.package(\n 'virtualenv',\n python_cmd=python_cmd,\n use_sudo=True,\n )\n require.python.package(\n 'virtualenvwrapper',\n python_cmd=python_cmd,\n use_sudo=True,\n )\n require.python.virtualenv(\n virtualenv,\n python_cmd=python_cmd,\n venv_python='python2.7',\n )\n\n with python.virtualenv(virtualenv):\n here = os.path.dirname(__file__)\n requirements = '%(here)s/requirements.txt' % locals()\n put(requirements, '/tmp/requirements.txt')\n require.python.requirements(\n '/tmp/requirements.txt',\n )\n\n # Synchronize user\n dotfiles.sync('fabrecipes/emacs/emacs_p2k/user/', '$HOME/')\n dotfiles.sync('fabrecipes/emacs/emacs_p2k/sys/', '/', use_sudo='true')", "def install_dependencies():\n\n # check python version and verify we are using Python 3\n if sys.version[0] < '3':\n print(\"ERROR: python version 3 required. You are using version \"\n \"{}\".format(sys.version))\n print(\"You must install python 3 from https://www.python.org\")\n print(\"Make sure to check the 'pip' package manager option when\")\n print(\"installing python\")\n return\n try:\n import pip\n except ModuleNotFoundError:\n print(\"The python 'pip' package manager is required.\")\n print(\"Go to https://www.python.org and download Python 3\")\n print(\"When re-installing, select 'modify' and make sure\")\n print(\"to check the 'pip' option\")\n return\n\n print(\"Python 3 and pip is installed\")\n\n # upgrade/install dependencies such as robot framework\n subprocess.run([\"python\", \"-m\", \"pip\", \"install\", \"-q\", \"--user\",\n \"--no-warn-script-location\", \"-r\",\n os.path.join(os.path.curdir, \"requirements.txt\")],\n shell=True, check=True)\n print(\"Robot framework is installed and up to date\")\n print(\"PyQT5 is installed and up to date\")", "def install_deps():\n default = open('requirements.txt', 'r').readlines()\n new_pkgs = []\n links = []\n for resource in default:\n if 'git+https' in resource:\n pkg = resource.split('#')[-1]\n links.append(resource.strip())\n new_pkgs.append(pkg.replace('egg=', '').rstrip())\n else:\n new_pkgs.append(resource.strip())\n return new_pkgs, links", "def upgrade_kernel():\n execute(\"upgrade_kernel_node\", env.host_string)", "def bootstrap():\n _require_environment()\n\n adduser()\n install_python()\n install_git()\n install_apache()\n install_mysql()\n setup_project()", "def do_base_setup(run_as_user, branch, base_path, dist_path):\n #install some necessary base deps\n runcmd(\"apt-get update\")\n runcmd(\"apt-get -y install git-core software-properties-common python-software-properties build-essential ssl-cert\")\n runcmd(\"apt-get update\")\n #node-gyp building for insight has ...issues out of the box on Ubuntu... use Chris Lea's nodejs build instead, which is newer\n runcmd(\"apt-get -y remove nodejs npm gyp\")\n runcmd(\"add-apt-repository -y ppa:chris-lea/node.js\")\n runcmd(\"apt-get update\")\n runcmd(\"apt-get -y install nodejs\") #includes npm\n\n #Create xcp user (to run bitcoind, counterpartyd, counterblockd) if not already made\n try:\n pwd.getpwnam(USERNAME)\n except:\n logging.info(\"Creating user '%s' ...\" % USERNAME)\n runcmd(\"adduser --system --disabled-password --shell /bin/bash --group %s\" % USERNAME)\n \n #add the run_as_user to the xcp group\n runcmd(\"adduser %s %s\" % (run_as_user, USERNAME))\n \n #Check out counterpartyd-build repo under this user's home dir and use that for the build\n git_repo_clone(branch, \"counterpartyd_build\", \"https://github.com/CounterpartyXCP/counterpartyd_build.git\", run_as_user)", "def _node_dependencies(self):\n dependencies = []\n if self._requires_extensions():\n self._inject_extensions_build(dependencies)\n if self._requires_npm():\n dependencies.append('- script: |')\n dependencies.append(' npm install')\n dependencies.append(' npm run build --if-present')\n dependencies.append(' npm prune --production')\n\n return dependencies", "def set_installed_packages():\n global INSTALLED_PACKAGES, REQUIRED_VERSION\n if INSTALLED_PACKAGES:\n return\n\n if os.path.exists(BIN_PYTHON):\n pip = subprocess.Popen(\n (BIN_PYTHON, '-m', 'pip', 'freeze'),\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE\n )\n (stdout, stderr) = pip.communicate()\n pip.wait()\n\n INSTALLED_PACKAGES = [normalize_package_name(r.decode().split('==')[0].lower()) for r in stdout.split()]\n REQUIRED_VERSION = next((package for package in INSTALLED_PACKAGES if re.match(r'^lore[!<>=]', package)), None)\n if REQUIRED_VERSION:\n REQUIRED_VERSION = re.split(r'[!<>=]', REQUIRED_VERSION)[-1]", "def setup(self):\n\n if os.name == 'nt':\n windows_env_start.print_banner(bootstrap=True, no_shell_file=False)\n else:\n enable_colors()\n\n steps = [\n ('CIPD package manager', self.cipd),\n ('Python environment', self.virtualenv),\n ('Host tools', self.host_tools),\n ]\n\n if self._is_windows:\n steps.append((\"Windows scripts\", self.win_scripts))\n\n self._log(\n Color.bold('Downloading and installing packages into local '\n 'source directory:\\n'))\n\n max_name_len = max(len(name) for name, _ in steps)\n\n self._env.comment('''\nThis file is automatically generated. DO NOT EDIT!\nFor details, see $PW_ROOT/pw_env_setup/py/pw_env_setup/env_setup.py and\n$PW_ROOT/pw_env_setup/py/pw_env_setup/environment.py.\n'''.strip())\n\n if not self._is_windows:\n self._env.comment('''\nFor help debugging errors in this script, uncomment the next line.\nset -x\nThen use `set +x` to go back to normal.\n'''.strip())\n\n self._env.echo(\n Color.bold(\n 'Activating environment (setting environment variables):'))\n self._env.echo('')\n\n for name, step in steps:\n self._log(' Setting up {name:.<{width}}...'.format(\n name=name, width=max_name_len),\n end='',\n flush=True)\n self._env.echo(\n ' Setting environment variables for {name:.<{width}}...'.\n format(name=name, width=max_name_len),\n newline=False,\n )\n\n start = time.time()\n spin = spinner.Spinner()\n with spin():\n result = step(spin)\n stop = time.time()\n\n self._log(result.status_str(stop - start))\n\n self._env.echo(result.status_str())\n for message in result.messages():\n sys.stderr.write('{}\\n'.format(message))\n self._env.echo(message)\n\n if not result.ok():\n return -1\n\n self._log('')\n self._env.echo('')\n\n self._env.finalize()\n\n self._env.echo(Color.bold('Checking the environment:'))\n self._env.echo()\n\n self._env.doctor()\n self._env.echo()\n\n self._env.echo(\n Color.bold('Environment looks good, you are ready to go!'))\n self._env.echo()\n\n with open(self._shell_file, 'w') as outs:\n self._env.write(outs)\n\n deactivate = os.path.join(\n self._install_dir,\n 'deactivate{}'.format(os.path.splitext(self._shell_file)[1]))\n with open(deactivate, 'w') as outs:\n self._env.write_deactivate(outs)\n\n config = {\n # Skipping sysname and nodename in os.uname(). nodename could change\n # based on the current network. sysname won't change, but is\n # redundant because it's contained in release or version, and\n # skipping it here simplifies logic.\n 'uname': ' '.join(getattr(os, 'uname', lambda: ())()[2:]),\n 'os': os.name,\n }\n\n with open(os.path.join(self._install_dir, 'config.json'), 'w') as outs:\n outs.write(\n json.dumps(config, indent=4, separators=(',', ': ')) + '\\n')\n\n if self._json_file is not None:\n with open(self._json_file, 'w') as outs:\n self._env.json(outs)\n\n return 0", "def update_envrionment():\n l.critical(\"Try getting Git remote repo...\")\n try:\n import git\n repo = git.Repo()\n o = repo.remotes.origin\n l.info(o.fetch())\n l.info(o.pull())\n except Exception as err:\n l.error(\n \"An error happened while updating COMET source code.\", exc_info=True\n )\n\n l.critical(\"Checking conda environment requirements...\")\n try:\n osType = sys.platform\n if \"win\" in osType.lower():\n version = \"COMET/resources/requirements_Winx86.yml\"\n elif \"linux\" in osType.lower():\n version = \"COMET/resources/requirements_LINUX_x86_64.yml\"\n else:\n version = \"COMET/resources/requirements_MacOS.yml\"\n os.system(\n \"conda env update --prefix ./env --file {} --prune\".format(version)\n )\n except Exception as err:\n l.error(\n \"An error happened while updating COMET environment.\", exc_info=True\n )\n\n l.critical(\"Please restart COMET for the updates to have an effect!\")", "def install_test_deps():\n workon = '.'\n if VENVWRAPPER:\n workon=os.getenv(\"WORKON_HOME\")\n cmd = '{workon}/{env}/bin/pip install nose-cov webtest mock'.format(\n envs=ENVS, env=VENV, workon=workon)\n print(cmd)\n subprocess.call(cmd.split())", "def set_download(self):\n print 'Setting download command...'\n wget = 0\n urllib = 0\n # JULIE : Cut proxy stuff...was causing problems (see scalapack installer if you want it back)\n if urllib == 0:\n # if urllib2 is not present checks if wget is present\n # in the PATH and if yes it sets the download command\n # to be wget\n print \"Checking availablility of wget...\",\n path=str(os.getenv('PATH')).split(os.pathsep)\n for i in path:\n if (os.path.isfile(os.path.join(i,'wget'))):\n print \"available\"\n wget = 1\n break\n if wget:\n # test wget\n print \"Testing wget...\",\n comm = 'wget --tries=2 --timeout=5 http://www.netlib.org/lapack/index'\n (output, error, retz) = runShellCommand(comm)\n if(retz != 0):\n print 'not working.'\n wget = -1\n else:\n print \"working\"\n self.downcmd=\"wget\"\n os.remove(\"index\")\n return\n else:\n # wget not available\n print \"not available\"\n wget=0", "def install_requirements():\n _git_pull()\n _install_requirements()\n _syncdb()\n _migrate()\n _restart_webserver()", "def build_virtualenv():\n\n puts(yellow(\"Install dependencies from requirements.txt\"))\n with cd(env.source_dir):\n with prefix('source %s' % in_rwd('bin/activate')):\n sudo('pip install -r %s' % env.requirements_file,\n user=env.app_user)\n sudo('python setup.py develop', user=env.app_user)", "def deps_install(deps, existing_env, env_opts):\n if not existing_env:\n # first remove previous pytorch-deps env\n cmd = [\"conda\", \"env\", \"remove\", \"--yes\"] + env_opts\n p = subprocess.run(cmd, check=True)\n # install new deps\n inst_opt = \"install\" if existing_env else \"create\"\n cmd = [\"conda\", inst_opt, \"--yes\", \"--no-deps\"] + env_opts + deps\n p = subprocess.run(cmd, check=True)", "def launch_analysis_v2():\n\n # add explicit instructions for user\n\n os.system(\"pip install -r requirements.txt\")\n os.chdir(f'{os.getcwd()}/gui')\n\n # explicit version checking\n if os.system(\"node -v\") != 0:\n print(\"Please install node before proceeding.\")\n exit(-1)\n\n if os.system(\"npm install\") != 0:\n print(\"Could not install npm packages. \")\n\n os.system(\"npm run start-backend &\")\n os.system(\"npm start\")", "def Install(vm):\n vm.Install('openjdk')\n # TODO(user): replace with Python 3 when supported.\n # https://github.com/brianfrankcooper/YCSB/issues/1459\n vm.Install('python')\n vm.InstallPackages('curl')\n ycsb_url = (\n _ycsb_tar_url\n or FLAGS.ycsb_tar_url\n or YCSB_URL_TEMPLATE.format(FLAGS.ycsb_version)\n )\n install_cmd = (\n 'mkdir -p {0} && curl -L {1} | '\n 'tar -C {0} --strip-components=1 -xzf - '\n # Log4j 2 < 2.16 is vulnerable to\n # https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-44228.\n # YCSB currently ships with a number of vulnerable jars. None are used by\n # PKB, so simply exclude them.\n # After https://github.com/brianfrankcooper/YCSB/pull/1583 is merged and\n # released, this will not be necessary.\n # TODO(user): Update minimum YCSB version and remove.\n \"--exclude='**/log4j-core-2*.jar' \"\n )\n vm.RemoteCommand(install_cmd.format(YCSB_DIR, ycsb_url))\n if _GetVersion(FLAGS.ycsb_version) >= 11:\n vm.Install('maven')\n vm.RemoteCommand(install_cmd.format(HDRHISTOGRAM_DIR, HDRHISTOGRAM_TAR_URL))\n # _JAVA_OPTIONS needed to work around this issue:\n # https://stackoverflow.com/questions/53010200/maven-surefire-could-not-find-forkedbooter-class\n # https://stackoverflow.com/questions/34170811/maven-connection-reset-error\n vm.RemoteCommand(\n 'cd {hist_dir} && _JAVA_OPTIONS=-Djdk.net.URLClassPath.'\n 'disableClassPathURLCheck=true,https.protocols=TLSv1.2 '\n '{mvn_cmd}'.format(\n hist_dir=HDRHISTOGRAM_DIR, mvn_cmd=maven.GetRunCommand('install')\n )\n )", "def rebuild_core(self):\n return_dir = os.getcwd()\n os.chdir(self.FRONTEND_SOURCE_DIR)\n if os.path.exists(self.FRONTEND_BUILD_DIR):\n shutil.rmtree(self.FRONTEND_BUILD_DIR)\n packages_installed = subprocess.call([\"npm\", \"install\"])\n if packages_installed != 0:\n raise Exception(\n \"please make sure npm is installed, otherwise view \"\n \"the above error for more info.\"\n )\n webpack_complete = subprocess.call([\"npm\", \"run\", \"dev\"])\n if webpack_complete != 0:\n raise Exception(\n \"Webpack appears to have failed to build your \"\n \"frontend. See the above error for more information.\"\n )\n os.chdir(return_dir)", "def setup_environment(self):\n self.run_command(\"cd {}\".format(quote(str(self.builddir))))\n env_vars = self._build_env_variables_string()\n if env_vars:\n env_vars = quote(env_vars)\n command = \"{} DISTRO={} MACHINE={} . {} build-{}\".format(\n env_vars,\n quote(self.distro),\n quote(self.machine),\n quote(self.init_env_file),\n quote(self.distro),\n )\n self.run_command(command)", "def download_and_install(self):\n if self.is_installed_from_bin:\n try:\n self.installer.install_from_rpm_py_package()\n return\n except RpmPyPackageNotFoundError as exc:\n Log.warn('RPM Py Package not found. reason: {0}'.format(exc))\n\n # Download and install from the source.\n top_dir_name = self.downloader.download_and_expand()\n rpm_py_dir = os.path.join(top_dir_name, 'python')\n\n setup_py_in_found = False\n with Cmd.pushd(rpm_py_dir):\n if self.installer.setup_py.exists_in_path():\n setup_py_in_found = True\n self.installer.run()\n\n if not setup_py_in_found:\n self.installer.install_from_rpm_py_package()", "def install_project(ctx, path):\n with ctx.cd(path):\n ctx.run(\"newt -v upgrade\")", "def test_npm_installed_pkgs(npm):\n ret = npm.installed(\n name=\"unused\",\n pkgs=[\"[email protected]\", \"[email protected]\"],\n registry=\"https://registry.npmjs.org/\",\n )\n assert ret.result is True", "def update_requirements():\n\n require('code_root', provided_by=env.environments)\n requirements = os.path.join(env.code_root, 'requirements')\n sdists = os.path.join(requirements, 'sdists')\n base_cmd = ['pip install']\n base_cmd += ['-q -E %(virtualenv_root)s' % env]\n base_cmd += ['--no-index --find-links=file://%s' % sdists]\n # install GDAL by hand, before anything else that might depend on it\n cmd = base_cmd + ['--no-install \"GDAL==1.6.1\"']\n sudo(' '.join(cmd), user=env.deploy_user)\n # this directory won't exist if GDAL was already installed\n if files.exists('%(virtualenv_root)s/build/GDAL' % env):\n sudo('rm -f %(virtualenv_root)s/build/GDAL/setup.cfg' % env, user=env.deploy_user)\n with cd('%(virtualenv_root)s/build/GDAL' % env):\n sudo('%(virtualenv_root)s/bin/python setup.py build_ext '\n '--gdal-config=gdal-config '\n '--library-dirs=/usr/lib '\n '--libraries=gdal1.6.0 '\n '--include-dirs=/usr/include/gdal '\n 'install' % env, user=env.deploy_user)\n # force reinstallation of OpenBlock every time\n with settings(warn_only=True):\n sudo('pip uninstall -y -E %(virtualenv_root)s ebpub ebdata obadmin' % env)\n for file_name in ['ebpub.txt', 'ebdata.txt', 'obadmin.txt', 'openrural.txt']:\n apps = os.path.join(requirements, file_name)\n cmd = base_cmd + ['--requirement %s' % apps]\n sudo(' '.join(cmd), user=env.deploy_user)", "def run(self):\n install = self.distribution.get_command_obj('install')\n install.install_scripts = self.temp_install_dir\n install.install_base = self.temp_install_dir\n install.install_platlib = self.temp_install_dir \n install.install_purelib = self.temp_install_dir \n install.install_data = self.temp_install_dir \n install.install_lib = self.temp_install_dir \n install.install_headers = self.temp_install_dir \n install.run()\n\n # Horrible trick to reload nipy with our temporary instal\n for key in sys.modules.keys():\n if key.startswith('nipy'):\n sys.modules.pop(key, None)\n sys.path.append(os.path.abspath(self.temp_install_dir))\n # Pop the cwd\n sys.path.pop(0)\n import nipy", "def install_requirements():\n require(\"release\", provided_by=[deploy])\n with cd(\"%(path)s\" % env):\n sudo(\"./bin/pip install -r ./releases/%(release)s/requirements.txt\" % env)", "def update():\n with cd(env.directory):\n\n # update plone\n result = sudo('git pull', user=env.deploy_user)\n quick_update = 'Already up-to-date.' in result\n\n if quick_update:\n # Plonesite Recipe replaces site on the fly\n print 'UPDATE: No full Buildout required: {0:s}'.format(result)\n # buildout\n stop()\n sudo('./bin/buildout install plonesite', user=env.deploy_user)\n start()\n\n else:\n stop()\n sudo('git checkout {}'.format(env.branch), user=env.deploy_user)\n\n # bootstrap\n sudo('./bin/pip install -r requirements.txt', user=env.deploy_user)\n\n sudo('rm -rf ./var/blobstorage', user=env.deploy_user)\n sudo('rm -rf ./var/filestorage', user=env.deploy_user)\n sudo('rm .installed.cfg', user=env.deploy_user)\n\n # buildout\n sudo('./bin/buildout', user=env.deploy_user)\n\n # start zope\n start()\n sudo('./bin/zeoclient_debug adduser admin admin', user=env.deploy_user) # noqa: E501\n\n # load page twice to fill cache and prevent a bug showing raw html\n sudo('/usr/bin/wget -S -qO- demo.starzel.de > /tmp/demo.starzel.de.html', user=env.deploy_user) # noqa: E501\n sudo('/usr/bin/wget -S -qO- demo.starzel.de > /tmp/demo.starzel.de.html', user=env.deploy_user) # noqa: E501", "def install():\n return {\n \"actions\": [TaskCreator.get_pip() + \" install --upgrade dist/*.whl\"],\n \"verbosity\": 2,\n \"setup\": [\"make_distribution\"],\n }", "def _install_dependencies(self):\n\n requirements_file = self.app_directory.joinpath('requirements.txt')\n\n package_copy_required = False\n if requirements_file.exists():\n cmd = [\n sys.executable,\n '-m',\n 'pip',\n 'install',\n '-r',\n str(requirements_file),\n '-t',\n str(self.build_directory),\n ]\n package_copy_required = True\n else:\n cmd = [\n sys.executable,\n '-m',\n 'pip',\n 'install',\n '.',\n '-t',\n str(self.build_directory),\n ]\n\n logger.debug('Running subprocess cmds: %s', cmd)\n\n try:\n _ = subprocess.run(cmd, check=True)\n except Exception:\n logger.error('Pip failed to install the app using cmd=[%s].', cmd)\n raise\n\n if package_copy_required:\n shutil.copytree(\n self.package_dir, self.build_directory.joinpath(self.package_name)\n )", "def sub_install_virtualenv():\n sudo('pip install virtualenv') # Need sudo b/c installing to system Python", "def build_essential(self):\n self.install_package(\"build-essential\")", "def update_project():\n with cd(env.code_dir):\n with _virtualenv():\n run('git pull origin master')\n install_requirements()\n perform_migration()\n collect_static()", "def install():\n deploy()\n configure()", "def sub_install_python_requirements_aws():\n # Activate the virtualenv\n activate = 'source {0}/{1}/bin/activate'.format(\n env.virtualenv['dir'], env.virtualenv['name'])\n run(activate)\n\n # make sure the directory is there\n run('mkdir -p /home/ubuntu')\n\n # put the local directory '/Users/jenniferchen/github/HS698-project'\n # - it contains files or subdirectories\n # to the ubuntu server\n put('/Users/jenniferchen/github/HS698-project',\n '/home/ubuntu')\n\n # Install Python requirements\n install = 'pip install -r ' \\\n '/home/ubuntu/HS698-project/Flask_app/requirements.txt'\n\n # Join and execute the commands\n sudo(install)\n # Run the file app.py to start the Flask app\n dev_server = 'python HS698-project/Flask_app/app.py'\n run(dev_server)" ]
[ "0.6407233", "0.63381755", "0.6324544", "0.6251576", "0.6231938", "0.6057195", "0.5836138", "0.5828892", "0.5789863", "0.5718106", "0.56944567", "0.56539077", "0.5616942", "0.5609459", "0.5526702", "0.5480729", "0.5467356", "0.54643357", "0.5417278", "0.5364148", "0.53312916", "0.53000194", "0.5296201", "0.5286228", "0.5278628", "0.527807", "0.52618295", "0.52430105", "0.5237947", "0.5236408", "0.5230619", "0.52288735", "0.5223743", "0.5218902", "0.5192772", "0.51892906", "0.51780736", "0.5176079", "0.5160649", "0.51597035", "0.514942", "0.51211697", "0.5116566", "0.51072943", "0.5101761", "0.5093161", "0.5069732", "0.5064167", "0.505874", "0.5045396", "0.50448006", "0.50382745", "0.5037758", "0.50347555", "0.50298584", "0.50262016", "0.5017432", "0.50030106", "0.49823275", "0.49814576", "0.49770728", "0.49693885", "0.49671173", "0.4965763", "0.49613348", "0.49445024", "0.49429098", "0.4936998", "0.49346533", "0.4929872", "0.4927141", "0.4924404", "0.49170145", "0.4912925", "0.48953924", "0.488611", "0.48818237", "0.4880531", "0.48752353", "0.4871253", "0.48710534", "0.48684394", "0.48659217", "0.4863237", "0.4861387", "0.48428392", "0.48399848", "0.483453", "0.48328096", "0.48273617", "0.48172364", "0.48172364", "0.4817057", "0.481422", "0.48131323", "0.4802675", "0.4802307", "0.47971737", "0.47966608", "0.4794028" ]
0.7617056
0
Load a module from the filesystem.
def load_module(name, path): loader = importlib.machinery.SourceFileLoader(name, path) module = types.ModuleType(loader.name) loader.exec_module(module) return module
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def load_module(module_name: str, module_path: str) -> object:\n spec = module_util.spec_from_file_location(module_name, module_path)\n module = module_util.module_from_spec(spec)\n spec.loader.exec_module(module) # type: ignore\n return module", "def load_module(path):\n spec = spec_from_file_location(\"module.name\", path)\n module = module_from_spec(spec)\n try:\n spec.loader.exec_module(module)\n except Exception as err:\n # ToDo: Append functions found from spec.loader.get_code(\"module.name\")\n # To some hidden attribute of the module object to be returned.\n warn(f'Exception when loading module {path}: \\n{err}')\n return module", "def load_module(path: os.PathLike):\n path = Path(path)\n pwd = Path(os.getcwd())\n os.chdir(path.parent)\n try:\n mod = import_module(path.stem)\n except ModuleNotFoundError as err:\n raise err\n finally:\n os.chdir(pwd)\n return mod", "def _load_module(modulepath):\n\n mod = __import__(modulepath)\n path = []\n for token in modulepath.split(\".\")[1:]:\n path.append(token)\n mod = getattr(mod, token)\n return mod", "def LoadModule(filename):\n (name, ext) = os.path.splitext(filename)\n\n fh = open(filename, \"r\")\n try:\n return imp.load_module(name, fh, filename, (ext, \"r\", imp.PY_SOURCE))\n finally:\n fh.close()", "def loadModule(path, doReload=False):\n relPath = Files.relName(path)\n context = Context.getContext()\n parentMod = context.package\n if parentMod is not None:\n modName = \"%s.%s\" % (parentMod.__name__,\n relPath.replace(\"/\", \".\")[:-3])\n else:\n modName = \"%s\" % (relPath.replace(\"/\", \".\")[:-3])\n if not doReload and path in _loadedModules:\n return _loadedModules[path]\n\n ns = {}\n here = os.getcwd()\n subDir = os.path.dirname(path)\n if subDir:\n os.chdir(subDir)\n\n global _loading, _curScriptPackage\n try:\n try:\n try:\n _loading = os.path.basename(path)\n _curScriptPackage = parentMod\n mod = imp.load_source(modName, os.path.basename(path))\n except Unsupported as exc:\n return\n except Exception as exc:\n print(formatImportFailure(modName, exc))\n print(\"Hmm\", exc)\n raise\n except Unsupported:\n return\n finally:\n os.chdir(here)\n return mod", "def import_module_from_module_path(path):\n return SourceFileLoader('', path).load_module()", "def loadmodule(self, name):\n\n if name in self._modules:\n return self._modules[name]()\n\n raise Error(\"No such module: {0}\".format(name))", "def load(identifier, path):\r\n\tloader = importlib.machinery.SourceFileLoader(identifier, path)\r\n\thandle = loader.load_module(identifier)\r\n\treturn handle", "def load_module(module):\n try:\n return import_module(module)\n except ImportError:\n sys.stderr.write('Unable to load the module: %s.\\n' % module)\n exit(-1)", "def load_module(module_name, file_name):\n from importlib.machinery import SourceFileLoader\n home_dir = os.path.expanduser(\"~\")\n valid_paths = [\n os.path.join(home_dir, \"Google Drive\"),\n os.path.join(home_dir, \"GoogleDrive\"),\n os.path.join(os.path.join(home_dir, \"Desktop\"), \"Google Drive\"),\n os.path.join(os.path.join(home_dir, \"Desktop\"), \"GoogleDrive\"),\n os.path.join(\"C:/\", \"GoogleDrive\"),\n os.path.join(\"C:/\", \"Google Drive\"),\n os.path.join(\"D:/\", \"GoogleDrive\"),\n os.path.join(\"D:/\", \"Google Drive\"),\n ]\n\n drive_path = None\n for path in valid_paths:\n if os.path.isdir(path):\n drive_path = path\n break\n\n if drive_path is None:\n raise Exception(\"Couldn't find google drive folder!\")\n\n utils_path = os.path.join(drive_path, \"_pyutils\")\n print(\"Loading [{}] package...\".format(os.path.join(utils_path,file_name)),flush = True)\n logger_lib = SourceFileLoader(module_name, os.path.join(utils_path, file_name)).load_module()\n print(\"Done loading [{}] package.\".format(os.path.join(utils_path,file_name)),flush = True)\n\n return logger_lib", "def load_module(file_name):\n path = temp.relpath(file_name)\n m = _load_module(path)\n logger.info(\"load_module %s\", path)\n return m", "def load_module(cls, bytes, options=None):\n\t\traise NotImplementedError(\"load_module must be implemented\")", "def load(path):\n pass", "def load_module (self, name):\n module = sys.modules.get (name)\n if module is not None:\n return module\n\n containment = self.containments.get (name)\n if containment is None:\n raise ImportError ('No such module: \\'{}\\''.format (name))\n source, filename, ispkg = containment\n\n module = imp.new_module (name)\n module.__loader__ = self\n module.__file__ = filename\n if ispkg:\n module.__path__ = [os.path.dirname (filename)]\n module.__package__ = name\n else:\n module.__package__ = name.rpartition ('.') [0]\n\n module.__initializing__ = True\n sys.modules [name] = module\n try:\n Exec (compile (source, module.__file__, 'exec'), module.__dict__)\n return module\n except Exception:\n sys.modules.pop (name, None)\n raise\n finally:\n module.__initializing__ = False", "def load(self):\n\n\t\tif self.module is None:\n\t\t\t# Cause the interpreter to load the module in local namespace ...\n\t\t\texec \"import \" + self.name\n\n\t\t\t# Store the module object ...\n\t\t\tobject.__setattr__(self, 'module', eval(self.name))", "def import_module(self, path):\n\n try:\n module = import_module(path)\n except ImportError:\n self.error('Failed to Load module: {0}'.format(path))\n return False\n else:\n self.out('Loaded module: {0}'.format(path))\n return module", "def loadmodule( conf ):\n try:\n #conf = routes[ route ]\n # try to load the module\n module_name = conf['module']['name']\n module_path = conf['module']['path']\n \n mod_name, file_ext = os.path.splitext( os.path.split( module_path )[ -1] )\n if file_ext.lower() == '.py':\n py_mod = imp.load_source( mod_name, module_path )\n elif file_ext.lower() == '.pyc':\n py_mod = imp.load_compiled( mod_name, module_path )\n else:\n raise Exception(\"Cannot handle module for route: \" + route )\n except Exception, e:\n import traceback\n traceback.print_exc( file=sys.stdout )\n # TODO log error + msg\n return py_mod", "def load_module(module_name, file_name):\n from importlib.machinery import SourceFileLoader\n home_dir = os.path.expanduser(\"~\")\n valid_paths = [\n os.path.join(home_dir, \"Google Drive\"),\n os.path.join(home_dir, \"GoogleDrive\"),\n os.path.join(os.path.join(home_dir, \"Desktop\"), \"Google Drive\"),\n os.path.join(os.path.join(home_dir, \"Desktop\"), \"GoogleDrive\"),\n os.path.join(\"C:/\", \"GoogleDrive\"),\n os.path.join(\"C:/\", \"Google Drive\"),\n os.path.join(\"D:/\", \"GoogleDrive\"),\n os.path.join(\"D:/\", \"Google Drive\"),\n ]\n\n drive_path = None\n for path in valid_paths:\n if os.path.isdir(path):\n drive_path = path\n break\n\n if drive_path is None:\n logger_lib = None\n print(\"Logger library not found in shared repo.\", flush = True)\n #raise Exception(\"Couldn't find google drive folder!\")\n else: \n utils_path = os.path.join(drive_path, \"_pyutils\")\n print(\"Loading [{}] package...\".format(os.path.join(utils_path,file_name)),flush = True)\n logger_lib = SourceFileLoader(module_name, os.path.join(utils_path, file_name)).load_module()\n print(\"Done loading [{}] package.\".format(os.path.join(utils_path,file_name)),flush = True)\n\n return logger_lib", "def load_module(module_name, file_name):\n from importlib.machinery import SourceFileLoader\n home_dir = os.path.expanduser(\"~\")\n valid_paths = [\n os.path.join(home_dir, \"Google Drive\"),\n os.path.join(home_dir, \"GoogleDrive\"),\n os.path.join(os.path.join(home_dir, \"Desktop\"), \"Google Drive\"),\n os.path.join(os.path.join(home_dir, \"Desktop\"), \"GoogleDrive\"),\n os.path.join(\"C:/\", \"GoogleDrive\"),\n os.path.join(\"C:/\", \"Google Drive\"),\n os.path.join(\"D:/\", \"GoogleDrive\"),\n os.path.join(\"D:/\", \"Google Drive\"),\n ]\n\n drive_path = None\n for path in valid_paths:\n if os.path.isdir(path):\n drive_path = path\n break\n\n if drive_path is None:\n logger_lib = None\n print(\"Logger library not found in shared repo.\", flush = True)\n #raise Exception(\"Couldn't find google drive folder!\")\n else: \n utils_path = os.path.join(drive_path, \"_pyutils\")\n print(\"Loading [{}] package...\".format(os.path.join(utils_path,file_name)),flush = True)\n logger_lib = SourceFileLoader(module_name, os.path.join(utils_path, file_name)).load_module()\n print(\"Done loading [{}] package.\".format(os.path.join(utils_path,file_name)),flush = True)\n\n return logger_lib", "def load_module(module_file: Path):\n try:\n name = module_file.stem\n spec = importlib.util.spec_from_file_location(name, module_file)\n module = importlib.util.module_from_spec(spec)\n sys.modules[name] = module\n spec.loader.exec_module(module)\n return module\n except Exception as err:\n _LOGGER.exception(err)\n raise", "def load_module(name_or_path):\n if os.path.exists(name_or_path):\n path = name_or_path.rstrip(\"/\")\n modname = os.path.splitext(os.path.basename(path))[0]\n if os.path.isdir(path):\n path = os.path.join(path, \"__init__.py\")\n spec = importlib.util.spec_from_file_location(modname, path)\n mod = importlib.util.module_from_spec(spec)\n spec.loader.exec_module(mod)\n else:\n mod = importlib.import_module(name_or_path)\n try:\n path = mod.__path__[0]\n except AttributeError:\n path = mod.__file__\n return mod, path", "def load_module(file_name):\n mod_name = file_module_name(file_name)\n spec = imputil.spec_from_file_location(mod_name, file_name)\n if spec is None:\n raise ImportError(f'cannot import from {file_name!r}')\n mod = imputil.module_from_spec(spec)\n spec.loader.exec_module(mod)\n return mod", "def load_from_module_path(self, filename: str) -> None:\n # pylint: disable=import-outside-toplevel\n import importlib.util\n spec = importlib.util.spec_from_file_location(\"base_config\", filename)\n module = importlib.util.module_from_spec(spec)\n if spec.loader is not None:\n spec.loader.exec_module(module)\n else:\n raise Exception(\"Could not get module loader from spec\")\n self.load_from_module(module)", "def load_module(module_name, root_dir):\n module_filepath = os.path.join(root_dir, module_name)\n python_version = sys.version_info[:2]\n\n module = None\n if python_version <= (2, 7):\n import imp\n module = imp.load_source(module_name, module_filepath)\n else:\n import importlib\n loader = importlib.machinery.SourceFileLoader(module_name, module_filepath)\n if python_version <= (3, 4):\n module = loader.load_module()\n else:\n spec = importlib.util.spec_from_loader(loader.name, loader)\n module = importlib.util.module_from_spec(spec)\n loader.exec_module(module)\n\n return module", "def load_module(module_name):\n try:\n module = resolve_name(module_name)\n except ImportError:\n raise error.NotFound(msg=module_name)\n\n return module", "def load_mod_from_file(self, fpath):\n\t\tshutit_global.shutit_global_object.yield_to_draw()\n\t\tfpath = os.path.abspath(fpath)\n\t\tfile_ext = os.path.splitext(os.path.split(fpath)[-1])[-1]\n\t\tif file_ext.lower() != '.py':\n\t\t\treturn\n\t\twith open(fpath) as f:\n\t\t\tcontent = f.read().splitlines()\n\t\tok = False\n\t\tfor line in content:\n\t\t\tif line.strip() == 'from shutit_module import ShutItModule':\n\t\t\t\tok = True\n\t\t\t\tbreak\n\t\tif not ok:\n\t\t\tself.log('Rejected file: ' + fpath,level=logging.DEBUG)\n\t\t\treturn\n\t\t# Note that this attribute will only be set for 'new style' module loading, # this should be ok because 'old style' loading checks for duplicate # existing modules.\n\t\t# TODO: this is quadratic complexity\n\t\texistingmodules = [\n\t\t\tm for m in self.shutit_modules\n\t\t\tif getattr(m, '__module_file', None) == fpath\n\t\t]\n\t\tif existingmodules:\n\t\t\tself.log('Module already seen: ' + fpath,level=logging.DEBUG)\n\t\t\treturn\n\t\t# Looks like it's ok to load this file\n\t\tself.log('Loading source for: ' + fpath,level=logging.DEBUG)\n\n\t\t# Add this directory to the python path iff not already there.\n\t\tdirectory = os.path.dirname(fpath)\n\t\tif directory not in sys.path:\n\t\t\tsys.path.append(os.path.dirname(fpath))\n\t\t# TODO: use bytearray to encode?\n\t\tmod_name = base64.b32encode(fpath.encode()).decode().replace('=', '')\n\t\tpymod = imp.load_source(mod_name, fpath)\n\n\t\t# Got the python module, now time to pull the shutit module(s) out of it.\n\t\ttargets = [\n\t\t\t('module', self.shutit_modules), ('conn_module', self.conn_modules)\n\t\t]\n\t\tself.build['source'] = {}\n\t\tfor attr, target in targets:\n\t\t\tmodulefunc = getattr(pymod, attr, None)\n\t\t\t# Old style or not a shutit module, nothing else to do\n\t\t\tif not callable(modulefunc):\n\t\t\t\treturn\n\t\t\tmodules = modulefunc()\n\t\t\tif not isinstance(modules, list):\n\t\t\t\tmodules = [modules]\n\t\t\tfor module in modules:\n\t\t\t\tsetattr(module, '__module_file', fpath)\n\t\t\t\tShutItModule.register(module.__class__)\n\t\t\t\ttarget.add(module)\n\t\t\t\tself.build['source'][fpath] = open(fpath).read()", "def load_from_path(cls, module_path: str) -> \"FilebaseApiModuleInfo\":\n module = try_load_module_dynamic_with_timestamp(module_path)\n if module is None:\n return None\n\n if not hasattr(module, \"__filebase_api_module_info\"):\n # thread blocking command\n module.__filebase_api_module_info = cls(module)\n\n return module.__filebase_api_module_info", "def import_module(module_name, path):\n file, path, description = imp.find_module(module_name, [path])\n # Close the .so file after load.\n with file:\n return imp.load_module(module_name, file, path, description)", "def load_datamodule(cls, path: Union[str, Path]):\n if isinstance(path, str):\n path = Path(path)\n if not path.exists():\n raise FileNotFoundError(f\"{path} does not exist.\")\n datamodule = joblib.load(path)\n return datamodule", "def import_from_file(module_name: str, filepath: str):\n return SourceFileLoader(module_name, filepath).load_module()", "def importModule(filename):\n\tfrom os.path import abspath, split, splitext\n\tfrom sys import path\n\tif isPython2():\n\t\tfrom imp import reload\n\telse:\n\t\tfrom importlib import reload\n\t\n\tfilename = adaptPath(filename)\n\tmodulePath = abspath(split(filename)[0])\n\tmoduleName = splitext(split(filename)[1])[0]\n\t\n\tif not modulePath in path:\n\t\tpath.append (modulePath)\n\tmodule = __import__(moduleName)\n\treload (module)\n\treturn module", "def import_module(name, path):\n spec = importlib.util.spec_from_file_location(name, path)\n module = importlib.util.module_from_spec(spec)\n spec.loader.exec_module(module)\n return module", "def _load_module(self, filename):\n logging.debug(\"Loading module %s\" % filename)\n module_name, dummy = os.path.splitext(os.path.basename(filename))\n f = file(filename, \"r\")\n try:\n module = imp.load_module(module_name, f, filename,\n (\".py\", \"r\", imp.PY_SOURCE))\n except Exception, e: # pragma: no cover\n logging.warning(\"Failed to load plugin '%s' (%s)\" % \n (module_name, e))\n return None\n f.close()\n return module", "def _load_modules(self):\n modules_src = os.path.abspath(\"src/modules\")\n\n # perform a tree walk over modules directory\n for file_name, file_path in self._tree_walk(modules_src):\n try:\n # try to find a spec for this file and construct a module\n # from it\n spec = spec_from_file_location(file_name, file_path)\n assert spec is not None\n module = module_from_spec(spec)\n assert spec.loader is not None\n spec.loader.exec_module(module)\n self.modules.append(module)\n self._loaded_modules_names.append(module.__name__)\n except:\n pass", "def load_module(name):\n return __import__(\"metaswitch.%s\" % name,\n fromlist=[\"ROUTES\"])", "def import_(filename):\n (path, name) = os.path.split(filename)\n (name, ext) = os.path.splitext(name)\n try:\n return sys.modules[name]\n except KeyError:\n pass\n try:\n file, filename, data = imp.find_module(name, [path])\n except ImportError:\n print('No module {} found'.format(name))\n try:\n mod = imp.load_module(name, file, filename, data)\n return mod\n except UnboundLocalError:\n pass\n finally:\n # Since we may exit via an exception, close fp explicitly.\n try:\n if file:\n file.close()\n except UnboundLocalError:\n if not os.path.exists(path):\n os.makedirs(path)\n from shutil import copyfile\n if os.name == 'nt':\n copyfile(os.path.join(path_to_module, 'models\\myfitmodels.py'), filename)\n else:\n copyfile(os.path.join(path_to_module, './models/myfitmodels.py'), filename)\n # open(filename, 'a').close()", "def import_file(name: Text, file_path: Text):\n\n spec = spec_from_file_location(f\"luh3417.{name}\", file_path)\n module = module_from_spec(spec)\n spec.loader.exec_module(module)\n\n return module", "def loadModule(self, module = \"\", modType=\"python\"):\n\t\tif modType == \"python\":\t\t\t\n\t\t\tmod = None\t\t\t#try:\n\t\t\tsys.path.insert(0, os.path.dirname(module))\n\t\t\tfile = os.path.basename(module)\n\t\t\tfile = file[:file.rfind('.')]\n\t\t\t#print \"Try: \", file, \"over\", sys.path\n\t\t\ttry:\n\t\t\t\tmod = __import__(file)\n\t\t\texcept:\n\t\t\t\tprint \"Invalid CSL API Module '%s' ignored.\" % (file)\n\t\t\t\tsys.path.pop(0)\n\t\t\t\treturn None\n\t\t\tsys.path.pop(0)\n\t\t\t#print \"Loaded Module Info:\", dir(mod)\n\t\t\tif \"CSLAPI_NAME\" in dir(mod):\t\t\t\t\n\t\t\t\tmod.CSLValue = CSLValue\n\t\t\t\tmod.debug = self.debug\n\t\t\t\tvtbl = mod.getFuncTable()\n\t\t\t\t#print \"CSL Module loader:\", module\n\t\t\t\tvtbl_names = vtbl.keys()\n\t\t\t\tfor i in vtbl_names:\n\t\t\t\t\t#print \"\\tAdded Function '%s' from module: %s (%s)\" % (i, mod.__file__, vtbl[i].__class__)\n\t\t\t\t\tself.cslAPIS[i] = vtbl[i]\n\t\t\t\tself.cslAPIMods.append(mod)", "def loadModule(mod):\n try:\n # from pyrominfo import gameboy, etc\n pyrominfo = __import__(\"pyrominfo\", globals(), locals(), [mod])\n except ImportError:\n import os\n parentdir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))\n os.sys.path.insert(0, parentdir)\n pyrominfo = __import__(\"pyrominfo\", globals(), locals(), [mod])\n try:\n return getattr(pyrominfo, mod)\n except AttributeError:\n raise ImportError(\"testutils.loadModule() can't find module %s in pyrominfo package\" % mod)", "def load_module(self, file_path: Path) -> Module:\n if file_path.suffix != \".wasm\":\n raise Exception(\"Unsupported file type: {file_path.suffix}\")\n\n with file_path.open(\"rb\") as wasm_file:\n try:\n module = parse_module(wasm_file)\n except ParseError as err:\n raise MalformedModule from err\n\n try:\n validate_module(module)\n except ValidationError as err:\n raise InvalidModule from err\n\n return module", "def _load_module(modname):\n if modname in sys.modules:\n raise ImportError(\"Stock module %r already loaded\" % modname)\n searchpath = [HERE]\n if \"DEFUSED_EXPAT\" in os.environ:\n # for unit testing\n searchpath.extend(os.environ[\"DEFUSED_EXPAT\"].split(os.pathsep))\n fh = None\n try:\n fh, filename, description = imp.find_module(modname, searchpath)\n mod = imp.load_module(modname, fh, filename, description)\n finally:\n if fh is not None:\n fh.close()\n modpath = getattr(sys.modules[modname], \"__file__\", \"\")\n if not modpath.startswith(HERE):\n raise ValueError(\"Unpatched module %r loaded (%s != %s)\" %\n (mod, moddir, HERE))\n return mod", "def _load_module(self):\n self.log(logging.INFO, \"Checking file\", (self.filename, os.getpid()))\n\n try:\n return self.load_module(self.filename)\n except KeyboardInterrupt:\n raise\n except BaseException as e:\n # don't re-raise the error, just proceed without a module object\n # this can happen with scripts that aren't intended to be imported\n if not self.has_file_level_ignore():\n traceback.print_exc()\n if self.tree.body:\n node = self.tree.body[0]\n else:\n node = None\n self.show_error(\n node,\n \"Failed to import {} due to {!r}\".format(self.filename, e),\n error_code=ErrorCode.import_failed,\n )\n return None, False", "def _load_module(module_name):\n last_dot = module_name.rfind('.')\n if last_dot == -1:\n return __import__(module_name, globals(), locals())\n from_module = module_name[:last_dot]\n import_module = module_name[last_dot+1:]\n m = __import__(from_module, globals(), locals(), [import_module])\n return getattr(m, import_module)", "def load_module(self, fqn):\n trace(\"load_module\", fqn)\n trace(\"sys.modules\", sys.modules)\n p = lookupWithMapper(self.mapper, fqn)\n trace(\"load_module\", fqn, \"done\", id(p))\n\n if fqn in _sysModulesSpecialCases:\n # This module didn't have access to our isolated sys.modules when it\n # did its sys.modules modification. Replicate it here.\n for submoduleName in _sysModulesSpecialCases[fqn]:\n subfqn = '.'.join([fqn, submoduleName])\n sys.modules[subfqn] = getattr(p, submoduleName, None)\n return p", "def load(self, path):\n pass", "def load(self, path):\n pass", "def as_module(file_path, name):\n\n with lock:\n with open(file_path, 'U') as module_file:\n prev = sys.dont_write_bytecode\n sys.dont_write_bytecode = True\n module = imp.load_module(name, module_file, file_path, (\".py\", 'U', imp.PY_SOURCE))\n sys.dont_write_bytecode = prev\n sys.modules[name] = module\n return module", "def load_module(id=None, datatype=None, action=None,\n version='0.0', fields=[]):\n\n icon = {\n 'URI': config.IMAGES + \"load.png\",\n 'terminals': {\n 'output': (20, 10, 1, 0),\n }\n }\n \n terminals = [\n dict(id='output',\n datatype=datatype,\n use='out',\n description='data',\n ),\n ]\n\n files_field = {\n \"type\":\"[file]\",\n \"label\": \"Files\",\n \"name\": \"files\",\n \"value\": '',\n }\n intent_field = {\n \"type\":\"string\",\n \"label\":\"Intent\",\n \"name\": \"intent\",\n \"value\": '',\n }\n \n # Combine everything into a module.\n module = Module(id=id,\n name='Load',\n version=version,\n description=action.__doc__,\n #icon=icon,\n terminals=terminals,\n fields=[files_field, intent_field] + fields,\n action=action,\n )\n\n return module", "def load(self, path: str):\n pass", "def _import_compiled_module(self, fullname):\n\n vfile = vfs.getFile(self.filename, False)\n\n # We can only import a compiled module if it already exists on\n # disk. This means if it's a truly virtual file that has no\n # on-disk equivalent, we have to write it to a temporary file\n # first.\n if hasattr(vfile, 'getMount') and \\\n isinstance(vfile.getMount(), VirtualFileMountSystem):\n # It's a real file.\n filename = self.filename\n else:\n # It's a virtual file. Dump it.\n filename = Filename.temporary('', self.filename.getBasenameWoExtension(),\n '.' + self.filename.getExtension(),\n type = Filename.TDso)\n filename.setExtension(self.filename.getExtension())\n fin = open(vfile, 'rb')\n fout = open(filename, 'wb')\n data = fin.read(4096)\n while data:\n fout.write(data)\n data = fin.read(4096)\n fin.close()\n fout.close()\n\n module = imp.load_module(fullname, None, filename.toOsSpecific(),\n self.desc)\n module.__file__ = self.filename.cStr()\n return module", "def import_from_path(module: str, path: str, name: str):\n\n spec = importlib.util.spec_from_file_location(module, path)\n foo = importlib.util.module_from_spec(spec)\n spec.loader.exec_module(foo)\n return getattr(foo, name)", "def load_module(cls, *args, **kwargs): # real signature unknown\n pass", "def load_module(cls, *args, **kwargs): # real signature unknown\n pass", "def load_module(cls, *args, **kwargs): # real signature unknown\n pass", "def load(path):\n return ActWrapper.load(path)", "def load_script(filename):\n path, module_name, ext = _extract_script_components(filename)\n add_search_path(path)\n return importlib.import_module(module_name)\n # return _load_module(module_name)", "def _import(module_name, dir_name):\n\n # assign module a name that's not likely to conflict\n safe_name = 'confab.data.' + module_name\n\n # check if module is already loaded\n existing = sys.modules.get(safe_name)\n if existing:\n return existing\n\n # try to load module\n module_info = imp.find_module(module_name, [dir_name])\n module = imp.load_module(safe_name, *module_info)\n return module", "def load_module(self, fullname):\n LOGGER.info('Loading module {0}'.format(fullname))\n if fullname in sys.modules:\n return sys.modules[fullname]\n\n splitted_names = fullname.split('.')\n if 'github' in splitted_names:\n if len(splitted_names) >= 3:\n self.username = splitted_names[splitted_names.index('github') + 1]\n if len(splitted_names) >= 4:\n self.repository_name = splitted_names[splitted_names.index('github') + 2]\n\n if self.username and self.repository_name:\n self.clone_github_repo()\n\n if len(splitted_names) == 2:\n return super().load_module(fullname)\n if len(splitted_names) == 3:\n username_directory = os.path.join(MODULES_PATH, 'github', self.username)\n if not os.path.exists(username_directory):\n os.mkdir(username_directory)\n init_filename = os.path.join(username_directory, '__init__.py')\n open(init_filename, 'a').close()\n return super().load_module(fullname)\n if len(splitted_names) >= 4:\n module = super().load_module(fullname)\n parent, _, current_module = fullname.rpartition('.')\n root_modules = [\n 'packyou.github.{0}.{1}'.format(self.username, self.repository_name),\n 'packyou.github.{0}.{1}.{1}'.format(self.username, self.repository_name)\n ]\n LOGGER.info('Current module is {0}'.format(current_module))\n if fullname in root_modules:\n self.root_module = fullname\n sys.modules[current_module] = module\n return module\n\n else:\n ipdb.set_trace()\n module = super().load_module(fullname)\n sys.modules[fullname] = module\n if not module:\n raise ImportError\n return module", "def load(path, num_cpu=16):\n return ActWrapper.load(path, num_cpu=num_cpu)", "def _import_module(name):\r\n __import__(name)\r\n return sys.modules[name]", "def _import_module(name):\r\n __import__(name)\r\n return sys.modules[name]", "def _import_module(name):\r\n __import__(name)\r\n return sys.modules[name]", "def load_module(self, module_name): # pragma: no cover\r\n try:\r\n module = import_module('SoftLayer.CLI.modules.%s' % module_name)\r\n for _, obj in inspect.getmembers(module):\r\n if inspect.isclass(obj) and issubclass(obj, CLIRunnable):\r\n self.add_plugin(obj)\r\n return module\r\n except ImportError:\r\n raise InvalidModule(module_name)", "def get_module(filename_with_path):\n try:\n with open(filename_with_path) as config_file:\n Module.temp_path = filename_with_path\n this_module = yaml.load(config_file, Loader=Loader)\n Module.temp_path = \"\"\n return this_module\n except IOError:\n raise ModulePathError(filename_with_path)\n except yaml.scanner.ScannerError:\n raise ModuleConstraintParseError(\"Parsing of module {} failed. This is likely caused by a typo in the file.\"\n \"\".format(filename_with_path))", "def load_module_2or3(model_name, model_def_path):\n if six.PY3:\n import importlib.util\n spec = importlib.util.spec_from_file_location(model_name, model_def_path)\n mod = importlib.util.module_from_spec(spec)\n spec.loader.exec_module(mod)\n else:\n import importlib\n dirname = os.path.dirname(model_def_path)\n sys.path.insert(0, dirname)\n module_name = os.path.splitext(os.path.basename(model_def_path))[0]\n mod = importlib.import_module(module_name)\n return mod", "def _import_module(name):\n __import__(name)\n return sys.modules[name]", "def _import_module(name):\n __import__(name)\n return sys.modules[name]", "def require(path,className=None):\n (dirname, basename) = os.path.split(path)\n packageName = dirname.replace('/','.')\n moduleName = basename.rstrip('.py')\n\n logging.getLogger().debug(\"Loading: %s.%s[%s]\" %(packageName,moduleName,className))\n\n mod = __import__(packageName+'.'+moduleName, globals(), locals(), [className])\n if className:\n return getattr(mod, className)\n\n return mod", "def import_pymodule(scheme):\n if not SchModule._ready:\n raise ValueError(u\"not mounted\")\n\n p = SchModule.DIR.hpath(scheme)\n p = path.join(p, SchModule.PYMODULE)\n p = p.encode(sys.getfilesystemencoding())\n # In load_source(name, path): name is name of module (without extension),\n # path is full path to the file of module\n return imp.load_source(path.splitext(SchModule.PYMODULE)[0], p)", "def importFromPath(filename):\n try:\n path, name = os.path.split(filename)\n name, ext = os.path.splitext(name)\n file, filename, data = imp.find_module(name, [path])\n importedModule = imp.load_module(name, file, filename, data)\n except Exception as ae:\n raise Exception('Importing module '+ filename + ' at ' + path + os.sep + name + ' failed with error '+ str(ae))\n return importedModule", "def import_module(self, location, name):", "def loadModule(self, module_name):\n\t\tmodule = ROOT.TChain(module_name)\n\t\tself.modules.append(module)\n\t\treturn module", "def import_file(filename, context=None):\n\n # First thing to try: see if this is a module and not a file\n if not filename.endswith('.py'):\n module = None\n try:\n # is the module already imported?\n module = sys.modules[filename]\n except KeyError:\n try:\n module = __import__(filename)\n except ImportError:\n pass\n if module is not None:\n if not context is None:\n context[name] = module\n return module\n\n #\n # Parse the filename to get the name of the module to be imported\n #\n if '/' in filename:\n name = (filename).split(\"/\")[-1]\n elif '\\\\' in filename:\n name = (filename).split(\"\\\\\")[-1]\n else:\n name = filename\n\n # NB: endswith accepts tuples of strings starting in python 2.5.\n # For 2.4 compatibility we will call endswith() twice.\n if name.endswith('.py') or name.endswith('.pyc'):\n name = name.rsplit('.', 1)[0]\n if '.' in name:\n raise RuntimeError(\"Invalid python module name '%s'. The head of the filename cannot contain a period.\" % filename)\n\n #\n # Get the module if it already exists, and otherwise\n # import it\n #\n try:\n module = sys.modules[name]\n except KeyError:\n dirname = os.path.dirname( os.path.abspath(filename) )\n sys.path.insert( 0, dirname )\n try:\n module = imp.load_source( name, filename )\n except Exception:\n e = sys.exc_info()[1]\n import logging\n logger = logging.getLogger('pyutilib.misc')\n logger.error(\"Failed to load python module=\"+str(filename)+\\\n \":\\n\" + str(e))\n raise\n except:\n import logging\n logger = logging.getLogger(\"pyutilib.misc\")\n logger.error(\"Failed to load python module=\"+str(filename))\n raise\n finally:\n sys.path.remove( dirname )\n #\n # Add module to the give context\n #\n if not context is None:\n context[name] = module\n return module", "def import_module(name):\n __import__(name)\n return sys.modules[name]", "def load_module(self, name, quiet=True):\n full_name = '%s.%s' % (self.name, name)\n try:\n return import_module(full_name)\n except ImportError:\n if quiet:\n return None\n raise", "def load(module_name):\r\n if module_name.startswith('http://'):\r\n pico_url, module_name = module_name.split('/pico/')\r\n global url\r\n url = pico_url + '/pico/'\r\n module_dict = get(url + module_name)\r\n module = imp.new_module(module_name)\r\n module.__doc__ = module_dict['__doc__']\r\n functions = module_dict['functions']\r\n for function_def in functions:\r\n name = function_def['name']\r\n args = function_def['args']\r\n args_string = ', '.join([\"%s=%s\"%(arg, json.dumps(default).replace(\"null\", \"None\")) for arg, default in args if arg != None])\r\n stream = function_def['stream']\r\n docstring = function_def['doc']\r\n exec(\"\"\"\r\ndef f(%s):\r\n \\\"\\\"\\\" %s \\\"\\\"\\\"\r\n return _call_function('%s', '%s', locals(), %s)\r\n\"\"\"%(args_string, docstring, module_name, name, stream))\r\n setattr(module, name, f)\r\n return module", "def load(path, reset=False):\n pass", "def import_module(module):\n return importlib.import_module(module)", "def loadConfigModule(name, options, tags):\n if isinstance(name, str):\n LOG.info('Loading %s', name)\n d = {}\n module = __import__(name[:-3], d, d)\n else:\n module = reload(name)\n onload = module.__dict__.get('onload')\n if callable(onload):\n try:\n onload(options, tags)\n except:\n LOG.fatal('Exception while loading %s', name)\n raise\n return module", "def test_load_simple_module():\n loader = Loader()\n main_fname = loader.load(\"https://gist.githubusercontent.com/miohtama/80391980c2e73b285cfe/raw/dd89a55497ba33a6014453d9bb7432ab424c01cf/kivyhello.py#main\")\n mod = path_to_mod_name(main_fname)\n result = loader.run(mod, \"hello\")\n assert result == \"Hello there\"\n loader.close()", "def loadModule(*args, allModules: bool=True, load: AnyStr=\"\", scan: bool=True,\n **kwargs)->List[AnyStr]:\n pass", "def load(self):\n\n self.commands = {\n # Usual text commands (e.g. \"/echo 123\")\n 'user': {},\n 'owner': {\n 'load': self.load,\n 'modprobe': self.modprobe,\n 'rmmod': self.rmmod\n },\n # Modules for bot's reaction to a different message types\n 'text': {},\n 'photo': {},\n 'audio': {},\n 'video': {},\n 'sticker': {},\n 'voice': {}\n }\n\n for file in os.listdir('modules'):\n if file.endswith('.py'):\n command_type, command = file.split('_', 1)\n self.modprobe(self, command[:-3])", "def import_python_module_by_filename(name, module_filename):\n\n sys.path.append(abspath(dirname(module_filename)))\n spec = importlib.util.spec_from_file_location(\n name,\n location=module_filename)\n imported_module = importlib.util.module_from_spec(spec)\n spec.loader.exec_module(imported_module)\n return imported_module", "def _load(self, directory):\n pass", "def mod_load(self):\n raise NotImplementedError(\"Mod load isn't overriden\")", "def import_module(module_name,module_path):\n try:\n if not isinstance(module_path,list):\n module_path = [module_path]\n file,filename,desc = imp.find_module(module_name,module_path)\n globals()[module_name] = imp.load_module(module_name, file, filename, desc)\n return\n except Exception as err:\n print 'import_module error', err\n traceback.print_exc()\n\n sys.exit()", "def _import_from(mod, path, mod_dir=None):\n\n if mod_dir is None:\n mod_dir = mod\n\n if not os.path.exists(path):\n return None\n\n if not os.path.exists(os.path.join(path, mod_dir)):\n return None\n\n try:\n mod_info = imp.find_module(mod_dir, [path])\n return imp.load_module(mod, *mod_info)\n except ImportError:\n return None", "def modules_load(machine_config):\n\t#---modules in LOCAL configuration must be loaded before checking version\n\timport importlib\n\tif 'module_path' in machine_config: module_path = machine_config['module_path']\n\telse:\n\t\tmodule_parent = os.environ.get('MODULESHOME','/usr/share/Modules/default')\n\t\tmodule_path = os.path.join(module_parent,'init','python.py')\n\tincoming = {}\n\tif sys.version_info<(3,0): execfile(module_path,incoming)\n\telse: exec(open(module_path).read(),incoming)\n\t#---note that modules that rely on dynamically-linked C-code must use EnvironmentModules\n\tmodlist = machine_config['modules']\n\tif type(modlist)==str: modlist = modlist.split(',')\n\tfor mod in modlist:\n\t\t#---always unload gromacs to ensure correct version\n\t\tincoming['module']('unload','gromacs')\n\t\tprint('[STATUS] module load %s'%mod)\n\t\tincoming['module']('load',mod)", "def _import_string(module_name, content):\n\n # assign module a name that's not likely to conflict\n safe_name = 'confab.data.' + module_name\n\n # check if module is already loaded\n existing = sys.modules.get(safe_name)\n if existing:\n return existing\n\n # try to load module\n module = imp.new_module(safe_name)\n exec content in module.__dict__\n return module", "def load_modules_manually():\n #cmd_folder = os.path.realpath(os.path.abspath(os.path.split(inspect.getfile( inspect.currentframe() ))[0]))\n cmd_folder = '../myutils/'\n if cmd_folder not in sys.path:\n sys.path.insert(0, cmd_folder)\n #print sys.path", "def import_module_from_file(f_path, verbose=True):\n # get absolute path\n f_path = os.path.abspath(f_path)\n assert os.path.isfile(f_path)\n\n f_dir = os.path.dirname(f_path)\n f_name = os.path.basename(f_path)\n f_id = os.path.splitext(f_name)[0]\n\n try:\n # add f_dir to system path for later import\n sys.path.insert(0, f_dir)\n # import module by name\n module = importlib.import_module(f_id)\n return module\n except ImportError:\n err_str = \"ERROR: Could not import module '{}' from '{}'.\\n\"\n err_str = err_str.format(f_name, f_dir)\n raise ImportError(err_str)", "def _import(self, module, name):\n try:\n return getattr(__import__(module, fromlist=[name]), name)\n except (AttributeError, ImportError):\n msg = \"Failed to load %s from %s: %s\" % (name, module,\n sys.exc_info()[1])\n if not self.fail_silently:\n print(msg)\n else:\n _debug(msg)\n return None", "def load_model_module(path):\n if not os.path.exists(path):\n raise ValueError(\"Model configuration not found in %s\" % path)\n dirname, filename = os.path.split(path)\n module_name, _ = os.path.splitext(filename)\n sys.path.insert(0, os.path.abspath(dirname))\n module = importlib.import_module(module_name)\n sys.path.pop(0)\n\n if not hasattr(module, \"model\"):\n raise ImportError(\"No model defined in {}\".format(path))\n\n return module", "def dynamicallyLoadModule(name):\n f, file, desc=imp.find_module(name, [ROLES_DIR])\n return imp.load_module(ROLES_PKG_NAME+'.'+name, f, file, desc)", "def import_from_path(path_to_module, obj_name = None):\n module_name = path_to_module.replace(\"/\",\".\").strip(\".py\")\n module = import_module(module_name)\n if obj_name == None:\n return module\n obj = getattr(module, obj_name)\n return obj", "def load_from_path(path):\n module, attr = path.rsplit('.', 1)\n mod = importlib.import_module(module)\n return getattr(mod, attr)", "def load_from_module(self, module: ModuleType) -> None:\n for key in dir(module):\n if key.startswith(\"__\") and key.endswith(\"__\"):\n continue\n value = getattr(module, key)\n self[key] = value", "def loadModule(module_name, class_name = None):\n mod = importlib.import_module(module_name)\n if class_name == None: return mod\n else: return getattr(mod, class_name)", "def load_instance_from_file(klass, modpath):\n mod_name, file_ext = os.path.splitext(os.path.split(modpath)[-1])\n logging.info('attempting problem import from {} module'.format(mod_name))\n\n ext = file_ext.lower()\n if ext == '.py':\n py_mod = imp.load_source(mod_name, modpath)\n elif ext == '.pyc':\n py_mod = imp.load_compiled(mod_name, modpath)\n else:\n raise NotModule('{} is not Python source or bytecode'.format(modpath))\n\n for attr in dir(py_mod):\n mod_obj = getattr(py_mod, attr)\n if isinstance(mod_obj, klass):\n return mod_obj\n\n return None" ]
[ "0.7140824", "0.7115812", "0.7082071", "0.7029506", "0.7029197", "0.6992073", "0.6935706", "0.6864377", "0.6848782", "0.6841059", "0.68201846", "0.6806607", "0.67745715", "0.6732326", "0.6725582", "0.6686308", "0.6678542", "0.6671717", "0.6595986", "0.6595986", "0.6583503", "0.6569107", "0.6548343", "0.65345293", "0.64482105", "0.64325804", "0.64281946", "0.64126027", "0.64002603", "0.63949096", "0.63928396", "0.6362112", "0.63520366", "0.6349731", "0.63468033", "0.6314005", "0.6290271", "0.6289207", "0.6280404", "0.627196", "0.62593013", "0.6237112", "0.62369025", "0.62298554", "0.61837596", "0.6175656", "0.6175656", "0.6171435", "0.6151955", "0.6124158", "0.6120976", "0.6118561", "0.6114206", "0.6114206", "0.6114206", "0.61016804", "0.6085861", "0.6077756", "0.6076577", "0.6050389", "0.60464233", "0.60464233", "0.60464233", "0.60410684", "0.60401994", "0.60253525", "0.60069054", "0.60069054", "0.59971344", "0.5989943", "0.5989194", "0.598108", "0.59778297", "0.5950552", "0.59490025", "0.5946873", "0.59323794", "0.5923076", "0.59177876", "0.5910196", "0.59041196", "0.5890436", "0.58810985", "0.587866", "0.58735025", "0.5859458", "0.58552", "0.58503157", "0.58371603", "0.5836263", "0.58134043", "0.581331", "0.58116597", "0.5799349", "0.5789335", "0.577573", "0.57671744", "0.57472044", "0.5739391", "0.57363594" ]
0.74431866
0
Load & cache the program module.
def _module(self): if self._module_cache is None: self._module_cache = load_module(self._name, self._path) return self._module_cache
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def load(self):\n\n\t\tif self.module is None:\n\t\t\t# Cause the interpreter to load the module in local namespace ...\n\t\t\texec \"import \" + self.name\n\n\t\t\t# Store the module object ...\n\t\t\tobject.__setattr__(self, 'module', eval(self.name))", "def load(self):\n \"\"\"Load a program into memory.\"\"\"\n\n if len(sys.argv) != 2:\n print(\"format: ls8.py [filename]\")\n sys.exit(1)\n\n program = sys.argv[1]\n address = 0\n\n # For now, we've just hardcoded a program:\n\n # program = [\n # # From print8.ls8\n # 0b10000010, # LDI R0,8\n # 0b00000000,\n # 0b00001000,\n # 0b01000111, # PRN R0\n # 0b00000000,\n # 0b00000001, # HLT\n # ]\n\n #open file\n with open(program) as file:\n #read the lines\n for line in file:\n #parse out comments\n line = line.strip().split(\"#\")[0]\n #cast numbers from strings to ints\n val = line.strip()\n #ignore blank lines\n if line == \"\":\n continue\n\n value = int(val, 2)\n self.ram[address] = value\n address +=1", "def load(self):\n\n address = 0\n\n program = sys.argv[1]\n\n with open(program) as p:\n for instruction in p:\n if instruction[0] == '#':\n continue\n\n instruction = instruction.strip()\n temp = instruction.split()\n\n if len(temp) == 0:\n continue\n\n self.ram[address] = int(temp[0], 2)\n address += 1\n \n # print(\"======= PROGRAM =========\")\n # for i in self.ram[:35]:\n # print(i)", "def _load_program(self, kernel):\n return cl.Program(\n self.context, open('kernels/{0}'.format(kernel)).read()\n ).build()", "def load_program(self, program):\n for idx, val in enumerate(program):\n self.memory[idx] = val", "def load(self):\n\n address = 0\n\n if len(sys.argv) < 2:\n print('ERROR - Provide program address to load')\n return\n\n program_filename = sys.argv[1]\n\n program_text = open(program_filename).read()\n program_lines = program_text.split('\\n')\n program = []\n\n for line in program_lines:\n blocks = line.split()\n if len(blocks) > 0:\n if blocks[0] != '#':\n inst = blocks[0]\n program.append(int(inst, 2))\n\n for instruction in program:\n self.ram[address] = instruction\n address += 1", "def reload_programs(self):\r\n print(\"Reloading programs:\")\r\n for name, program in self._programs.items():\r\n if getattr(program, 'program', None):\r\n print(\" - {}\".format(program.meta.label))\r\n program.program = resources.programs.load(program.meta)", "def do_workload(self):\n module_manager = self._core.get_module_manager()\n module = module_manager.get_module_by_name(self._values[\"name\"])\n module_manager.update_module(module)", "def prog():\n global program\n return program", "def mod_load(self):\n raise NotImplementedError(\"Mod load isn't overriden\")", "def load(self, program):\n\n address = 0\n\n try:\n with open(program, 'r') as f:\n for line in f:\n # strip out comment, if any, and whitespace\n instruction = line.split('#')[0].strip()\n if instruction == '':\n continue\n self.ram[address] = int(instruction, base=2)\n address += 1\n\n except FileNotFoundError:\n print(f'File not found. path: {program}')\n sys.exit(2)", "def _load_program():\n filepath = os.path.join(os.getcwd(), os.path.dirname(__file__), PROGRAM_TXT)\n f = open(filepath, 'r')\n program = f.read()\n f.close()\n return program.strip().split('\\n')", "def exec_module(self, module):\n pass", "def pymod_cache():\n pymod.cache.cache = Singleton(pymod.cache.factory)", "def load(self, program):\n\n #print(f\"Program in memory {program}\")\n address = 0\n\n for instruction in program:\n self.ram[address] = instruction\n address += 1", "def load():\n out = load_as_root_module()\n parser = create_parser(os.path.basename(sys.argv[0]))\n opts = parser.parse_args(sys.argv[1:])\n load_env(opts, out.opt)\n\n return out", "def load(self):\n\n self.commands = {\n # Usual text commands (e.g. \"/echo 123\")\n 'user': {},\n 'owner': {\n 'load': self.load,\n 'modprobe': self.modprobe,\n 'rmmod': self.rmmod\n },\n # Modules for bot's reaction to a different message types\n 'text': {},\n 'photo': {},\n 'audio': {},\n 'video': {},\n 'sticker': {},\n 'voice': {}\n }\n\n for file in os.listdir('modules'):\n if file.endswith('.py'):\n command_type, command = file.split('_', 1)\n self.modprobe(self, command[:-3])", "def load(self, program):\n address = 0\n\n with open(program) as lines:\n for line in lines:\n line = line.split('#')\n # print(line)\n try:\n value = int(line[0], 2)\n except ValueError:\n continue\n self.ram[address] = value\n address += 1", "def load(self):\n\n address = 0\n program = []\n\n if len(sys.argv) < 2:\n print(\"Please pass in a second file.\")\n sys.exit()\n\n file_name = sys.argv[1]\n try:\n with open(file_name) as file:\n for line in file:\n split_line = line.split('#')[0]\n command = split_line.strip()\n\n if command == '':\n continue\n\n program.append(int(command, 2))\n\n except FileNotFoundError:\n print(f'{sys.argv[0]}: {sys.argv[1]} file was not found')\n sys.exit()\n\n for instruction in program:\n self.ram[address] = instruction\n address += 1", "def load(self, program):\n\n address = 0\n\n # For now, we've just hardcoded a program:\n\n # program = [\n # # From print8.ls8\n # 0b10000010, # LDI R0,8\n # 0b00000000,\n # 0b00001000,\n # 0b01000111, # PRN R0\n # 0b00000000,\n # 0b00000001, # HLT\n # ]\n\n # for instruction in program:\n # self.ram[address] = instruction\n # address += 1\n\n try:\n with open(program) as program:\n for line in program:\n line_split = line.split('#')\n value = line_split[0].strip()\n \n if value == \"\":\n continue\n formatted_value = int(value, 2)\n \n self.ram[address] = formatted_value\n address += 1\n except FileNotFoundError:\n print(f\"{program} not found\")\n sys.exit(1)", "def exe():\n e = entry()\n if e:\n return load(e)", "def load(self):\n\n # Extract filename from command line\n try:\n filename = sys.argv[1]\n print(filename)\n except IndexError:\n print(\"Usage: python3 ls8.py <program_name>\")\n sys.exit(1)\n\n # Validate filetype and confirm file exists\n if filename[-4:] != '.ls8':\n print(\"You must supply a '.ls8' binary.\")\n sys.exit(2)\n try:\n f = open(filename)\n except FileNotFoundError:\n print(f\"File not found: {filename}\")\n sys.exit(2)\n\n # Read the contents of the file\n address = 0\n for line in f:\n try:\n opcode = line.split()[0]\n except IndexError:\n continue\n if opcode == '#':\n continue\n self.ram[address] = int(opcode, 2)\n address += 1\n f.close()\n\n # Double-check the file wasn't empty\n if address == 0:\n print(\"Error: Empty source file\")\n sys.exit(2)", "def get_programs(self):\n self.logger.info(\"Preparing programs...\")\n current_dir = Path()\n dir_path = current_dir / \"data\" / \"break_data\" / \"programs\"\n\n file_name = \"programs_\" + self.dataset_split + \".pkl\"\n if not (dir_path / file_name).is_file():\n self.create_matching_programs(dir_path, file_name)\n data = load_obj(dir_path, file_name)\n\n self.logger.info(\"Programs ready.\")\n return data", "def initialize_cache(inputs, outputs, programs, max_decode_len, config):\n target_shape = (programs.shape[0], max_decode_len)\n initial_variables = models.ProgramTransformer(config).init(\n jax.random.PRNGKey(0),\n jnp.ones(inputs.shape, config.dtype),\n jnp.ones(outputs.shape, config.dtype),\n jnp.ones(target_shape, config.dtype))\n return initial_variables['cache']", "def load(self):\n with self.__lock:\n self._d.update(self.backend.load())\n log.debug(\"load: {}\".format(self.backend.filename))", "def __put_module_in_sys_cache(module_name, module_obj):\n #try:\n #if hasattr(sys, 'stypy_module_cache'):\n sys.stypy_module_cache[module_name] = module_obj\n # else:\n # __preload_sys_module_cache()\n # sys.stypy_module_cache[module_name] = module_obj\n # except:\n # pass\n # finally:\n # return None", "def do_workload(self):\n module_manager = self._core.get_module_manager()\n module_manager.install_module(self.get_meta())", "def __preload_sys_module_cache():\n # Preload sys module\n sys.stypy_module_cache = {\n 'sys': __load_python_module_dynamically('sys', False)} # By default, add original sys module clone\n\n # Preload builtins module\n sys.stypy_module_cache['__builtin__'] = __load_python_module_dynamically('__builtin__', False)\n sys.stypy_module_cache['ctypes'] = __load_python_module_dynamically('ctypes', False)", "def reload(self):\n\n\t\tif self.module is None:\n\t\t\t# Do nothing, as the module will be imported on attribute access.\n\t\t\tpass\n\t\telse:\n\t\t\texec \"reload(\" + self.name + \")\"\n\t\t\t# The module object is still identical, only its code has been\n\t\t\t# replaced. Thus no eval(self.name) is necessary.", "def load(self):\n\n address = 0\n\n # For now, we've just hardcoded a program:\n\n if len(sys.argv) != 2:\n print(\"Usage: cpu.py filename\")\n sys.exit(1)\n \n filename = sys.argv[1]\n\n try:\n with open(filename) as f:\n for line in f:\n \n instruction = line.split(\"#\")[0].strip()\n \n if instruction == \"\":\n continue\n\n val = int(instruction, 2) \n\n self.ram_write(address, val)\n\n address += 1\n\n except FileNotFoundError:\n print(f\"File {filename} not found\")\n sys.exit(2)", "def load(self):\n # Proceed only if singleton instance has been created\n if self.initialized:\n # The cache manager will work on manifest and cache tasks on an\n # in-process basis as load() is only called during startup from\n # the server process.\n if self.is_server_process:\n # Remove all existing manifest files from previous processes\n self._remove_all_manifest_files()\n\n # Start the watchdog if it's not alive, prevents redundant starts\n if not self.observer.is_alive():\n self.observer.start()\n\n # Fetch all component catalog instances and trigger their add to the\n # component cache if this is not already happening (it seems some server\n # test fixtures could be loading the server extensions multiple times).\n if not self.cache_manager.is_refreshing():\n self.refresh()", "def __load_python_module_dynamically(module_name, put_in_cache=True):\n if module_name in sys.modules:\n module_obj = sys.modules[module_name]\n else:\n exec (\"import {0}\".format(module_name))\n module_obj = eval(module_name)\n\n module_obj = type_inference_proxy_copy.TypeInferenceProxy(module_obj).clone()\n if put_in_cache:\n __put_module_in_sys_cache(module_name, module_obj)\n return module_obj", "def load(self, prog_file):\n\n prog = open(prog_file, \"r\")\n address = 0\n # program = []\n\n for inst in prog:\n # print(\"Inst: \", inst)\n if inst[0] != \"#\":\n if \"1\" in inst or \"0\" in inst:\n inst = inst[slice(8)]\n # inst = inst[0]\n self.ram[address] = int(inst, 2)\n address += 1\n\n # print(\"RAM: \", self.ram)\n # print(f\"Sp: {self.sp} Reg Len: {len(self.reg)}\")\n # For now, we've just hardcoded a program:\n\n # program = [\n # # From print8.ls8\n # 0b10000010, # LDI R0,8\n # 0b00000000,\n # 0b00001000,\n # 0b01000111, # PRN R0\n # 0b00000000,\n # 0b00000001, # HLT\n # ]\n\n # for instruction in program:\n # self.ram[address] = int(instruction)\n # address += 1\n # print(\"RAM: \", self.ram)", "def main():\n run_program()", "def load_cache(self):\n self.mu.load(self.cached_mu)\n self.var.load(self.cached_var)\n self.count.load(self.cached_count)", "def load(app, verbose, replay, exp_config=None):\n if replay:\n exp_config = exp_config or {}\n exp_config[\"replay\"] = True\n log(header, chevrons=False)\n loader = LoaderDeployment(app, Output(), verbose, exp_config)\n loader.run()", "def cache_code(self):\n\n # Generate the prologue\n self._synthesize_prologue()\n\n # Don't have a real epilogue.\n self.add(spu.stop(0x2000))\n # self._check_alignment(self._code, 'spu code')\n\n # self.exec_module.make_executable(self._code.buffer_info()[0], len(self._code))\n\n # Append our instructions to the prologue's, first making sure the alignment is correct.\n if len(self._prologue._code) % 2 == 1: # Odd number of instructions\n self._prologue.add(spu.lnop(0))\n\n self._prologue._code.extend(self._code)\n self._prologue._check_alignment(self._prologue._code, 'spu prologue')\n \n self._epilogue = self \n self._cached = True\n return", "def load_mod_from_file(self, fpath):\n\t\tshutit_global.shutit_global_object.yield_to_draw()\n\t\tfpath = os.path.abspath(fpath)\n\t\tfile_ext = os.path.splitext(os.path.split(fpath)[-1])[-1]\n\t\tif file_ext.lower() != '.py':\n\t\t\treturn\n\t\twith open(fpath) as f:\n\t\t\tcontent = f.read().splitlines()\n\t\tok = False\n\t\tfor line in content:\n\t\t\tif line.strip() == 'from shutit_module import ShutItModule':\n\t\t\t\tok = True\n\t\t\t\tbreak\n\t\tif not ok:\n\t\t\tself.log('Rejected file: ' + fpath,level=logging.DEBUG)\n\t\t\treturn\n\t\t# Note that this attribute will only be set for 'new style' module loading, # this should be ok because 'old style' loading checks for duplicate # existing modules.\n\t\t# TODO: this is quadratic complexity\n\t\texistingmodules = [\n\t\t\tm for m in self.shutit_modules\n\t\t\tif getattr(m, '__module_file', None) == fpath\n\t\t]\n\t\tif existingmodules:\n\t\t\tself.log('Module already seen: ' + fpath,level=logging.DEBUG)\n\t\t\treturn\n\t\t# Looks like it's ok to load this file\n\t\tself.log('Loading source for: ' + fpath,level=logging.DEBUG)\n\n\t\t# Add this directory to the python path iff not already there.\n\t\tdirectory = os.path.dirname(fpath)\n\t\tif directory not in sys.path:\n\t\t\tsys.path.append(os.path.dirname(fpath))\n\t\t# TODO: use bytearray to encode?\n\t\tmod_name = base64.b32encode(fpath.encode()).decode().replace('=', '')\n\t\tpymod = imp.load_source(mod_name, fpath)\n\n\t\t# Got the python module, now time to pull the shutit module(s) out of it.\n\t\ttargets = [\n\t\t\t('module', self.shutit_modules), ('conn_module', self.conn_modules)\n\t\t]\n\t\tself.build['source'] = {}\n\t\tfor attr, target in targets:\n\t\t\tmodulefunc = getattr(pymod, attr, None)\n\t\t\t# Old style or not a shutit module, nothing else to do\n\t\t\tif not callable(modulefunc):\n\t\t\t\treturn\n\t\t\tmodules = modulefunc()\n\t\t\tif not isinstance(modules, list):\n\t\t\t\tmodules = [modules]\n\t\t\tfor module in modules:\n\t\t\t\tsetattr(module, '__module_file', fpath)\n\t\t\t\tShutItModule.register(module.__class__)\n\t\t\t\ttarget.add(module)\n\t\t\t\tself.build['source'][fpath] = open(fpath).read()", "def load(self):\n address = 0\n\n program = []\n try:\n with open(sys.argv[1]) as document:\n for line in document:\n if line[0].startswith(\"0\") or line[0].startswith(\"1\"):\n # split before and after any comment symbol '#'\n comment_split = line.split(\"#\")[0]\n # convert the pre-comment portion (to the left) from binary to a value\n # extract the first part of the split to a number variable\n # and trim whitespace\n num = comment_split.strip()\n\n # ignore blank lines / comment only lines\n if len(num) == 0:\n continue\n\n # set the number to an integer of base 2\n value = int(num, 2)\n program.append(value)\n except FileNotFoundError:\n print(f\"{sys.argv[0]}: {sys.argv[1]} not found\")\n sys.exit(2)\n\n for instructions in program:\n self.ram[address] = instructions\n address += 1", "def get(self):\r\n #python = sys.executable\r\n #os.execl(python, python, * sys.argv)\r\n os.execl(sys.executable, *([sys.executable] + sys.argv))", "def _load(self):\n module = importlib.import_module(self.__name__)\n self._parent_module_globals[self._local_name] = module\n\n if self._warning:\n logger.warning(self._warning)\n # Make sure to only warn once.\n self._warning = None\n\n # Update this object's dict so that if someone keeps a reference to the\n # LazyLoader, lookupts are efficient (__getattr__ is only called on lookups\n # that fail).\n self.__dict__.update(module.__dict__)\n return module", "def exec_module(self, module):\n\n if not self.filename.endswith(config.FILE_EXT) and not self.filename.endswith(\n \"__init__.py\"\n ):\n print(\"Fatal error: ExtensionLoader is asked to load a normal file.\")\n print(\"filename:\", self.filename)\n print(\"Expected extension:\", config.FILE_EXT)\n raise SystemExit\n\n name = module.__name__\n if module.__name__ == config.MAIN_MODULE_NAME:\n module.__name__ = \"__main__\"\n config.MAIN_MODULE_NAME = None\n\n with open(self.filename) as f:\n source = f.read()\n\n transforms.identify_requested_transformers(source)\n\n if config.TRANSFORMERS:\n original = source\n source = transforms.add_all_imports(source)\n source = transforms.apply_source_transformations(source)\n\n if config.DIFF and original != source:\n self.write_html_diff(name, original, source)\n\n if config.CONVERT and self.filename.endswith(config.FILE_EXT):\n print(\"############### Original source: ############\\n\")\n print(original)\n print(\"\\n############### Converted source: ############\\n\")\n print(source)\n print(\"=\" * 50, \"\\n\")\n\n source = transforms.apply_ast_transformations(source)\n exec(source, vars(module))", "def loadModule(mod):\n try:\n # from pyrominfo import gameboy, etc\n pyrominfo = __import__(\"pyrominfo\", globals(), locals(), [mod])\n except ImportError:\n import os\n parentdir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))\n os.sys.path.insert(0, parentdir)\n pyrominfo = __import__(\"pyrominfo\", globals(), locals(), [mod])\n try:\n return getattr(pyrominfo, mod)\n except AttributeError:\n raise ImportError(\"testutils.loadModule() can't find module %s in pyrominfo package\" % mod)", "def LoadProgramState(self, restored_checkpoint_path=None, sess=None):\n pass", "def load(self):\n\n # address = 0\n\n # # For now, we've just hardcoded a program:\n\n # program = [\n # # From print8.ls8\n # 0b10000010, # LDI R0,8\n # 0b00000000,\n # 0b00001000,\n # 0b01000111, # PRN R0\n # 0b00000000,\n # 0b00000001, # HLT\n # ]\n\n # for instruction in program:\n # self.ram[address] = instruction\n # address += 1\n\n if len(sys.argv) != 2:\n print(\"usage: python3 ls8.py examples/filename\")\n sys.exit(1)\n\n try:\n address = 0\n\n with open(sys.argv[1]) as f:\n for line in f:\n t = line.split('#')\n n = t[0].strip()\n\n if n == '':\n continue\n\n try:\n n = int(n, 2)\n except ValueError:\n print(f\"Invalid number '{n}'\")\n sys.exit(1)\n\n self.ram[address] = n\n address += 1\n\n except FileNotFoundError:\n print(f\"File not found: {sys.argv[1]}\")\n sys.exit(2)", "def onReload(self,moduleName=\"NeedleFinder\"):\n if profiling : profbox()\n #framework\n globals()[moduleName] = slicer.util.reloadScriptedModule(moduleName)", "def reload(self):\n\n if not path.isfile(self.definition_filename):\n msg = \"Program %s does not exist (no definition file %s)\"\n raise errors.ProgramNotFound(msg % (self.name, self.definition_filename))\n\n with open(self.definition_filename) as df:\n self.raw_data = yaml.safe_load(df)\n\n self.mk_data()", "def load_cache():\n return {}", "def getcached(cls, name, mediatype=None):\n if name not in cls.__cache:\n cls.__cache[name] = Executable(name, mediatype)\n return cls.__cache[name]", "def __init__(self):\n ScriptedLoadableModuleLogic.__init__(self)", "def modules():", "def load(self):\n\n try:\n address = 0\n with open(sys.argv[1]) as f:\n for line in f:\n comment_split = line.strip().split('#')\n value = comment_split[0].strip()\n self.ram[address] = int(value, 2)\n address += 1\n print(self.ram)\n except FileNotFoundError:\n print('File not Found')\n sys.exit(2)", "def load(self):\n address = 0\n if len(sys.argv) < 2:\n print(\"Please pass in a second file name: python3 ls8.py second_filename.py\")\n sys.exit()\n file_name = sys.argv[1]\n try:\n file = open(file_name, \"r\")\n except FileNotFoundError:\n print(f\"{sys.argv[0]}: {sys.argv[1]} file was not found.\")\n sys.exit()\n \n for line in file.readlines():\n instruction = line.split(\"#\")[0]\n instruction = instruction.strip() \n if len(instruction) > 0:\n self.ram_write(address, int(instruction, 2))\n address += 1 \n file.close()", "def load(self):\n basePath = './examples/'\n file = \"print8.ls8\"\n # file = \"mult.ls8\"\n # file = \"stack.ls8\"\n # file = \"call.ls8\"\n file = \"sctest.ls8\"\n if len(sys.argv) > 1:\n file = sys.argv[1]\n address = 0\n\n with open(basePath + file, \"r\") as f:\n for line in f:\n line = line.split(\"#\")\n\n try:\n v = int(line[0], 2)\n except ValueError:\n continue\n # print(v)\n self.ram[address] = v\n address += 1", "def main() -> None:\n\n cache: Dict[str, Any] = {}\n datadir = util.get_abspath(sys.argv[1])\n for yaml_path in glob.glob(os.path.join(datadir, \"*.yaml\")):\n with open(yaml_path) as yaml_stream:\n cache_key = os.path.relpath(yaml_path, datadir)\n cache[cache_key] = yaml.load(yaml_stream)\n\n cache_path = os.path.join(datadir, \"yamls.pickle\")\n with open(cache_path, \"wb\") as cache_stream:\n pickle.dump(cache, cache_stream)", "def _load_cache():\n BASE_DIR = os.path.dirname(os.path.abspath(__file__))\n fname = os.path.join(BASE_DIR, \"model_cache.json\")\n with open(fname) as f:\n models_cache = json.load(f)\n return models_cache", "def load_module(name):\n return __import__(\"metaswitch.%s\" % name,\n fromlist=[\"ROUTES\"])", "def load(self):\n\n if len(sys.argv) != 2:\n print(\"Error\")\n sys.exit(1)\n\n address = 0\n\n with open(sys.argv[1]) as f:\n for line in f:\n string_val = line.split(\"#\")[0].strip()\n if string_val == '':\n continue\n v = int(string_val, 2)\n self.ram[address] = v\n address += 1", "def test_load_simple_module():\n loader = Loader()\n main_fname = loader.load(\"https://gist.githubusercontent.com/miohtama/80391980c2e73b285cfe/raw/dd89a55497ba33a6014453d9bb7432ab424c01cf/kivyhello.py#main\")\n mod = path_to_mod_name(main_fname)\n result = loader.run(mod, \"hello\")\n assert result == \"Hello there\"\n loader.close()", "def force_load(self):\n pass", "def onReload(self, moduleName=\"NeedleFinder\"):\r\n if profiling : profbox()\r\n # framework\r\n globals()[moduleName] = slicer.util.reloadScriptedModule(moduleName)", "def module_runner(module):\n task_queue.put(1)\n result = sys.modules[module].run()\n task_queue.get()\n store_module_result(result) # Store the result in our repo", "def _reload(mod,larch=None,**kw):\n\n if isinstance(mod, str):\n return larch.import_module(mod, do_reload=True)\n\n for k,v in chain(larch.symtable._sys.modules.iteritems(), sys.modules.iteritems()):\n if v == mod:\n modname = k\n break\n try:\n return larch.import_module(modname,do_reload=True)\n except NameError:\n pass", "def get_data(self, filename):\n pyfile = filename + '.py'\n if not os.path.exists(pyfile) or os.path.getmtime(pyfile) < os.path.getmtime(filename):\n # recompile the Haxe file\n status = subprocess.call([self.haxe_bin, '-cp', os.path.dirname(filename), os.path.basename(filename), '-python', pyfile])\n if status:\n raise ImportError(\"Haxe compilation of {} failed with status {}\".format(filename, status))\n with open(pyfile) as f:\n data = f.read()\n return data", "def load(self):\n\n address = 0\n\n # For now, we've just hardcoded a program:\n\n program = [\n # From print8.ls8\n 0b10000010, # LDI R0,8\n 0b00000000,\n 0b00001000,\n 0b01000111, # PRN R0\n 0b00000000,\n 0b00000001, # HLT\n ]\n\n for instruction in program:\n self.ram[address] = instruction\n address += 1", "def load(self):\n\n address = 0\n\n # For now, we've just hardcoded a program:\n\n program = [\n # From print8.ls8\n 0b10000010, # LDI R0,8\n 0b00000000,\n 0b00001000,\n 0b01000111, # PRN R0\n 0b00000000,\n 0b00000001, # HLT\n ]\n\n for instruction in program:\n self.ram[address] = instruction\n address += 1", "def load(ctx):\n if not is_owner(ctx.update):\n return\n global cmds\n cmds.load_ext(ctx.args[0], ctx.update)", "def refresh(self):\n self.modules.clear()\n module_files = []\n module_paths = os.environ['MAYA_MODULE_PATH'].split(os.pathsep)\n for p in module_paths:\n try:\n module_files += [os.path.join(p, x).replace(os.sep, os.altsep or os.sep) for x in os.listdir(p) if\n x.lower()[-3:] == \"mod\"]\n except OSError:\n pass # ignore bad paths\n for eachfile in module_files:\n for eachmod in self.parse_mod(eachfile):\n self.modules[\"{0.name} ({0.version})\".format(eachmod)] = eachmod", "def reload(self,module):\n try:\n code = 'import %s; reload(%s)' % ((module.__name__,)*2)\n except AttributeError:\n code = 'import %s; reload(%s)' % ((module,)*2)\n self.workers.exec_code(code)", "def load(path, num_cpu=16):\n return ActWrapper.load(path, num_cpu=num_cpu)", "def loadapp(self, app, params=None):\n if not TESTMODE:\n app = 'snakewm.' + app\n\n _app = importlib.import_module(app)\n _app.load(self.MANAGER, params)", "def main():\n obj = UnityFilesystem()\n obj.perform_module_operation()", "def loadmodule( conf ):\n try:\n #conf = routes[ route ]\n # try to load the module\n module_name = conf['module']['name']\n module_path = conf['module']['path']\n \n mod_name, file_ext = os.path.splitext( os.path.split( module_path )[ -1] )\n if file_ext.lower() == '.py':\n py_mod = imp.load_source( mod_name, module_path )\n elif file_ext.lower() == '.pyc':\n py_mod = imp.load_compiled( mod_name, module_path )\n else:\n raise Exception(\"Cannot handle module for route: \" + route )\n except Exception, e:\n import traceback\n traceback.print_exc( file=sys.stdout )\n # TODO log error + msg\n return py_mod", "def modules_load(machine_config):\n\t#---modules in LOCAL configuration must be loaded before checking version\n\timport importlib\n\tif 'module_path' in machine_config: module_path = machine_config['module_path']\n\telse:\n\t\tmodule_parent = os.environ.get('MODULESHOME','/usr/share/Modules/default')\n\t\tmodule_path = os.path.join(module_parent,'init','python.py')\n\tincoming = {}\n\tif sys.version_info<(3,0): execfile(module_path,incoming)\n\telse: exec(open(module_path).read(),incoming)\n\t#---note that modules that rely on dynamically-linked C-code must use EnvironmentModules\n\tmodlist = machine_config['modules']\n\tif type(modlist)==str: modlist = modlist.split(',')\n\tfor mod in modlist:\n\t\t#---always unload gromacs to ensure correct version\n\t\tincoming['module']('unload','gromacs')\n\t\tprint('[STATUS] module load %s'%mod)\n\t\tincoming['module']('load',mod)", "def command_load(interface,command,args):\n try:\n modules.add_module(args)\n interface.reply(\"Loaded %s\"%args)\n except ImportError, e:\n interface.reply(str(e))\n except modules.ModuleAlreadyLoaded, e:\n interface.reply(str(e))", "def load_module(module):\n try:\n return import_module(module)\n except ImportError:\n sys.stderr.write('Unable to load the module: %s.\\n' % module)\n exit(-1)", "def get_module_from_sys_cache(module_name):\n try:\n if hasattr(sys, 'stypy_module_cache'):\n return sys.stypy_module_cache[module_name]\n else:\n __preload_sys_module_cache()\n return sys.stypy_module_cache[module_name]\n except:\n return None", "def load_module(cls, bytes, options=None):\n\t\traise NotImplementedError(\"load_module must be implemented\")", "def recache(self, phys):\r\n self.myOutputCache.initialize(phys.app)\r\n\r\n for output in self.myOutputs:\r\n output.initialize(phys.app)\r\n output.run(1)", "def load(self, path):\n address = 0\n\n with open(path) as f:\n # with open(sys.argv[1]) as f:\n for line in f:\n comment_split = line.split('#')\n\n value = comment_split[0].strip()\n\n if value == '':\n continue\n\n num = int(value, 2)\n self.ram[address] = num\n # self.ram_write(num, address)\n address += 1\n\n\n # with open(sys.argv[1], 'r')as f:\n # line = f.readlines()\n # for line in f:\n # stripped = line.strip()\n # if len(sys.argv) != 2:\n # print(\"Error\")\n # sys.exit(1)\n # try:\n # address = 0\n # with open(sys.argv[1]) as f:\n # for instruction in f:\n # split_excess = instruction.split('#')\n # split = split_excess[0].strip()\n # if split == '':\n # continue\n # val = int(split, 2)\n # self.ram_write(address, val)\n # address += 1\n # except FileNotFoundError:\n # print(f\"FileNotFound: {sys.argv}\")\n # sys.exit(2)\n\n\n # except FileNotFoundError:\n # print('file not found')\n # sys.exit(2)\n\n # For now, we've just hardcoded a program:\n\n # program = [\n # # From print8.ls8\n # 0b10000010, # LDI R0,8\n # 0b00000000,\n # 0b00001000,\n # 0b01000111, # PRN R0\n # 0b00000000,\n # 0b00000001, # HLT\n # ]\n\n # for instruction in program:\n # self.ram[address] = instruction\n # address += 1", "def load(bot, feature) :\n try :\n f = sys.modules[feature]\n\n except KeyError :\n f = False\n\n if f :\n imp.reload(f)\n initalize(bot, f)\n\n else :\n f = importlib.import_module(\"mandelbot.features.\" + feature)\n initalize(bot, f)\n sys.modules[feature] = f", "def _import(self, module_name):\n # load keywords\n kw = __import__('keywords')\n # set real rpc proxy\n kw.var_cache['proxy'] = device_proxy\n kw.var_cache['reflection'] = reflection_proxy\n kw.var_cache['local'] = local_proxy\n # load script\n __import__(module_name)\n # register all kw func from keywords.kw_func\n self.kw_func.update(kw.kw_func)", "def add_module(self, module):\n getattr(module, 'load_bench')(self)", "def load(self, arr = None):\n address = 0\n \n if arr:\n for inst in arr:\n self.mem[address] = inst\n address +=1\n \"\"\"\n print('loading')\n if len(sys.argv) != 2:\n print(f\"usage: {sys.argv[0]} filename\")\n sys.exit(1)\n try:\n with open(sys.argv[1]) as f:\n for line in f:\n num = line.split('#', 1)[0]\n if num.strip() == '': # ignore comment only lines\n continue\n num = int(num, 2)\n self.mem[address] = num\n address += 1\n except FileNotFoundError:\n print(f\"{sys.argv[0]}: {sys.argv[1]} not found\")\n sys.exit(2)\n \"\"\"", "def getProgram(self) -> ghidra.program.model.listing.Program:\n ...", "def _rai_module(self) -> str:\n module = [\"--loadmodule\", CONFIG.redisai]\n if self.queue_threads:\n module.append(f\"THREADS_PER_QUEUE {self.queue_threads}\")\n if self.inter_threads:\n module.append(f\"INTER_OP_PARALLELISM {self.inter_threads}\")\n if self.intra_threads:\n module.append(f\"INTRA_OP_PARALLELISM {self.intra_threads}\")\n return \" \".join(module)", "def load_cache(name, typ=\"pkl\"):\n filename = cache_name(name, typ)\n if typ == \"str\":\n with open(filename, 'r') as fin:\n return fin.read()\n elif typ == \"pkl\":\n with open(filename, 'rb') as fin:\n return pickle.load(fin)\n elif typ == \"h5\":\n import keras\n return keras.models.load_model(filename)\n else:\n raise ValueError(\"Invalid type '{}'.\".format(typ))", "def main():\n module = IRODSPermissionModule()\n module.run()", "def load(self, program_file):\n\n if self.debug:\n print()\n print_heading(\"reading program from file...\", width=40)\n\n program = []\n\n with open(program_file) as file:\n\n for line in file:\n\n line_str = line.split(\"#\")[0].strip()\n\n if line_str:\n\n word = int(line_str, base=2)\n program.append(word)\n\n if self.debug:\n print(self.format_value(word))\n\n if self.debug:\n print()\n print_heading(\"writing program to memory...\", width=40)\n\n for (i, word) in enumerate(program):\n\n self.write_memory(i, word)\n\n if self.debug:\n print(\"[{}]: {}\".format(*self.format_iterable(i, word)))\n\n return", "def load_module(name, path):\n loader = importlib.machinery.SourceFileLoader(name, path)\n module = types.ModuleType(loader.name)\n loader.exec_module(module)\n return module", "def load(self):\n self.uniquify_name()\n code = self.get_code()\n self.store.load_user_function(self.get_name(), self.get_num_params(), code)", "def run():\n import hmmmAssembler ; reload(hmmmAssembler) # import helpers\n hmmmAssembler.main(Random) # this runs the code!", "def load_module(self, fqn):\n trace(\"load_module\", fqn)\n trace(\"sys.modules\", sys.modules)\n p = lookupWithMapper(self.mapper, fqn)\n trace(\"load_module\", fqn, \"done\", id(p))\n\n if fqn in _sysModulesSpecialCases:\n # This module didn't have access to our isolated sys.modules when it\n # did its sys.modules modification. Replicate it here.\n for submoduleName in _sysModulesSpecialCases[fqn]:\n subfqn = '.'.join([fqn, submoduleName])\n sys.modules[subfqn] = getattr(p, submoduleName, None)\n return p", "def load_python_startup_script(name):\n\n try:\n return sys.modules[name]\n except KeyError:\n pass\n\n (fp, pathname, description) = imp.find_module(name)\n try:\n module = imp.load_module(name, fp, pathname, description)\n # Special to GPS: if the module has a on_gps_started function,\n # execute it\n module.on_gps_started('gps_started')\n except AttributeError:\n pass\n finally:\n\n if fp:\n fp.close()\n\n return module", "def init():\n if not _module_init():\n _pypm.Initialize()\n _module_init(True)\n atexit.register(quit)", "def boot(self):\n self.job = Bootstrap.loadJobDefinition()\n self.task = Bootstrap.loadTask(self.job)\n\n stepSpaceMod = __import__(self.stepModule,\n globals(), locals(), ['stepSpace'], 0)\n\n self.stepSpace = stepSpaceMod.stepSpace\n\n self.step = self.task.getStep(self.stepSpace.stepName)\n\n self.script = getScript(scriptModule)\n self.script.task = self.task\n self.script.step = self.step\n self.script.job = self.job\n self.script.stepSpace = self.stepSpace", "def load(self):\n code = self.get_code()\n determ = self.get_deterministic()\n if self.uniquify:\n self.uniquify_name()\n self.store.load_user_function(self.get_name(), self.get_num_params(), code, deterministic=determ)", "def AddModule (self, module):\n getattr (module, 'load_bench') (self)", "def reload_module(module_name):\n try:\n reload(eval(module_name))\n except:\n pass", "def load_modules_manually():\n #cmd_folder = os.path.realpath(os.path.abspath(os.path.split(inspect.getfile( inspect.currentframe() ))[0]))\n cmd_folder = '../myutils/'\n if cmd_folder not in sys.path:\n sys.path.insert(0, cmd_folder)\n #print sys.path" ]
[ "0.64689153", "0.63654965", "0.62676257", "0.62425035", "0.62108284", "0.62063473", "0.61392486", "0.6114491", "0.602539", "0.60204077", "0.60017616", "0.59714663", "0.588076", "0.5877579", "0.58205396", "0.5767681", "0.5746382", "0.57439184", "0.57372934", "0.5737229", "0.5736307", "0.57307464", "0.571669", "0.5713525", "0.56753576", "0.5653245", "0.5648725", "0.5646026", "0.56441456", "0.5632725", "0.56274676", "0.56269956", "0.56106514", "0.5606673", "0.5597151", "0.55962723", "0.5567711", "0.5541844", "0.5533124", "0.551728", "0.5497645", "0.54948765", "0.5492229", "0.54907054", "0.547919", "0.5476581", "0.5467591", "0.5461114", "0.54158497", "0.5402137", "0.5399135", "0.53974533", "0.539143", "0.53896457", "0.5384241", "0.53810596", "0.5354846", "0.53520316", "0.5336769", "0.5334025", "0.53274643", "0.531954", "0.53174394", "0.5312009", "0.5309686", "0.5305503", "0.5299564", "0.5287561", "0.528215", "0.5281712", "0.5275544", "0.52671", "0.5242637", "0.5233846", "0.5232975", "0.52307224", "0.5227999", "0.52228296", "0.52203965", "0.5219936", "0.52178466", "0.5215698", "0.5210587", "0.5204321", "0.5194816", "0.5193342", "0.5188877", "0.5186529", "0.51829207", "0.51767075", "0.5176549", "0.5174559", "0.51719254", "0.51691216", "0.51638323", "0.5154631", "0.5152993", "0.51451373", "0.5144842", "0.5143705" ]
0.6589164
0
Dynamic forwarder to module members.
def __getattr__(self, name): return getattr(self._module, name)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def forward(self,input):\n\t\traise RuntimeError(\"All subclasses of Module must implement a forward method\")", "def forward(self, X):\n res = X\n for i, module in enumerate(self.modules):\n res = module.forward(res)\n\n return res", "def forward(self, x):\n result = x\n for module in self.modules:\n result = module.forward(result)\n return result", "def forward_pass(self):", "def forward(self, *args, **kwargs):\n pass", "def forward(self, *args, **kwargs) -> Dict[str, Any]:\n pass", "def forward(self):\n pass", "def forward(self):\n pass", "def forward(self, *args):\n raise NotImplementedError", "def forward(self, *args):\n raise NotImplementedError", "def proxyModule(original, **replacements):\n class _ModuleProxy(object):\n def __getattribute__(self, name):\n if name in replacements:\n return replacements[name]\n else:\n return getattr(original, name)\n\n def __repr__(self):\n return \"<Proxy for %r: %s replaced>\" % (\n original, ', '.join(replacements.keys()))\n return _ModuleProxy()", "def forward(self, *args, **kwargs):\n raise NotImplementedError", "def forward(self)->None:", "def get_forward_mapping(self):", "def make_module_instance(self, *args, **kwargs):\r\n\r\n # Function to go through member lists and dictionaries recursively,\r\n # to look for submodules on which make_module_instance needs to be called\r\n def recurse(v):\r\n if isinstance(v,list):\r\n iterv = enumerate(v)\r\n else:\r\n iterv = v.iteritems()\r\n #backport\r\n #iter = enumerate(v) if isinstance(v,list) else v.iteritems()\r\n for sk,sv in iterv:\r\n if isinstance(sv,(list,dict)):\r\n sv = recurse(sv)\r\n elif isinstance(sv,Module):\r\n sv = sv.make_module_instance(args,kwargs)\r\n v[sk] = sv\r\n return v\r\n\r\n for k,v in self.local_attr.iteritems():\r\n if isinstance(v,Module):\r\n v = v.make_module_instance(args,kwargs)\r\n self[k] = self.__wrapper__(v)\r\n elif isinstance(v,Method):\r\n self.__setitem__(k,v)\r\n else:\r\n # iterate through lists and dictionaries to wrap submodules\r\n if isinstance(v,(list,dict)):\r\n self[k] = self.__wrapper__(recurse(v))\r\n try:\r\n self[k] = self.__wrapper__(v)\r\n except Exception:\r\n if isinstance(v, Component):\r\n raise\r\n else:\r\n self.__dict__[k] = v\r\n return self", "def _forward_message(self, name, message):\n unhashed = self.message_hashes[repr(name)]\n if unhashed in self.handlers:\n for handler in self.handlers[unhashed]:\n handler(message)", "def forward(self, *args, **kwargs):\n\n raise NotImplementedError()", "def _createModuleObj(self):\n ModuleTimeWeakening.__init__(self)\n return", "def process_module_list(self, modules):", "def register(self):\n for _, member in inspect.getmembers(self):\n if isinstance(member, Route):\n member.set_parent(self)\n member.register(self.core)", "def __call__(self, *args, **kwargs):\n return self.forward(*args, **kwargs)", "def forward_test(self, *args, **kwargs):\n pass", "def modules():", "def members(self, items):\n pass", "def patch(self):\n\t\t\n\t\t# Create tunnels\n\t\t(module, self.tunnel_source) = create_tunnel(self.remote_source_info)\n\t\tself.modules += [ module ]\n\t\t(module, self.tunnel_sink) = create_tunnel(self.remote_sink_info)\n\t\tself.modules += [ module ]\n\t\t\n\t\t# Connect them to the local devices\n\t\tself.modules = self.modules + [\n\t\t\tadd_loopback(self.tunnel_source, self.local_sink),\n\t\t\tadd_loopback(self.local_source, self.tunnel_sink)\n\t\t]", "def _forward(self, z):\n raise NotImplementedError(\"Forward shouldn't be called!\")", "def setHook(self, module):\n self.hook = module.register_forward_hook(self.hook_fn)", "def on_module_event(self, event: str, *args, **kwargs):", "def get_members():", "def forward(self, x):\n pass", "def _forward(self, X, **kwargs):\n raise NotImplementedError()", "def forward_op(x, module_dict, **kwargs):\n if not isinstance(x, dict):\n raise ValueError(\"The input x should be a dictionary.\")\n res = {}\n if not isinstance(module_dict, dict) and not isinstance(module_dict, nn.ModuleDict):\n for key in x:\n res[key] = module_dict(x[key], **kwargs)\n else:\n for key in x:\n res[key] = module_dict[key](x[key], **kwargs)\n return res", "def forward(self, x):\n return self.main(x)", "def pre_move_hook(self, from_module, to_module):\n raise NotImplementedError()", "def forwarder(self, forwarder: ICNForwarder):\n self._forwarder = forwarder", "def forward(self, x):\n outputs = {} \n #features = self.bn(self.linear(features))\n #x = self.relu(self.bn(self.linear(features)))\n # x = self.dropout(x)\n \n for i in range(len(self.module_list)): \n output = self.module_list[i](x)\n outputs[i] = output\n\n return outputs", "def base_forward(self, x):\r\n pass", "def __setattr__(self, name, value):\r\n self.assert_valid()\r\n\r\n\r\n\r\n\r\n self.__dict__.setdefault(\"_members\",{})[name] = value\r\n\r\n return _swig_setattr(self, self.__class__, name, value)", "def __setattr__(self, name, value):\r\n self.assert_valid()\r\n\r\n\r\n\r\n\r\n self.__dict__.setdefault(\"_members\",{})[name] = value\r\n\r\n return _swig_setattr(self, self.__class__, name, value)", "def __setattr__(self, name, value):\r\n self.assert_valid()\r\n\r\n\r\n\r\n\r\n self.__dict__.setdefault(\"_members\",{})[name] = value\r\n\r\n return _swig_setattr(self, self.__class__, name, value)", "def __setattr__(self, name, value):\r\n self.assert_valid()\r\n\r\n\r\n\r\n\r\n self.__dict__.setdefault(\"_members\",{})[name] = value\r\n\r\n return _swig_setattr(self, self.__class__, name, value)", "def __setattr__(self, name, value):\r\n self.assert_valid()\r\n\r\n\r\n\r\n\r\n self.__dict__.setdefault(\"_members\",{})[name] = value\r\n\r\n return _swig_setattr(self, self.__class__, name, value)", "def __setattr__(self, name, value):\r\n self.assert_valid()\r\n\r\n\r\n\r\n\r\n self.__dict__.setdefault(\"_members\",{})[name] = value\r\n\r\n return _swig_setattr(self, self.__class__, name, value)", "def __setattr__(self, name, value):\r\n self.assert_valid()\r\n\r\n\r\n\r\n\r\n self.__dict__.setdefault(\"_members\",{})[name] = value\r\n\r\n return _swig_setattr(self, self.__class__, name, value)", "def __setattr__(self, name, value):\r\n self.assert_valid()\r\n\r\n\r\n\r\n\r\n self.__dict__.setdefault(\"_members\",{})[name] = value\r\n\r\n return _swig_setattr(self, self.__class__, name, value)", "def __setattr__(self, name, value):\r\n self.assert_valid()\r\n\r\n\r\n\r\n\r\n self.__dict__.setdefault(\"_members\",{})[name] = value\r\n\r\n return _swig_setattr(self, self.__class__, name, value)", "def __setattr__(self, name, value):\r\n self.assert_valid()\r\n\r\n\r\n\r\n\r\n self.__dict__.setdefault(\"_members\",{})[name] = value\r\n\r\n return _swig_setattr(self, self.__class__, name, value)", "def __setattr__(self, name, value):\r\n self.assert_valid()\r\n\r\n\r\n\r\n\r\n self.__dict__.setdefault(\"_members\",{})[name] = value\r\n\r\n return _swig_setattr(self, self.__class__, name, value)", "def __setattr__(self, name, value):\r\n self.assert_valid()\r\n\r\n\r\n\r\n\r\n self.__dict__.setdefault(\"_members\",{})[name] = value\r\n\r\n return _swig_setattr(self, self.__class__, name, value)", "def forward(self, x, **kwargs):\n pass", "def forward(self, s):", "def set_hook(self, module):\n self.hook = module.register_forward_hook(self.hook_fn)", "def _handle_member_chunk(self, members: list):\n if self._chunks_left >= 1:\n # We have a new chunk, so decrement the number left.\n self._chunks_left -= 1\n\n for member_data in members:\n id = int(member_data[\"user\"][\"id\"])\n if id in self._members:\n member_obj = self._members[id]\n else:\n member_obj = dt_member.Member(self._bot, **member_data)\n\n member_obj.nickname = member_data.get(\"nick\", member_obj.nickname)\n member_obj.guild_id = self.id\n\n self._members[member_obj.id] = member_obj", "def forward_on_instance(self, instance):\n sentence, length = instance\n return self.forward(sentence, length)", "def module_transfer_to_device(self) -> None:\n for name, module in self.modules.items():\n module.to(self.device)\n if self.device.type == 'cuda':\n self.modules[name] = torch.nn.DataParallel(module, self.gpu_ids)\n return", "def receiverMapping():", "def forward(self, x):\n x = self.main(x)\n return x", "def forward(self, x):\n x = self.main(x)\n return x", "def _get_forwarding_groups(self):\n return self.__forwarding_groups", "def _get_forwarding_groups(self):\n return self.__forwarding_groups", "def _get_forwarding_groups(self):\n return self.__forwarding_groups", "def make_module_hook(self):\n res = \\\n\"\"\"{fname} = shared_object.{fname}\n {fname}.restype = POINTER({structname})\n {varname} = {fname}()\n\n\"\"\"\n fragments ={\n \"varname\": self._namespace_mangle(self.namespace) + \"_plugin\",\n \"fname\": \"___madz_LANG_python_get_out_struct\" if self.namespace == \"\" else \"___madz_LANG_python_get_\"+self._namespace_mangle(self.namespace) + \"_struct\",\n \"structname\": self.python_madz_types + (\"OUTSTRUCT\" if self.namespace == \"\" else self._namespace_mangle(self.namespace))\n }\n\n return res.format(**fragments)", "def forward(self, x):\n x = _ReversibleModuleFunction.apply(x, self.reversible_blocks, self.eagerly_discard_variables)\n return x", "def forward(self, features):\n outputs = {} \n #features = self.bn(self.linear(features))\n for i in range(len(self.module_list)): \n x = self.module_list[i](features)\n outputs[i] = x\n\n return outputs", "def exec_module(cls, *args, **kwargs): # real signature unknown\n pass", "def exec_module(cls, *args, **kwargs): # real signature unknown\n pass", "def add_member(self, member, neighbors):\n self.members.append(member)\n \n # self.neighbors[member] = neighbors\n for n in neighbors:\n self.neighbors.append(n)\n self.calculate_a()", "def autodiscover():\n autodiscover_modules('broadcasts')", "def visit_Module(self, node):\n for item in node.body:\n self.visit(item)", "def visit_Module(self, node):\n for item in node.body:\n self.visit(item)", "def _createModuleObj(self):\n raise NotImplementedError(\"Implement in derived class.\")", "def forward(self, output, target):\n raise NotImplementedError", "def forward(self):\n raise NotImplemented", "def forward(self):\n raise NotImplemented", "def forward(self):\n raise NotImplemented", "def LocalProxies(self): # real signature unknown; restored from __doc__\n pass", "def members(self, members: object):\n\n self._members = members", "def getMembers():", "def getMembers():", "def getMembers():", "def getMembers():", "def _createModuleObj(self):\n ModuleFaultCohesiveKin.__init__(self)\n return", "def __init__(self, source, target):\n\n self._verify_class_dicts()\n self.source = source\n # this relies on payload module names matching payload class names\n self.module = getattr(regrws.payload, target.__module__.split('.')[-1])\n self.payload = target()", "def forward(self, obs):\n\t\tpass", "def forwards(self, orm):\r\n def get_modulestore(ms_type, key):\r\n \"\"\"\r\n Find the modulestore of the given type trying the key first\r\n \"\"\"\r\n try:\r\n store = modulestore(key)\r\n if isinstance(store, MixedModuleStore):\r\n store = store.modulestores[key]\r\n if store.get_modulestore_type(None) == ms_type:\r\n return store\r\n else:\r\n return None\r\n except KeyError:\r\n return None\r\n\r\n # Note: Remember to use orm['appname.ModelName'] rather than \"from appname.models...\"\r\n loc_map_collection = loc_mapper().location_map\r\n xml_ms = get_modulestore(XML_MODULESTORE_TYPE, 'xml')\r\n mongo_ms = get_modulestore(MONGO_MODULESTORE_TYPE, 'default')\r\n if mongo_ms is None:\r\n mongo_ms = get_modulestore(MONGO_MODULESTORE_TYPE, 'direct')\r\n\r\n query = Q(name__startswith='staff') | Q(name__startswith='instructor') | Q(name__startswith='beta_testers')\r\n for group in orm['auth.Group'].objects.filter(query).exclude(name__contains=\"/\").all():\r\n def _migrate_users(correct_course_key, role):\r\n \"\"\"\r\n Get all the users from the old group and migrate to this course key in the new table\r\n \"\"\"\r\n log.info(\r\n u'Giving %s users access to %s',\r\n group.name, correct_course_key\r\n )\r\n for user in orm['auth.user'].objects.filter(groups=group).all():\r\n entry = orm['student.courseaccessrole'](\r\n role=role,\r\n user=user,\r\n org=correct_course_key.org,\r\n course_id=correct_course_key,\r\n )\r\n try:\r\n entry.save()\r\n except IntegrityError:\r\n pass\r\n\r\n parsed_entry = self.GROUP_ENTRY_RE.search(group.name)\r\n if parsed_entry is None:\r\n log.warn('Ignoring an unexpected unparsable entry %s', group.name)\r\n continue\r\n role = parsed_entry.group('role_id')\r\n course_id_string = parsed_entry.group('course_id_string')\r\n # if it's a full course_id w/ dots, ignore it\r\n entry = loc_map_collection.find_one({\r\n 'course_id': re.compile(r'^{}$'.format(course_id_string), re.IGNORECASE)\r\n })\r\n if entry is None:\r\n # check new table to see if it's been added as org permission\r\n if not orm['student.courseaccessrole'].objects.filter(\r\n role=role,\r\n org__iexact=course_id_string,\r\n ).exists():\r\n # old auth was of form role_coursenum. Grant access to all such courses wildcarding org and run\r\n # look in xml for matching courses\r\n if xml_ms is not None:\r\n for course in xml_ms.get_courses():\r\n if course_id_string == course.id.course.lower():\r\n _migrate_users(course.id, role)\r\n\r\n if mongo_ms is not None:\r\n mongo_query = re.compile(ur'^{}$'.format(course_id_string), re.IGNORECASE)\r\n for mongo_entry in mongo_ms.collection.find(\r\n {\"_id.category\": \"course\", \"_id.course\": mongo_query}, fields=[\"_id\"]\r\n ):\r\n mongo_id_dict = mongo_entry['_id']\r\n course_key = SlashSeparatedCourseKey(\r\n mongo_id_dict['org'], mongo_id_dict['course'], mongo_id_dict['name']\r\n )\r\n _migrate_users(course_key, role)", "def members(self, members):\n\n self._members = members", "def add_members(self, members):\n self.__add_remove_members(members)", "def move_members(_) -> int:\n return 1 << 24", "def move_members(_) -> int:\n return 1 << 24", "def __init__(self):\n ScriptedLoadableModuleLogic.__init__(self)", "def _delegate_methods(receiver, target):\n receiver_methods = frozenset(dir(receiver))\n target_methods = frozenset(dir(target))\n\n forward_methods = target_methods.difference(receiver_methods)\n for method in forward_methods:\n if not method.startswith('_'):\n ref = getattr(target, method)\n setattr(receiver, method, ref)", "def __init__(self, fritz_box, call_forwarding_dict):\n self.fritz_box = fritz_box\n self._name = \"callforwarding_\" + call_forwarding_dict['uid']\n self.uid = call_forwarding_dict['uid']\n self.from_number = call_forwarding_dict['from_number']\n self.to_number = call_forwarding_dict['to_number']\n self.connection_type = call_forwarding_dict['connection_type']\n self.enabled = call_forwarding_dict['enabled']", "def fastforward(self):\n self.run_command('fastforward')", "def handleModuleMessage(self, data, datapathSendFcn):\n print (\"handler not implemented in silverline.\")", "def opaque_module(self, modobj):\n for var, val in modobj.__dict__.iteritems():\n if isinstance(val, type(Lumpy)):\n self.opaque_class(val)", "def _forward_impl(self, *inputs, **kwargs):\n raise NotImplementedError('Abstract method.')", "def add_function (self, module, name) :\n setattr (module, name, self._wrapped (module, name))", "def map_from_app_modules(self, app):\n if 'modules' in app and len(app['modules']) > 0:\n empty_fieldlist(self.modules)\n for module in app.get('modules', []):\n self.modules.append_entry()\n form_module = self.modules.entries[-1].form\n form_module.map_from_app(module)", "def emit(self, *path):\n path = list(path)\n for module in self.modules.values():\n module.emit_local(*path)", "def make_modules(self, config):\n pass", "def _swap_child_modules(\n module: torch.nn.Module,\n static_mappings: Dict[Callable, Any],\n dynamic_mappings: Dict[Callable, Any],\n) -> None:\n\n reassign = {}\n for name, mod in module.named_children():\n # both fused modules and observed custom modules are\n # swapped as one unit\n if not isinstance(mod, _FusedModule):\n _swap_child_modules(mod, static_mappings, dynamic_mappings)\n\n qconfig = getattr(mod, 'qconfig', None)\n if not qconfig:\n continue\n activation_int8_quantized = activation_is_int8_quantized(qconfig)\n op_int8_dynamically_quantized = op_is_int8_dynamically_quantized(qconfig)\n if activation_int8_quantized:\n if not type(mod) in static_mappings:\n continue\n reassign[name] = swap_module(mod, static_mappings, {})\n elif op_int8_dynamically_quantized:\n if not type(mod) in dynamic_mappings:\n continue\n reassign[name] = swap_module(mod, dynamic_mappings, {})\n # TODO(future PR): add support for other dtypes\n\n for key, value in reassign.items():\n module._modules[key] = value" ]
[ "0.61396086", "0.6047265", "0.6046735", "0.5913129", "0.5799977", "0.5715483", "0.5634315", "0.5634315", "0.5568903", "0.5568903", "0.5526504", "0.5521884", "0.5437717", "0.5414434", "0.5402117", "0.54014426", "0.53860456", "0.5361227", "0.52834386", "0.5244702", "0.5238629", "0.5228822", "0.51377445", "0.51348126", "0.51038283", "0.5100785", "0.509586", "0.5064351", "0.5059515", "0.5055066", "0.5051707", "0.50420266", "0.50344396", "0.50333333", "0.50192666", "0.5009743", "0.50018543", "0.5001437", "0.5001437", "0.5001437", "0.5001437", "0.5001437", "0.5001437", "0.5001437", "0.5001437", "0.5001437", "0.5001437", "0.5001437", "0.5001437", "0.49935704", "0.49888378", "0.49710146", "0.4961332", "0.4960798", "0.49544185", "0.4946531", "0.49451217", "0.49451217", "0.4929547", "0.4929547", "0.4929547", "0.49265924", "0.4924971", "0.4919523", "0.4916022", "0.4916022", "0.4913124", "0.49033707", "0.4898573", "0.4898573", "0.4894729", "0.48924404", "0.48849252", "0.48849252", "0.48849252", "0.48549727", "0.48534086", "0.48387146", "0.48387146", "0.48387146", "0.48387146", "0.48375914", "0.4829391", "0.4818357", "0.48015976", "0.47984135", "0.4789588", "0.4761479", "0.4761479", "0.4756641", "0.47512403", "0.4743251", "0.4738234", "0.4725516", "0.47200537", "0.4715497", "0.47118798", "0.47094414", "0.47015175", "0.47011366", "0.46987364" ]
0.0
-1
Set the packet length.
def _set_packet_len(self, packet_len): self._packet_len = packet_len
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def setPacketLength(self):\n self.packetLength = len(self) - PRIMARY_HEADER_BYTE_SIZE - 1", "def setLength(self, new_length):\n\n self.length = new_length", "def length(self, length):\n\n self._length = length", "def set_length(self, ak_tpl: BKT, newLength: float): # -> None:\n ...", "async def gpt2_set_length(self, ctx, *, arg=None):\n print('Command gpt2_set_length triggered')\n if arg:\n try:\n i = int(arg)\n assert (i > 0) and (i < 1024)\n except ValueError or AssertionError:\n ctx.send(\"ERROR: Argument must be a positive integer number\")\n self.update_config(length=arg)\n else:\n await ctx.send(\"ERROR: Argument required\")", "def set_length(self, new_length):\n if(new_length == None):\n self._logger.write(\"Error! new_length cannot be a NoneType\")\n elif(type(new_length) != float):\n self._logger.write(\"Error! new_length must be of type float\")\n else:\n try:\n self._length = new_length\n except Exception as e:\n self._logger.write(\"Error! Could not set the new length:\\n %s\" % e)", "def set_length(self, length):\n if length < 0:\n raise AttributeError('length should be positive')\n self.progress_char_length = length", "def change_tail_length(self, value):\n self.layer.tail_length = value", "def set_length(self, ak_spec: Union[str, BKT], val: float) -> None:\n ...", "def token_length(self, token_length):\n\n self._token_length = token_length", "def _set_maskLength(self, v, load=False):\n try:\n t = YANGDynClass(v,base=np.uint8, is_leaf=True, yang_name=\"maskLength\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"maskLength must be of a type compatible with base=np.uint8, is_leaf=True, yang_name=\"maskLength\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__maskLength = t\n if hasattr(self, '_set'):\n self._set()", "def length(self, length: Union[int, float]):\n self._length = length\n self._update_length()\n self.events.length()\n\n self.refresh()", "def change_length(self, value):\n self.layer.length = value\n self.lengthSpinBox.clearFocus()\n self.setFocus()", "def setLength(self, length):\n self.vector.norm = length", "def read_packetlen(self):\n packetlen = int(struct.unpack('!I', b\"\".join(self.__input))[0])\n self.__input = []\n self.set_terminator(packetlen)\n self.found_terminator = self.read_milter_data", "def setDataSize(self, head,payload,eop):\n self.dataSize = len(head)+len(payload)+len(eop)", "def setGoalLength(self, length):\n assert isinstance(length, int)\n self.goal_length = length", "def update_total_length(self):\n self.total_length = len(bytes(self))", "def setSplitLength(self, value):\n return self._set(splitLength=value)", "def length_changed(self, value):\n self.message.dlc = value\n self.validate_data_input(value)", "def set_part_length(self, seconds):\n self._part_length = seconds", "def sent_len(self) -> int:\n raise NotImplementedError(\"must be implemented by subclasses\")", "def network_byte_length(self) -> int:", "def __set_size(self, size):\n if not isinstance(size, int):\n raise TypeError('The size should be an integer')\n if size < 64 or size > 1500: # It should be in the Standard Ethernet Payload range\n raise ValueError('The size should be in the range of Standard Ethernet frames [64,1500] bytes')\n self.__size = size", "def _setVals(self, cmd_length=0):\n self.cmd_length = cmd_length", "def length(self, value):\n raise TypeError(\"Cannot delete {class-name} length property.\")", "def _on_len_change(self, event=None):\n with self.layer.events.length.blocker():\n self.lengthSpinBox.setValue(self.layer.length)", "def setsize(self, size):\n self.__size = size", "def _set_length(self, length):\n self.bottom.pos.y = self.top.pos.y + length", "def _update_length(self, field, tag_id, value):\n # pylint: disable=unused-argument\n if tag_id not in {8, 9, 10}:\n self._message_length += len(field) + 1\n if self._message_length >= self._max_length:\n raise FIXLengthTooLongError(\n f'message too long: {self._message_length}')", "def random_password_length(self, random_password_length):\n\n self._random_password_length = random_password_length", "def length(self):\n return struct.unpack('<B', self.pkt.payload[2:3])[0]", "def length(self):\n return struct.unpack('<B', self.pkt.payload[2:3])[0]", "def content_len(self, value):\n self.set_header('CONTENT-LENGTH', value)", "def setLengthUnits(self, *args):\n return _libsbml.Model_setLengthUnits(self, *args)", "def content_length(self, content_length):\n self._content_length = content_length", "def setMaxLength(self, value):\n return self._set(maxLength=value)", "def setMaxLength(self, value):\n return self._set(maxLength=value)", "def setMaxLength(self, value):\n return self._set(maxLength=value)", "def setMaxLength(self, value):\n return self._set(maxLength=value)", "def set_size(self, size):\n self.dtSize = size", "def length(self):\n return struct.unpack('<B', self.pkt.payload[1:2])[0]", "def length(self):\n return struct.unpack('<H', self.pkt.payload[6:8])[0]", "def as_length(self, value):\n new_vec = self.copy()\n new_vec.length = value\n return new_vec", "def setNewLen(self):\n self.wordLen = randint(3, 31)", "def length(self) -> int:\n pass", "def length(self):\n raise UnsupportedCall(f\"'{self.__class__.__name__}' object has no attribute 'length'\")", "def __len__(self) -> int:\n return self._length", "def length(self):\n return struct.unpack('<H', self.pkt.payload[2:4])[0]", "def svn_info_t_size_set(svn_info_t_self, apr_size_t_size): # real signature unknown; restored from __doc__\n pass", "def setMinOutputLength(self, value):\n return self._set(minOutputLength=value)", "def setMinOutputLength(self, value):\n return self._set(minOutputLength=value)", "def set_width(self, width):\n self.__width = width", "def __len__(self) -> int:\n return len(self.length)", "def length(self):\n pass", "def set_last_segment_length(self, length):\n prior_length = self.segments[-1].get_length()\n if prior_length != -1:\n self.end_time -= prior_length\n\n self.segments[-1].set_length(length)\n self.end_time += length", "def __len__(self) -> int:\n return self.length", "def content_length(self, value):\r\n self.set_header('CONTENT-LENGTH', str(value))", "def set_line_width(self, val):\n self.lwidth = val", "def getLength(self):\n return self.length", "def __padlen(self,l):\n return Utils.padlen(l,self.com.granularity)", "def size(self, value):\n self.width = value", "def set_size(self, length, width=None):\n\n length = float(length)\n try:\n width = float(width)\n except:\n pass\n if width is not None:\n self.ang_size = np.sqrt(length * width)\n else:\n self.ang_size = length\n\n ang_size_in_rad = self.ang_size / 60 * np.pi / 180\n self.sr = ct.angle_to_solid_angle(ang_size_in_rad)", "def __len__(self) -> int:\n return self._len", "def __len__(self):\n return(self.data_len)", "def set_width(self, width):\n self.width = width", "def get_tcp_packet_payload_len_with_options(pkt: dpkt.ethernet.Ethernet) -> int:\n if isinstance(pkt, dpkt.ethernet.Ethernet):\n ip = pkt.data\n elif isinstance(pkt, dpkt.ip.IP):\n ip = pkt\n else:\n return None\n return ip.len - ip.hl * 4 - 20", "def get_payload_length(packet):\n adaptation_field_len = TS.get_adaptation_field_length(packet)\n return 188 - 4 - adaptation_field_len", "def get_length(self):\n return self._length", "def get_length(self):\n return self._length", "def set_width(self, *args):\n return _ida_hexrays.lvar_t_set_width(self, *args)", "def get_length(self):\n\n return self.length", "def setMaxWindowLen(self, length):\n return self._set(maxWindowLen=length)", "def setMaxWindowLen(self, length):\n return self._set(maxWindowLen=length)", "def length(self) -> int:\r\n\r\n return self.__length", "def length_in_bits(self):\n if hasattr(self, '_m_length_in_bits'):\n return self._m_length_in_bits if hasattr(self, '_m_length_in_bits') else None\n\n self._m_length_in_bits = ((self.len - 1) * 8)\n return self._m_length_in_bits if hasattr(self, '_m_length_in_bits') else None", "def setWidth(self, width):\n self._reconfig(\"width\", width)", "def set_length(vec, length):\n return normalized(vec) * length", "def _setVals(self, tp: CQCType = 0, length: int = 0) -> None:\n self.type = tp\n self.length = length", "def change_length_signal(signal, length=None):\n if length is None:\n length = len(signal)\n if len(signal) >= length:\n signal = sumpf.modules.CutSignal(signal=signal, start=0, stop=length).GetOutput()\n else:\n signal = append_zeros(signal, length)\n return signal", "def _add_slice_length(self, length):\n if length < pow(2, 7):\n self.add_int8(length << 1)\n elif length < pow(2, 14):\n self.add_int16(1 | length << 2)\n elif length < pow(2, 21):\n self.add_int24(3 | length << 3)\n elif length < pow(2, 29):\n self.add_int32(7 | length << 3)\n else:\n raise SliceLengthOutOfRange(\"slice length {} is out of range\".format(length))", "def __len__(self):\n return self._length # pylint: disable = E1101", "def set_body_size(self, length: int) -> None:\n self._body = [Coord2D(0, 0) for _ in range(length)]\n self._tail_visited = set()\n self.record_tail_location()", "def len23(self, len): # -> None:\n ...", "def lib_size(self, lib_size):\n self.logger.debug(\"In 'lib_size' setter.\")\n\n self._lib_size = lib_size", "def length(self) -> 'int':\n return self._info.len", "def cal_data_length(self, data_length):\n fn = self._lib['resampleObj_calDataLength']\n fn.argtypes = [POINTER(OpaqueResample), c_int]\n fn.restype = c_int\n fn(self._obj, c_int(data_length))", "def _set_prefix_length(self, v, load=False):\n parent = getattr(self, \"_parent\", None)\n if parent is not None and load is False:\n raise AttributeError(\"Cannot set keys directly when\" +\n \" within an instantiated list\")\n\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name=\"prefix-length\", rest_name=\"prefix-length\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"prefix_length must be of a type compatible with uint32\"\"\",\n 'defined-type': \"uint32\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name=\"prefix-length\", rest_name=\"prefix-length\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)\"\"\",\n })\n\n self.__prefix_length = t\n if hasattr(self, '_set'):\n self._set()", "def get_length(self):\n\n return self._length", "def payload_length(self):\n return self._payload_length", "def getLen(self):\n return self.len", "def length(self):\n raise NotImplementedError(\"(%s).length\" % self)", "def set_gesture_pulse_count_and_length(self, pulse_count, pulse_length):\n if not (1 <= pulse_count <= 64):\n raise ValueError(\"pulse_count must be in range [1-64].\")\n if not (APDS_9960.PULSE_LEN_4_MICROS <= pulse_length\n <= APDS_9960.PULSE_LEN_32_MICROS):\n raise ValueError(\"pulse_length must be one of PULSE_LEN_N_MICROS.\")\n\n reg_value = (pulse_count - 1) | (pulse_length << 6)\n self.write_byte_data(reg_value, APDS_9960.GESTURE_PULSE_COUNT_AND_LEN_REG_ADDRESS)", "def set_base_length_entry(self, base_length):\n self.entries[\"ent_base_length\"].delete(0, END)\n self.entries[\"ent_base_length\"].insert(\n 0, str(base_length))", "def length(self):\n ...", "def _get_length(self):\n return self._length", "def __len__(self):\n return self.length", "def __len__(self):\n return self.length", "def __len__(self):\n return self.length", "def __len__(self):\n return self.length" ]
[ "0.7967779", "0.7632073", "0.72674304", "0.7185319", "0.7084449", "0.70364594", "0.6882304", "0.66527474", "0.6593729", "0.6497038", "0.64803594", "0.6373639", "0.63581616", "0.63335747", "0.6284977", "0.6236442", "0.6221398", "0.6134403", "0.6132038", "0.6003637", "0.597266", "0.59429586", "0.59172124", "0.59083015", "0.58775926", "0.58236974", "0.58028436", "0.57977897", "0.5788708", "0.5780997", "0.57799125", "0.5735965", "0.5735965", "0.5732699", "0.57295305", "0.57164156", "0.5703339", "0.5703339", "0.5703339", "0.5703339", "0.5698901", "0.56821907", "0.56764823", "0.5653249", "0.56521994", "0.5650801", "0.56416893", "0.5638124", "0.5637013", "0.562164", "0.5590347", "0.5590347", "0.55895454", "0.558931", "0.55722255", "0.5570504", "0.5560576", "0.5557601", "0.5556602", "0.5525722", "0.5519424", "0.55172914", "0.5516462", "0.5501868", "0.54912645", "0.5482546", "0.5482542", "0.5478047", "0.5477772", "0.5477772", "0.54728025", "0.54677", "0.5464321", "0.5464321", "0.5463634", "0.54515517", "0.5450474", "0.54469246", "0.5434429", "0.54339", "0.5433591", "0.54314524", "0.5424478", "0.54164493", "0.5408369", "0.5401785", "0.5400137", "0.5398293", "0.5398251", "0.5390379", "0.5381538", "0.53771514", "0.536911", "0.53656036", "0.53576404", "0.53574884", "0.5355783", "0.5355783", "0.5355783", "0.5355783" ]
0.8634613
0
Increment control counter for flow control of Ethernet traffic.
def update_control(self): self._control_ctr += 0x01
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def increment_counter(self) -> None:", "def increment_etherscan_calls():\n _increment_counter(\"etherscan_calls\")", "def inc(self):\n \n self.count += 1", "def inc( self ):\n self.count += 1", "def _inc_counter(self) -> None:\n self._state_storage.increment_counter()", "def increment_pc(self):\n self.program_counter[-1] += 1", "def increment_counter(self) -> None:\n self._fail_counter += 1", "def inc_xor_gateways(self):\r\n self.num_xor_gateways += 1", "def incr_logical_clock(self):\n self._logical_clock += 1", "def increment_counter(self) -> None:\n try:\n self._redis.incr(self._namespace(\"fail_counter\"))\n except RedisError:\n self.logger.error(\"RedisError\", exc_info=True)", "def increment_instr(self):\n self.instruction_count += 1", "def increment(self):\n self.data[self.pointer] += 1\n self.data[self.pointer] %= 256", "def add_count(self):\n self.count += 1", "def set_uplink_cnt(self, uplink_counter: int) -> None:\n\n if uplink_counter < 0 or uplink_counter > 4294967295:\n raise ValueError('Bad uplink counter')\n\n cmd = b'\\x52\\x04' + uplink_counter.to_bytes(4, 'little', signed=False)\n\n try:\n self._serial.transmit(cmd)\n self._get_reply(0x52, 0, 0.25)\n finally:\n self._gpio.sleep()\n\n return", "def enable(ctx):\n fc_info = {}\n fc_info['FLEX_COUNTER_STATUS'] = 'enable'\n ctx.obj.mod_entry(\"FLEX_COUNTER_TABLE\", \"FLOW_CNT_ROUTE\", fc_info)", "def flowcnt_route(ctx):\n exit_if_route_flow_counter_not_support()\n ctx.obj = ConfigDBConnector()\n ctx.obj.connect()", "async def increment(self):\n async with self.lock:\n self.counter += 1", "def increase_count(self, number=1):\n self.count += number", "def increment_count(self):\n self.image_count +=1\n if self.image_count > self.max_count:\n self.image_count = self.count_start # overflow", "def inc_counter(self, *_, **__): # pylint: disable=arguments-differ\n pass", "def inc(self):\n self._value += 1", "def increment(self, inc):\n self.done += inc", "def incrementWriteCount(self):\n self.writeCount += 1", "def incInstCount(self):\n self.instCount += 1", "async def counter_inc(self,\n row: bytes,\n column: bytes,\n value: int = 1) -> None:\n self._counters[(row, column)] += value\n await self._check_send()", "def increment_requests_count(self, type):\n if type not in self._requests_count:\n self._requests_count[type] = 0\n self._requests_count[type] += 1", "def increase_counter(self):\n self.values = self.values + 1", "def increment(self, status: StatusEnum):\n if status == StatusEnum.OK:\n self.ok_count += 1\n elif status == StatusEnum.MISSING:\n self.missing_count += 1\n elif status == StatusEnum.MINOR:\n self.minor_count += 1\n elif status == StatusEnum.MAJOR:\n self.major_count += 1\n elif status == StatusEnum.LIFE_THREATENING:\n self.life_threatening_count += 1", "def incr_cond(self):\n pass", "def _increment_turn(self):\r\n\r\n self.turn_number += 1", "def incrementTimers(self):\n # online servers\n for server in self.online_servers:\n self.online_servers[server][0] += 1\n # offline servers\n for server in self.offline_servers:\n self.offline_servers[server][0] += 1\n \n return", "def _tally(self, user_gpio, level, tick):\n self.count += 1", "def _increase_counter(self, response):\n response_id = response.meta['__id']\n spot = self._request_registry[response_id]\n spot['counter'] = spot.get('counter', 0) + 1", "def incr_counter(cls, cname):\n if not cname in cls.__counters: cls.__counters[cname] = -1\n cls.__counters[cname] += 1\n return cls.__counters[cname]", "def updateCounter(self):\n self.counter = self.counter + 1\n self.syncDataStructure[\"+\"][str(self.instanceID)] = self.counter", "def send_req(self):\n self.n_send_req += 1", "def inc_para_gateways(self):\r\n self.num_para_gateways += 1", "def touch_packet (self, byte_count, now=None):\n if now is None: now = time.time()\n self.byte_count += byte_count\n self.packet_count += 1\n self.last_touched = now", "def increment(cls):\n index = random.randint(0, SimpleCounterShard.NUM_SHARDS - 1)\n shard_name = 'shard' + str(index)\n counter = SimpleCounterShard.objects.get_or_create(pk=shard_name)[0]\n counter.count += 1\n counter.save()", "def increment_login_attemtps(self):\r\n self.login_attempts += 1", "def increment_number_served(self, increment):\n self.number_served += increment", "def increment_number_served(self, increment):\n self.number_served += increment", "def increment_number(self):\n # self.number += 1\n print('fuckwit')\n # print(self.number)", "def set_counter_increase(self, val=1):\r\n return self._arm.set_counter_increase(val)", "def tick(self):\n self.count += 1", "def counter(self) -> int:", "def counter(self) -> int:", "def incr_registers(self):\n pass", "def increment_login_attempts(self):\n self.login_attempts += 1", "def increment_login_attempts(self):\n self.login_attempts += 1", "def increment_login_attempts(self):\n self.login_attempts += 1", "def increment_login_attempts(self):\n self.login_attempts += 1", "def increment_login_attempts(self):\n self.login_attempts += 1", "def increment_login_attempts(self):\n self.login_attempts += 1", "def increment_login_attempts(self):\n self.login_attempts += 1", "def increment_login_attempts(self):\n\t\tself.login_attempts += 1", "def increment_login_attempts(self):\n\t\tself.login_attempts += 1", "def inc_pc(self, size):\n current_pc = self.get_register('PC')\n self.set_pc(current_pc + size)", "def count_inside(self):\n time.sleep(2) #1\n self.count += 1", "def incTurn(self):\n self.turnOn = (self.turnOn+1)%self.turns", "def enable(ctx):\n fc_info = {}\n fc_info['FLEX_COUNTER_STATUS'] = 'enable'\n ctx.obj.mod_entry(\"FLEX_COUNTER_TABLE\", \"FLOW_CNT_TRAP\", fc_info)", "async def incr(req):\n key, ttl, err = validate_params(req)\n if err is not None:\n return err\n\n counter = incr_with_ttl(key, ttl)\n return web.json_response(data={'status': 'success', 'counter': counter})", "def get_uplink_cnt(self) -> int:\n try:\n self._serial.transmit(b'\\x53\\x00')\n response = self._get_reply(0x53, 4, 0.25)\n finally:\n self._gpio.sleep()\n\n return int.from_bytes(response[2:6], 'little', signed=False)", "def post_seqnoincrease(self):", "def throttle_increment(self, commit=True):\n self.throttling_failure_timestamp = timezone.now()\n self.throttling_failure_count += 1\n if commit:\n self.save()", "def incr_no_of_attacks(self):\n\t\tself.__anom += 1\n\t\tself.__anom_lbl.setText(str(self.__anom))", "def increase_view_count(self):\n try:\n self.view_counter += 1\n self.save(update_fields=['view_counter'])\n except:\n warnings.warn(\"Unable to increase view count for advert {}\".format(self.pk))", "def setNextIface(self): \n self.nextIface+=1", "def increment_login_attempts(self):\n self.attributes['login_attempts'] += 1", "def getPacketCount(self):\n return 1", "def increment(self, amount):\n pass", "def testSetCount(self):\n\t\tc = Controller()\n\t\ta = BaseAction('x')\n\t\tc.record(a)\n\t\tself.failUnless(a.playbackPolicy.remaining == 1)\n\t\tc.setCount(2)\n\t\tself.failUnless(a.playbackPolicy.remaining == 2)", "def _increment_state(self, bytes_read):\n self._read_state[StateKey.POSITION] += bytes_read", "def tick():\n global counter\n counter += 1", "def incr_circuit_remix_count(self, circuit_id):\n key = ':'.join(\n [CIRCUIT_NMBR_RMX_1, \n str(circuit_id), \n CIRCUIT_NMBR_RMX_2]\n )\n self.RS.incr(key)", "def increaseFlow(self, edge, value):\r\n self.flow[edge] += value\r\n self.flow[edge[::-1]] -= value", "def inc(self, labels: dict[str, str]):\n\n val = self.get(labels)\n\n if val is None:\n val = 0\n\n val += 1\n\n self.set(labels, val)", "def increment(self, n=1):\n with self.current_counter.get_lock():\n self.current_counter.value += n", "def increment_node_index(self):\n self.node_index += 1", "def update_counter(ai_counter):\n if ai_counter < 140:\n ai_counter += 1\n else:\n ai_counter = 60\n return ai_counter", "def increment(self):\n self._deltas += 1", "def inc_rolls(self):\n self._rolls += 1", "def incr_counter(self, path):\n res = self.read_counter(path)\n # print 'incr_counter:', path, res, '->', res + 1\n res += 1\n self.cursor.execute('REPLACE INTO counter(fullpath, count) VALUES(?, ?)', (path, res))\n self.conn.commit()\n pass", "def ctrl_sequence_of_oper(self) -> int:\n return self.cluster.get(\"ctrl_sequence_of_oper\", 0xFF)", "def increment_login_attempts(self, increment):\r\n increment == self.login_attempts\r\n self.login_attempts += 1", "def increment(self, player):\n if not bblocks.counter.next():\n bblocks.check_end(player)\n\n if self._increment(player):\n for tile in board.cross_neighbours(self):\n tile.increment(player)\n board.draw()", "def calee(num):\n state.inc(num)\n print(\"caleee\")", "def control_edge_count(self) -> int:\n return int(self.graph_tuple_stats.control_edge_count or 0)", "def test_should_return_the_correct_integer(self):\n\n tcp_flags = TCPControlBits(['SYN', 'ACK'])\n assert_equal(tcp_flags.to_int(), 18)", "def traffic_control(self, *args, **kwargs):\n self.check_res(self.hltapi.traffic_control(**kwargs))", "def _increment_state(self, increment):\n self._read_state[StateKey.POSITION] += increment", "def increment_steps(self):\n self.num_steps += 1", "def increment(cls, value):\r\n value.value += 1", "def __numHeads(self):\n count = 1\n\n while (self.__coinFlip() == 1):\n count += 1\n return count", "def inc(self):\n return self._inc", "def _on_progress(self, num):\n self._num_progresses += num\n self._log.info(\"Progress incrementing by {}\".format(num))\n self._host_comms.send_msg(\"progress\", num)", "def increment_views(self):\n self.views += 1\n self.save()", "def recv_req(self):\n self.n_recv_req += 1", "def at_ctrl(seq, num):\n at(\"CTRL\", seq, [num, 0])", "def inc_cycles(self, cycles):\n self.cycle += cycles\n self.global_cycle += cycles" ]
[ "0.64278454", "0.642254", "0.6130352", "0.60675645", "0.5942708", "0.5894269", "0.5890736", "0.58173895", "0.5758865", "0.5732553", "0.5684082", "0.567313", "0.55923927", "0.55792695", "0.55606544", "0.554892", "0.55463547", "0.5544628", "0.5536674", "0.55294377", "0.55287164", "0.5477743", "0.54717904", "0.5468333", "0.5419579", "0.5419331", "0.54158425", "0.5415841", "0.54158324", "0.54144216", "0.54084504", "0.5401043", "0.539353", "0.53610224", "0.53507036", "0.5326365", "0.5306717", "0.53059053", "0.5299115", "0.52692145", "0.526113", "0.526113", "0.52447057", "0.523724", "0.523569", "0.52322525", "0.52322525", "0.5230821", "0.52257246", "0.52257246", "0.52257246", "0.52257246", "0.52257246", "0.52257246", "0.52257246", "0.5220749", "0.5220749", "0.5215539", "0.5207329", "0.52029467", "0.5202699", "0.5200584", "0.5190664", "0.51820457", "0.518051", "0.517223", "0.5154458", "0.51470184", "0.51451194", "0.51435417", "0.51409787", "0.5135416", "0.51217145", "0.5108148", "0.51005673", "0.5093448", "0.50888354", "0.50824916", "0.5074618", "0.5072434", "0.5071415", "0.5060602", "0.5059436", "0.5056645", "0.505393", "0.5042348", "0.504078", "0.50354266", "0.5027721", "0.5024318", "0.5021723", "0.50162846", "0.50143784", "0.50099665", "0.49988604", "0.4996411", "0.4995149", "0.499347", "0.49926302", "0.49896494" ]
0.5772331
8
Return bytes representation of Ethernet header.
def __bytes__(self): return pack("<HH", self._packet_len, self._control_ctr)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def header_bytes(self, zero_checksum=False):\n b = bytearray()\n\n counter = 0\n integer = 0\n\n # The values are packed into 4-byte integers, since the largest\n # fixed-size header fields are 4-byte IP addresses. After that, they are\n # added to the byte array as 4 bytes.\n for field, bits in self.fields:\n assert counter <= 32, \"Bit counter somehow got over 32!\"\n\n if counter < 32:\n if field == \"header_checksum\" and zero_checksum:\n value = 0\n else:\n value = getattr(self, field)\n\n integer += (value << (32 - bits - counter))\n counter += bits\n\n if counter == 32:\n b.append((integer >> 24) & 255)\n b.append((integer >> 16) & 255)\n b.append((integer >> 8) & 255)\n b.append(integer & 255)\n\n integer = 0\n counter = 0\n\n return bytes(b)", "def encode(self) -> bytes:\n version_ihl = self._version << 4 | self._ihl\n flags_fragoffset = self._flags << 13 | self._frag_offset\n\n raw = struct.pack(\n IPHeader.fmt,\n version_ihl,\n self._tos,\n self.len,\n self.id,\n flags_fragoffset,\n self._ttl,\n self.proto,\n self._csum,\n self.saddr,\n self.daddr,\n )\n return raw + self.payload", "def __bytes__(self):\n return self.header_bytes() + self.payload", "def __bytes__(self):\n if self._header.value_type in b'ZH':\n value = self._buffer[:-1] # Omit the trailing Null\n elif self._header.value_type in b'AB':\n value = self._buffer\n else:\n value = str(self._buffer.value).encode('ASCII')\n return self._header.tag + b':' + (self._header.value_type if self._header.value_type in b'AifZHB' else b'i') + b':' + value", "def __bytes__(self):\n return (\n bytes(self.block_header) +\n pack_compact_int(len(self.txns)) +\n b''.join([bytes(t) for t in self.txns])\n )", "def __bytes__(self) -> bytes:\n from hathor.merged_mining.bitcoin import encode_bytearray, encode_list\n struct_bytes = self.header_head\n struct_bytes += encode_bytearray(self.coinbase_head)\n struct_bytes += encode_bytearray(self.coinbase_tail)\n struct_bytes += encode_list(self.merkle_path)\n struct_bytes += self.header_tail\n return struct_bytes", "def get_byte_string(self):\n return \"\".join(['%02X' % i for i in self._data]).decode('hex')", "def bytes(self):\n return bytes(self._packet)", "def encode(self) -> bytes:\n\n # unsigned char dmac[6];\n # unsigned char smac[6];\n # uint16_t ethertype;\n # unsigned char payload[];\n\n t = struct.pack(\"H\", socket.htons(self.typ))\n return self.dmac + self.smac + t + self.payload", "def header(self):\n return encode_as_str([self.unsealed_header(), self.seal_data], sep='`')", "def header(self):\n return encode_as_str([self.unsealed_header(), self.seal_data], sep='`')", "def unsealed_header(self):\n return encode_as_str([self.height, self.timestamp, self.target, self.parent_hash, self.is_genesis, self.merkle], sep='`')", "def unsealed_header(self):\n return encode_as_str([self.height, self.timestamp, self.target, self.parent_hash, self.is_genesis, self.merkle], sep='`')", "def __repr__(self):\n return \"{}:{}:{}\".format(self._header.tag.decode('ASCII'),\n self._header.value_type.decode('ASCII') if self._header.value_type in b'AifZHB' else 'i', _to_str(self._buffer))", "def to_header(self):\n\n return self._header_block", "def to_bytes(self):\n return self._bytes", "def headerNameAsBytes(name):\n # type: (String) -> bytes\n if isinstance(name, bytes):\n return name\n else:\n return name.encode(HEADER_NAME_ENCODING)", "def getHeader(self):\n length = self.getInt()\n dest = self._getStr(definition.ADDRESS_LENGTH)\n origin = self._getStr(definition.ADDRESS_LENGTH)\n msgType = self._getStr(definition.MSG_TYPE_LENGTH)\n msgNr = self.getInt()\n return (length, dest, origin, msgType, msgNr)", "def mail_header(self):\n return self._hdr", "def _encode_header(self):\n\t\theader = self.config.get('header')\n\t\tif header is not None:\n\t\t\treturn self._encode_tuple(header)", "def tobytes(self):\n\n self.meta.msg_id = self.body.__msg_id__\n self.meta.msg_class = self.body.__msg_class__\n offset = HEADER_STRUCT_SIZE\n if (\n isinstance(self.body, Modern)\n and not self.body.binary is None\n and self.meta.msg_class in (MSG_CLASS_MODERN_BINARY, MSG_CLASS_MODERN_OTHER)\n ):\n offset += 4\n buffer = bytearray()\n (body_len, bin_offset) = self.body.__pack_into__(self.meta, buffer, offset)\n self.meta.__pack_into__(buffer, body_len, bin_offset)\n return bytes(buffer)", "def create_message(self, packet):\n self._header.packet_len = len(bytes(packet))\n \n frame_bytes = super(EthernetTransport, self).create_message(packet) \n \n # Update control counter for next frame\n self._header.update_control()\n \n return bytes(frame_bytes)", "def to_bytes(self):\n return bytes(self.data)", "def test_udp_header_native(self):\n header = UDP_HEADER(\n source_port = 8080,\n dest_port = 8080,\n length = 2,\n checksum = 0xbeef\n )\n\n expected_val = struct.pack('HHHH', 8080, 8080, 2, 0xbeef)\n\n self.assertEqual(header.to_bytes(), expected_val)", "def to_bytes(self) -> bytes:", "def encode(self):\n data = self.fhdr.encode() + struct.pack('B', self.fport) + \\\n self.frmpayload\n return data", "def decode(cls, raw: bytes) -> \"EthernetHeader\":\n # unsigned char dmac[6];\n # unsigned char smac[6];\n # uint16_t ethertype;\n # unsigned char payload[];\n dmac = raw[:6]\n smac = raw[6:12]\n typ = socket.htons(struct.unpack(\"H\", raw[12:14])[0])\n payload = raw[14:]\n return EthernetHeader(dmac=dmac, smac=smac, typ=typ, payload=payload)", "def Header(self):\n return (bytes(bytearray([constants.VERSION])) +\n util.Base64WSDecode(self.hash_id))", "def to_bytes(self) -> bytes:\n return pack('4B', self.tag, self.flags, self.reserved, self.params_count)", "def getHeader(self):\n return self.data.header", "def empty_header_data(cls):\n empty_struct = struct.Struct(cls.HEADER_STRUCT_FORMAT_STR)\n packed_data = empty_struct.pack(0, 0, 0, 0)\n return packed_data", "def dump(self) -> bytes:\n header = pack(\n \">3I3B\",\n self.width,\n self.height,\n self.row_bytes,\n self.color_mode,\n self.channels,\n self.bits,\n )\n return header + self.data", "def as_bytes(self) -> bytes:\n\n return bytes(self.data_bytes)", "def to_header(self):\n if not self.filled:\n return ''\n\n return \"\\n\".join(self.data)", "def to_h(self):\n return str(self).encode('hex')", "def to_bytes(self, padding: bool = True) -> bytes:\n self.header.params_count = len(self.params)\n data = self.header.to_bytes()\n data += pack(f\"<{self.header.params_count}I\", *self.params)\n if padding and len(data) < self.SIZE:\n data += bytes([self.EMPTY_VALUE] * (self.SIZE - len(data)))\n return data", "def toBytes(self):\n return self._bytes", "def _pack(self):\n header = struct.pack(self.PACKAGING_FORMAT, self.cmd_length)\n return header", "def encode(self):\n datarate_txpower = 0 | (self.datarate << 4) | self.txpower\n redundancy = 0 | (self.chmaskcntl << 4) | self.nbrep\n data = struct.pack('<BBHB', self.cid, datarate_txpower, self.chmask, redundancy)\n return data", "def headerValueAsBytes(value):\n # type: (String) -> bytes\n if isinstance(value, bytes):\n return value\n else:\n return value.encode(HEADER_VALUE_ENCODING)", "def _pack(self):\n header = struct.pack(self.PACKAGING_FORMAT, self.qubit_id)\n return header", "def dump_protocol_header(major: int, minor: int, revision: int) -> bytes:\n return pack('>5sBBB', b'AMQP\\x00', major, minor, revision)", "def as_bits(self):\r\n return self.header.as_bits() + self.payload.as_bits() + self.padding.as_bits()", "def get_nt_header(self):\n\n if self.e_magic != 0x5a4d:\n raise ValueError('e_magic {0:04X} is not a valid DOS signature.'.format(self.e_magic))\n\n nt_header = obj.Object(\"_IMAGE_NT_HEADERS\",\n offset = self.e_lfanew + self.obj_offset,\n vm = self.obj_vm,\n native_vm = self.obj_native_vm)\n\n if nt_header.Signature != 0x4550:\n raise ValueError('NT header signature {0:04X} is not a valid'.format(nt_header.Signature))\n\n return nt_header", "def dump(self):\n avps = self.get_all_avps_contents()\n auth = self.compute_authenticator(avps)\n header = struct.pack(RadiusMessage.RADIUS_HDR_TMPL, self.code,\n self.pid, len(self), auth)\n return b\"\".join([header, avps])", "def to_bytes(self):\n\t\treturn self.buffer.tobytes();", "def network_bytes(self) -> List[bytes]:", "def bytes(self):\n return self._payload", "def to_knx(self) -> bytes:\n return (\n bytes(\n (\n TunnellingRequest.HEADER_LENGTH,\n self.communication_channel_id,\n self.sequence_counter,\n 0x00, # Reserved\n )\n )\n + self.raw_cemi\n )", "def test_tcp_header_native(self):\n header = TCP_HEADER(\n source_port = 8080,\n dest_port = 8080,\n seq_num = 0xbeefcafe,\n ack_num = 0xcafebeef,\n data_offset = 0xf,\n flag_ns = 1,\n flag_cwr = 1,\n flag_ece = 1,\n flag_urg = 1,\n flag_ack = 1,\n flag_psh = 1,\n flag_rst = 1,\n flag_syn = 1,\n flag_fin = 1,\n window_size = 12,\n checksum = 0xffff\n )\n\n expected_data = [\n 8080, 8080, 0xbeefcafe, 0xcafebeef, int('10001111', 2), 0xff, 12, 0xffff\n ]\n\n expected_val = struct.pack('HHIIBBHH', *expected_data)\n\n self.assertEqual(header.to_bytes(), expected_val)", "def _pack(self):\n if self._cqc_version < 2:\n header = struct.pack(self.PACKAGING_FORMAT_V1, self.remote_app_id, self.remote_node, self.remote_port)\n else:\n header = struct.pack(self.PACKAGING_FORMAT, self.remote_app_id, self.remote_port, self.remote_node)\n return header", "def ethernet_frame(packet):\n dest_mac, src_mac, proto = struct.unpack('! 6s 6s H', packet[:14])\n return get_mac_addr(dest_mac), get_mac_addr(src_mac), socket.htons(proto), packet[14:]", "def Header(self):\n return chr(keyczar.VERSION) + util.Decode(self.hash)", "def pack(self):\n # TODO Avoid copying data\n return bytearray(self._header) + bytearray(self._buffer)", "def _pack(self):\n header = struct.pack(self.PACKAGING_FORMAT, self.datetime)\n return header", "def _pack(self):\n header = struct.pack(self.PACKAGING_FORMAT, self.outcome)\n return header", "def str(self):\n return struct.pack(\n '!IIIIIIIIIII',\n self.magic,\n self.totalsize,\n self.off_dt_struct,\n self.off_dt_strings,\n self.off_mem_rsvmap,\n self.version,\n self.last_comp_version,\n self.size_dt_strings,\n self.size_dt_struct\n )", "def generate_ens_header(ens_num, payload_size):\n\n header = []\n\n # Get the Header ID\n for cnt in range(0, 16):\n header.append(0x80)\n\n # Ensemble Number and inverse\n header += Ensemble.int32_to_bytes(ens_num)\n header += struct.pack(\"i\", ~ens_num)\n\n # Payload size and inverse\n header += Ensemble.int32_to_bytes(payload_size)\n header += struct.pack(\"i\", ~payload_size)\n\n return header", "def __bytes__(self):\n # Mac address case\n if self.subtype == 3:\n return bytes([self.type * 2, 1 + 6, self.subtype]) + self.value\n # ip address case\n elif self.subtype == 4:\n if self.value.version == 4:\n # ipv4 case\n return bytes([self.type * 2, 1 + 5, self.subtype ,1]) + self.value.packed\n else:\n # ipv6 case\n return bytes([self.type * 2, 1 + 17, self.subtype, 2]) + self.value.packed\n\n #all other cases:\n else:\n if len(self.value) > 255:\n return bytes([(self.type * 2) + 1, len(self.value) - 256, self.subtype]) + bytes(self.value, 'utf-8')\n else:\n return bytes([self.type * 2, len(self.value) + 1, self.subtype]) + bytes(self.value,'utf-8')", "def encode_header(value):\n return str(Header(unicode(value), 'iso-8859-1'))", "def to_bytes(self) -> bytes:\n\n # for extendability\n version = ACCOUNT_DATA_STRUCTURE_VERSION\n\n flags = 0\n if self._locked:\n flags |= AccountFlag.LOCKED\n if self._c_rep:\n flags |= AccountFlag.C_REP\n\n return Account._struct.pack(version, self._type, flags, self._icx.to_bytes(DEFAULT_BYTE_SIZE, DATA_BYTE_ORDER))", "def serialize(self) -> bytes:\n headers = \"\\r\\n\".join(\n f\"{header}: {value}\" for header, value in self.headers.items()\n )\n return (\n f\"HTTP/1.0 {self.status} {STATUS_CODES[self.status]}\\r\\n\"\n f\"{headers}\\r\\n\"\n \"\\r\\n\"\n ).encode(\"utf-8\")", "def to_bytes(self, byteorder=\"little\"):\n return self._value.to_bytes(self.width(), byteorder=byteorder)", "def pack_hdr(self):\n d = bytearray()\n\n # Field setter functions affect _hdr value,\n # so insert _hdr at the end of this function\n\n # Accept checksum as int or bytes types\n # and convert to bytes if given as an int\n if self.chksum:\n self.hdr_co = 0\n if type(self.chksum) is int:\n self.chksum = struct.pack(\"!H\", self.chksum)\n d.extend(self.chksum)\n else:\n self.hdr_co = 1\n self.chksum = APv6Udp.DEFAULT_CHKSUM\n\n # Accept source port as int or bytes types\n # and make sure we have the bytes in src_port\n # and the int in src_port_int\n if type(self.src_port) is int:\n self.src_port_int = self.src_port\n self.src_port = struct.pack(\"!H\", self.src_port)\n elif type(self.src_port) is bytes:\n self.src_port_int = struct.unpack(\"!H\", self.src_port)[0]\n\n # Accept dest port as int or bytes types\n # and make sure we have the bytes in dst_port\n # and the int in dst_port_int\n if type(self.dst_port) is int:\n self.dst_port_int = self.dst_port\n self.dst_port = struct.pack(\"!H\", self.dst_port)\n elif type(self.dst_port) is bytes:\n self.dst_port_int = struct.unpack(\"!H\", self.dst_port)[0]\n\n if ((self.src_port_int & 0xFFF0) == 0xF0B0 and\n (self.dst_port_int & 0xFFF0) == 0xF0B0):\n self.hdr_ports = APv6Udp.HDR_PORTS_SRC_F0BX_DST_F0BX\n src_nbl = self.src_port_int & 0x000F\n dst_nbl = self.dst_port_int & 0x000F\n d.append(src_nbl << 4 | dst_nbl)\n\n elif (self.src_port_int & 0xFF00) == 0xF000:\n self.hdr_ports = APv6Udp.HDR_PORTS_SRC_F0XX_DST_INLN\n d.append(self.src_port_int & 0x00FF)\n d.extend(self.dst_port)\n\n elif (self.dst_port_int & 0xFF00) == 0xF000:\n self.hdr_ports = APv6Udp.HDR_PORTS_SRC_INLN_DST_F0XX\n d.extend(self.src_port)\n d.append(self.dst_port_int & 0x00FF)\n\n else:\n self.hdr_ports = APv6Udp.HDR_PORTS_SRC_INLN_DST_INLN\n d.extend(self.src_port)\n d.extend(self.dst_port)\n\n return super().pack_hdr() + bytes(d)", "def get_header(self):\n return self._header", "def encode(self):\n return (struct.pack(b\"<iii\", self.size, self.id, self.type) +\n self.body.encode(\"ascii\") + b\"\\x00\\x00\")", "def header(self):\r\n return self.__header", "def makePacketHeader(bytes):\n id = struct.unpack('!H', bytes[0:2])[0]\n length = struct.unpack('!H', bytes[2:4])[0]\n packet_count = struct.unpack('!I',bytes[4:8])[0]\n return PacketHeader(id, length, packet_count)", "def _pack(self):\n header = struct.pack(self.PACKAGING_FORMAT, self.step)\n return header", "def encode(self):\n b = 0 | self.mtype << 5 | self.major\n data = struct.pack('B', b)\n return data", "def serialize_to_bytes(self):\n return \\\n struct.pack(\n self._format,\n self.cpu_svn.serialize_to_bytes(),\n self.misc_select,\n self._RESERVED_1,\n self.attributes.serialize_to_bytes(),\n self.mr_enclave.serialize_to_bytes(),\n self._RESERVED_2,\n self.mr_signer.serialize_to_bytes(),\n self._RESERVED_3,\n self.isv_prod_id,\n self.isv_svn,\n self._RESERVED_4,\n self.report_data.serialize_to_bytes())", "def _serialize_header_v1(header, signer=None):\n ec_serialized = aws_encryption_sdk.internal.formatting.encryption_context.serialize_encryption_context(\n header.encryption_context\n )\n header_start_format = (\n \">\" # big endian\n \"B\" # version\n \"B\" # type\n \"H\" # algorithm ID\n \"16s\" # message ID\n \"H\" # encryption context length\n \"{}s\" # serialized encryption context\n ).format(len(ec_serialized))\n header_bytes = bytearray()\n header_bytes.extend(\n struct.pack(\n header_start_format,\n header.version.value,\n header.type.value,\n header.algorithm.algorithm_id,\n header.message_id,\n len(ec_serialized),\n ec_serialized,\n )\n )\n\n serialized_data_keys = bytearray()\n for data_key in header.encrypted_data_keys:\n serialized_data_keys.extend(serialize_encrypted_data_key(data_key))\n\n header_bytes.extend(struct.pack(\">H\", len(header.encrypted_data_keys)))\n header_bytes.extend(serialized_data_keys)\n\n header_close_format = (\n \">\" # big endian\n \"B\" # content type (no framing vs framing)\n \"4x\" # reserved (formerly content AAD length)\n \"B\" # nonce/IV length, this applies to all IVs in this message\n \"I\" # frame length\n )\n header_bytes.extend(\n struct.pack(header_close_format, header.content_type.value, header.algorithm.iv_len, header.frame_length)\n )\n output = bytes(header_bytes)\n if signer is not None:\n signer.update(output)\n return output", "def encode(self):\n data = struct.pack('BBB', self.cid, self.margin, self.gwcnt)\n return data", "def packetize(self):\n byte_str = b''\n\n # Bit string to accumulate bit values until we are ready to convert it into bytes\n bit_str = \"\"\n\n for field in self.fields:\n #if the current field is a special type, the bit_str value to the byte string and clear the accumulated bit_str.\n if not isinstance(field.size, int) and len(bit_str) != 0:\n byte_str += self.encode_bit_str(bit_str)\n bit_str = \"\"\n if field.size == NULL_TERMINATE:\n byte_str += self.encode_null_term(field.value)\n elif field.size == PREFIX_LENGTH:\n byte_str += self.encode_prefix_length(field.value)\n elif field.size == PREFIX_LEN_NULL_TERM:\n byte_str += self.encode_prefix_length_null_term(field.value)\n elif field.size == IPv4:\n byte_str += self.encode_ipv4(field.value)\n elif field.size == 1: # One bit, just add it to our bit string.\n bit_str += \"0\" if field.value == 0 else \"1\"\n else:\n if isinstance(field.value, int):\n bit_str += \"0\" * (field.size - len(bin(field.value)[2:])) + bin(field.value)[2:]\n elif isinstance(field.value, bytes):\n bit_str += field.value.decode('latin-1')\n #clear the bit string one last time\n if len(bit_str) != 0:\n byte_str += self.encode_bit_str(bit_str)\n bit_str = \"\"\n\n return byte_str", "def pack(self):\n\n datalen = 0\n if self._data:\n datalen = len(self._data)\n\n header = struct.pack(CCPMessage.HEADER_FMT,\n self._version, self._msg_type, datalen, self._conn_id )\n\n if datalen > 0:\n msg = header + self._data\n else:\n msg = header\n\n return msg", "def hex(self):\n return binascii.hexlify(self.data)", "def encode(self):\n packed = struct.pack(\"<2Bl\",\n self.device_number,\n self.command_number,\n self.data)\n if self.message_id is not None:\n packed = packed[:5] + struct.pack(\"B\", self.message_id)\n return packed", "def get_binary(self):\n data = bytes()\n\n for tag in self._tags:\n value = 0\n if tag in self.fields.keys():\n value = self.fields[tag]\n try:\n data += struct.pack(\"<I\", value)\n except struct.error as e:\n raise TypeError(f\"expected integer value for {tag} but got {type(value)}: {value}\")\n\n return data", "def get_bytes(self):\n return self.bytes[:-len(ImageStream.END_SEQUENCE)]", "def get_octets(self):\n if self.method == 'GET':\n return '{}{}{}{}'.format(self._get_status_line(),\n self._get_headers(),\n 2 * CRLF,\n self.data)\n else:\n return '{}{}{}'.format(self._get_status_line(),\n self._get_headers(),\n 2 * CRLF)", "def header(self):\n return self._header", "def header(self):\n return self._header", "def header(self):\n return self._header", "def create_apdu_header(self, apdu):\n if not type(apdu) is bytes:\n return \"ERROR: An APDU has to be a bytestring.\"\n start = b'\\x68'\n apdu_length = len(apdu)\n if apdu_length > 253:\n return \"ERROR: APDU too long.\"\n return start + struct.pack(\"B\", len(apdu))", "def _encode_message_header(cls, client_id, correlation_id, request_key,\n version=0):\n return struct.pack('>hhih%ds' % len(client_id),\n request_key, # ApiKey\n version, # ApiVersion\n correlation_id, # CorrelationId\n len(client_id), # ClientId size\n client_id) # ClientId", "def __repr__(self):\n return encode_as_str([self.header(), \"!\".join([str(tx) for tx in self.transactions])], sep=\"`\")", "def __repr__(self):\n return encode_as_str([self.header(), \"!\".join([str(tx) for tx in self.transactions])], sep=\"`\")", "def header_text(self):\n return os.linesep.join(map(str, self.headers))", "def GetHeader(self) -> \"char const *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD2_GetHeader(self)", "def serialize(self):\n messageLen = len(self._messageBuf) + 1 # 1 byte for the message type\n header = pack(self.headerFormat, messageLen)\n msgType = pack(self.messageTypeFormat, self.messageType)\n payload = bytes(self._messageBuf)\n return header + msgType + payload", "def _to_bytes(self):\n return self.to_uri().to_text().encode('ascii')", "def testnet_bytes(self):\n return self._serialize(True)", "def as_bytearray(self):\n icon_type_as_bytearray = self.icon_type.as_bytearray()\n length = len(icon_type_as_bytearray) + 4 + len(self.icon)\n length_packed = struct.pack('>I', length)\n return icon_type_as_bytearray + bytearray(length_packed) + self.icon", "def ccp4i_header(self):\n return self.__ccp4i_header", "def bytes_encoded(self):\n # type: () -> int\n return self._bytes_encoded", "def getHeader():\n return _HEADER", "def bytes(self) -> int:\n return self._bytes", "def bytes(self) -> int:\n return self._bytes", "def GetHeader(self) -> \"char const *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF2_GetHeader(self)", "def header(self):\n header_str = self._base[0:self.s_allocator_header].tostring()\n magic, pos, used = struct.unpack(str('III'), header_str)\n\n assert magic == self._magic_num, \\\n 'invalid header magic[%d] in shared memory' % (magic)\n return self._header_pages, self._total_pages, pos, used" ]
[ "0.71295446", "0.6759291", "0.67367053", "0.6716264", "0.65858597", "0.6580915", "0.6509498", "0.64819175", "0.6466296", "0.6400534", "0.6400534", "0.62959313", "0.62959313", "0.6222679", "0.61640424", "0.6083347", "0.60510254", "0.6036172", "0.60090053", "0.59898037", "0.5986125", "0.59802866", "0.596922", "0.5963869", "0.59620476", "0.59607416", "0.5960523", "0.5957915", "0.5950058", "0.5914405", "0.59091717", "0.5902894", "0.589158", "0.5869236", "0.5840802", "0.58337754", "0.5810802", "0.58106494", "0.5810406", "0.57980365", "0.57978463", "0.57737994", "0.57736236", "0.5767535", "0.57582384", "0.5752676", "0.5732565", "0.5728052", "0.5713052", "0.56925625", "0.5680816", "0.5675159", "0.5674795", "0.56668794", "0.565911", "0.5654517", "0.56514007", "0.5645548", "0.5634714", "0.5628557", "0.5611107", "0.5605963", "0.5598428", "0.55980664", "0.55712444", "0.55683243", "0.55633724", "0.5531349", "0.5530259", "0.55294544", "0.552797", "0.5525718", "0.5522713", "0.5521086", "0.5510457", "0.548707", "0.54863393", "0.54723066", "0.54711777", "0.54673696", "0.54665774", "0.54665774", "0.54665774", "0.54651743", "0.54560894", "0.5451559", "0.5451559", "0.54506606", "0.5450375", "0.54466945", "0.5445622", "0.5442563", "0.54390746", "0.5425322", "0.5424586", "0.54227877", "0.54129773", "0.54129773", "0.54108775", "0.5398141" ]
0.6176237
14
Creates a XCP Ethernet frame
def create_message(self, packet): self._header.packet_len = len(bytes(packet)) frame_bytes = super(EthernetTransport, self).create_message(packet) # Update control counter for next frame self._header.update_control() return bytes(frame_bytes)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def gen_ieee_packet(self, data):\n\t\tpacket = Dot15d4FCS() / Dot15d4Data() / Raw(load=data)\n\n\t\tpacket.fcf_srcaddrmode = 2\n\t\tpacket.fcf_destaddrmode = 2\n\n\t\tpacket.fcf_panidcompress = True\n\t\tpacket.fcf_ackreq = True\n\t\tpacket.seqnum = self.seqnum\n\n\t\tpacket.dest_panid = self.link_config.dest_panid\n\n\t\tpacket.dest_addr = self.link_config.destination.get_short_address()\n\t\tpacket.src_addr = self.link_config.source.get_short_address()\n\n\t\treturn packet.build()", "def _create_packet(self, request):\n\n data_len = struct.pack('<Q', len(request))\n packet = b'ZBXD\\x01' + data_len + request\n\n def ord23(x):\n if not isinstance(x, int):\n return ord(x)\n else:\n return x\n\n logger.debug('Packet [str]: %s', packet)\n logger.debug('Packet [hex]: %s', ':'.join(hex(ord23(x))[2:] for x in packet))\n return packet", "def create_tcp_pkt(smac: bytes, dmac: bytes, sip: bytes, dip: bytes, ip_id: int, sp: int, dp: int,\n flags: int =dpkt.tcp.TH_SYN, payload: bytes = b\"\") -> dpkt.ethernet.Ethernet:\n tcp_pkt = dpkt.tcp.TCP(sport=sp, dport=dp, flags=flags)\n tcp_pkt.data = payload\n ip_pkt = dpkt.ip.IP(id=ip_id, p=6, src=sip, dst=dip)\n ip_pkt.data = tcp_pkt\n ip_pkt.len += len(ip_pkt.data)\n eth_pkt = dpkt.ethernet.Ethernet(src=smac, dst=dmac)\n eth_pkt.data = ip_pkt\n return eth_pkt", "def create(self):\n\t\t\n\t\tflagbyte = 0\n\t\tif self.synf: flagbyte += 1\n\t\tif self.ackf: flagbyte += 2\n\t\t\n\t\tself.header = struct.pack(\">IBIII\", self.connid, flagbyte, self.seqn, self.ackn, self.recv)\n\t\t\n\t\tself.data = self.header+self.payload", "def define_ethernet_header(self, src=None, dst=None, typeeth=None, tag=None):\n ether_header = Ether()\n if (dst == None):\n ether_header.dst = BCAST_MAC\n else:\n ether_header.dst = dst\n ether_header.src = src\n return ether_header", "def ethernet_frame(packet):\n dest_mac, src_mac, proto = struct.unpack('! 6s 6s H', packet[:14])\n return get_mac_addr(dest_mac), get_mac_addr(src_mac), socket.htons(proto), packet[14:]", "def make_packet(message, host):\n\tRESOURCE = \"/\"\t\t\t\t# dummy resource\n\t\n\t# First line is the request\n\trequest = HTTPConstants.GET_REQUEST + \" \" + RESOURCE + \" \" + HTTPConstants.VERSION + HTTPConstants.CRLF\n\t\n\t# Next are the headers\n\theaders = \"Host: {0}\".format(host) + HTTPConstants.CRLF\n\t\n\t# Construct the head\n\thead = request + headers\n\t\n\t# Construct the body\n\tbody = message + HTTPConstants.CRLF\n\t\n\t# Assembly into a packet, where the head and body (message) are separated by a blank line (CRLF), and the EOM is\n\t# denoted by a blank line\n\treturn head + HTTPConstants.CRLF + body + HTTPConstants.CRLF", "def main():\n connection = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.htons(0x03))\n\n # Start the main loop.\n while True:\n # 65536 is the biggest buffer size that can be used.\n raw_data, addr = connection.recvfrom(65536)\n dest_mac, src_mac, eth_proto, data = ethernet_frame(raw_data)\n print('\\nEthernet Frame:')\n print('Destination: {}, Source: {}, Protocol: {}'.format(dest_mac, src_mac, eth_proto))", "def create_coa_packet(self, **args):\n return host.Host.create_coa_packet(self, secret=self.secret, **args)", "def create_frame(data, opcode):\r\n if opcode == ABNF.OPCODE_TEXT and isinstance(data, unicode):\r\n data = data.encode(\"utf-8\")\r\n # mask must be set if send data from client\r\n return ABNF(1, 0, 0, 0, opcode, 1, data)", "def createPacket(id):\n\n # Header is type (8), code (8), checksum (16), id (16), sequence (16)\n header = getHeaderData(0, id)\n\n data = 192 * 'Q'\n\n checksum = getChecksum(header + data)\n\n header = getHeaderData(socket.htons(checksum), id)\n\n return header + data", "def init_from_body(knxip_body: KNXIPBody):\n knxipframe = KNXIPFrame(knxip_body.xknx)\n knxipframe.header.service_type_ident = knxip_body.__class__.service_type\n knxipframe.body = knxip_body\n knxipframe.normalize()\n return knxipframe", "def fusion_api_create_ethernet_network(self, body, api=None, headers=None):\n return self.ethernet_network.create(body, api, headers)", "def build_frame(self, message):\r\n header = BytesIO()\r\n if 0x3 <= self.opcode <= 0x7 or 0xB <= self.opcode:\r\n raise WebSocketProtocolError('Opcode cannot be a reserved opcode')\r\n ## +-+-+-+-+-------+\r\n ## |F|R|R|R| opcode|\r\n ## |I|S|S|S| (4) |\r\n ## |N|V|V|V| |\r\n ## | |1|2|3| |\r\n ## +-+-+-+-+-------+\r\n header.write(i2b(((self.fin << 7)\r\n | (self.rsv1 << 6)\r\n | (self.rsv2 << 5)\r\n | (self.rsv3 << 4)\r\n | self.opcode)))\r\n ## +-+-------------+-------------------------------+\r\n ## |M| Payload len | Extended payload length |\r\n ## |A| (7) | (16/63) |\r\n ## |S| | (if payload len==126/127) |\r\n ## |K| | |\r\n ## +-+-+-+-+-------+-+-------------+ - - - - - - - - - - - - - - - +\r\n ## | Extended payload length continued, if payload len == 127 |\r\n ## + - - - - - - - - - - - - - - - +-------------------------------+\r\n if self.masking_key:\r\n mask_bit = 1 << 7\r\n else:\r\n mask_bit = 0\r\n length = self.payload_length \r\n if length < 126:\r\n header.write(i2b(mask_bit | length))\r\n elif length < (1 << 16):\r\n header.write(i2b(mask_bit | 126))\r\n header.write(pack('!H', length))\r\n elif length < (1 << 63):\r\n header.write(i2b(mask_bit | 127))\r\n header.write(pack('!Q', length))\r\n else:\r\n raise WebSocketProtocolError('Frame too large')\r\n ## + - - - - - - - - - - - - - - - +-------------------------------+\r\n ## | |Masking-key, if MASK set to 1 |\r\n ## +-------------------------------+-------------------------------+\r\n ## | Masking-key (continued) | Payload Data |\r\n ## +-------------------------------- - - - - - - - - - - - - - - - +\r\n ## : Payload Data continued ... :\r\n ## + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +\r\n ## | Payload Data continued ... |\r\n ## +---------------------------------------------------------------+\r\n if not self.masking_key:\r\n header.write(message)\r\n else:\r\n header.write(self.masking_key)\r\n header.write(self.mask(message))\r\n return header.getvalue()", "def testFramepack2(self):\n # Check bad frame generation:\n frame = stomper.Frame()\n frame.cmd = 'DISCONNECT'\n result = frame.pack()\n correct = 'DISCONNECT\\n\\n\\x00\\n'\n self.assertEqual(result, correct)", "def create_packet_definition(packet_to_send):\n source_mac = \"00:00:00:00:00:01\"\n destination_mac = \"00:00:00:00:00:02\"\n source_ip = \"10.10.10.1\"\n destination_ip = \"10.10.10.2\"\n source_ip6 = 'fe80::214:f2ff:fe07:af0'\n destination_ip6 = 'ff02::1'\n sport = 1\n dport = 2\n tos = 4\n if packet_to_send[\"type\"] == \"ip\":\n packet_definition = ({\"Ether\": {\"dst\": destination_mac, \"src\": source_mac, \"type\": 0x0800}},\n {\"IP\": {\"dst\": destination_ip, \"src\": source_ip, \"tos\": tos}},\n {\"TCP\": {}})\n elif packet_to_send[\"type\"] == \"tagged_ip\":\n packet_definition = ({\"Ether\": {\"dst\": destination_mac, \"src\": source_mac, \"type\": 0x8100}},\n {\"Dot1Q\": {\"vlan\": packet_to_send[\"vlan\"],\n \"prio\": packet_to_send[\"priority\"]}},\n {\"IP\": {\"dst\": destination_ip, \"src\": source_ip, \"tos\": tos}})\n elif packet_to_send[\"type\"] == \"tcp\":\n packet_definition = ({\"Ether\": {\"dst\": destination_mac, \"src\": source_mac, \"type\": 0x0800}},\n {\"IP\": {\"dst\": destination_ip, \"src\": source_ip, \"tos\": tos}},\n {\"TCP\": {\"sport\": sport, \"dport\": dport}})\n elif packet_to_send[\"type\"] == \"udp\":\n packet_definition = ({\"Ether\": {\"dst\": destination_mac, \"src\": source_mac, \"type\": 0x0800}},\n {\"IP\": {\"dst\": destination_ip, \"src\": source_ip, \"tos\": tos}},\n {\"UDP\": {\"sport\": sport, \"dport\": dport}})\n elif packet_to_send[\"type\"] == \"double_tagged_ip\":\n packet_definition = ({\"Ether\": {\"dst\": destination_mac, \"src\": source_mac, \"type\": 0x8100}},\n {\"Dot1Q\": {\"vlan\": packet_to_send[\"outer_vlan\"], \"type\": 0x8100,\n \"prio\": packet_to_send[\"outer_priority\"]}},\n {\"Dot1Q\": {\"vlan\": packet_to_send[\"inner_vlan\"], \"type\": 0x0800,\n \"prio\": packet_to_send[\"inner_priority\"]}},\n {\"IP\": {\"dst\": destination_ip, \"src\": source_ip, \"tos\": tos}})\n elif packet_to_send[\"type\"] == \"arp\":\n packet_definition = (\n {\"Ether\": {\"src\": source_mac, \"dst\": 'FF:FF:FF:FF:FF:FF', \"type\": 0x0806}},\n {\"ARP\": {\"op\": 1, \"hwsrc\": source_mac,\n \"psrc\": source_ip, \"pdst\": destination_ip}},)\n elif packet_to_send[\"type\"] == \"arp_reply_tagged\":\n packet_definition = ({\"Ether\": {\"src\": source_mac, \"dst\": destination_mac, \"type\": 0x8100}},\n {\"Dot1Q\": {\"vlan\": 2}},\n {\"ARP\": {\"op\": 2, \"hwsrc\": source_mac, \"hwdst\": destination_mac,\n \"pdst\": destination_ip, \"psrc\": source_ip}}, )\n elif packet_to_send[\"type\"] == \"icmp\":\n packet_definition = ({\"Ether\": {\"dst\": destination_mac, \"src\": source_mac, \"type\": 0x0800}},\n {\"IP\": {\"dst\": destination_ip, \"src\": source_ip, \"proto\": 1}},\n {\"ICMP\": {\"type\": 8, \"code\": 0}})\n elif packet_to_send[\"type\"] == \"ipv6\":\n packet_definition = ({\"Ether\": {\"dst\": destination_mac, \"src\": source_mac, \"type\": 0x86dd}},\n {\"IPv6\": {\"dst\": destination_ip6, \"src\": source_ip6, \"version\": 6,\n \"hlim\": 255, \"plen\": 64, \"tc\": 225}})\n elif packet_to_send[\"type\"] == \"tcp6\":\n packet_definition = ({\"Ether\": {\"dst\": destination_mac, \"src\": source_mac, \"type\": 0x86dd}},\n {\"IPv6\": {\"dst\": destination_ip6, \"src\": source_ip6, \"version\": 6,\n \"hlim\": 255, \"tc\": 224, \"nh\": 6}},\n {\"TCP\": {\"sport\": sport, \"dport\": dport}})\n elif packet_to_send[\"type\"] == \"udp6\":\n packet_definition = ({\"Ether\": {\"dst\": destination_mac, \"src\": source_mac, \"type\": 0x86dd}},\n {\"IPv6\": {\"dst\": destination_ip6, \"src\": source_ip6, \"version\": 6,\n \"hlim\": 255, \"tc\": 224, \"nh\": 17}},\n {\"UDP\": {\"sport\": sport, \"dport\": dport}})\n elif packet_to_send[\"type\"] == \"icmp6\":\n packet_definition = ({\"Ether\": {\"dst\": destination_mac, \"src\": source_mac, \"type\": 0x86dd}},\n {\"IPv6\": {\"dst\": destination_ip6, \"src\": source_ip6, \"version\": 6,\n \"hlim\": 255, \"tc\": 224, \"nh\": 1}},\n {\"ICMP\": {\"type\": 8, \"code\": 0}})\n return packet_definition", "def _send_knxipframe(self, knxipframe: KNXIPFrame) -> None:\n self.transport.send(knxipframe)", "def make_packet(self, type, data): \n return (\"{}\\x00{}\\x00{}\".format(type, data, self.ID)).encode()", "def _create_pgframe(nodes=None, edges=None):\n pass", "def new_packet():\n return rtmp_packet.RtmpPacket()", "def makeMessage( name, *structure ):\n return X12Message( name, *structure )", "def packetize(cls, source, raw_data):\n pkt = cls(source, raw_data)\n\n if pkt.type not in DGTL.descriptors.keys():\n raise Warning('Unsupported packet type! (%s)' % pkt.type)\n\n pkt.set_decoder(DGTL.descriptors[pkt.type][2])\n\n return pkt", "def create_frame_blob(self):\n # self.image_blob = cv2.dnn.blobFromImage(\n # cv2.resize(self.frame, (300, 300)), 1.0, (300, 300),\n # (104.0, 177.0, 123.0), swapRB=False, crop=False)\n self.image_blob = cv2.dnn.blobFromImage(cv2.resize(self.frame, (300, 300)),\n 0.007843, (300, 300), 127.5)", "def setUp(self):\n self.message = PhyPort()\n self.message.port_no = 1\n self.message.hw_addr = HWAddress('9a:da:11:8a:f4:0c')\n self.message.name = 's1-eth1'\n self.message.state = PortState.OFPPS_STP_LISTEN\n self.message.curr = (PortFeatures.OFPPF_10GB_FD |\n PortFeatures.OFPPF_COPPER)", "def __init__(\n self, env, source, destination, size_bytes, message_type,\n data=None):\n if not isinstance(size_bytes, int):\n raise FT4FTTSimException(\"Message size must be integer\")\n if not (ethernet.MIN_FRAME_SIZE_BYTES <= size_bytes <=\n ethernet.MAX_FRAME_SIZE_BYTES):\n raise FT4FTTSimException(\n \"Message size must be between {} and {}, but is {}\".format(\n ethernet.MIN_FRAME_SIZE_BYTES,\n ethernet.MAX_FRAME_SIZE_BYTES,\n size_bytes))\n self.env = env\n self._identifier = Message.next_identifier\n Message.next_identifier += 1\n # source of the message. Models the source MAC address.\n self.source = source\n # destination of the message. It models the destination MAC address. It\n # is a list to allow multicast addressing.\n self.destination = destination\n self.size_bytes = size_bytes\n self.message_type = message_type\n self.data = data\n self.name = \"({:03d}, {}, {}, {:d}, {}, {})\".format(\n self.identifier, self.source, self.destination, self.size_bytes,\n self.message_type, self.data)\n log.debug(\"{} created\".format(self))", "async def test_create_knxipframe_err(self):\n xknx = XKNX()\n udp_client = UDPClient(xknx, (\"192.168.1.1\", 0), (\"192.168.1.2\", 1234))\n request_response = RequestResponse(xknx, udp_client, DisconnectResponse)\n request_response.timeout_in_seconds = 0\n\n with self.assertRaises(NotImplementedError):\n await request_response.start()", "def genFrame(self):\n # generate frame-specific data\n frameData = self._genFrameData()\n\n # call parent function to create the complete frame (as bytearray)\n frame = self._genDigiMeshFrame(frameData)\n\n # OBS: never escape-sequence local msg\n return frame", "def setupPacket(self):\n return None", "def build_packets(self):\n from scapy.all import IP, TCP\n return IP()/TCP()", "def __init__(self, bytes = None):\n hrd = pcs.Field(\"hrd\", 16, default = 1)\n pro = pcs.Field(\"pro\", 16, default = 0x800)\n hln = pcs.Field(\"hln\", 8, default = 6)\n pln = pcs.Field(\"pln\", 8, default = 4)\n op = pcs.Field(\"op\", 16)\n sha = pcs.StringField(\"sha\", 48)\n spa = pcs.Field(\"spa\", 32)\n tha = pcs.StringField(\"tha\", 48)\n tpa = pcs.Field(\"tpa\", 32)\n \n pcs.Packet.__init__(self, [hrd, pro, hln, pln, op,\n sha, spa, tha, tpa], bytes = bytes)\n self.description = \"ARP\"\n self.data = None", "def scapy_create_send_layer4_with_l2header(self, ipdst, ipsrc, \\\n layer4proto=\"udp\", macdst=None, macsrc=None):\n ip_header = self.define_ip_header(dst=ipdst, src=ipsrc,\\\n ttl=self.ipttl, version=self.version)\n ether_header = self.define_ethernet_header(src=macsrc, \\\n dst=macdst)\n if layer4proto == \"tcp\":\n tcp_header = self.define_tcp_header(sport=self.sourceport,\n dport=self.destport, seq=self.tcpseq, ack=self.tcpack,\n flags=self.tcpflags)\n sendp(ether_header/ip_header/tcp_header, verbose=self.verbose, \\\n iface=self.sourceiface)\n elif layer4proto == \"udp\":\n udp_header = self.define_udp_header(sport=self.sourceport,\n dport=self.destport)\n sendp(ether_header/ip_header/udp_header, verbose=self.verbose, \\\n iface=self.sourceiface)", "def define_ip_header(self, version=4, tos=None, ttl=None, proto=None,\n src=None, dst=None):\n\n if version == 4:\n ip_pkt = IP()\n else:\n ip_pkt = IPv6()\n\n if src:\n ip_pkt.src = src\n\n if dst:\n ip_pkt.dst = dst\n\n if tos:\n ip_pkt.tos = tos\n\n if ttl:\n ip_pkt.ttl = ttl\n\n if proto:\n ip_pkt.proto = proto\n\n return ip_pkt", "def scapy_create_send_layer4(self, ipdst, ipsrc, layer4proto=\"udp\", \\\n macdst=None, macsrc=None, payload=None):\n if payload == None:\n # Payload cannot be None type, hence default to ''.\n payload = ''\n ip_header = self.define_ip_header(dst=ipdst, src=ipsrc,\\\n ttl=self.ipttl, version=self.version)\n ether_header = self.define_ethernet_header(src=macsrc, \\\n dst=macdst)\n if layer4proto == \"tcp\":\n tcp_header = self.define_tcp_header(sport=self.sourceport,\n dport=self.destport, seq=self.tcpseq, ack=self.tcpack,\n flags=self.tcpflags)\n if (macdst == None):\n send(ip_header/tcp_header, verbose=self.verbose)\n else:\n sendp(ether_header/ip_header/tcp_header, verbose=self.verbose, \\\n iface=self.sourceiface)\n elif layer4proto == \"udp\":\n udp_header = self.define_udp_header(sport=self.sourceport,\n dport=self.destport)\n if (macdst == None):\n send(ip_header/udp_header, verbose=self.verbose)\n else:\n sendp(ether_header/ip_header/udp_header/payload, verbose=self.verbose, \\\n iface=self.sourceiface)", "def pack(self):\r\n return pack('!BBHHBBBHBBHBBB',\r\n 17, # size\r\n 0xe0, # pdu type: CR\r\n self.dst_ref,\r\n self.src_ref,\r\n 0, # flag\r\n 0xc1, 2, self.src_tsap,\r\n 0xc2, 2, self.dst_tsap,\r\n 0xc0, 1, self.tpdu_size)", "def decode(cls, raw: bytes) -> \"EthernetHeader\":\n # unsigned char dmac[6];\n # unsigned char smac[6];\n # uint16_t ethertype;\n # unsigned char payload[];\n dmac = raw[:6]\n smac = raw[6:12]\n typ = socket.htons(struct.unpack(\"H\", raw[12:14])[0])\n payload = raw[14:]\n return EthernetHeader(dmac=dmac, smac=smac, typ=typ, payload=payload)", "def create_empty_pack_tcp(type, data):\n camps = ['', '', '', '']\n llargada_camps = (7, 13, 7, 50)\n index_camps = 0\n for llargada in llargada_camps:\n camps[index_camps] = camps[index_camps].zfill(llargada)\n index_camps += 1\n return struct.pack('c7s13s7s150s', chr(type), '', '', '', data)", "def initConnTermFrame(self,referenceID):\r\n # Strip any colons in the mac address\r\n self.referenceID = referenceID\r\n\r\n # Set the frame content\r\n self.content = \"\"\r\n\r\n # Set the content length\r\n self.contentLength = 0\r\n\r\n # Set the correct frame message type\r\n self.mesgType = MULTIPLEXER_CONN_TERM", "def to_physical_layer(frame, sender):\r\n frame_msg = frame.get_frame()\r\n print(f\"[to_physical_layer] frame:{frame_msg}\")\r\n sender.send(frame_msg)", "def __str__(self):\n return f'<KNXIPFrame {self.header}\\n body=\"{self.body}\" />'", "def _pack(self):\n if self._cqc_version < 2:\n header = struct.pack(self.PACKAGING_FORMAT_V1, self.remote_app_id, self.remote_node, self.remote_port)\n else:\n header = struct.pack(self.PACKAGING_FORMAT, self.remote_app_id, self.remote_port, self.remote_node)\n return header", "def _create_rx_frame(self, rx_window):\n self.window = Frame(rx_window) # we create a special Frame on the main window for the rx frames\n self.window.grid(row=0, column=0)\n\n self.printRec = False\n\n self.logText = ScrolledText(self.window, width=70) # log text\n self.logText.grid(row=1, column=1)\n\n self.buttonStart = Checkbutton(self.window, text=\" Receive info \", command=self.change_receive, bg='bisque',\n cursor='hand2')\n self.buttonStart.grid(row=3, column=1)\n\n self.buttonClear = Button(self.window, text=\" Clear \", command=self.clear, cursor='hand2')\n self.buttonClear.grid(row=4, column=1)\n\n self.buttonConnect = Button(self.window, text=\" Set Com \", command=self.clear, cursor='hand2')\n self.buttonClear.grid(row=4, column=1)\n\n self.logText.insert(END, \"Detected lasers :\" + '\\n')", "def _marshal(self, pieces):\n payload = b''.join(pieces)\n return struct.pack('>BHI', self.frame_type, self.channel_number,\n len(payload)) + payload + bytes((spec.FRAME_END,))", "def __init__(self, bytes = None):\n version = pcs.Field(\"version\", 4, default = 4)\n hlen = pcs.Field(\"hlen\", 4)\n tos = pcs.Field(\"tos\", 8)\n length = pcs.Field(\"length\", 16)\n id = pcs.Field(\"id\", 16)\n flags = pcs.Field(\"flags\", 3)\n offset = pcs.Field(\"offset\", 13)\n ttl = pcs.Field(\"ttl\", 8, default = 64)\n protocol = pcs.Field(\"protocol\", 8)\n checksum = pcs.Field(\"checksum\", 16)\n src = pcs.Field(\"src\", 32)\n dst = pcs.Field(\"dst\", 32)\n pcs.Packet.__init__(self,\n [version, hlen, tos, length, id, flags, offset,\n ttl, protocol, checksum, src, dst],\n bytes = bytes)\n # Description MUST be set after the PCS layer init\n self.description = \"IPv4\"\n\n\n if (bytes != None):\n offset = self.hlen << 2\n self.data = self.next(bytes[offset:len(bytes)])\n else:\n self.data = None", "def __init__(self, bytes = None, timestamp = None):\n from socket import IPPROTO_TCP\n src = pcs.Field(\"src\", 32)\n dst = pcs.Field(\"dst\", 32)\n reserved = pcs.Field(\"reserved\", 8, default = 0)\n protocol = pcs.Field(\"protocol\", 8, default = IPPROTO_TCP)\n length = pcs.Field(\"length\", 16)\n pcs.Packet.__init__(self, [src, dst, reserved, protocol, length],\n bytes = bytes)\n # Description MUST be set after the PCS layer init\n self.description = inspect.getdoc(self)\n if timestamp == None:\n self.timestamp = time.time()\n else:\n self.timestamp = timestamp\n\n self.data = None", "def _apply_vpp_tcp(node):\n\n active_open_sessions = node[\"tcp\"][\"active_open_sessions\"]\n aos = int(active_open_sessions)\n\n passive_open_sessions = node[\"tcp\"][\"passive_open_sessions\"]\n pos = int(passive_open_sessions)\n\n # Generate the api-segment gid vpp sheit in any case\n if (aos + pos) == 0:\n tcp = \"\\n\".join([\"api-segment {\", \" gid vpp\", \"}\"])\n return tcp.rstrip(\"\\n\")\n\n tcp = \"\\n\".join(\n [\n \"# TCP stack-related configuration parameters\",\n \"# expecting {:d} client sessions, {:d} server sessions\\n\".format(\n aos, pos\n ),\n \"heapsize 4g\\n\",\n \"api-segment {\",\n \" global-size 2000M\",\n \" api-size 1G\",\n \"}\\n\",\n \"session {\",\n \" event-queue-length {:d}\".format(aos + pos),\n \" preallocated-sessions {:d}\".format(aos + pos),\n \" v4-session-table-buckets {:d}\".format((aos + pos) // 4),\n \" v4-session-table-memory 3g\\n\",\n ]\n )\n if aos > 0:\n tcp = (\n tcp + \" v4-halfopen-table-buckets {:d}\".format((aos + pos) // 4) + \"\\n\"\n )\n tcp = tcp + \" v4-halfopen-table-memory 3g\\n\"\n tcp = (\n tcp\n + \" local-endpoints-table-buckets {:d}\".format((aos + pos) // 4)\n + \"\\n\"\n )\n tcp = tcp + \" local-endpoints-table-memory 3g\\n\"\n tcp = tcp + \"}\\n\\n\"\n\n tcp = tcp + \"tcp {\\n\"\n tcp = tcp + \" preallocated-connections {:d}\".format(aos + pos) + \"\\n\"\n if aos > 0:\n tcp = tcp + \" preallocated-half-open-connections {:d}\".format(aos) + \"\\n\"\n tcp = tcp + \"}\\n\\n\"\n\n return tcp.rstrip(\"\\n\")", "def transmitFrame(self):\n\n\t\ttransmitFrame = Frame(self)\n\t\ttransmitFrame.grid(column=0, columnspan=2, row=0, rowspan=3)\n\t\ttransmitFrame.config(bg = \"white\")\n\n\t\ttransmitLabel = Label(transmitFrame, text=\"Transmit\", font=(\"Sans Serif\", 20, \"bold\"), fg=\"#006400\", bg = \"white\")\n\t\ttransmitEntryLabel = Label(transmitFrame, text = \"(Instructions for Transmit Input)\", font = (\"Times New Roman\", 9), fg=\"black\", bg = \"white\")\n\t\tself.transmitEntry = Entry(transmitFrame, width=30, fg=\"green\", highlightthickness = 2, highlightcolor = \"green\", highlightbackground = \"light slate gray\")\n\t\tself.transmitEntry.bind(\"<Return>\", lambda e: self.transmit())\n\t\ttransmitButton = Button(transmitFrame, text=\"SEND\", font=(\"Arial\", 8, \"bold\"), fg=\"white\", bg=\"green\", activebackground = \"DarkGreen\", command=self.transmit)\n\n\t\ttransmitLabel.pack(pady= '10 0')\n\t\ttransmitEntryLabel.pack(padx = 10, pady = \"35 10\")\n\t\tself.transmitEntry.pack(padx = 10)\n\t\ttransmitButton.pack(pady = 10)", "def sendData(packet: FrameStruct, repeats: int) -> NoReturn:\n ftype = b'\\x08\\x00'\n dur = b'\\x00\\x00'\n # random hex stream, could be used as additional space of bits\n src = b'\\x08\\x00\\x27\\x8e\\x75\\x44'\n # broadcast address is used to stop certain drivers retransmitting frames\n dst = b'\\xff\\xff\\xff\\xff\\xff\\xff'\n bssid = src\n # semi unique id, annoyingly not usable due to lack of bits for this appli\n sn = (random.randint(0, 4096))\n sn = sn << 4\n seq = sn.to_bytes(4, 'little')\n\n # generate 80211 header\n header80211 = ftype + dur + dst + src + bssid + seq\n\n # combine header with other data to create valid frame\n data = globaldat.RADIO_TAP + header80211 + b\"\\x72\\x6f\\x62\\x6f\\x74\" + \\\n packet # attach radiotap headers, 80211 headers and yodel payload\n #globaldat.bytesPrint(data)\n #print(repeats)\n for i in range(repeats): # re-transmmit message a couple times\n globaldat.yodelSocket.send(data) # send the data", "def _pack(self):\n xtraH = struct.pack(\n self.PACKAGING_FORMAT,\n self.qubit_id,\n self.remote_app_id,\n self.remote_node,\n self.datetime,\n self.remote_port,\n self.outcome,\n 0,\n )\n return xtraH", "def build_header_2_40(self):\n self.header_2 = b'\\x0e\\x00\\x00\\x00AssignmentList\\x01\\x00\\x00\\x000' + \\\n (b'\\x0c\\x00\\x00\\x00ComputerName' + len(self.agent_hostname).to_bytes(4, 'little') + self.agent_hostname) + \\\n (b'\\n\\x00\\x00\\x00DomainName\\t\\x00\\x00\\x00WORKGROUP'\n b'\\x12\\x00\\x00\\x00EventFilterVersion\\x01\\x00\\x00\\x000'\n b'\\x19\\x00\\x00\\x00GuidRegenerationSupported\\x01\\x00\\x00\\x001'\n b'\\t\\x00\\x00\\x00IPAddress\\x0f\\x00\\x00\\x00192.168.236.199') + \\\n b'\\n\\x00\\x00\\x00NETAddress' + len(self.agent_mac_address).to_bytes(4, 'little') +self.agent_mac_address + \\\n (b'\\x0b\\x00\\x00\\x00PackageType\\x0b\\x00\\x00\\x00AgentPubKey'\n b'\\n\\x00\\x00\\x00PlatformID\\n\\x00\\x00\\x00W2KW:5:0:4'\n b'\\r\\x00\\x00\\x00PolicyVersion\\x01\\x00\\x00\\x000'\n b'\\x0c\\x00\\x00\\x00PropsVersion\\x0e\\x00\\x00\\x0020170724000500'\n b'\\x0e\\x00\\x00\\x00SequenceNumber\\x01\\x00\\x00\\x003') + \\\n b'\\r\\x00\\x00\\x00ServerKeyHash' + len(self.serverkeyhash).to_bytes(4, 'little') + self.serverkeyhash + \\\n (b'\\x0f\\x00\\x00\\x00SiteinfoVersion\\x01\\x00\\x00\\x000'\n b'\\x15\\x00\\x00\\x00SupportedSPIPEVersion\\x0b\\x00\\x00\\x003.0;4.0;5.0'\n b'\\x0b\\x00\\x00\\x00TaskVersion\\x01\\x00\\x00\\x000') + \\\n b'\\x0f\\x00\\x00\\x00TransactionGUID' + len(self.transaction_guid).to_bytes(4, 'little') + self.transaction_guid\n return self.header_2", "def _generate_generic_frame(naxes, unit, names=None, physical_types=None):\n axes_order = tuple(range(naxes))\n\n name = None\n axes_type = \"CUSTOM\"\n\n if isinstance(unit, (u.Unit, u.IrreducibleUnit, u.CompositeUnit)):\n unit = tuple([unit] * naxes)\n\n if all([u.m.is_equivalent(un) for un in unit]):\n axes_type = \"SPATIAL\"\n\n if all([u.pix.is_equivalent(un) for un in unit]):\n name = \"PixelFrame\"\n axes_type = \"PIXEL\"\n\n axes_type = tuple([axes_type] * naxes)\n\n return cf.CoordinateFrame(naxes, axes_type, axes_order, unit=unit,\n axes_names=names, name=name, axis_physical_types=physical_types)", "def _create_msg(self, tr_id, payload, confirm, expire_time, encoding):\n tmp = [\"<SSAP_message><transaction_type>INSERT</transaction_type>\",\n \"<message_type>REQUEST</message_type>\"]\n tmp.extend([\"<transaction_id>\", str(tr_id), \"</transaction_id>\"])\n tmp.extend([\"<node_id>\", str(self.node_id), \"</node_id>\"])\n tmp.extend([\"<space_id>\", str(self.targetSS), \"</space_id>\"])\n tmp.extend(['<parameter name=\"insert_graph\" encoding=\"%s\">' % encoding.upper(),\n str(payload), \"</parameter>\"])\n tmp.extend(['<parameter name = \"confirm\">',\n str(confirm).upper(),\n \"</parameter>\",\n \"</SSAP_message>\"])\n return \"\".join(tmp)", "def CreateFrame (audioSampleX, audioSampleY, statusList, statusCursor, bitDepth):\n if bitDepth == 3:\n if statusCursor == 0:\n FirstSubFrame = [b'0xD8', audioSampleX, 1, 0, int(statusList[statusCursor]), 1]\n else:\n FirstSubFrame = [b'0xD2', audioSampleX, 1, 0, int(statusList[statusCursor]), 1]\n SubFrameY = [b'0xD4', audioSampleY, 1, 0, int(statusList[statusCursor]), 1]\n Frame = [FirstSubFrame, SubFrameY]\n else: # Frame with less than 24 bits sample\n if statusCursor == 0:\n FirstSubFrame = [b'0xD8', b'0x0', audioSampleX, 1, 0, int(statusList[statusCursor]), 1]\n else:\n FirstSubFrame = [b'0xD2', b'0x0', audioSampleX, 1, 0, int(statusList[statusCursor]), 1]\n SubFrameY = [b'0xD4', b'0x0', audioSampleY, 1, 0, int(statusList[statusCursor]), 1]\n Frame = [FirstSubFrame, SubFrameY]\n print(Frame)\n return Frame", "def deauth(self):\n # https://man7.org/linux/man-pages/man7/packet.7.html\n s = socket.socket(socket.AF_PACKET, socket.SOCK_RAW)\n s.bind((self.interface, 0))\n\n # The RadioTap version is always 0\n rt_version = 0\n # The padding is always 0\n rt_padding = 0\n # The empty RadioTap frame has length of 8 bytes\n rt_length = 8\n # The RadioTap flags are irrelevant\n rt_flags = 0 \n # Construct the empty RadioTap frame (1,1,2,4 bytes)\n # https://docs.python.org/3/library/struct.html\n rt_frame = struct.pack(\n 'BBHI',\n rt_version,\n rt_padding,\n rt_length,\n rt_flags\n )\n\n # The 802.11 de-authentication subtype(4bits), type(2bits), version(2bits)\n dot11_type = int(b'11000000', 2)\n # The 802.11 flags are irrelevant\n dot11_flags = 0 \n # The 802.11 duration is irrelevant\n dot11_dur = 0\n # The 802.11 receiver address\n dot11_ra = bytes(map(lambda x: int(x, 16) , self.target_addr.split(':')))\n # The 802.11 transmitter address\n dot11_ta = bytes(map(lambda x: int(x, 16) , self.access_point.split(':')))\n # The 802.11 access point address\n dot11_ap = dot11_ta\n # The 802.11 sequence control is irrelevant\n dot11_sc = 0\n # The 802.11 reason code is irrelevant (0 is fine)\n dot11_reason = 0\n # Construct the 802.11 frame (1,1,2,6,6,6,2,2 bytes)\n # https://docs.python.org/3/library/struct.html\n dot11_frame = struct.pack(\n 'BBH6s6s6sHH',\n dot11_type,\n dot11_flags,\n dot11_dur,\n dot11_ra,\n dot11_ta,\n dot11_ap,\n dot11_sc,\n dot11_reason\n )\n\n # Construct the full payload (RadioTap + 802.11)\n payload = rt_frame + dot11_frame \n\n # Send packets while running and sending\n while 1:\n while self.sending:\n s.send(payload)\n time.sleep(1)", "def duplicate_tcp_pkt(src_pkt: dpkt.ethernet.Ethernet, seq: int, ack: int, flags: int = dpkt.tcp.TH_ACK,\n payload: bytes = b\"\") -> dpkt.ethernet.Ethernet:\n new_pkt = copy.deepcopy(src_pkt)\n new_pkt.data.data.seq = seq\n new_pkt.data.data.ack = ack\n new_pkt.data.data.flags = flags\n new_pkt.data.data.data = payload\n if payload:\n new_pkt.data.len -= src_pkt.data.len - (4 * src_pkt.data.hl) - (4 * src_pkt.data.data.off)\n new_pkt.data.len += len(payload)\n new_pkt = tcp_fix_checksum(new_pkt)\n return new_pkt", "def generate_pkt(self, dsn, ):\n assert self.state == SubflowState.Available\n\n e = SenderEvent(\n None,\n self.name,\n self.fowd,\n dsn,\n self.sp_cwnd * self.sp_mss\n )\n # e.delay = self.fowd\n # e.subflow_id = self.name\n # e.dsn = dsn\n # e.size = self.sp_cwnd * self.sp_mss\n\n # print(\"packet size %r\"% e.size)\n\n self.state = SubflowState.WaitingAck\n return e", "def makePacketHeader(bytes):\n id = struct.unpack('!H', bytes[0:2])[0]\n length = struct.unpack('!H', bytes[2:4])[0]\n packet_count = struct.unpack('!I',bytes[4:8])[0]\n return PacketHeader(id, length, packet_count)", "def setPacket(self, packet):\n\t\tself.clear()\n\t\tself.packet = packet\n\t\t\n\t\tfields = self.fields\n\t\t\n\t\tfields.append(['Reception time', '%s:%s:%s.%s' % tuple(packet.time), None])\n\t\t\n\t\tif self.packet.isInvalid:\n\t\t\treturn\n\t\t\n\t\tfields.append(['Transmission info', 'CRC passed: %s, LQI: %s, RSSI: %s' % (packet.CRCOk, packet.LQI, packet.RSSI), None])\n\t\tfields.append(['PHY fields', '', None])\n\t\tphy = len(fields) - 1\n\t\tfields.append(['Frame length', len(packet.load), phy])\n\t\t\n\t\tfields.append(['MAC fields', '', None])\n\t\tmac = len(fields) - 1\n\t\tfields.append(['Frame control', packet.frameControl, mac])\n\t\tfields.append(['Frame Type', packet.frameType, mac])\n\t\tfields.append(['Security enabled', packet.securityEnabled, mac])\n\t\tfields.append(['Frame pending', packet.framePending, mac])\n\t\tfields.append(['Ack. request', packet.ackRequest, mac])\n\t\tfields.append(['Intra-PAN', packet.intraPAN, mac])\n\t\tfields.append(['Dest. addressing mode', packet.dstAddrMode, mac])\n\t\tfields.append(['Source addressing mode', packet.srcAddrMode, mac])\n\t\tfields.append(['Sequence number', packet.seqNumber, mac])\n\t\t\n\t\tif hasattr(packet, 'dstPANID'):\n\t\t\tfields.append(['Destination PAN-ID', packet.dstPANID, mac])\n\t\t\n\t\tif hasattr(packet, 'dstAddr'):\n\t\t\tfields.append(['Destination address', packet.dstAddr, mac])\n\t\t\n\t\tif hasattr(packet, 'srcPANID'):\n\t\t\tfields.append(['Source PAN-ID', packet.srcPANID, mac])\n\t\t\t\n\t\tif hasattr(packet, 'srcAddr'):\n\t\t\tfields.append(['Source address', packet.srcAddr, mac])\n\t\t\t\n\t\tif hasattr(packet, 'payload'):\n\t\t\tfields.append(['Payload', packet.payload, mac])\n\t\t\n\t\tif hasattr(packet, 'commandType'):\n\t\t\tfields.append(['Command type', packet.commandType, mac])\n\t\t\n\t\tif hasattr(packet, 'commandPayload'):\n\t\t\tfields.append(['Command payload', packet.commandPayload, mac])\n\t\t\n\t\tif hasattr(packet, 'superFrameSpec'):\n\t\t\tfields.append(['Superframe specification', packet.superFrameSpec, mac])\n\t\t\tsfs = len(fields) - 1\n\t\t\tfields.append(['Beacon order', packet.beaconOrder, sfs])\n\t\t\tfields.append(['Superframe order', packet.superFrameOrder, sfs])\n\t\t\tfields.append(['finalCAPSlot', packet.finalCAPSlot, sfs])\n\t\t\tfields.append(['Batt. life extension', packet.battLifeExt, sfs])\n\t\t\tfields.append(['PAN Coordinator', packet.PANCoord, sfs])\n\t\t\tfields.append(['Association permit', packet.assocPermit, sfs])\n\t\t\n\t\tif hasattr(packet, 'GTS'):\n\t\t\tfields.append(['GTS specification', packet.GTS, mac])\n\t\t\tgts = len(fields) - 1\n\t\t\tfields.append(['GTS descriptor count', packet.GTSDescrCount, gts])\n\t\t\tfields.append(['GTS permit', packet.GTSPermit, gts])\n\t\t\tif int(packet.GTSDescrCount, 16) > 0:\n\t\t\t\tfields.append(['GTS directions', packet.GTSDirections, gts])\n\t\t\t\tfields.append(['GTS descriptors list', '', gts])\n\t\t\t\tdscList = len(fields) - 1\n\t\t\t\tfor i in xrange(int(packet.GTSDescrCount, 16)):\n\t\t\t\t\tfields.append(['Descriptor #'+str(i), '', dscList])\n\t\t\t\t\td = len(fields) - 1\n\t\t\t\t\tfields.append(['Device short address', packet.GTSDescriptors[i].deviceShortAddr, d])\n\t\t\t\t\tfields.append(['GTS starting slot', packet.GTSDescriptors[i].GTSStartingSlot, d])\n\t\t\t\t\tfields.append(['GTS length', packet.GTSDescriptors[i].GTSLength, d])\n\t\t\t\n\t\t\tfields.append(['Pending addresses list', '', gts])\n\t\t\tpnd = len(fields) - 1\n\t\t\tif int(packet.numShortAddrPnd, 16) > 0 or int(packet.numShortAddrPnd, 16) > 0:\n\t\t\t\tfor i in xrange(int(self.numShortAddrPnd, 16)):\n\t\t\t\t\tfields.append(['Short addr. #%i' % i, packet.shortAddrPndList[i], pnd])\n\n\t\t\t\tfor i in xrange(int(self.numLongAddrPnd, 16)):\n\t\t\t\t\tfields.append(['Long addr. #%i' % i, packet.longAddrPndList[i], pnd])\n\t\t\n\t\tif hasattr(packet, 'bcnPayload'):\n\t\t\tfields.append(['Beacon payload', packet.bcnPayload, mac])\n\t\t\n\t\tself.beginInsertRows(QModelIndex(), 0, len(self.fields)+1)\n\t\tself.endInsertRows()\n\t\tfor field in fields:\n\t\t\tprint field", "def send(self):\n # Copy the base packet then add the channel array\n packet = self._base_packet[:]\n packet.extend(self._channels)\n self._socket.sendto(packet, (self._host, self._port))\n logging.debug(\"Sending Art-Net frame\")", "def create_stream(cls, packet_count=test_packet_count):\n for i in range(0, packet_count):\n info = cls.create_packet_info(cls.src_dst_if, cls.src_dst_if)\n payload = cls.info_to_payload(info)\n p = (\n Ether(dst=cls.src_dst_if.local_mac, src=cls.src_dst_if.remote_mac)\n / IP(\n id=info.index,\n src=cls.src_dst_if.remote_ip4,\n dst=cls.src_dst_if.local_ip4,\n )\n / ICMP(type=\"echo-request\", id=1234)\n / Raw(payload)\n )\n cls.extend_packet(p, 1518, cls.padding)\n info.data = p", "def frame(self):\n return _generate_generic_frame(len(self.table), self.unit, self.names, self.physical_types)", "def make_message(parsed):\n frame = {\n 'technology': 'LoRa',\n 'freq': parsed[3],\n 'bw': parsed[4],\n 'sf': parsed[5],\n 'snr': parsed[9] / 100.0,\n 'length': parsed[11],\n 'payload': str(parsed[14]).decode('latin-1').encode(\"utf-8\")\n }\n print frame\n return frame", "def initDataFrame(self,referenceID, content):\r\n # Strip any colons in the mac address\r\n self.referenceID = referenceID\r\n\r\n # Set the frame content\r\n self.content = str(content)\r\n\r\n # Set the content length\r\n self.contentLength = len(self.content)\r\n\r\n # Set the correct frame message type\r\n self.mesgType = MULTIPLEXER_DATA_FORWARD", "def beginMessageFrame(self, length):", "def create_frame_curve(self):\n self.frame_curve = pm.curve(\n d=1,\n p=[(-0.5, 0.5, 0),\n (0.5, 0.5, 0),\n (0.5, -0.5, 0),\n (-0.5, -0.5, 0),\n (-0.5, 0.5, 0)],\n k=[0, 1, 2, 3, 4]\n )\n self.store_node(self.frame_curve)", "def create_between(self, name, flow_number, source, destination, number_of_frames=None, duration=None):\n\n if source.iptype != destination.iptype:\n raise RuntimeError(\"Source and destination devices do not have the\"\n \"same IP configuration!\"\n \"IPv4 vs IPv6\")\n\n if number_of_frames is None:\n duration_s = duration.total_seconds()\n number_of_frames = int(math.ceil(duration_s * 1e9 / self.interframegap_ns))\n\n udp_src = 4096 + flow_number\n udp_dest = 4096 + flow_number\n\n # Collect the basic addressing info for the Tx side.\n # VLAN id handled lower in the code.\n src_ip = source.ip\n src_mac = source.bbport.Layer2EthIIGet().MacGet()\n\n source_l3 = source.bbport.Layer3IPv4Get()\n if source.iptype == 6:\n source_l3 = source.bbport.Layer3IPv6Get()\n\n dst_ip = destination.ip\n\n logging.info(\"Resolving destination MAC for %s\", dst_ip)\n dst_mac = source_l3.Resolve(dst_ip)\n\n frame_dst_ip = destination.ip\n frame_dst_port = udp_dest\n filter_dst_ip = destination.ip\n filter_dst_port = udp_dest\n\n if source.nat and destination.nat:\n raise RuntimeError(\"Cannot resolve traffic between multiple NAT ports\")\n\n if source.nat:\n # no need to resolve here, since we only trigger on\n # destination parameters\n pass\n\n if destination.nat:\n logging.info(\"Resolving NAT parameters\")\n # destination port is behind a NAT, probably need to 'poke' a hole\n frame_dst_ip, frame_dst_port = NATResolver.resolve(\n wan_device=source, private_device=destination,\n udp_src_port=udp_src, udp_dst_port=udp_dest\n )\n\n logging.info(\"Resolving destination MAC for %s\", frame_dst_ip)\n dst_mac = source_l3.Resolve(dst_ip)\n\n stream = source.bbport.TxStreamAdd()\n stream.NumberOfFramesSet(number_of_frames)\n stream.InterFrameGapSet(self.interframegap_ns)\n\n frame = stream.FrameAdd()\n\n frame_overhead = 42\n # IPv6 header is larger than an IPv4 header\n if source.iptype == 6:\n frame_overhead = 62\n\n payload = 'a' * (self.frame_size - frame_overhead)\n\n from scapy.layers.inet import UDP, Ether\n from scapy.all import Raw, Dot1Q\n\n # A stream will always send the packet just as configured.\n # When the Tx ByteBlower port has a VLAN, we need to add it\n # to frame to be sent.\n # The following 5 lines are the only difference compared\n # to the basic IPv4 example.\n scapy_frame = Ether(src=src_mac, dst=dst_mac)\n\n for vlan_id in source.vlans:\n scapy_frame /= Dot1Q(vlan=vlan_id)\n\n if source.iptype == 4:\n from scapy.layers.inet import IP\n scapy_frame /= IP(src=src_ip, dst=frame_dst_ip)\n else:\n from scapy.layers.inet6 import IPv6\n scapy_frame /= IPv6(src=src_ip, dst=frame_dst_ip)\n\n scapy_frame /= UDP(dport=frame_dst_port, sport=udp_src)\n scapy_frame /= Raw(payload.encode('ascii', 'strict'))\n\n logging.debug('Created frame %s', repr(scapy_frame))\n\n frame_content = bytearray(bytes(scapy_frame))\n hexbytes = ''.join((format(b, \"02x\") for b in frame_content))\n frame.BytesSet(hexbytes)\n\n # create a trigger to count the number of received frames.\n # Similar to the stream we will need to make a slight modification\n # for the Vlan layer.\n trigger = destination.bbport.RxTriggerBasicAdd()\n\n # The BPF filter on a trigger is promiscuous: it will be applied to all\n # traffic that arrives at the Physical interface.\n #\n # When we expect to receive packets with a VLAN, we need to add\n # this element to the filter.\n filter_elements = []\n for vlan in destination.vlans:\n filter_elements.append(\"vlan %d\" % vlan)\n\n # normal filter:\n if destination.iptype == 4:\n filter_elements.append(\"ip dst %s\" % filter_dst_ip)\n else:\n filter_elements.append(\"ip6 dst %s\" % filter_dst_ip)\n\n filter_elements.append(\"udp port %d\" % filter_dst_port)\n bpf_filter = ' and '.join(filter_elements)\n trigger.FilterSet(bpf_filter)\n\n return UdpFlow(name, stream, trigger)", "def CreateAdHocNetwork(self, essid, channel, ip, enctype, key, encused,\n ics):\n self.wifi.CreateAdHocNetwork(essid, channel, ip, enctype, key, encused,\n ics)", "def _pack(self):\n xtraH = struct.pack(\n self.PACKAGING_FORMAT,\n self.qubit_id,\n self.remote_app_id,\n self.remote_node,\n self.cmdLength,\n self.remote_port,\n self.step,\n 0,\n )\n return xtraH", "def show_frame(self, frame):\n self._i2c_write(_FRAME_REGISTER, frame, bank=_CONFIG_BANK)", "def createSendMailFrame(self, empireDict):\n self.destroyTempFrames()\n self.sendMailInfo = anwp.gui.sendmailinfo.SendMailInfoFrame(self, self.game.app, empireDict)\n self.tempFrames.append(self.sendMailInfo)", "def create_packet(id, seq, data_size):\n\n # Random sequence of characters.\n payload = ''\n for k in range(data_size):\n payload += chr(random.randint(65, 65+25))\n\n # Create ICMP echo packet.\n echo = dpkt.icmp.ICMP.Echo()\n echo.id = id\n echo.seq = seq\n echo.data = payload\n\n icmp = dpkt.icmp.ICMP()\n icmp.type = dpkt.icmp.ICMP_ECHO\n icmp.data = echo\n\n # Return data packet as string representation.\n packet = str(icmp)\n\n # Done.\n return (payload, packet)", "def create_dgp_contract(self):\n contract_data = self.node.createcontract(\"6060604052601e6003556000600460006101000a81548160ff02191690831515021790555060d8600555341561003457600080fd5b5b613183806100446000396000f30060606040523615610126576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff1680630c83ebac1461012b578063153417471461019757806319971cbd146101fa5780631ec28e0f1461022657806327e357461461025d57806330a79873146102865780633a32306c146102e95780634364725c146103575780634afb4f111461038e5780634cc0e2bc146103fc5780635f302e8b1461043e5780636b102c49146104825780636fb81cbb146104d35780637b993bf3146104e8578063850d9758146105395780638a5a9d07146105b2578063aff125f6146105e9578063bec171e51461064c578063bf5f1e83146106ba578063e9944a81146106fc578063f769ac481461078d578063f9f51401146107f0575b600080fd5b341561013657600080fd5b6101556004808035906020019091908035906020019091905050610872565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b34156101a257600080fd5b6101b86004808035906020019091905050610928565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b341561020557600080fd5b6102246004808035906020019091908035906020019091905050610976565b005b341561023157600080fd5b6102476004808035906020019091905050610f95565b6040518082815260200191505060405180910390f35b341561026857600080fd5b610270610fed565b6040518082815260200191505060405180910390f35b341561029157600080fd5b6102a76004808035906020019091905050610ffa565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b34156102f457600080fd5b61034160048080359060200190820180359060200190808060200260200160405190810160405280939291908181526020018383602002808284378201915050505050509190505061103a565b6040518082815260200191505060405180910390f35b341561036257600080fd5b61037860048080359060200190919050506110a9565b6040518082815260200191505060405180910390f35b341561039957600080fd5b6103e66004808035906020019082018035906020019080806020026020016040519081016040528093929190818152602001838360200280828437820191505050505050919050506110db565b6040518082815260200191505060405180910390f35b341561040757600080fd5b61043c600480803573ffffffffffffffffffffffffffffffffffffffff16906020019091908035906020019091905050611172565b005b341561044957600080fd5b61046860048080359060200190919080359060200190919050506118b0565b604051808215151515815260200191505060405180910390f35b341561048d57600080fd5b6104b9600480803573ffffffffffffffffffffffffffffffffffffffff16906020019091905050611977565b604051808215151515815260200191505060405180910390f35b34156104de57600080fd5b6104e6611a1d565b005b34156104f357600080fd5b61051f600480803573ffffffffffffffffffffffffffffffffffffffff16906020019091905050611ab9565b604051808215151515815260200191505060405180910390f35b341561054457600080fd5b61055a6004808035906020019091905050611b5f565b6040518080602001828103825283818151815260200191508051906020019060200280838360005b8381101561059e5780820151818401525b602081019050610582565b505050509050019250505060405180910390f35b34156105bd57600080fd5b6105d36004808035906020019091905050611ca9565b6040518082815260200191505060405180910390f35b34156105f457600080fd5b61060a6004808035906020019091905050611cd7565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b341561065757600080fd5b6106a4600480803590602001908201803590602001908080602002602001604051908101604052809392919081815260200183836020028082843782019150505050505091905050611d17565b6040518082815260200191505060405180910390f35b34156106c557600080fd5b6106fa600480803573ffffffffffffffffffffffffffffffffffffffff16906020019091908035906020019091905050611dae565b005b341561070757600080fd5b610773600480803573ffffffffffffffffffffffffffffffffffffffff169060200190919080359060200190820180359060200190808060200260200160405190810160405280939291908181526020018383602002808284378201915050505050509190505061285f565b604051808215151515815260200191505060405180910390f35b341561079857600080fd5b6107ae60048080359060200190919050506128de565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b34156107fb57600080fd5b61081a6004808035906020019091908035906020019091905050612a18565b6040518080602001828103825283818151815260200191508051906020019060200280838360005b8381101561085e5780820151818401525b602081019050610842565b505050509050019250505060405180910390f35b600060018311806108835750600282115b1561088d57600080fd5b60008314156108d7576006600001600083815260200190815260200160002060020160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff169050610922565b6001831415610921576006600201600083815260200190815260200160002060020160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff169050610922565b5b92915050565b6000808281548110151561093857fe5b906000526020600020906002020160005b5060010160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1690505b919050565b61097f33611977565b151561098a57600080fd5b600281111561099857600080fd5b60008114806109a75750600281145b8015610a405750610a3d6001805480602002602001604051908101604052809291908181526020018280548015610a3357602002820191906000526020600020905b8160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190600101908083116109e9575b505050505061103a565b82115b15610a4a57600080fd5b600181148015610ae75750610ae46002805480602002602001604051908101604052809291908181526020018280548015610ada57602002820191906000526020600020905b8160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019060010190808311610a90575b505050505061103a565b82115b15610af157600080fd5b6000811415610b0d57816009600001541415610b0c57600080fd5b5b6001811415610b2957816009600101541415610b2857600080fd5b5b6002811415610b4557816009600201541415610b4457600080fd5b5b6006600101600082815260200190815260200160002060000160009054906101000a900460ff161515610c875760016006600101600083815260200190815260200160002060000160006101000a81548160ff02191690831515021790555081600660010160008381526020019081526020016000206002018190555043600660010160008381526020019081526020016000206003018190555060006006600101600083815260200190815260200160002060010181610c069190613046565b50600660010160008281526020019081526020016000206001018054806001018281610c329190613072565b916000526020600020900160005b33909190916101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff16021790555050610e0e565b600554600660010160008381526020019081526020016000206003015443031115610cba57610cb581612c4a565b610f90565b816006600101600083815260200190815260200160002060020154141515610ce157600080fd5b610d883360066001016000848152602001908152602001600020600101805480602002602001604051908101604052809291908181526020018280548015610d7e57602002820191906000526020600020905b8160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019060010190808311610d34575b505050505061285f565b15610d9257600080fd5b600660010160008281526020019081526020016000206001018054806001018281610dbd9190613072565b916000526020600020900160005b33909190916101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff160217905550505b600960020154610eba60066001016000848152602001908152602001600020600101805480602002602001604051908101604052809291908181526020018280548015610eb057602002820191906000526020600020905b8160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019060010190808311610e66575b5050505050611d17565b101515610f8f576000811480610ed05750600181145b15610ee057610edf6002612ce5565b5b6000811415610f0d5760066001016000828152602001908152602001600020600201546009600001819055505b6002811415610f2b57610f206000612ce5565b610f2a6001612ce5565b5b6001811415610f585760066001016000828152602001908152602001600020600201546009600101819055505b6002811415610f855760066001016000828152602001908152602001600020600201546009600201819055505b610f8e81612c4a565b5b5b5b5050565b60006002821115610fa557600080fd5b6000821415610fbb576009600001549050610fe8565b6001821415610fd1576009600101549050610fe8565b6002821415610fe7576009600201549050610fe8565b5b919050565b6000808054905090505b90565b60028181548110151561100957fe5b906000526020600020900160005b915054906101000a900473ffffffffffffffffffffffffffffffffffffffff1681565b6000806000809050600091505b835182101561109e576000848381518110151561106057fe5b9060200190602002015173ffffffffffffffffffffffffffffffffffffffff161415156110905780806001019150505b5b8180600101925050611047565b8092505b5050919050565b600060028211156110b957600080fd5b600660010160008381526020019081526020016000206002015490505b919050565b6000806000809050600091505b8351821015611167576000848381518110151561110157fe5b9060200190602002015173ffffffffffffffffffffffffffffffffffffffff161415801561114b575061114a848381518110151561113b57fe5b90602001906020020151611ab9565b5b156111595780806001019150505b5b81806001019250506110e8565b8092505b5050919050565b60008061117e33611977565b151561118957600080fd5b60008473ffffffffffffffffffffffffffffffffffffffff1614156111ad57600080fd5b60018311156111bb57600080fd5b600083141561128b57611253600180548060200260200160405190810160405280929190818152602001828054801561124957602002820191906000526020600020905b8160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190600101908083116111ff575b505050505061103a565b915060096000015482148061126c575060096002015482145b1561127657600080fd5b61127f84611977565b151561128a57600080fd5b5b600183141561134957600960010154611329600280548060200260200160405190810160405280929190818152602001828054801561131f57602002820191906000526020600020905b8160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190600101908083116112d5575b505050505061103a565b141561133457600080fd5b61133d84611ab9565b151561134857600080fd5b5b6006600201600084815260200190815260200160002060000160009054906101000a900460ff1615156114c55760016006600201600085815260200190815260200160002060000160006101000a81548160ff021916908315150217905550836006600201600085815260200190815260200160002060020160006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff160217905550436006600201600085815260200190815260200160002060030181905550600060066002016000858152602001908152602001600020600101816114449190613046565b506006600201600084815260200190815260200160002060010180548060010182816114709190613072565b916000526020600020900160005b33909190916101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff16021790555050611698565b6005546006600201600085815260200190815260200160002060030154430311156114f8576114f383612dba565b6118a9565b8373ffffffffffffffffffffffffffffffffffffffff166006600201600085815260200190815260200160002060020160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1614151561156b57600080fd5b611612336006600201600086815260200190815260200160002060010180548060200260200160405190810160405280929190818152602001828054801561160857602002820191906000526020600020905b8160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190600101908083116115be575b505050505061285f565b1561161c57600080fd5b6006600201600084815260200190815260200160002060010180548060010182816116479190613072565b916000526020600020900160005b33909190916101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff160217905550505b6009600201546117446006600201600086815260200190815260200160002060010180548060200260200160405190810160405280929190818152602001828054801561173a57602002820191906000526020600020905b8160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190600101908083116116f0575b5050505050611d17565b1015156118a85760008314801561179957506117986006600201600085815260200190815260200160002060020160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff16611977565b5b156117e2576117e1836006600201600086815260200190815260200160002060020160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff16612e8f565b5b600183148015611830575061182f6006600201600085815260200190815260200160002060020160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff16611ab9565b5b1561187957611878836006600201600086815260200190815260200160002060020160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff16612e8f565b5b600090505b600381101561189e5761189081612ce5565b5b808060010191505061187e565b6118a783612dba565b5b5b5b50505050565b600060028311806118c15750600282115b156118cb57600080fd5b6000831415611902576006600001600083815260200190815260200160002060000160009054906101000a900460ff169050611971565b6001831415611939576006600101600083815260200190815260200160002060000160009054906101000a900460ff169050611971565b6002831415611970576006600201600083815260200190815260200160002060000160009054906101000a900460ff169050611971565b5b92915050565b600080600090505b600180549050811015611a12578273ffffffffffffffffffffffffffffffffffffffff166001828154811015156119b257fe5b906000526020600020900160005b9054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff161415611a045760019150611a17565b5b808060010191505061197f565b600091505b50919050565b600460009054906101000a900460ff1615611a3757600080fd5b60018054806001018281611a4b9190613072565b916000526020600020900160005b33909190916101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff160217905550506001600460006101000a81548160ff0219169083151502179055505b565b600080600090505b600280549050811015611b54578273ffffffffffffffffffffffffffffffffffffffff16600282815481101515611af457fe5b906000526020600020900160005b9054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff161415611b465760019150611b59565b5b8080600101915050611ac1565b600091505b50919050565b611b6761309e565b6001821115611b7557600080fd5b6000821415611c0c576001805480602002602001604051908101604052809291908181526020018280548015611c0057602002820191906000526020600020905b8160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019060010190808311611bb6575b50505050509050611ca4565b6001821415611ca3576002805480602002602001604051908101604052809291908181526020018280548015611c9757602002820191906000526020600020905b8160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019060010190808311611c4d575b50505050509050611ca4565b5b919050565b60008082815481101515611cb957fe5b906000526020600020906002020160005b506000015490505b919050565b600181815481101515611ce657fe5b906000526020600020900160005b915054906101000a900473ffffffffffffffffffffffffffffffffffffffff1681565b6000806000809050600091505b8351821015611da35760008483815181101515611d3d57fe5b9060200190602002015173ffffffffffffffffffffffffffffffffffffffff1614158015611d875750611d868483815181101515611d7757fe5b90602001906020020151611977565b5b15611d955780806001019150505b5b8180600101925050611d24565b8092505b5050919050565b611db733611977565b158015611dca5750611dc833611ab9565b155b15611dd457600080fd5b600081148015611e745750600354611e716001805480602002602001604051908101604052809291908181526020018280548015611e6757602002820191906000526020600020905b8160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019060010190808311611e1d575b505050505061103a565b10155b15611e7e57600080fd5b600181148015611f1e5750600354611f1b6002805480602002602001604051908101604052809291908181526020018280548015611f1157602002820191906000526020600020905b8160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019060010190808311611ec7575b505050505061103a565b10155b15611f2857600080fd5b60008273ffffffffffffffffffffffffffffffffffffffff161415611f4c57600080fd5b6002811115611f5a57600080fd5b6000811480611f695750600181145b8015611f8a5750611f7982611977565b80611f895750611f8882611ab9565b5b5b15611f9457600080fd5b6006600001600082815260200190815260200160002060000160009054906101000a900460ff16151561212357611fca33611ab9565b15611fd457600080fd5b60016006600001600083815260200190815260200160002060000160006101000a81548160ff021916908315150217905550816006600001600083815260200190815260200160002060020160006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff160217905550436006600001600083815260200190815260200160002060030181905550600060066000016000838152602001908152602001600020600101816120a29190613046565b506006600001600082815260200190815260200160002060010180548060010182816120ce9190613072565b916000526020600020900160005b33909190916101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff160217905550506122f6565b6005546006600001600083815260200190815260200160002060030154430311156121565761215181612ce5565b61285a565b8173ffffffffffffffffffffffffffffffffffffffff166006600001600083815260200190815260200160002060020160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff161415156121c957600080fd5b612270336006600001600084815260200190815260200160002060010180548060200260200160405190810160405280929190818152602001828054801561226657602002820191906000526020600020905b8160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff168152602001906001019080831161221c575b505050505061285f565b1561227a57600080fd5b6006600001600082815260200190815260200160002060010180548060010182816122a59190613072565b916000526020600020900160005b33909190916101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff160217905550505b60008114806123055750600181145b156125ab576009600201546123b6600660000160008481526020019081526020016000206001018054806020026020016040519081016040528092919081815260200182805480156123ac57602002820191906000526020600020905b8160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019060010190808311612362575b5050505050611d17565b1015156125aa576123ff6006600001600083815260200190815260200160002060020160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff16611977565b8061244857506124476006600001600083815260200190815260200160002060020160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff16611ab9565b5b1561245257600080fd5b60008114156124f9576001805480600101828161246f9190613072565b916000526020600020900160005b6006600001600085815260200190815260200160002060020160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff16909190916101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff160217905550505b60018114156125a057600280548060010182816125169190613072565b916000526020600020900160005b6006600001600085815260200190815260200160002060020160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff16909190916101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff160217905550505b6125a981612ce5565b5b5b6002811415612859576009600001546126606006600001600084815260200190815260200160002060010180548060200260200160405190810160405280929190818152602001828054801561265657602002820191906000526020600020905b8160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff168152602001906001019080831161260c575b5050505050611d17565b1015801561271857506009600101546127156006600001600084815260200190815260200160002060010180548060200260200160405190810160405280929190818152602001828054801561270b57602002820191906000526020600020905b8160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190600101908083116126c1575b50505050506110db565b10155b15612858576000808054905011801561275f575060014301600060016000805490500381548110151561274757fe5b906000526020600020906002020160005b5060000154145b1561276957600080fd5b6000805480600101828161277d91906130b2565b916000526020600020906002020160005b60408051908101604052806001430181526020016006600001600087815260200190815260200160002060020160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681525090919091506000820151816000015560208201518160010160006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff16021790555050505061285781612ce5565b5b5b5b5b5050565b600080600090505b82518110156128d2578373ffffffffffffffffffffffffffffffffffffffff16838281518110151561289557fe5b9060200190602002015173ffffffffffffffffffffffffffffffffffffffff1614156128c457600191506128d7565b5b8080600101915050612867565b600091505b5092915050565b6000806000808054905014156128f75760009150612a12565b60016000805490500390505b6000811115612994578260008281548110151561291c57fe5b906000526020600020906002020160005b50600001541115156129855760008181548110151561294857fe5b906000526020600020906002020160005b5060010160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff169150612a12565b5b808060019003915050612903565b826000808154811015156129a457fe5b906000526020600020906002020160005b5060000154111515612a0d576000808154811015156129d057fe5b906000526020600020906002020160005b5060010160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff169150612a12565b600091505b50919050565b612a2061309e565b6002831180612a2f5750600282115b15612a3957600080fd5b6000831415612ae75760066000016000838152602001908152602001600020600101805480602002602001604051908101604052809291908181526020018280548015612adb57602002820191906000526020600020905b8160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019060010190808311612a91575b50505050509050612c44565b6001831415612b955760066001016000838152602001908152602001600020600101805480602002602001604051908101604052809291908181526020018280548015612b8957602002820191906000526020600020905b8160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019060010190808311612b3f575b50505050509050612c44565b6002831415612c435760066002016000838152602001908152602001600020600101805480602002602001604051908101604052809291908181526020018280548015612c3757602002820191906000526020600020905b8160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019060010190808311612bed575b50505050509050612c44565b5b92915050565b6000600660010160008381526020019081526020016000206002018190555060006006600101600083815260200190815260200160002060010181612c8f9190613046565b506000600660010160008381526020019081526020016000206003018190555060006006600101600083815260200190815260200160002060000160006101000a81548160ff0219169083151502179055505b50565b60006006600001600083815260200190815260200160002060020160006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff16021790555060006006600001600083815260200190815260200160002060010181612d649190613046565b506000600660000160008381526020019081526020016000206003018190555060006006600001600083815260200190815260200160002060000160006101000a81548160ff0219169083151502179055505b50565b60006006600201600083815260200190815260200160002060020160006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff16021790555060006006600201600083815260200190815260200160002060010181612e399190613046565b506000600660020160008381526020019081526020016000206003018190555060006006600201600083815260200190815260200160002060000160006101000a81548160ff0219169083151502179055505b50565b600080831415612f6857600090505b600180549050811015612f67578173ffffffffffffffffffffffffffffffffffffffff16600182815481101515612ed157fe5b906000526020600020900160005b9054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff161415612f5957600181815481101515612f2957fe5b906000526020600020900160005b6101000a81549073ffffffffffffffffffffffffffffffffffffffff02191690555b5b8080600101915050612e9e565b5b600183141561304057600090505b60028054905081101561303f578173ffffffffffffffffffffffffffffffffffffffff16600282815481101515612fa957fe5b906000526020600020900160005b9054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1614156130315760028181548110151561300157fe5b906000526020600020900160005b6101000a81549073ffffffffffffffffffffffffffffffffffffffff02191690555b5b8080600101915050612f76565b5b5b505050565b81548183558181151161306d5781836000526020600020918201910161306c91906130e4565b5b505050565b8154818355818115116130995781836000526020600020918201910161309891906130e4565b5b505050565b602060405190810160405280600081525090565b8154818355818115116130df576002028160020283600052602060002091820191016130de9190613109565b5b505050565b61310691905b808211156131025760008160009055506001016130ea565b5090565b90565b61315491905b80821115613150576000808201600090556001820160006101000a81549073ffffffffffffffffffffffffffffffffffffffff02191690555060020161310f565b5090565b905600a165627a7a723058203193cc570fd198d6b9da1b2fcac2a6332e140446e820454c1ac4467f811341e30029\", 4000000)\n self.contract_address = contract_data['address']\n self.node.generate(1)", "def create(openstack_resource):\n # Update port config before create port\n _update_port_config(openstack_resource.config)\n\n # Create port\n created_resource = openstack_resource.create()\n ipv4_list, ipv6_list = _get_fixed_ips_from_port(created_resource)\n fixed_ips = ipv4_list + ipv6_list\n _export_ips_to_port_instance(ipv4_list, ipv6_list)\n\n # Handle runtime properties\n update_runtime_properties(\n {\n RESOURCE_ID: created_resource.id,\n 'fixed_ips': fixed_ips,\n 'mac_address': created_resource.mac_address,\n 'allowed_address_pairs': created_resource.allowed_address_pairs,\n }\n )", "def u_frame(self, function):\n byte = NO_FUNC\n function = function.lower()\n if \"con\" in function:\n if \"test\" in function:\n byte = TESTFR_CON\n if \"stop\" in function:\n byte = STOPDT_CON\n if \"start\" in function:\n byte = STARTDT_CON\n if \"act\" in function:\n if \"test\" in function:\n byte = TESTFR_ACT\n if \"stop\" in function:\n byte = STOPDT_ACT\n if \"start\" in function:\n byte = STARTDT_ACT\n if byte == NO_FUNC:\n print(\"Warning: U-Frame was made without an active function.\")\n return struct.pack('<2BH', byte, 0x00, 0x00)", "def pack(self):\n if self.originalOriginate is None:\n os, on = divmod(self.originateNanos,1000000000)\n else:\n os, on = self.originalOriginate\n rs, rn = divmod(self.receiveNanos, 1000000000)\n ts, tn = divmod(self.transmitNanos, 1000000000)\n msg = struct.pack(WCMessage.STRUCT_FMT, 0, self.msgtype, self.precision, 0, self.maxFreqError, os, on, rs, rn, ts, tn) \n return msg", "def craft_tcp_packet_with_options(pkt: dpkt.ethernet.Ethernet, opts: list) -> dpkt.ethernet.Ethernet:\n if isinstance(pkt, dpkt.ethernet.Ethernet) and isinstance(pkt.data.data, dpkt.tcp.TCP):\n pkt.data.data.opts = tcp_option_payload_creation(opts)\n pkt.data.data.off = 5 + int(len(pkt.data.data.opts) / 4)\n pkt.data = tcp_fix_checksum(pkt.data)\n return pkt\n return b''", "def frame_packet(message):\n if message in one_char_packets:\n return message\n return \"$%s#%02x\" % (message, checksum(message))", "def __init__(self, host, device=\"gpib0,17\", raise_on_err=1, timeout=180000,device_name=\"HP 4156A\"):\n\t\tvxi_11.vxi_11_connection.__init__(self, host=host, device=device, raise_on_err=raise_on_err, timeout=timeout, device_name=device_name)\n\t\tself.write(\":FORM:DATA ASCii\")\n\t\tpass", "def output_generator(pkt):\r\n ethe_header = pkt[0]\r\n ip_header = pkt[1]\r\n protocol = pkt[1][7]\r\n data_header = pkt[2]\r\n ethe_prefix = \"ETHER: \"\r\n ip_prefix = \"IP: \"\r\n tcp_prefix = \"TCP: \"\r\n udp_prefix = \"UDP: \"\r\n icmp_prefix = \"ICMP: \"\r\n # print ether header information\r\n print(\"\\n\" + ethe_prefix + \"----- Ether Header -----\")\r\n print(ethe_prefix)\r\n print(ethe_prefix + \"Packet size = \" + str(ethe_header[0]) + \" bytes\")\r\n print(ethe_prefix + \"Destination = \" + str(ethe_header[1]))\r\n print(ethe_prefix + \"Source = \" + str(ethe_header[2]))\r\n print(ethe_prefix + \"Ethertype = \" + str(ethe_header[3]) + \" (IP)\")\r\n print(ethe_prefix)\r\n\r\n print(ip_prefix + \"----- IP Header -----\")\r\n print(ip_prefix)\r\n print(ip_prefix + \"Version = \" + str(ip_header[0]))\r\n print(ip_prefix + \"Header length = \" + str(4 * int(ip_header[1])) + \" bytes\")\r\n print(ip_prefix + \"Type of service = 0x\" + str(ip_header[2]))\r\n if str(ip_header[2]) == \"00\":\r\n print(ip_prefix + \"\\txxx. .... = 0 (precedence)\")\r\n print(ip_prefix + \"\\t...0 .... = normal delay\")\r\n print(ip_prefix + \"\\t.... 0... = normal throughput\")\r\n print(ip_prefix + \"\\t.... .0.. = normal reliability\")\r\n print(ip_prefix + \"Total length = \" + str(ip_header[3]) + \" bytes\")\r\n print(ip_prefix + \"Identification = \" + str(ip_header[4]))\r\n print(ip_prefix + \"Flags = 0x\" + str(ip_header[5]))\r\n flag = str(format(int(ip_header[5][0]), '04b'))\r\n if flag[0] == \"0\":\r\n print(ip_prefix + \"\\t0... ... = Reserved bit: Not set\")\r\n else:\r\n print(ip_prefix + \"\\t1... ... = Reserved bit: set\")\r\n if flag[1] == \"0\":\r\n print(ip_prefix + \"\\t.0.. ... = Don't fragment: Not set\")\r\n else:\r\n print(ip_prefix + \"\\t.1.. ... = Don't fragment: set\")\r\n if flag[2] == \"0\":\r\n print(ip_prefix + \"\\t..0. ... = More fragments: Not set\")\r\n else:\r\n print(ip_prefix + \"\\t..1. ... = More fragments: set\")\r\n flag_offset = str((int(ip_header[5][2:3])))\r\n print(ip_prefix + \"Fragment offset = \" + flag_offset + \" bytes\")\r\n print(ip_prefix + \"Time to live = \" + str(ip_header[6]) + \" seconds/hops\")\r\n if protocol == 1:\r\n print(ip_prefix + \"Protocol = \" + str(protocol) + \" (ICMP)\")\r\n if protocol == 17:\r\n print(ip_prefix + \"Protocol = \" + str(protocol) + \" (UDP)\")\r\n if protocol == 6:\r\n print(ip_prefix + \"Protocol = \" + str(protocol) + \" (TCP)\")\r\n print(ip_prefix + \"Header checksum = \" + str(ip_header[8]))\r\n print(ip_prefix + \"Source address = \" + str(ip_header[9]))\r\n print(ip_prefix + \"Destination address = \" + str(ip_header[10]))\r\n if ip_header[11] == \"\":\r\n print(ip_prefix + \"No options\")\r\n else:\r\n print(ip_prefix + \"Options: \" + ip_header[11])\r\n print(ip_prefix)\r\n\r\n if protocol == 1:\r\n print(icmp_prefix + \"----- ICMP Header -----\")\r\n print(icmp_prefix)\r\n if str(data_header[0]) == \"8\":\r\n print(icmp_prefix + \"Type = \" + str(data_header[0]) + \" (Echo request)\")\r\n elif str(data_header[0]) == \"0\":\r\n print(icmp_prefix + \"Type = \" + str(data_header[0]) + \" (Echo reply)\")\r\n else:\r\n print(icmp_prefix + \"Type = \" + str(data_header[0]))\r\n print(icmp_prefix + \"Code = \" + str(data_header[1]))\r\n print(icmp_prefix + \"Checksum = \" + str(data_header[2]))\r\n print(icmp_prefix)\r\n\r\n elif protocol == 6:\r\n print(tcp_prefix + \"----- TCP Header -----\")\r\n print(tcp_prefix)\r\n print(tcp_prefix + \"Source port = \" + str(data_header[0]))\r\n print(tcp_prefix + \"Destination port = \" + str(data_header[1]))\r\n print(tcp_prefix + \"Sequence number = \" + str(data_header[2]))\r\n print(tcp_prefix + \"Acknowledgement number = \" + str(data_header[3]))\r\n print(tcp_prefix + \"Data offset = \" + str(data_header[4]) + \" bytes\")\r\n flag = str(data_header[5])\r\n print(tcp_prefix + \"\\tReserved: Not set\")\r\n print(tcp_prefix + \"\\tNonce: Not set\")\r\n if flag[0] == \"0\":\r\n print(tcp_prefix + \"\\tCWR: Not set\")\r\n else:\r\n print(tcp_prefix + \"\\tCWR: Set\")\r\n if flag[1] == \"0\":\r\n print(tcp_prefix + \"\\tECN-Echo : No set\")\r\n else:\r\n print(tcp_prefix + \"\\tECN-Echo: Set\")\r\n if flag[2] == \"0\":\r\n print(tcp_prefix + \"\\tUrgent: Not set\")\r\n else:\r\n print(tcp_prefix + \"\\tUrgent: Set\")\r\n if flag[3] == \"0\":\r\n print(tcp_prefix + \"\\tAcknowledgment: No set\")\r\n else:\r\n print(tcp_prefix + \"\\tAcknowledgment: Set\")\r\n if flag[4] == \"0\":\r\n print(tcp_prefix + \"\\tPush: No set\")\r\n else:\r\n print(tcp_prefix + \"\\tPush: Set\")\r\n if flag[5] == \"0\":\r\n print(tcp_prefix + \"\\tReset: No set\")\r\n else:\r\n print(tcp_prefix + \"\\tReset: Set\")\r\n if flag[6] == \"0\":\r\n print(tcp_prefix + \"\\tSyn: No set\")\r\n else:\r\n print(tcp_prefix + \"\\tSyn: Set\")\r\n if flag[7] == \"0\":\r\n print(tcp_prefix + \"\\tFin: No set\")\r\n else:\r\n print(tcp_prefix + \"\\tFin: Set\")\r\n print(tcp_prefix + \"Window = \" + str(data_header[6]))\r\n print(tcp_prefix + \"Checksum 0x= \" + str(data_header[7]))\r\n print(tcp_prefix + \"Urgent pointers = \" + str(data_header[8]))\r\n if data_header[9] != 0:\r\n print(tcp_prefix + \"Options\")\r\n else:\r\n print(tcp_prefix + \"No options\")\r\n print(tcp_prefix)\r\n\r\n elif protocol == 17:\r\n print(udp_prefix + \"----- UDP Header -----\")\r\n print(udp_prefix)\r\n print(udp_prefix + \"Source port = \" + str(data_header[0]))\r\n print(udp_prefix + \"Destination port = \" + str(data_header[1]))\r\n print(udp_prefix + \"Length = \" + str(data_header[2]))\r\n print(udp_prefix + \"Checksum = \" + str(data_header[3]))\r\n print(udp_prefix)", "def create_apdu(self, frame, asdu_type, sequence, cause_of_transmission, common_address, message, ssn = 0, rsn = 0, originator_address = 0):\n apci = self.wrap_frame(frame, ssn, rsn)\n if type(apci) is str:\n return apci\n asdu = self.wrap_asdu(asdu_type, sequence, cause_of_transmission, common_address, message, originator_address)\n if type(asdu) is str:\n return asdu\n return apci + asdu", "def generate_ethmac(peripheral, **kwargs):\n buf = kwargs['buffer']()\n\n result = \"\"\"\nethmac: Network.LiteX_Ethernet @ {{\n sysbus <{}, +0x100>;\n sysbus new Bus.BusMultiRegistration {{ address: {};\n size: {};\n region: \"buffer\" }}\n}}\n\"\"\".format(peripheral['address'], buf['address'], buf['size'])\n\n if 'interrupt' in peripheral['constants']:\n result += ' -> cpu@{}\\n'.format(\n peripheral['constants']['interrupt'])\n\n return result", "def setUp(self):\n\n serial_times = {295: '1971-07-31T01:24:11.754',\n 296: '1971-07-31T01:24:36.970',\n 297: '1971-07-31T01:25:02.243',\n 298: '1971-07-31T01:25:27.457',\n 299: '1971-07-31T01:25:52.669',\n 300: '1971-07-31T01:26:17.923'}\n self.serials = ['APOLLO15/METRIC/{}'.format(i) for i in serial_times.values()]\n\n\n x = list(range(5))\n y = list(range(5))\n pid = [0,0,1,1,1]\n idx = pid\n serials = [self.serials[0], self.serials[1], self.serials[2],\n self.serials[2], self.serials[3]]\n\n\n columns = ['x', 'y', 'idx', 'pid', 'nid']\n self.data_length = 5\n\n data = [x,y, idx, pid, serials]\n\n self.creation_time = strftime(\"%Y-%m-%d %H:%M:%S\", gmtime())\n cnet = C(data, index=columns).T\n\n io_controlnetwork.to_isis('test.net', cnet, mode='wb', targetname='Moon')\n\n self.header_message_size = 85\n self.point_start_byte = 65621", "def prepare_packet(msg_parts, nonce=None, add_time=True):\n if not isinstance(msg_parts, list):\n msg_parts = [msg_parts, \"\", \"\"]\n else:\n while len(msg_parts) < 3:\n msg_parts.append(\"\")\n for ind, mp in enumerate(msg_parts):\n if not isinstance(mp, str):\n msg_parts[ind] = str(mp)\n has_ts = c.TRUE_STR if add_time else c.FALSE_STR\n has_nonce = c.TRUE_STR if nonce is not None else c.FALSE_STR\n eofp = str(len(msg_parts[0])).zfill(5)\n eosp = str(len(msg_parts[0] + msg_parts[1])).zfill(5)\n header = has_ts + has_nonce + eofp + eosp\n res_msg = header + \"\".join(msg_parts) + (nonce if nonce is not None else \"\")\n res_msg += PacketOrganiser.get_new_timestamp() if add_time else \"\"\n return res_msg", "def __init__(self):\n super().__init__(message_type=cp_type_enum.CP_ACK)", "def _send_frame(self, dest, data):\n self._log.debug(\"write {} to {}\".format(len(data), dest)) \n # send to endpoint\n self._conn.sendto(data, (dest,0))", "def gen_frame():\n while True:\n frame = camera_stream()\n yield (b'--frame\\r\\n'\n b'Content-Type: image/png\\r\\n\\r\\n' + frame + b'\\r\\n') # concate frame one by one and show result", "def create_instance(c_instance):\n return AumPC40(c_instance)", "def __frame_tx(self,data):\n\n if self._spy_frame_tx is not None:\n self._spy_frame_tx(data)\n\n data=self.__pad(data)\n\n if len(data) < self.other_bufferlen:\n self.com.tx(data)\n else:\n chunks = (len(data)-1) // self.other_bufferlen\n #print(\"__frame_tx: %d full chunks + last\"%chunks,flush=True)\n for i in range(0,chunks):\n self.com.tx(data[i*self.other_bufferlen:(i+1)*self.other_bufferlen])\n self.com.rx_ack()\n self.com.tx(data[chunks*self.other_bufferlen:])\n #print(\"__frame_tx done\",flush=True)", "def create(self):\n\t\treturn handle_to_object(call_sdk_function('PrlPortFwd_Create'))", "def create_magic_packet(macaddress: str) -> bytes:\n if len(macaddress) == 17:\n sep = macaddress[2]\n macaddress = macaddress.replace(sep, \"\")\n elif len(macaddress) == 14:\n sep = macaddress[4]\n macaddress = macaddress.replace(sep, \"\")\n if len(macaddress) != 12:\n raise ValueError(\"Incorrect MAC address format\")\n return bytes.fromhex(\"F\" * 12 + macaddress * 16)", "def rx_beacon_packet(self): \n self.beacon.make_packet()\n rx_packet = self.beacon.tx_packet()\n rx_time = np.float128('%.20f'%(time.time()))\n if self.DEBUG:\n print 'rx_time: ', repr(rx_time)\n\n self.data.set_timestamp_base(rx_time)\n self.data.set_beacon_packet(rx_packet)", "def __init__(self, channel_number, body_size, props):\n Frame.__init__(self, spec.FRAME_HEADER, channel_number)\n self.body_size = body_size\n self.properties = props", "def malloc_jitframe(self, frame_info):\n frame = JITFRAME.allocate(frame_info)\n self.frames.append(frame)\n return frame", "def __init__(self, source, ip='localhost', port=12345):\n self.ip = ip\n self.port = port\n self.frame = 1\n self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.frame_buffer = PriorityQueue()\n self.capture_thread = Thread(target=self.capture_video)\n self.sending_thread = Thread(target=self.send_video)\n self.capture_thread.setDaemon(True)\n self.sending_thread.setDaemon(True)\n self.capturing = False\n self.source = source\n self.addr = (self.ip, self.port)\n self.encode_param = [1, 90]#[int(cv2.IMWRITE_JPEG_QUALITY), 90]", "def testFramepack1(self):\n # Check bad frame generation:\n frame = stomper.Frame()\n\n def bad():\n frame.cmd = 'SOME UNNOWN CMD'\n\n self.assertRaises(stomper.FrameError, bad)\n\n # Generate a MESSAGE frame:\n frame = stomper.Frame()\n frame.cmd = 'MESSAGE'\n frame.headers['destination'] = '/queue/a'\n frame.headers['message-id'] = 'card_data'\n frame.body = \"hello queue a\"\n result = frame.pack()\n\n# print \"\\n-- result \" + \"----\" * 10\n# pprint.pprint(result)\n# print\n\n # Try bad message unpack catching:\n bad_frame = stomper.Frame()\n self.assertRaises(stomper.FrameError, bad_frame.unpack, None)\n self.assertRaises(stomper.FrameError, bad_frame.unpack, '')\n\n # Try to read the generated frame back in\n # and then check the variables are set up\n # correctly:\n frame2 = stomper.Frame()\n frame2.unpack(result)\n\n self.assertEqual(frame2.cmd, 'MESSAGE')\n self.assertEqual(frame2.headers['destination'], '/queue/a')\n self.assertEqual(frame2.headers['message-id'], 'card_data')\n self.assertEqual(frame2.body, 'hello queue a')\n result = frame2.pack()\n\n correct = \"MESSAGE\\ndestination:/queue/a\\nmessage-id:card_data\\n\\nhello queue a\\x00\\n\"\n\n# print \"result: \"\n# pprint.pprint(result)\n# print\n# print \"correct: \"\n# pprint.pprint(correct)\n# print\n#\n self.assertEqual(result, correct)\n\n result = stomper.unpack_frame(result)\n\n self.assertEqual(result['cmd'], 'MESSAGE')\n self.assertEqual(result['headers']['destination'], '/queue/a')\n self.assertEqual(result['headers']['message-id'], 'card_data')\n self.assertEqual(result['body'], 'hello queue a')", "def create_stream(cls, packet_sizes, packet_count=test_packet_count):\n for i in range(0, packet_count):\n info = cls.create_packet_info(cls.src_if, cls.src_if)\n payload = cls.info_to_payload(info)\n p = (\n Ether(dst=cls.src_if.local_mac, src=cls.src_if.remote_mac)\n / IPv6(src=cls.src_if.remote_ip6, dst=cls.dst_if.remote_ip6)\n / UDP(sport=1234, dport=5678)\n / Raw(payload)\n )\n size = packet_sizes[(i // 2) % len(packet_sizes)]\n cls.extend_packet(p, size, cls.padding)\n info.data = p", "def diagram(self, with_lines=False, with_captions=False):\n out = []\n\n row = 0\n col = 0\n byte_counter = 0\n\n if with_captions:\n out.append(\"1: Version 2: IHL 3: DSCP 4: ECN 5: Total Length\\n\")\n out.append(\"6: Identification 7: Flags 8: Fragment Offset\\n\")\n out.append(\"9: Time To Live 10: Protocol 11: Header Checksum\\n\")\n out.append(\"12: Source IP 13: Destination IP\\n\")\n\n if len(self.payload) > 0:\n out.append(\"14: Payload\\n\")\n\n out.append(\"\\n\")\n\n hb = len(self.header_bytes())\n\n for byte in bytes(self):\n if with_lines:\n # Version, IHL, DSCP, ECN and Total Length\n if col == 0 and row == 0:\n out.append(\" 1 2 3 4 5\\n\")\n out.append(\"┌┴─┐ ┌┴─┐ ┌┴────┐├┐ \")\n out.append(\"┌┴──────────────────┐\\n\")\n\n # Identification, Flags and Fragment Offset\n if col == 0 and row == 1:\n out.append(\" 6 7 8\\n\")\n out.append(\"┌┴──────────────────┐ \")\n out.append(\"┌┴┐┌┴───────────────┐\\n\")\n\n # Time To Live, Protocol and Header Checksum\n if col == 0 and row == 2:\n out.append(\" 9 10 11\\n\")\n out.append(\"┌┴──────┐ ┌┴──────┐ \")\n out.append(\"┌┴──────────────────┐\\n\")\n\n # Source IP\n if col == 0 and row == 3:\n out.append(\" 12\\n\")\n out.append(\"┌┴──────────────────────\")\n out.append(\"────────────────────┐\\n\")\n\n # Destination IP\n if col == 0 and row == 4:\n out.append(\" 13\\n\")\n out.append(\"┌┴──────────────────────\")\n out.append(\"────────────────────┐\\n\")\n\n # Payload\n if byte_counter == hb:\n out.append(\" 14\\n\")\n out.append(\"┌┴──────────────────────\")\n out.append(\"────────────────────┐\\n\")\n\n byte = bin(byte)[2:].rjust(8, \"0\")\n first = byte[:4]\n last = byte[4:]\n\n out.append(first)\n out.append(\" \")\n out.append(last)\n\n if col == 3:\n out.append(\"\\n\")\n col = 0\n row += 1\n else:\n out.append(\" \")\n col += 1\n\n byte_counter += 1\n\n return \"\".join(out).strip()", "def render(self):\n fmt = 'B' + 'B' * len(self.frame)\n self.sendPacket(6, struct.pack(fmt, self.start_code, *self.frame))", "def encode(self) -> bytes:\n\n # unsigned char dmac[6];\n # unsigned char smac[6];\n # uint16_t ethertype;\n # unsigned char payload[];\n\n t = struct.pack(\"H\", socket.htons(self.typ))\n return self.dmac + self.smac + t + self.payload", "def make_xfer_config(hostname, identfile, user='root'):\n xc = \"\"\n xc += \"Host %s\\n\" % (hostname)\n xc += \"Hostname %s\\n\" % (hostname)\n xc += \"User %s\\n\" % (user)\n xc += \"IdentityFile %s\\n\" % (identfile)\n xc += \"Compression yes\\n\"\n xc += \"StrictHostKeyChecking no\\n\"\n xc += \"UserKnownHostsFile /dev/null\\n\"\n return xc", "def __init__(self, ip, x_len, u_len):\n self._x_fmt = '>' + x_len * 'd'\n self._u_fmt = '>' + u_len * 'd'\n self._buf_size = x_len * 8 # 8 bytes for each double\n self._port = 9095 # fixed in Simulink model\n self._ip = ip\n self._soc = None" ]
[ "0.58488214", "0.5700761", "0.56956196", "0.56287456", "0.56241447", "0.56202054", "0.54626226", "0.5419994", "0.5410478", "0.53642374", "0.53623325", "0.531552", "0.52997607", "0.526246", "0.5247345", "0.5247104", "0.5242202", "0.5209782", "0.5174515", "0.5075918", "0.50341254", "0.5015512", "0.49719715", "0.4962528", "0.49543566", "0.49539757", "0.49517596", "0.49474245", "0.49429342", "0.4928377", "0.49266142", "0.4924996", "0.48903644", "0.48783627", "0.48748076", "0.48542944", "0.48534247", "0.48466522", "0.48423514", "0.4838213", "0.48321033", "0.4820578", "0.48088792", "0.48074025", "0.47933525", "0.4775019", "0.47517687", "0.47508386", "0.4737", "0.47302538", "0.47269845", "0.47206497", "0.47204232", "0.47189537", "0.47176653", "0.46889958", "0.46798566", "0.46796718", "0.46707", "0.4662057", "0.4662012", "0.46593958", "0.46400288", "0.4629382", "0.46174955", "0.4615896", "0.461101", "0.4606873", "0.4604455", "0.4602163", "0.4596299", "0.45894682", "0.45842195", "0.45792633", "0.45780355", "0.45759085", "0.4570358", "0.4562171", "0.4561905", "0.45609406", "0.45524287", "0.45477706", "0.4543517", "0.4538673", "0.45244467", "0.45208585", "0.4514979", "0.45128813", "0.45097408", "0.4497431", "0.4492101", "0.44894826", "0.44854832", "0.4479096", "0.4477963", "0.44741315", "0.44733858", "0.44716102", "0.44670716", "0.4466133" ]
0.6318154
0
Computes the pickup_features feature group. To restrict features to a time range, pass in ts_column, start_date, and/or end_date as kwargs.
def pickup_features_fn(df, ts_column, start_date, end_date): df = filter_df_by_ts( df, ts_column, start_date, end_date ) pickupzip_features = ( df.groupBy( "pickup_zip", window("tpep_pickup_datetime", "1 hour", "15 minutes") ) # 1 hour window, sliding every 15 minutes .agg( mean("fare_amount").alias("mean_fare_window_1h_pickup_zip"), count("*").alias("count_trips_window_1h_pickup_zip"), ) .select( col("pickup_zip").alias("zip"), unix_timestamp(col("window.end")).alias("ts").cast(IntegerType()), partition_id(to_timestamp(col("window.end"))).alias("yyyy_mm"), col("mean_fare_window_1h_pickup_zip").cast(FloatType()), col("count_trips_window_1h_pickup_zip").cast(IntegerType()), ) ) return pickupzip_features
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def dropoff_features_fn(df, ts_column, start_date, end_date):\n df = filter_df_by_ts(\n df, ts_column, start_date, end_date\n )\n dropoffzip_features = (\n df.groupBy(\"dropoff_zip\", window(\"tpep_dropoff_datetime\", \"30 minute\"))\n .agg(count(\"*\").alias(\"count_trips_window_30m_dropoff_zip\"))\n .select(\n col(\"dropoff_zip\").alias(\"zip\"),\n unix_timestamp(col(\"window.end\")).alias(\"ts\").cast(IntegerType()),\n partition_id(to_timestamp(col(\"window.end\"))).alias(\"yyyy_mm\"),\n col(\"count_trips_window_30m_dropoff_zip\").cast(IntegerType()),\n is_weekend(col(\"window.end\")).alias(\"dropoff_is_weekend\"),\n )\n )\n return dropoffzip_features", "def get_date_features(gt_ids=[], gt_masks=None, gt_shifts=None, first_year=None):\n # If particular arguments aren't lists, replace with repeating iterators\n if not isinstance(gt_masks, list):\n gt_masks = itertools.repeat(gt_masks)\n if not isinstance(gt_shifts, list):\n gt_shifts = itertools.repeat(gt_shifts)\n\n # Add each ground truth feature to dataframe\n df = None\n for gt_id, gt_mask, gt_shift in zip(gt_ids, gt_masks, gt_shifts):\n print \"Getting {}_shift{}\".format(gt_id, gt_shift)\n t = time.time()\n # Load ground truth data\n gt = get_ground_truth(gt_id, gt_mask, gt_shift)\n # Discard years prior to first_year\n gt = year_slice(gt, first_year = first_year)\n # If lat, lon columns exist, pivot to wide format\n if 'lat' in gt.columns and 'lon' in gt.columns:\n if gt_shift == None:\n measurement_variable = get_measurement_variable(gt_id)\n else:\n measurement_variable = get_measurement_variable(gt_id)+'_shift'+str(gt_shift)\n gt = pd.pivot_table(gt, values=measurement_variable, index='start_date',\n columns=['lat', 'lon']).reset_index()\n gt = pd.DataFrame(gt.to_records())\n gt.drop(\"index\", axis=1, inplace=True)\n # Rename columns to start_date and precip_(27.0,261.0), etc.\n gt.rename(columns={gt.columns[0]: 'start_date'}, inplace=True)\n gt.rename(columns=lambda x: x.replace('(',\n measurement_variable +\n '_('), inplace=True)\n # Use outer merge to include union of start_date values across all features\n # combinations across all features\n df = df_merge(df, gt, on=\"start_date\")\n print \"Elapsed: {}s\".format(time.time() - t)\n\n return df", "def _create_ts_features(df, tscol):\r\n df = copy.deepcopy(df)\r\n dt_adds = []\r\n try:\r\n df[tscol+'_hour'] = df[tscol].dt.hour.fillna(0).astype(int)\r\n df[tscol+'_minute'] = df[tscol].dt.minute.fillna(0).astype(int)\r\n dt_adds.append(tscol+'_hour')\r\n dt_adds.append(tscol+'_minute')\r\n except:\r\n print(' Error in creating hour-second derived features. Continuing...')\r\n try:\r\n df[tscol+'_dayofweek'] = df[tscol].dt.dayofweek.fillna(0).astype(int)\r\n dt_adds.append(tscol+'_dayofweek')\r\n if tscol+'_hour' in dt_adds:\r\n DAYS = dict(zip(range(7),['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat']))\r\n df[tscol+'_dayofweek'] = df[tscol+'_dayofweek'].map(DAYS)\r\n df.loc[:,tscol+'_dayofweek_hour_cross'] = df[tscol+'_dayofweek'] +\" \"+ df[tscol+'_hour'].astype(str)\r\n dt_adds.append(tscol+'_dayofweek_hour_cross')\r\n df[tscol+'_quarter'] = df[tscol].dt.quarter.fillna(0).astype(int)\r\n dt_adds.append(tscol+'_quarter')\r\n df[tscol+'_month'] = df[tscol].dt.month.fillna(0).astype(int)\r\n MONTHS = dict(zip(range(1,13),['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul',\r\n 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']))\r\n df[tscol+'_month'] = df[tscol+'_month'].map(MONTHS)\r\n dt_adds.append(tscol+'_month')\r\n #### Add some features for months ########################################\r\n festives = ['Oct','Nov','Dec']\r\n name_col = tscol+\"_is_festive\"\r\n df[name_col] = 0\r\n df[name_col] = df[tscol+'_month'].map(lambda x: 1 if x in festives else 0).values\r\n df[name_col].fillna(0,inplace=True)\r\n dt_adds.append(name_col)\r\n summer = ['Jun','Jul','Aug']\r\n name_col = tscol+\"_is_summer\"\r\n df[name_col] = 0\r\n df[name_col] = df[tscol+'_month'].map(lambda x: 1 if x in summer else 0).values\r\n df[name_col].fillna(0,inplace=True)\r\n dt_adds.append(name_col)\r\n winter = ['Dec','Jan','Feb']\r\n name_col = tscol+\"_is_winter\"\r\n df[name_col] = 0\r\n df[name_col] = df[tscol+'_month'].map(lambda x: 1 if x in winter else 0).values\r\n df[name_col].fillna(0,inplace=True)\r\n dt_adds.append(name_col)\r\n cold = ['Oct','Nov','Dec','Jan','Feb','Mar']\r\n name_col = tscol+\"_is_cold\"\r\n df[name_col] = 0\r\n df[name_col] = df[tscol+'_month'].map(lambda x: 1 if x in cold else 0).values\r\n df[name_col].fillna(0,inplace=True)\r\n dt_adds.append(name_col)\r\n warm = ['Apr','May','Jun','Jul','Aug','Sep']\r\n name_col = tscol+\"_is_warm\"\r\n df[name_col] = 0\r\n df[name_col] = df[tscol+'_month'].map(lambda x: 1 if x in warm else 0).values\r\n df[name_col].fillna(0,inplace=True)\r\n dt_adds.append(name_col)\r\n #########################################################################\r\n if tscol+'_dayofweek' in dt_adds:\r\n df.loc[:,tscol+'_month_dayofweek_cross'] = df[tscol+'_month'] +\" \"+ df[tscol+'_dayofweek']\r\n dt_adds.append(tscol+'_month_dayofweek_cross')\r\n df[tscol+'_year'] = df[tscol].dt.year.fillna(0).astype(int)\r\n dt_adds.append(tscol+'_year')\r\n today = date.today()\r\n df[tscol+'_age_in_years'] = today.year - df[tscol].dt.year.fillna(0).astype(int)\r\n dt_adds.append(tscol+'_age_in_years')\r\n df[tscol+'_dayofyear'] = df[tscol].dt.dayofyear.fillna(0).astype(int)\r\n dt_adds.append(tscol+'_dayofyear')\r\n df[tscol+'_dayofmonth'] = df[tscol].dt.day.fillna(0).astype(int)\r\n dt_adds.append(tscol+'_dayofmonth')\r\n df[tscol+'_weekofyear'] = df[tscol].dt.weekofyear.fillna(0).astype(int)\r\n dt_adds.append(tscol+'_weekofyear')\r\n weekends = (df[tscol+'_dayofweek'] == 'Sat') | (df[tscol+'_dayofweek'] == 'Sun')\r\n df[tscol+'_typeofday'] = 'weekday'\r\n df.loc[weekends, tscol+'_typeofday'] = 'weekend'\r\n dt_adds.append(tscol+'_typeofday')\r\n if tscol+'_typeofday' in dt_adds:\r\n df.loc[:,tscol+'_month_typeofday_cross'] = df[tscol+'_month'] +\" \"+ df[tscol+'_typeofday']\r\n dt_adds.append(tscol+'_month_typeofday_cross')\r\n except:\r\n print(' Error in creating date time derived features. Continuing...')\r\n print(' created %d columns from time series %s column' %(len(dt_adds),tscol))\r\n return df, dt_adds", "def FE_start_end_date_time_features(smalldf, startTime, endTime, splitter_date_string=\"/\",splitter_hour_string=\":\"):\r\n smalldf = smalldf.copy()\r\n add_cols = []\r\n date_time_variable_flag = False\r\n if smalldf[startTime].dtype in ['datetime64[ns]','datetime16[ns]','datetime32[ns]']:\r\n print('%s variable is a date-time variable' %startTime)\r\n date_time_variable_flag = True\r\n if date_time_variable_flag:\r\n view_days = 'processing'+startTime+'_elapsed_days'\r\n smalldf[view_days] = (smalldf[endTime] - smalldf[startTime]).astype('timedelta64[s]')/(60*60*24)\r\n smalldf[view_days] = smalldf[view_days].astype(int)\r\n add_cols.append(view_days)\r\n view_time = 'processing'+startTime+'_elapsed_time'\r\n smalldf[view_time] = (smalldf[endTime] - smalldf[startTime]).astype('timedelta64[s]').values\r\n add_cols.append(view_time)\r\n else:\r\n start_date = 'processing'+startTime+'_start_date'\r\n smalldf[start_date] = smalldf[startTime].map(lambda x: x.split(\" \")[0])\r\n add_cols.append(start_date) \r\n try:\r\n start_time = 'processing'+startTime+'_start_time'\r\n smalldf[start_time] = smalldf[startTime].map(lambda x: x.split(\" \")[1])\r\n add_cols.append(start_time)\r\n except:\r\n ### there is no hour-minutes part of this date time stamp field. You can just skip it if it is not there\r\n pass\r\n end_date = 'processing'+endTime+'_end_date'\r\n smalldf[end_date] = smalldf[endTime].map(lambda x: x.split(\" \")[0])\r\n add_cols.append(end_date)\r\n try:\r\n end_time = 'processing'+endTime+'_end_time'\r\n smalldf[end_time] = smalldf[endTime].map(lambda x: x.split(\" \")[1])\r\n add_cols.append(end_time)\r\n except:\r\n ### there is no hour-minutes part of this date time stamp field. You can just skip it if it is not there\r\n pass\r\n view_days = 'processing'+startTime+'_elapsed_days'\r\n smalldf[view_days] = (pd.to_datetime(smalldf[end_date]) - pd.to_datetime(smalldf[start_date])).values.astype(int)\r\n add_cols.append(view_days)\r\n try:\r\n view_time = 'processing'+startTime+'_elapsed_time'\r\n smalldf[view_time] = (pd.to_datetime(smalldf[end_time]) - pd.to_datetime(smalldf[start_time])).astype('timedelta64[s]').values\r\n add_cols.append(view_time)\r\n except:\r\n ### In some date time fields this gives an error so skip it in that case\r\n pass\r\n #### The reason we chose endTime here is that startTime is usually taken care of by another library. So better to do this alone.\r\n year = 'processing'+endTime+'_end_year'\r\n smalldf[year] = smalldf[end_date].map(lambda x: str(x).split(splitter_date_string)[0]).values\r\n add_cols.append(year)\r\n #### The reason we chose endTime here is that startTime is usually taken care of by another library. So better to do this alone.\r\n month = 'processing'+endTime+'_end_month'\r\n smalldf[month] = smalldf[end_date].map(lambda x: str(x).split(splitter_date_string)[1]).values\r\n add_cols.append(month)\r\n try:\r\n #### The reason we chose endTime here is that startTime is usually taken care of by another library. So better to do this alone.\r\n daynum = 'processing'+endTime+'_end_day_number'\r\n smalldf[daynum] = smalldf[end_date].map(lambda x: str(x).split(splitter_date_string)[2]).values\r\n add_cols.append(daynum)\r\n except:\r\n ### In some date time fields the day number is not there. If not, just skip it ####\r\n pass\r\n #### In some date time fields, the hour and minute is not there, so skip it in that case if it errors!\r\n try:\r\n start_hour = 'processing'+startTime+'_start_hour'\r\n smalldf[start_hour] = smalldf[start_time].map(lambda x: str(x).split(splitter_hour_string)[0]).values\r\n add_cols.append(start_hour)\r\n start_min = 'processing'+startTime+'_start_hour'\r\n smalldf[start_min] = smalldf[start_time].map(lambda x: str(x).split(splitter_hour_string)[1]).values\r\n add_cols.append(start_min)\r\n except:\r\n ### If it errors, skip it\r\n pass\r\n #### Check if there is a weekday and weekends in date time columns using endTime only\r\n weekday_num = 'processing'+endTime+'_end_weekday_number'\r\n smalldf[weekday_num] = pd.to_datetime(smalldf[end_date]).dt.weekday.values\r\n add_cols.append(weekday_num)\r\n weekend = 'processing'+endTime+'_end_weekend_flag'\r\n smalldf[weekend] = smalldf[weekday_num].map(lambda x: 1 if x in[5,6] else 0)\r\n add_cols.append(weekend)\r\n #### If everything works well, there should be 13 new columns added by module. All the best!\r\n print('%d columns added using start date=%s and end date=%s processing...' %(len(add_cols),startTime,endTime))\r\n return smalldf", "def create_feature_based_on_spent_by_timestamp(data):\n utils.save_log('{0} :: {1}'.format(\n create_feature_based_on_spent_by_timestamp.__module__,\n create_feature_based_on_spent_by_timestamp.__name__))\n\n data = data.withColumn('RatioValueSpentByWeekOfYear',\n (data['Value'] / data['TransactionWeekOfYear']))\n data = data.withColumn('RatioValueSpentByDayOfWeek',\n (data['Value'] / data['TransactionDayOfWeek']))\n data = data.withColumn('RatioValueSpentByDayOfYear',\n (data['Value'] / data['TransactionDayOfYear']))\n\n update_list_features(\"numerical\", ['RatioValueSpentByWeekOfYear',\n 'RatioValueSpentByDayOfWeek',\n 'RatioValueSpentByDayOfYear'])\n\n return data", "def create_features(energy_data, label=None):\n energy_data['date'] = energy_data.index\n energy_data['hour'] = energy_data['Datetime'].dt.hour\n energy_data['dayofweek'] = energy_data['Datetime'].dt.dayofweek\n energy_data['month'] = energy_data['Datetime'].dt.month\n energy_data['quarter'] = energy_data['Datetime'].dt.quarter\n energy_data['year'] = energy_data['Datetime'].dt.year\n energy_data['dayofyear'] = energy_data['Datetime'].dt.dayofyear\n energy_data['dayofmonth'] = energy_data['Datetime'].dt.day\n energy_data['weekofyear'] = energy_data['Datetime'].dt.weekofyear\n energy_data['pjme_2_hrs_lag'] = energy_data['PJME_MW'].shift(2)\n energy_data['pjme_4_hrs_lag'] = energy_data['PJME_MW'].shift(4)\n energy_data['pjme_8_hrs_lag'] = energy_data['PJME_MW'].shift(8)\n energy_data['pjme_12_hrs_lag'] = energy_data['PJME_MW'].shift(12)\n energy_data['pjme_24_hrs_lag'] = energy_data['PJME_MW'].shift(24)\n energy_data['pjme_4_hrs_mean'] = energy_data['PJME_MW'].rolling(window=4).mean()\n energy_data['pjme_8_hrs_mean'] = energy_data['PJME_MW'].rolling(window=8).mean()\n energy_data['pjme_12_hrs_mean'] = energy_data['PJME_MW'].rolling(window=12).mean()\n energy_data['pjme_24_hrs_mean'] = energy_data['PJME_MW'].rolling(window=24).mean()\n energy_data['pjme_4_hrs_std'] = energy_data['PJME_MW'].rolling(window=4).std()\n energy_data['pjme_8_hrs_std'] = energy_data['PJME_MW'].rolling(window=8).std()\n energy_data['pjme_12_hrs_std'] = energy_data['PJME_MW'].rolling(window=12).std()\n energy_data['pjme_24_hrs_std'] = energy_data['PJME_MW'].rolling(window=24).std()\n energy_data['pjme_4_hrs_max'] = energy_data['PJME_MW'].rolling(window=4).max()\n energy_data['pjme_8_hrs_max'] = energy_data['PJME_MW'].rolling(window=8).max()\n energy_data['pjme_12_hrs_max'] = energy_data['PJME_MW'].rolling(window=12).max()\n energy_data['pjme_24_hrs_max'] = energy_data['PJME_MW'].rolling(window=24).max()\n energy_data['pjme_4_hrs_min'] = energy_data['PJME_MW'].rolling(window=4).min()\n energy_data['pjme_8_hrs_min'] = energy_data['PJME_MW'].rolling(window=8).min()\n energy_data['pjme_12_hrs_min'] = energy_data['PJME_MW'].rolling(window=12).min()\n energy_data['pjme_24_hrs_min'] = energy_data['PJME_MW'].rolling(window=24).min()\n\n features = energy_data[['hour', 'dayofweek', 'quarter', 'month', 'year',\n 'dayofyear', 'dayofmonth', 'weekofyear', 'pjme_2_hrs_lag', 'pjme_4_hrs_lag',\n 'pjme_8_hrs_lag', 'pjme_12_hrs_lag', 'pjme_24_hrs_lag', 'pjme_4_hrs_mean',\n \"pjme_8_hrs_mean\", \"pjme_12_hrs_mean\", \"pjme_24_hrs_mean\", \"pjme_4_hrs_std\",\n \"pjme_8_hrs_std\", \"pjme_12_hrs_std\", \"pjme_24_hrs_std\",\n \"pjme_4_hrs_max\", \"pjme_8_hrs_max\", \"pjme_12_hrs_max\", \"pjme_24_hrs_max\",\n \"pjme_4_hrs_min\", \"pjme_8_hrs_min\", \"pjme_12_hrs_min\", \"pjme_24_hrs_min\"]]\n if label:\n label = energy_data[label]\n return features, label\n return features", "def getFeature(df, start, end):\n\n return [df[start:end].mean(),\n df[start:end].std(),\n df[start:end].skew(),\n df[start:end].kurt(),\n df[start:end].quantile(0.25),\n df[start:end].quantile(0.75),\n df[start:end].quantile(0.90),\n df[start:end].quantile(0.15),\n df[start:end].median(),\n df[start:end].mad(),\n df[start:end].sem(),\n df[start:end].var(),\n df[start:end].autocorr(1),\n df[start:end].autocorr(2),\n df[start:end].autocorr(3),\n df[start:end].autocorr(4),\n df[start:end].autocorr(5),\n np.append(df[start:end].mode(), -1)[0]\n ]", "def _extract_features(self, row):\n ncep_data = self.ncep_data\n ncep_sfc_data = self.ncep_sfc_data\n date = row['date']\n features = dict(row)\n #reduce the dimensions of ncep_data(xarray dataset) by fixing coordinates(lon,lat)\n #and then convert it to dataframe\n ncep_data = ncep_data[date.year] \\\n .sel(lon=row['longitude'], lat=row['latitude'], method='nearest') \\\n .to_dask_dataframe() \\\n .compute() \\\n .set_index(['level','time'])\n #reduce the dimensions of ncep_sfc_data(xarray dataset) by fixing coordinates(lon,lat)\n #and then convert it to dataframe\n ncep_sfc_data = ncep_sfc_data[date.year] \\\n .sel(lon=row['longitude'], lat=row['latitude'], method='nearest') \\\n .to_dask_dataframe() \\\n .compute() \\\n .set_index(['time'])\n\n for level in self.levels:\n #features at different pressure level\n point = ncep_data.loc[level]\n p1w = point.rolling(7).mean() # 1 Week mean\n p2w = point.rolling(14).mean() # 2 Week mean\n p3w = point.rolling(21).mean() # 3 Week mean\n # \n v0w = point.loc[date]\n v1w = p1w.loc[date]\n v2w = p2w.loc[date]\n v3w = p3w.loc[date]\n #\n for data_var in self.ncep_data_vars:\n features[\"{0}_0w_lvl_{1}\".format(data_var,level)] = v0w[data_var]\n features[\"{0}_1w_lvl_{1}\".format(data_var,level)] = v1w[data_var]\n features[\"{0}_2w_lvl_{1}\".format(data_var,level)] = v2w[data_var]\n features[\"{0}_3w_lvl_{1}\".format(data_var,level)] = v3w[data_var]\n #features at surface level\n point = ncep_sfc_data\n p1w = point.rolling(7).mean() # 1 Week mean\n p2w = point.rolling(14).mean() # 2 Week mean\n p3w = point.rolling(21).mean() # 3 Week mean\n # \n v0w = point.loc[date]\n v1w = p1w.loc[date]\n v2w = p2w.loc[date]\n v3w = p3w.loc[date]\n #\n for data_var in self.ncep_sfc_data_vars:\n features[\"{0}_0w\".format(data_var)] = v0w[data_var]\n features[\"{0}_1w\".format(data_var)] = v1w[data_var]\n features[\"{0}_2w\".format(data_var)] = v2w[data_var]\n features[\"{0}_3w\".format(data_var)] = v3w[data_var] \n\n return features", "def compute_features_one_round(\n train_base_df,\n train_delta_df,\n test_df,\n df_config,\n feature_config_list,\n feature_map,\n filter_by_month,\n compute_load_ratio=False,\n):\n\n train_round_df = pd.concat([train_base_df, train_delta_df])\n max_train_timestamp = train_round_df[df_config[\"time_col_name\"]].max()\n max_test_timestamp = test_df[df_config[\"time_col_name\"]].max()\n train_test_diff = max_test_timestamp - max_train_timestamp\n max_horizon = ceil(train_test_diff.days * 24 + train_test_diff.seconds / 3600)\n train_features, feature_pipeline = compute_training_features(\n train_round_df, df_config, feature_config_list, feature_map, max_horizon,\n )\n\n test_features = compute_testing_features(test_df, feature_pipeline, feature_config_list, train_round_df)\n\n if compute_load_ratio:\n rolling_window_args = LOAD_RATIO_CONFIG[\"same_day_of_week_rolling_args\"]\n previous_years_lag_args = LOAD_RATIO_CONFIG[\"same_week_of_year_lag_args\"]\n same_week_day_hour_rolling_featurizer = SameDayOfWeekRollingWindowFeaturizer(\n df_config, input_col_names=df_config[\"target_col_name\"], max_horizon=max_horizon, **rolling_window_args\n )\n train_df_with_recent_load = same_week_day_hour_rolling_featurizer.transform(train_round_df)\n same_week_day_hour_rolling_featurizer.train_df = train_round_df\n test_df_with_recent_load = same_week_day_hour_rolling_featurizer.transform(test_df)\n\n time_col_name = df_config[\"time_col_name\"]\n ts_id_col_names = df_config[\"ts_id_col_names\"]\n keep_col_names = [time_col_name]\n if ts_id_col_names is not None:\n if isinstance(ts_id_col_names, list):\n keep_col_names = keep_col_names + ts_id_col_names\n else:\n keep_col_names.append(ts_id_col_names)\n lag_df_list = []\n start_week = rolling_window_args[\"start_week\"]\n end_week = start_week + rolling_window_args[\"agg_count\"]\n for i in range(start_week, end_week):\n col_old = df_config[\"target_col_name\"] + \"_\" + rolling_window_args[\"output_col_suffix\"] + \"_\" + str(i)\n col_new = col_old + \"_\" + previous_years_lag_args[\"output_col_suffix\"]\n col_ratio = \"recent_load_ratio_\" + str(i)\n\n same_week_day_hour_lag_featurizer = SameWeekOfYearLagFeaturizer(\n df_config,\n input_col_names=col_old,\n train_df=train_df_with_recent_load,\n max_horizon=max_horizon,\n **previous_years_lag_args\n )\n\n lag_df = same_week_day_hour_lag_featurizer.transform(test_df_with_recent_load)\n lag_df[col_ratio] = lag_df[col_old] / lag_df[col_new]\n lag_df_list.append(lag_df[keep_col_names + [col_ratio]].copy())\n\n test_features = reduce(\n lambda left, right: pd.merge(left, right, on=keep_col_names), [test_features] + lag_df_list,\n )\n\n if filter_by_month:\n test_month = test_features[\"month_of_year\"].values[0]\n train_features = train_features.loc[train_features[\"month_of_year\"] == test_month,].copy()\n\n train_features.dropna(inplace=True)\n\n return train_features, test_features", "def build_shape_data(self, start=None, end=None):\n # If start and end are None, then set them to be min/max of self.df_demand\n if start is None:\n start = self.df_demand['date'].min()\n if end is None:\n end = self.df_demand['date'].max()\n print(f\"date range for shape data is from {start} to {end}\")\n # Extract part of df_demand that is within start and end\n df_sub = self.df_demand[(self.df_demand['date'] >= start) & (self.df_demand['date'] <= end)]\n assert df_sub['date'].min() >= start\n assert df_sub['date'].max() <= end\n num_days = len(pd.date_range(iso8601.parse_date(start), iso8601.parse_date(end), freq='d'))\n print(f\"number of days is {num_days}\")\n # When finding variance and mean, add in missing days as 0s\n # Obtain the counts for each lat/lng region\n counts = df_sub.groupby(['left_lng', 'right_lng', 'lower_lat', 'upper_lat']).size().reset_index(name='counts')\n # Group demand data by lat/lng region and average across other cols\n df = df_sub.groupby(['left_lng', 'right_lng', 'lower_lat', 'upper_lat'])[['avail_count', 'avail_mins', 'trips', 'prob_scooter_avail', 'adj_trips']].mean().reset_index()\n df = df.merge(counts, on=['left_lng', 'right_lng', 'lower_lat', 'upper_lat'])\n # print(df.head())\n # Modify averages by multiplying each by count and divide by num_days\n vars = ['avail_count', 'avail_mins', 'trips', 'prob_scooter_avail', 'adj_trips']\n for var in vars:\n df[var] = df[var]*df['counts']/num_days\n # print(df.head())\n # Calculate the variance for prob_scooter_avail\n probVariance = df_sub.groupby(['left_lng', 'right_lng', 'lower_lat', 'upper_lat']).apply(lambda x: ((x['prob_scooter_avail'] - (x['prob_scooter_avail'].sum()/num_days))**2).sum()/(num_days-1)).reset_index(name='prob_scooter_avail')\n # print(probVariance.head())\n df['prob_scooter_avail_var'] = probVariance['prob_scooter_avail']\n # Check to see if there are any Nan values\n print(f\"Nan values in df? {df.isnull().values.any()}\")\n # print(df.head())\n # For each var col, create corresponding color columns (log and unlog)\n # Also create the factors list that get passed into self.create_rectangle_lst\n factors = [('avail_count', 'decimal'), ('avail_mins', 'decimal'),\n ('trips', 'decimal'), ('prob_scooter_avail', 'percent'), ('adj_trips', 'decimal')]\n i = 0\n original_len = len(factors)\n while i < original_len:\n name, type = factors[i]\n # print(f\"name={name}, type={type}\")\n # Create color column\n df = self.map_values_to_color(df, name)\n # If type is not percent than create log version\n if type != 'percent':\n df = self.create_log_column(df, name)\n factors.append(('log_'+name, type))\n i += 1\n # Deal with estimated demand and unmet demand\n # Filter out rows where prob_scooter_avail sig diff from 0\n sigDiffIdx = df.apply(lambda x: utils.sig_diff_from_zero(x['prob_scooter_avail'], x['prob_scooter_avail_var']), axis=1)\n # print(sigDiffIdx.head())\n df_sig_diff = df[sigDiffIdx]\n # Calculate estimated demand and unmet demand\n df_sig_diff = self.calculate_demand(df_sig_diff)\n # print(df_sig_diff.head())\n # Create color column and log column for unmet demand\n df_sig_diff = self.map_values_to_color(df_sig_diff, 'unmet_demand')\n df_sig_diff = self.map_values_to_color(df_sig_diff, 'estimated_demand')\n df_sig_diff = self.create_log_column(df_sig_diff, 'unmet_demand')\n factors.extend([('estimated_demand', 'decimal'), ('unmet_demand', 'decimal'), ('log_unmet_demand', 'decimal')])\n # Fill in the colors for the grid cells that aren't significantly different\n df_not_sig_diff = df[~sigDiffIdx]\n # print(df_not_sig_diff.head())\n df = pd.concat([df_sig_diff, df_not_sig_diff])\n # df.to_csv('../../../data_files/20210427_estimatedDemand.csv', index=False)\n # Create Rectangle information\n rectangles = self.create_rectangle_lst(df, factors)\n return rectangles, start, end", "def create_date_features(df = None, date = None):\n #TODO", "def get_lat_lon_date_features(gt_ids=[], gt_masks=None, gt_shifts=None,\n forecast_ids=[], forecast_masks=None, forecast_shifts=None,\n anom_ids=[], anom_masks=None, anom_shifts=None,\n first_year = None):\n # If particular arguments aren't lists, replace with repeating iterators\n if not isinstance(gt_masks, list):\n gt_masks = itertools.repeat(gt_masks)\n if not isinstance(gt_shifts, list):\n gt_shifts = itertools.repeat(gt_shifts)\n if not isinstance(forecast_masks, list):\n forecast_masks = itertools.repeat(forecast_masks)\n if not isinstance(forecast_shifts, list):\n forecast_shifts = itertools.repeat(forecast_shifts)\n if not isinstance(anom_masks, list):\n anom_masks = itertools.repeat(anom_masks)\n if not isinstance(anom_shifts, list):\n anom_shifts = itertools.repeat(anom_shifts)\n\n # Define canonical name for target start date column\n date_col = \"start_date\"\n # Add each ground truth feature to dataframe\n df = None\n for gt_id, gt_mask, gt_shift in zip(gt_ids, gt_masks, gt_shifts):\n print \"Getting {}_shift{}\".format(gt_id, gt_shift)\n t = time.time()\n # Load ground truth data\n gt = get_ground_truth(gt_id, gt_mask, shift=gt_shift)\n # Discard years prior to first_year\n gt = year_slice(gt, first_year = first_year)\n # Use outer merge to include union of (lat,lon,date_col)\n # combinations across all features\n df = df_merge(df, gt)\n print \"Elapsed: {}s\".format(time.time() - t)\n\n # Add each forecast feature to dataframe\n for forecast_id, forecast_mask, forecast_shift in zip(forecast_ids,\n forecast_masks,\n forecast_shifts):\n print \"Getting {}_shift{}\".format(forecast_id, forecast_shift)\n t = time.time()\n # Load forecast with years >= first_year\n forecast = get_forecast(forecast_id, forecast_mask, shift=forecast_shift)\n # Rename target start date column to \"start_date\"\n fcst_date_col = get_target_start_date_col(forecast_id)\n forecast.rename(columns={fcst_date_col: date_col}, inplace=True)\n # Discard years prior to first_year\n forecast = year_slice(forecast, first_year = first_year)\n # Use outer merge to include union of (lat,lon,date_col)\n # combinations across all features\n df = df_merge(df, forecast)\n print \"Elapsed: {}s\".format(time.time() - t)\n\n # Add anomaly features and climatology last so that climatology\n # is produced for all previously added start dates\n for anom_id, anom_mask, anom_shift in zip(anom_ids, anom_masks, anom_shifts):\n print \"Getting {}_shift{} with anomalies\".format(anom_id, anom_shift)\n t = time.time()\n # Check if ground truth column already exists\n gt_col = get_measurement_variable(anom_id, shift=anom_shift)\n if df is None or gt_col not in df.columns:\n # Add masked ground truth data if absent\n gt = get_ground_truth(anom_id, anom_mask, shift=anom_shift)\n # Discard years prior to first_year\n gt = year_slice(gt, first_year = first_year)\n # Use outer merge to include union of (lat,lon,date_col)\n # combinations across all features\n df = df_merge(df, gt)\n\n # Load masked ground truth data climatology\n climatology = get_climatology(anom_id, anom_mask, anom_shift)\n # Merge climatology into dataset\n df = pd.merge(df, climatology[[gt_col]],\n left_on=['lat', 'lon', df[date_col].dt.month,\n df[date_col].dt.day],\n right_on=[climatology.lat, climatology.lon,\n climatology[date_col].dt.month,\n climatology[date_col].dt.day],\n how='left', suffixes=('', '_clim'))\n clim_col = gt_col+\"_clim\"\n # Compute ground-truth anomalies\n anom_col = gt_col+\"_anom\"\n df[anom_col] = df[gt_col] - df[clim_col]\n print \"Elapsed: {}s\".format(time.time() - t)\n\n return df", "def __feature_set__(self):\r\n import numpy as np\r\n import datetime\r\n import time\r\n cols_norm = [col for col in self.columns]\r\n cols_lower = [col.lower() for col in self.columns]\r\n fields = []\r\n features = []\r\n date_fields = []\r\n _geom_types = {\r\n arcgis.geometry._types.Point : \"esriGeometryPoint\",\r\n arcgis.geometry._types.Polyline : \"esriGeometryPolyline\",\r\n arcgis.geometry._types.MultiPoint : \"esriGeometryMultipoint\",\r\n arcgis.geometry._types.Polygon : \"esriGeometryPolygon\"\r\n }\r\n if self.sr is None:\r\n sr = {'wkid' : 4326}\r\n else:\r\n sr = self.sr\r\n fs = {\r\n \"objectIdFieldName\" : \"\",\r\n \"globalIdFieldName\" : \"\",\r\n \"displayFieldName\" : \"\",\r\n \"geometryType\" : _geom_types[type(self.geometry[self.geometry.first_valid_index()])],\r\n \"spatialReference\" : sr,\r\n \"fields\" : [],\r\n \"features\" : []\r\n }\r\n if 'objectid' in cols_lower:\r\n fs['objectIdFieldName'] = cols_norm[cols_lower.index('objectid')]\r\n fs['displayFieldName'] = cols_norm[cols_lower.index('objectid')]\r\n elif 'fid' in cols_lower:\r\n fs['objectIdFieldName'] = cols_norm[cols_lower.index('fid')]\r\n fs['displayFieldName'] = cols_norm[cols_lower.index('fid')]\r\n elif 'oid' in cols_lower:\r\n fs['objectIdFieldName'] = cols_norm[cols_lower.index('oid')]\r\n fs['displayFieldName'] = cols_norm[cols_lower.index('oid')]\r\n else:\r\n self['OBJECTID'] = list(range(1, self.shape[0] + 1))\r\n res = self.__feature_set__\r\n del self['OBJECTID']\r\n return res\r\n if 'objectIdFieldName' in fs:\r\n fields.append({\r\n \"name\" : fs['objectIdFieldName'],\r\n \"type\" : \"esriFieldTypeOID\",\r\n \"alias\" : fs['objectIdFieldName']\r\n })\r\n cols_norm.pop(cols_norm.index(fs['objectIdFieldName']))\r\n if 'globalIdFieldName' in fs and len(fs['globalIdFieldName']) > 0:\r\n fields.append({\r\n \"name\" : fs['globalIdFieldName'],\r\n \"type\" : \"esriFieldTypeGlobalID\",\r\n \"alias\" : fs['globalIdFieldName']\r\n })\r\n cols_norm.pop(cols_norm.index(fs['globalIdFieldName']))\r\n elif 'globalIdFieldName' in fs and \\\r\n len(fs['globalIdFieldName']) == 0:\r\n del fs['globalIdFieldName']\r\n if self._geometry_column_name in cols_norm:\r\n cols_norm.pop(cols_norm.index(self._geometry_column_name))\r\n for col in cols_norm:\r\n try:\r\n idx = self[col].first_valid_index()\r\n col_val = self[col].loc[idx]\r\n except:\r\n col_val = \"\"\r\n if isinstance(col_val, (str, np.str)):\r\n l = self[col].str.len().max()\r\n if str(l) == 'nan':\r\n l = 255\r\n\r\n fields.append({\r\n \"name\" : col,\r\n \"type\" : \"esriFieldTypeString\",\r\n \"length\" : int(l),\r\n \"alias\" : col\r\n })\r\n if fs['displayFieldName'] == \"\":\r\n fs['displayFieldName'] = col\r\n elif isinstance(col_val, (datetime.datetime,\r\n pd.Timestamp,\r\n np.datetime64,\r\n pd.datetime)):\r\n fields.append({\r\n \"name\" : col,\r\n \"type\" : \"esriFieldTypeDate\",\r\n \"alias\" : col\r\n })\r\n date_fields.append(col)\r\n elif isinstance(col_val, (np.int32, np.int16, np.int8)):\r\n fields.append({\r\n \"name\" : col,\r\n \"type\" : \"esriFieldTypeSmallInteger\",\r\n \"alias\" : col\r\n })\r\n elif isinstance(col_val, (int, np.int, np.int64)):\r\n fields.append({\r\n \"name\" : col,\r\n \"type\" : \"esriFieldTypeInteger\",\r\n \"alias\" : col\r\n })\r\n elif isinstance(col_val, (float, np.float64)):\r\n fields.append({\r\n \"name\" : col,\r\n \"type\" : \"esriFieldTypeDouble\",\r\n \"alias\" : col\r\n })\r\n elif isinstance(col_val, (np.float32)):\r\n fields.append({\r\n \"name\" : col,\r\n \"type\" : \"esriFieldTypeSingle\",\r\n \"alias\" : col\r\n })\r\n fs['fields'] = fields\r\n for row in self.to_dict('records'):\r\n geom = {}\r\n if self._geometry_column_name in row:\r\n geom = row[self._geometry_column_name]\r\n del row[self._geometry_column_name]\r\n for f in date_fields:\r\n try:\r\n row[f] = int(row[f].to_pydatetime().timestamp() * 1000)\r\n except:\r\n row[f] = None\r\n features.append(\r\n {\r\n \"geometry\" : dict(geom),\r\n \"attributes\" : row\r\n }\r\n )\r\n del row\r\n del geom\r\n fs['features'] = features\r\n return fs", "def generate_features(df):\n return np.array([np.array(xi) for xi in pd.to_datetime(df).apply(lambda x: [x.year, x.month, x.day, x.hour, x.minute, x.second, x.weekday()])])", "def featuretest(self, args):\n db_engine = create_engine(self.root.db_url)\n feature_config = yaml.load(args.feature_config_file)\n\n FeatureGenerator(db_engine, 'features_test').create_features_before_imputation(\n feature_aggregation_config=feature_config,\n feature_dates=[args.as_of_date]\n )\n logging.info('Features created for feature_config %s and date %s', feature_config, args.as_of_date)", "def new_features(df):\n print(\"Add new features ...\")\n # distinguish Spring, Fall and pregnant females (don't care about juvenilles/unknown)\n df[\"gender_plus\"] = df[\"Gender\"]\n df.loc[df.Gravid, \"gender_plus\"] = \"f_gra\"\n\n df[\"gender_seasons\"] = df[\"Gender\"]\n df.loc[df.Gravid, \"gender_seasons\"] = \"f_gra\"\n\n # add features\n df[\"Age_To_Weight\"] = df[\"Annuli\"] / df[\"Weight\"]\n\n # Calcuate Number of recaptures\n df_captures = df[[\"ID\", \"Date\"]].groupby(\"ID\").count()\n df_captures.columns = [\"recapture_count\"]\n df_captures.reset_index(inplace=True)\n df = pd.merge(df, df_captures, how=\"outer\", on=\"ID\")\n\n # recalculate annuli\n df_min = pd.pivot_table(\n df[df.Annuli > 0],\n values=[\"Date\", \"Annuli\"],\n index=[\"ID\"],\n aggfunc={\"Date\": min, \"Annuli\": min},\n )\n df_min.columns = [\"annuli_min\", \"date_min\"]\n df_min.reset_index(inplace=True)\n\n df = pd.merge(df, df_min, how=\"outer\", on=\"ID\")\n df[\"year\"] = df.Date.map(lambda x: x.year)\n df[\"year_min\"] = df.date_min.map(lambda x: x.year)\n df[\"Annuli_orig\"] = df.Annuli\n df.Annuli = df.year - df.year_min + df.annuli_min\n df.Annuli = np.nan_to_num(df.Annuli)\n df[\"Annuli\"] = pd.to_numeric(df[\"Annuli\"], downcast=\"integer\")\n\n # Annuli Buckets\n buckets = 5\n interval = int(df[\"Annuli\"].max() / buckets)\n buckets = [i for i in range(0, df[\"Annuli\"].max() + interval, interval)]\n labels = [\"'{0} - {1}'\".format(i, i + interval) for i in buckets]\n df[\"Annuli_Group\"] = pd.cut(\n df.Annuli, buckets, labels=labels[:-1], include_lowest=True\n )\n\n return df", "def create_features_using_groupby(training, entity, feature, avg=True, minimum=True, maximum=True):\n\n entity_col = 'offer_id' if entity == 'portfolio' else 'person'\n\n groupby = training.groupby(entity_col)[feature]\n\n features, col_name = [], []\n if avg:\n features.append(groupby.mean())\n col_name.append('avg_'+feature)\n if minimum:\n features.append(groupby.min())\n col_name.append('min_'+feature)\n if maximum:\n features.append(groupby.max())\n col_name.append('max_'+feature)\n\n feature_df = pd.concat(features, axis=1)\n feature_df.columns = [col + '_' + entity for col in col_name]\n\n return feature_df", "def at(self, time_slices):\n\n if self.base is not None:\n return self.base.at(time_slices)\n\n if isinstance(time_slices, TimeSlice):\n time_slices = [time_slices]\n\n # join the time slice values\n timed_data = pd.DataFrame(columns=self.data.columns)\n\n # make the new data\n for slice_t in time_slices:\n slice_index = (slice_t.time <= self.data.index) & (\n self.data.index < slice_t.time + slice_t.duration\n )\n timed_data.loc[slice_t.time] = self.aggregate(\n self.data[slice_index], axis=0\n )\n\n # return the new feature object\n return Feature(\n data=timed_data,\n aggregate=self.aggregate,\n base=self,\n time_slices=time_slices,\n )", "def postprocess_features(self, featurelist):\n \n ##: To overwrite the time of features that are in a clause\n for feature in featurelist:\n if feature.inClause() or self.is_in_clause(feature.getStartPos(), feature.getSentNum()):\n feature = self.assign_feature_time_with_references(feature, self.timeReferences, feature.getStartPos(), True)\n \n ##: To set time of features after death to none. Currently disabled.\n# deathDates = []\n# for feature in featurelist:\n# if 'Death' in [tg[1] for tg in feature.getTags()]:\n# dt = feature.getDateTime()\n# if dt and feature.getTlink().getTimexes()[0].getType()!='VIRTUAL': ##: only original date counts\n# deathDates.append(dt)\n# \n# if feature.getType()=='CAUSE_OF_DEATH':\n# feature.setTlink(None)\n# \n# if deathDates:\n# deathDate = min(deathDates)\n# for feature in featurelist: \n# dt = feature.getDateTime()\n# if dt and dt>deathDate:\n# feature.setTlink(None)\n \n ##: Remove time from features in the blockout range, \n ##: e.g., A 34 years old male with{ history of leg pain }who on ....\n for feature in featurelist:\n posStart = feature.getStartPos()\n posEnd = feature.getEndPos()\n for r in self.blockout_range:\n if (posStart>r[0] and posStart<r[1]) or (posEnd>r[0] and posEnd<r[1]):\n timex = feature.getTimex()\n if timex:\n tpos = timex.getStartPos()\n if tpos>=r[0] and tpos<=r[1]:\n continue\n \n feature.setTlink(None)\n \n return featurelist", "def features(self, mask=None, propnames=None):\n\t\t\n\t\t# See if we have a cached result\n\t\tif self._features:\n\t\t\treturn self._features\n\t\t\n\t\tresult = {'type': 'FeatureCollection', 'features':[]}\n\t\tfeatures = []\n\t\t\t\t\t\t\t\t\t\t\n\t\t# We can dealt with grid type collections first\n\t\tif self.featuretype in ['Grid', 'GridSeries']:\n\t\t\t\n\t\t\t# Get center point latitudes and longitudes\n\t\t\tlatitudes = self.latitudes\n\t\t\tlongitudes = self.longitudes\n\t\t\tshape = latitudes.shape\n\t\t\t\n\t\t\t# How do we slice the data to get grid point values?\n\t\t\tindex = 0\n\t\t\tfor dim in self.variable.dimensions:\n\t\t\t\tprint dim, dim.length, len(self.times)\n\t\t\t\tif dim.length == shape[0]:\n\t\t\t\t\ty_index = index\n\t\t\t\tif dim.length == shape[1]:\n\t\t\t\t\tx_index = index\n\t\t\t\tif dim.length == len(self.times):\n\t\t\t\t\tt_index = index\n\t\t\t\tindex += 1\n\t\t\t\n\t\t\t\n\t\t\t# Create the initial slices with indices defaulting to 0\n\t\t\tslices = [0]*len(self.variable.dimensions)\n\t\t\tslices[t_index] = slice(0,len(self.times))\n\n\t\t\t\t\t\t\n\t\t\t# Create corner point latitude longitude arrays\n\t\t\tcorner_lats = numpy.zeros((shape[0]+1, shape[1]+1))\n\t\t\tcorner_lons = numpy.zeros((shape[0]+1, shape[1]+1))\n\t\t\t\t\t\t\n\t\t\t# Step through all the interior grid points\n\t\t\tfor y in range(1, shape[0]):\n\t\t\t\tfor x in range(1, shape[1]):\n\t\t\t\t\tcorner_lats[y,x] = (latitudes[y, x-1] + latitudes[y,x] + latitudes[y-1,x-1] + latitudes[y-1,x])/4\n\t\t\t\t\tcorner_lons[y,x] = (longitudes[y, x-1] + longitudes[y,x] + longitudes[y-1,x-1] + longitudes[y-1,x])/4\n\t\t\t\t\t\n\t\t\t# Left boundary\n\t\t\tx = 0\n\t\t\tfor y in range(1,shape[0]):\n\t\t\t\ttmp_lat = (latitudes[y,x] + latitudes[y-1,x])/2\n\t\t\t\ttmp_lon = (longitudes[y,x] + longitudes[y-1,x])/2\n\t\t\t\tcorner_lats[y,x] = tmp_lat - (corner_lats[y,x+1] - tmp_lat)\n\t\t\t\tcorner_lons[y,x] = tmp_lon - (corner_lons[y,x+1] - tmp_lon)\n\n\n\t\t\t# Right boundary\n\t\t\tx = shape[1]\n\t\t\tfor y in range(1,shape[0]):\n\t\t\t\ttmp_lat = (latitudes[y,x-1] + latitudes[y-1,x-1])/2\n\t\t\t\ttmp_lon = (longitudes[y,x-1] + longitudes[y-1,x-1])/2\n\t\t\t\tcorner_lats[y,x] = tmp_lat - (corner_lats[y,x-1] - tmp_lat)\n\t\t\t\tcorner_lons[y,x] = tmp_lon - (corner_lons[y,x-1] - tmp_lon)\n\n\n\t\t\t# Bottom boundary\n\t\t\ty = 0\n\t\t\tfor x in range(1,shape[1]):\n\t\t\t\ttmp_lat = (latitudes[y,x] + latitudes[y,x-1])/2\n\t\t\t\ttmp_lon = (longitudes[y,x] + longitudes[y,x-1])/2\n\t\t\t\tcorner_lats[y,x] = tmp_lat - (corner_lats[y+1,x] - tmp_lat)\n\t\t\t\tcorner_lons[y,x] = tmp_lon - (corner_lons[y+1,x] - tmp_lon)\n\n\t\t\t# Top boundary\n\t\t\ty = shape[0]\n\t\t\tfor x in range(1,shape[1]):\n\t\t\t\ttmp_lat = (latitudes[y-1,x] + latitudes[y-1,x-1])/2\n\t\t\t\ttmp_lon = (longitudes[y-1,x] + longitudes[y-1,x-1])/2\n\t\t\t\tcorner_lats[y,x] = tmp_lat - (corner_lats[y-1,x] - tmp_lat)\n\t\t\t\tcorner_lons[y,x] = tmp_lon - (corner_lons[y-1,x] - tmp_lon)\n\t\t\t\n\t\t\t# Corners\n\t\t\tcorner_lats[0,0] = latitudes[0,0] - (corner_lats[1,1] - latitudes[0,0])\n\t\t\tcorner_lats[0,shape[1]] = latitudes[0,shape[1]-1] - (corner_lats[1,shape[1]-1] - latitudes[0,shape[1]-1])\n\t\t\tcorner_lats[shape[0],0] = latitudes[shape[0]-1,0] + (latitudes[shape[0]-1,0] - corner_lats[shape[0]-1,1])\n\t\t\tcorner_lats[shape[0],shape[1]] = latitudes[shape[0]-1,shape[1]-1] + (latitudes[shape[0]-1,shape[1]-1] - corner_lats[shape[0]-1,shape[1]-1])\n\n\t\t\tcorner_lons[0,0] = longitudes[0,0] - (corner_lons[1,1] - longitudes[0,0])\n\t\t\tcorner_lons[0,shape[1]] = longitudes[0,shape[1]-1] + (longitudes[0,shape[1]-1] - corner_lons[1,shape[1]-1])\n\t\t\tcorner_lons[shape[0],0] = longitudes[shape[0]-1,0] - (corner_lons[shape[0]-1,1] - longitudes[shape[0]-1,0])\n\t\t\tcorner_lons[shape[0],shape[1]] = longitudes[shape[0]-1,shape[1]-1] + (longitudes[shape[0]-1,shape[1]-1] - corner_lons[shape[0]-1,shape[1]-1])\n\n\n#\t\t\tprint corner_lats\n\n\t\t\t# Now create all polygons\n\t\t\tfor y in range(0, shape[0]):\n\t\t\t\tfor x in range(0, shape[1]):\n\n\t\t\t\t\t# Configure the slices\n\t\t\t\t\tslices[x_index] = slice(x,x+1)\n\t\t\t\t\tslices[y_index] = slice(y,y+1)\n\n\t\t\t\t\t# Check if we are masking and if this point is masked\n\t\t\t\t\tmasked = False\n\n\t\t\t\t\tif mask:\n\t\t\t\t\t\tif mask[y, x] < 0.5:\n\t\t\t\t\t\t\tmasked = True\n\t\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\tif not masked:\n\n\t\t\t\t\t\tvertices = []\n\t\t\t\t\t\tvertices.append([corner_lons[y, x], corner_lats[y,x]])\n\t\t\t\t\t\tvertices.append([corner_lons[y+1, x], corner_lats[y+1,x]])\n\t\t\t\t\t\tvertices.append([corner_lons[y+1, x+1], corner_lats[y+1,x+1]])\n\t\t\t\t\t\tvertices.append([corner_lons[y, x+1], corner_lats[y,x+1]])\n\t\t\t\t\t\tvertices.append([corner_lons[y, x], corner_lats[y,x]])\t\t\t\t\n\n\t\t\t\t\t\t# Create the basic feature\n\t\t\t\t\t\tfeature = {'type': 'Feature', 'properties':{'id':x + y * shape[1]}, 'geometry': {'type': 'Polygon', 'coordinates': [vertices]}}\n\t\t\t\t\t\t\n\t\t\t\t\t\t# Now add the data\t\t\t\t\t\n\t\t\t\t\t\t#data = self.variable[slices].flatten()\n\t\t\t\t\t\t\n\t\t\t\t\t\t# If we have property names then extract data for each name\n\t\t\t\t\t\tif propnames:\n\t\t\t\t\t\t\tfor name in propnames:\n\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\tfeature['properties']['value'] = self.variable[slices].flatten()[1]\n\t#\t\t\t\t\t\t\tprint self.variable[slices]\n\t\t\t\t\t\t\t\t#feature['properties']['value'] = self.variable[slices].flatten()[propnames.index(name)]\n\t\t\t\t\t\t\n\t\t\t\t\t\t# else just set property 'value' to the first value of the flattened data array\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\t\tpass\n\t\t\t\t\t\t\t\t#feature['properties']['value'] = float(self.variable[slices].flatten()[1])\n\t\t\t\t\t\t\n\t\t\t\t\t\t#print feature['properties']\n\t\t\t\t\t\t#, 'value':float(values[y,x])\n\t\t\t\t\t\tfeatures.append(feature)\n\t\t\t\t\t\n\t\t\tresult['features'] = features\n\t\t\t\t\t\t\n#\t\t\toutfile = open('test.json', 'w')\n#\t\t\toutfile.write(simplejson.dumps(result))\n#\t\t\toutfile.close()\n\t\t\t\n\t\t\n\t\t# Point type feature sets next\n\t\telif self.featuretype in ['Point', 'PointSeries']:\n\t\t\t\n\t\t\tresult = {'type': 'FeatureCollection', 'features':[]}\n\t\t\tfeatures = []\n\t\t\t\n\t\t\tlongitudes = self.longitudes\n\t\t\tlatitudes = self.latitudes\n\t\t\t\n\t\t\tcount = len(longitudes)\n\t\t\tfor fid in range(0,count):\n\t\t\t\tfeature = {'type':'Feature', 'properties':{'_id':fid}, 'geometry': {'type':'Point', 'coordinates': [float(longitudes[fid]), float(latitudes[fid])]}}\n\n\t\t\t\t# Add related variables to properties\n\t\t\t\tfor key in self.coordinates_mapping:\n\t\t\t\t\tif key in self.variable.group.variables and key not in ['latitude', 'longitude']:\n\t\t\t\t\t\tif self.coordinates_mapping[key]['map'] == self.coordinates_mapping['latitude']['map']:\n\t\t\t\t\t\t\tfeature['properties'][key] = self.variable.group.variables[key][fid]\n\t\t\t\t\t\t\t\n\t\t\t\tfeatures.append(feature)\n\t\t\t\t\n\t\t\tresult['features'] = features\n\n\t\t\t\n\t\telse:\n\t\t\treturn None\n\n\t\t# Cache result\n\t\tif not self._features:\n\t\t\tself._features = result\n\t\t\t\n\t\treturn result", "def _filter_temporal(self, start_date: str, end_date: str) -> 'ImageCollection':\n process_id = 'filter_daterange'\n args = {\n 'imagery': self.graph,\n 'extent': [start_date, end_date]\n }\n\n return self.graph_add_process(process_id, args)", "def add_features(df):\n \n assert df.columns.str.contains(\"query|value|keyword|ranking|timestamp|geo\").all(), \"Add features failed. \\\n Missing one of [query, value, keyword, ranking, timestamp, geo]\"\n \n # feature engineering: totals and normalize\n grouped = df.groupby(['ranking']).value # group values by ranking\n df['value_total'] = grouped.transform('sum') # total sum \n df['value_normalized'] = (df.value-grouped.transform('min'))/(grouped.transform('max')-grouped.transform('min')) # normalize \n df['value_normalized_total'] = df.groupby(['ranking']).value_normalized.transform('sum') # total sum of normalized values \n df['date'] = pd.to_datetime(df.query_timestamp).dtd\n \n return df", "def samples_timesteps_features(dataframe, columns, start_date, timesteps=72, \n steps_ahead=24, window_days=100, train_percent=80.):\n \n def overlap_windows(dataset, timesteps, steps_ahead):\n \"\"\" Create overlaping window of time-series data\n \n Parameters\n ----------\n dataset: pd.DataFrame\n time-series pandas dataset\n timesteps: int\n number of time steps from the past for creating output arrays\n steps_ahead: int\n number of time steps into the future for making predictions\n \n Returns\n -------\n X, y: np.array\n input and output 3-d arrays of overlaping time windows\n \"\"\"\n X = []; y = []\n \n start = 0\n for i in range(len(dataset)):\n # Define the end of the input sequence\n in_end = start + timesteps\n out_end = in_end + steps_ahead\n # Ensure that there is enough data\n if out_end <= len(dataset):\n X.append(dataset[start:in_end, :])\n # First column holds load values\n y.append(dataset[in_end:out_end, 0])\n # Move along one time step\n start += 1\n \n # Convert list to np.array\n X = np.asarray(X)\n y = np.asarray(y)\n \n return X, y\n\n\n data = dataframe.copy()\n \n if window_days*24 > data.values.shape[0]:\n raise ValueError('Variable window_days has too large value: {}*24h = {} > {}, which is more than there is data!'.format(window_days, window_days*24, \n data.values.shape[0]))\n \n # Training period\n # ---------------\n train_percent = train_percent/100.\n st = pd.to_datetime(start_date) # start date\n et = st + dt.timedelta(days=int(train_percent*window_days)) # end date\n train = data.loc[st:et].values\n \n # Standardize and transform training data set\n mean_std_values = {}\n for i, column in enumerate(columns):\n # Calculate mean and standard deviation only\n # from the training data set values\n mu = train[:,i].mean() # axis=0\n sd = train[:,i].std()\n mean_std_values[column] = (mu, sd)\n # Standardize training data\n train[:,i] = (train[:,i] - mu)/sd\n \n # Create overlapping windows with training data\n X_train, y_train = overlap_windows(train, timesteps, steps_ahead)\n \n # Testing / Validation period\n # ---------------------------\n sv = et \n ev = sv + dt.timedelta(days=int((1-train_percent)*window_days)+1)\n test = data.loc[sv:ev].values\n \n # Transform testing/validation data set\n for i, column in enumerate(columns):\n # Use mean and standard deviation from the\n # training data set\n mu = mean_std_values[column][0]\n sd = mean_std_values[column][1]\n # Standardize test data\n test[:,i] = (test[:,i] - mu)/sd\n \n # Create overlaping windows with test data\n X_test, y_test = overlap_windows(test, timesteps, steps_ahead)\n \n return mean_std_values, X_train, y_train, X_test, y_test", "def select_features(self):\r\n \r\n features_list = list(self.feed_data.columns.values)\r\n features_list.remove(\"min_time\")\r\n thisrace = self.config.race_to_predict\r\n\r\n #if never ran race before, don't include these variables in feature\r\n #selection, they're just 0's anyway\r\n if self.config.first_time_running_race == True:\r\n unuseable_columns = [('min_time', thisrace),('std', thisrace),('num_races', thisrace),\r\n ('rainfall', thisrace),\r\n ('temp', thisrace),\r\n ('wind', thisrace),\r\n ('metersup', thisrace), \r\n 'sex_W']\r\n else:\r\n #drop this column...probs should have removed it earlier. \r\n unuseable_columns = ['sex_W']\r\n #print(features_list)\r\n for element in unuseable_columns:\r\n features_list.remove(element)\r\n data_with_all_feats = self.feed_data.drop(unuseable_columns,axis=1)\r\n colstodrop = features_list\r\n thiscols = []\r\n data_with_current_feats = data_with_all_feats.drop(features_list,axis=1)\r\n checkfit=100.0\r\n scores = []\r\n dropped_cols = []\r\n loopgain =True\r\n #mymod = RandomForestRegressor(n_estimators=80, oob_score = True, max_depth=10,\r\n # min_samples_split = 25, criterion='mse')\r\n thisloopfeatures_list = features_list\r\n curcols = data_with_current_feats.columns\r\n countgain=0\r\n #print(\"cc\",curcols)\r\n while loopgain == True:\r\n thisloopscore=100.0\r\n for fet in thisloopfeatures_list:\r\n data_with_current_feats[fet] = data_with_all_feats[fet]\r\n etrain=data_with_current_feats.sample(frac=0.8,random_state=200)\r\n etest=data_with_current_feats.drop(etrain.index)\r\n y = etrain.pop('min_time')\r\n ytest = etest.pop('min_time')\r\n #print(y)\r\n model = RandomForestRegressor(n_estimators=80, oob_score = True, max_depth=15,\r\n min_samples_split = 12, criterion='mse')\r\n model.fit(etrain,y)\r\n\r\n PRED = model.predict(etrain)\r\n predscore = self.mean_absolute_percentage_error(y,PRED)#= r2_score(y,PRED)\r\n oobs = self.mean_absolute_percentage_error(y,model.oob_prediction_)\r\n scores.append(oobs)\r\n if ((thisloopscore - oobs) > 0.0):\r\n thisloopscore = oobs\r\n fetwinner = fet\r\n data_with_current_feats.drop(fet,axis=1,inplace=True)\r\n etrain.drop(fet,axis=1,inplace=True)\r\n\r\n data_with_current_feats[fetwinner] = data_with_all_feats[fetwinner]\r\n etrain=data_with_current_feats.sample(frac=0.8,random_state=200)\r\n etest=data_with_current_feats.drop(etrain.index)\r\n y = etrain.pop('min_time')\r\n ytest = etest.pop('min_time')\r\n #print(y)\r\n model = RandomForestRegressor(n_estimators=80, oob_score = True, max_depth=30,\r\n min_samples_split = 12,min_samples_leaf =7, criterion='mse')\r\n model.fit(etrain,y)\r\n\r\n PRED = model.predict(etrain)\r\n predscore = self.mean_absolute_percentage_error(y,PRED)#= r2_score(y,PRED)\r\n #print(fetwinner,predscore)\r\n oobs = self.mean_absolute_percentage_error(y,model.oob_prediction_)\r\n scores.append(oobs)\r\n #print(fetwinner,\"~\",oobs)\r\n thisloopfeatures_list.remove(fetwinner)\r\n if ((checkfit-oobs)>0.0001):\r\n checkfit = oobs\r\n curcols = data_with_current_feats.columns\r\n #print(curcols)\r\n else:\r\n break\r\n\r\n\r\n self.final_df = self.feed_data[data_with_current_feats.columns]\r\n self.Xtrain=self.final_df.sample(frac=0.8,random_state=200)\r\n self.Xtest=self.final_df.drop(self.Xtrain.index)#\r\n self.ytrain = self.Xtrain.pop('min_time')\r\n self.ytest = self.Xtest.pop('min_time')\r\n self.model= RandomForestRegressor(n_estimators=80, oob_score = True, max_depth=30,\r\n min_samples_split = 12,min_samples_leaf =7, criterion='mse')\r\n self.model.fit(self.Xtrain,self.ytrain)\r\n #print(y)\r\n return", "def create_features(df,rsi_window = 14,macd_feat = [12,26,9]):\n df.dropna(inplace=True)\n ## day and month\n df['Date'] = pd.to_datetime(df['Date'])\n df['Month'] = df['Date'].dt.month\n df['dayowk'] = df['Date'].dt.dayofweek\n df = pd.get_dummies(data = df,columns = ['Month','dayowk'])\n \n ##Previos n-day pct_changes\n df['1day_pct'] = df['Adj Close'].pct_change()\n df['2day_pct'] = df['Adj Close'].pct_change(periods = 2)\n df['3day_pct'] = df['Adj Close'].pct_change(periods = 3)\n df['4day_pct'] = df['Adj Close'].pct_change(periods = 4)\n df['5day_pct'] = df['Adj Close'].pct_change(periods = 5)\n df['7day_pct'] = df['Adj Close'].pct_change(periods = 7)\n \n ##Cumulative sum of 1day_pct\n df['1day_pct_cs'] = df['Adj Close'].pct_change().cumsum()\n \n ##EWMA of 7, 50 and 200 days\n df['ewma_7'] = df['Adj Close'].ewm(span=7).mean()/df['Adj Close']\n df['ewma_50'] = df['Adj Close'].ewm(span=50).mean()/df['Adj Close']\n df['ewma_200'] = df['Adj Close'].ewm(span=200).mean()/df['Adj Close']\n ## Golden Cross vs Death Cross etc.\n #df['7g(50&200)'] = (df['ewma_7'] > df['ewma_50']) & (df['ewma_7'] > df['ewma_200'])\n #df['7l(50&200)'] = (df['ewma_7'] < df['ewma_50']) & (df['ewma_7'] < df['ewma_200'])\n #df['7g50'] = (df['ewma_7'] > df['ewma_50']) & (df['ewma_7'] < df['ewma_200'])\n #df['7g200'] = (df['ewma_7'] < df['ewma_50']) & (df['ewma_7'] > df['ewma_200'])\n \n ##RSI and MACD\n df = RSI(df,14)\n df = MACD_mod(df,nl=macd_feat[0],nh=macd_feat[1],nsig=macd_feat[2])\n \n df['day_var'] = (df['High'] - df['Low'])/df['Close']## Days variance\n df['open_close'] = (df['Open'] - df['Close'])/df['Close'] ## Days Open-Close\n df['high_close'] = (df['High'] - df['Close'])/df['Close'] ##Days High-Close\n df['open_prev_close'] = (df['Open'] - df['Close'].shift(1))/df['Close'] ## Days open - Previos Dyas Close\n \n ##Classification target\n df['target'] = round((np.sign(df['1day_pct']).shift(-1)+1)/2) ## Target for classification\n #df['1_day_target'] = df['Adj Close'].shift(-1) - df['Adj Close'] ## Target for Regression\n #df['target2'] = round((np.sign(df['1day_pct']).shift(-1)+1)/2)## Will the price go up intra-day\n \n ## IS the stock Overbought or Oversold based on RSI?\n df['RSI_overbought'] = df['RSI']>70\n df['RSI_oversold'] = df['RSI']<30\n \n \n #df.drop(['Open','High','Low','Close'],axis=1,inplace=True)\n# df = df.dropna()\n \n #df = df.reset_index(drop=True)\n \n ## Calculating how large the previos hot and cold streaks were\n f = 0\n df['prev_hot_streak'] = np.zeros(df.shape[0])\n for i in range(df.shape[0]-1):\n if df['target'][i] ==1:\n f += 1\n if df['target'][i+1] ==0:\n df['prev_hot_streak'][i+1] = f\n f = 0\n for i in range(1,df.shape[0]):\n #print(i)\n if df['prev_hot_streak'][i]==0:\n df['prev_hot_streak'][i]=df['prev_hot_streak'][i-1]\n \n \n df['prev_cold_streak'] = np.zeros(df.shape[0])\n for i in range(df.shape[0]-1):\n if df['target'][i] ==0:\n f += 1\n if df['target'][i+1] ==1:\n df['prev_cold_streak'][i+1] = f\n f = 0\n\n for i in range(1,df.shape[0]):\n #print(i)\n if df['prev_cold_streak'][i]==0:\n df['prev_cold_streak'][i] = df['prev_cold_streak'][i-1]\n \n ## Calculating current hot and cold streaks\n df['current_hot_streak'] = np.zeros(df.shape[0])\n df['current_cold_streak'] = np.zeros(df.shape[0])\n fhot=0\n fcold=0\n for i in range(df.shape[0]):\n if df['target'][i]==1:\n fhot += 1\n fcold = 0\n df['current_hot_streak'][i] = fhot\n elif df['target'][i]==0:\n fcold += 1\n fhot = 0\n df['current_cold_streak'][i] = fcold\n \n df['prev_hot_streak'] = df['prev_hot_streak'].shift(1)\n df['prev_cold_streak'] = df['prev_cold_streak'].shift(1)\n df['current_hot_streak'] = df['current_hot_streak'].shift(1)\n df['current_cold_streak'] = df['current_cold_streak'].shift(1)\n \n ## Combinations of previos streaks\n df['prev_current_hot'] = df['prev_hot_streak'] - df['current_hot_streak']\n df['prev_current_cold'] = df['prev_cold_streak'] - df['current_cold_streak']\n df['current_hot_prev_cold'] = df['current_hot_streak'] - df['prev_cold_streak']\n df['current_cold_prev_hot'] = df['current_cold_streak'] - df['prev_hot_streak']\n \n ##Calculating days since max\n current_max = df['Adj Close'][0]\n df['days_from_max'] = np.zeros(df.shape[0])\n df['pct_from_max'] = np.zeros(df.shape[0])\n #print('blah')\n for i in range(1,df.shape[0]):\n if df['Adj Close'][i] > current_max:\n current_max = df['Adj Close'][i]\n # print(current_max)\n else:\n df['days_from_max'][i] = df['days_from_max'][i-1]+1\n df['pct_from_max'][i] = (df['Adj Close'][i]-current_max)/current_max\n #print(df['days_from_max'][i])\n \n \n \n df.dropna(inplace=True)\n df = df.reset_index(drop=True)\n return df", "def dataset_extract_features_from_date(dataset,date_feature): \n dataset['dayofmonth'] = dataset[date_feature].dt.day\n dataset['dayofyear'] = dataset[date_feature].dt.dayofyear \n dataset['dayofweek'] = dataset[date_feature].dt.dayofweek\n dataset['month'] = dataset[date_feature].dt.month\n dataset['year'] = dataset[date_feature].dt.year\n dataset['weekofyear'] = dataset[date_feature].dt.weekofyear\n dataset['is_month_start'] = (dataset[date_feature].dt.is_month_start).astype(int)\n dataset['is_month_end'] = (dataset[date_feature].dt.is_month_end).astype(int)\n return dataset", "def add_features(df_in, rolling_win_size=15):\n cols =['Turbine_ID', 'Date', 'TTF', '60_days', 'Component']\n other_cols = []\n for i in df_in.columns:\n if i not in cols:\n other_cols.append(i)\n all_cols = cols + other_cols\n\n df_in = df_in[all_cols]\n\n sensor_cols = []\n for i in df_in.columns[5:]:\n sensor_cols.append(i)\n\n sensor_av_cols = [nm+'_av' for nm in sensor_cols]\n sensor_sd_cols = [nm+'_sd' for nm in sensor_cols]\n\n df_out = pd.DataFrame()\n\n ws = rolling_win_size\n\n #calculate rolling stats for each engine id\n\n for m_id in pd.unique(df_in.Turbine_ID):\n\n # get a subset for each engine sensors\n df_engine = df_in[df_in['Turbine_ID'] == m_id]\n df_sub = df_engine[sensor_cols]\n\n # get rolling mean for the subset\n av = df_sub.rolling(ws, min_periods=1).mean()\n av.columns = sensor_av_cols\n\n # get the rolling standard deviation for the subset\n sd = df_sub.rolling(ws, min_periods=1).std().fillna(0)\n sd.columns = sensor_sd_cols\n\n # combine the two new subset dataframes columns to the engine subset\n new_ftrs = pd.concat([df_engine,av,sd], axis=1)\n\n # add the new features rows to the output dataframe\n df_out = pd.concat([df_out,new_ftrs])\n df_out = df_out.sort_values(by=['Turbine_ID', 'Date'] )\n return df_out", "def add_datepart(\n cls,\n df: pd.DataFrame,\n field_name: str,\n frequency: str,\n prefix: str = None,\n drop: bool = True,\n ) -> Tuple[pd.DataFrame, List[str]]:\n field = df[field_name]\n prefix = (re.sub(\"[Dd]ate$\", \"\", field_name) if prefix is None else prefix) + \"_\"\n attr = cls.time_features_from_frequency_str(frequency)\n added_features = []\n for n in attr:\n if n == \"Week\":\n continue\n df[prefix + n] = getattr(field.dt, n.lower())\n added_features.append(prefix + n)\n # Pandas removed `dt.week` in v1.1.10\n if \"Week\" in attr:\n week = field.dt.isocalendar().week if hasattr(field.dt, \"isocalendar\") else field.dt.week\n df.insert(3, prefix + \"Week\", week)\n added_features.append(prefix + \"Week\")\n # TODO Not adding Elapsed by default. Need to route it through config\n # mask = ~field.isna()\n # df[prefix + \"Elapsed\"] = np.where(\n # mask, field.values.astype(np.int64) // 10 ** 9, None\n # )\n # added_features.append(prefix + \"Elapsed\")\n if drop:\n df.drop(field_name, axis=1, inplace=True)\n\n # Removing features woth zero variations\n # for col in added_features:\n # if len(df[col].unique()) == 1:\n # df.drop(columns=col, inplace=True)\n # added_features.remove(col)\n return df, added_features", "def feature_list(user_id: str, session: str, tap_feature: str, task_name: str, window: DataFrame):\n if window.shape[0] == 0:\n return None\n #Add user ID, session, task name\n features = [user_id, session, task_name]\n\n #Add orientation\n orientation = mode(window['Phone_orientation_accel'])\n features.append(orientation)\n\n #Add tap type\n features.append(tap_feature)\n\n lead_file = 'Accelerometer.csv'\n\n time_col = x_columns[lead_file]\n\n before_start = window[window[tap_feature] == 4].index[0]\n during_start = window[window[tap_feature] == 2].index[0]\n after_start = window[window[tap_feature] == 3].index[0] + 1\n after_end = window[window[tap_feature] == 5].index[0]\n\n before = window.loc[before_start : during_start]\n during = window.loc[during_start : after_start]\n after = window.loc[after_start : after_end + 1]\n\n if during.shape[0] < 2:\n # If there were none or one measurements during the tap,\n # add the closest ones\n during = window[during_start - 1 : after_start + 1]\n\n for file_name in file_names:\n for y in y_columns[file_name]:\n\n # Feature 1: Mean during\n mean_during = mean(during[y])\n\n # Feature 2: SD during\n sd_during = sd(during[y])\n\n # Feature 3: Difference before/after\n mean_before = mean(before[y])\n mean_after = mean(after[y])\n difference_before_after = mean_after - mean_before\n\n # Feature 4: Net change from tap\n net_change_due_to_tap = mean_during - mean_before\n\n # Feature 5: Maximal change from tap\n max_tap = max(during[y])\n max_change = max_tap - mean_before\n\n # Feature 6: Restoration time\n avgDiffs = []\n for j in range(after[y].shape[0]):\n subsequentValues = after[y].iloc[j:]\n subsequentDistances = subsequentValues.map(lambda x: abs(x - mean_before))\n averageDistance = mean(subsequentDistances)\n avgDiffs.append(averageDistance)\n time_of_earliest_restoration = min(avgDiffs)\n restoration_time = time_of_earliest_restoration - during[time_col].iloc[-1]\n\n # Feature 7: Normalized duration\n t_before_center = (before[time_col].iloc[0] + before[time_col].iloc[-1]) / 2 \n t_after_center = (after[time_col].iloc[0] + after[time_col].iloc[-1]) / 2\n normalized_duration = (t_after_center - t_before_center) / (mean_after - mean_before)\n \n # Feature 8: Ndormalized duration max\n t_max_in_tap = during[during[y] == max_tap][time_col].iloc[0]\n normalized_duration_max = (t_after_center - t_max_in_tap) / (mean_after - max_tap)\n\n\n features += [mean_during, sd_during, difference_before_after,\n net_change_due_to_tap, max_change, restoration_time,\n normalized_duration, normalized_duration_max]\n\n if random.choice(range(100))== 0:\n plot_tap('Plots/Project/' + session, before, during, after, time_col)\n \n return features", "def compute_features(ctx, input_file, output_file):\n kwargs = {ctx.args[i][2:]: ctx.args[i+1].strip('\"') for i in range(0, len(ctx.args), 2)}\n output_file = os.path.abspath(output_file)\n click.echo(\"Init feature set computation\")\n executor = FeatureSetPreparer.build(verbose=True, violate=True, independent=True, session_file=None, location_mapping_file = None, orientation_fix_file=None, ws=12800, ss=12800, threshold=0.2, subwins=4, skip_post=True, **kwargs)\n click.echo(\"Compute feautures\")\n result = executor(input_file)\n if not os.path.exists(os.path.dirname(output_file)):\n click.echo(\"Create output folder if not exists\")\n os.makedirs(os.path.dirname(output_file))\n click.echo(\"Save feature set to: \" + output_file)\n result.to_csv(output_file, index=False, float_format='%.6f')\n click.echo(\"Saved\")", "def getTimePointFeatures(self):\r\n\r\n def quarterToFeature():\r\n quarter = np.asarray([[0] * 4])\r\n if self.month in [12, 1, 2]:\r\n quarter[:, 0] = 1\r\n elif self.month in [3, 4, 5]:\r\n quarter[:, 1] = 1\r\n elif self.month in [6, 7, 8]:\r\n quarter[:, 2] = 1\r\n else:\r\n quarter[:, 3] = 1\r\n return quarter\r\n\r\n # Mon=0 tue=1 wed=2 thu=3 sun=6\r\n def dayToFeature(day):\r\n feature = np.asarray([[0] * 3])\r\n if day == 0 or day == 4:\r\n # Day is Mon or Fri\r\n feature[:, 0] = 1\r\n elif 0 < day < 4:\r\n # Day is Tue, Wed, Thu\r\n feature[:, 1] = 1\r\n else:\r\n # Weekend\r\n feature[:, 2] = 1\r\n return feature\r\n\r\n # Can split time of day as night and 4 halves\r\n def timeToFeature(time):\r\n feature = np.asarray([[0] * 17])\r\n if time >= 22 or time <= 5:\r\n feature[:, 0] = 1\r\n else:\r\n feature[:, time - 5] = 1\r\n return feature\r\n\r\n return np.concatenate((timeToFeature(self.hour).flatten(),\r\n dayToFeature(self.weekDay).flatten(),\r\n quarterToFeature().flatten()))", "def aggregateFunctions(fnPointFeatures, start_date, end_date, out_dir):\n\n downloadStreamflowFromGeoJson(fnPointFeatures=fnPointFeatures, target_dir=out_dir,\n startDT=start_date, endDT=end_date)\n\n dat = format_streamflows(out_dir)\n fname = out_dir + '/pd_streamflow.csv'\n\n dat.to_csv(fname)", "def generate_features(df):\n df_new = pd.DataFrame()\n \n # 6 original features\n df_new['open'] = df['open']\n df_new['open_1'] = df['open'].shift(1)\n df_new['close_1'] = df['close'].shift(1)\n df_new['high_1'] = df['high'].shift(1)\n df_new['low_1'] = df['low'].shift(1)\n df_new['volume_1'] = df['volume'].shift(1)\n \n # 50 original features\n # average price\n df_new['avg_price_5'] = df['close'].rolling(window=5).mean().shift(1)\n df_new['avg_price_30'] = df['close'].rolling(window=21).mean().shift(1)\n df_new['avg_price_90'] = df['close'].rolling(window=63).mean().shift(1)\n df_new['avg_price_365'] = df['close'].rolling(window=252).mean().shift(1)\n \n # average price ratio\n df_new['ratio_avg_price_5_30'] = df_new['avg_price_5'] / df_new['avg_price_30']\n df_new['ratio_avg_price_905_'] = df_new['avg_price_5'] / df_new['avg_price_90']\n df_new['ratio_avg_price_5_365'] = df_new['avg_price_5'] / df_new['avg_price_365']\n df_new['ratio_avg_price_30_90'] = df_new['avg_price_30'] / df_new['avg_price_90']\n df_new['ratio_avg_price_30_365'] = df_new['avg_price_30'] / df_new['avg_price_365']\n df_new['ratio_avg_price_90_365'] = df_new['avg_price_90'] / df_new['avg_price_365'] \n \n \n # average volume\n df_new['avg_volume_5'] = df['volume'].rolling(window=5).mean().shift(1)\n df_new['avg_volume_30'] = df['volume'].rolling(window=21).mean().shift(1)\n df_new['avg_volume_90'] = df['volume'].rolling(window=63).mean().shift(1)\n df_new['avg_volume_365'] = df['volume'].rolling(window=252).mean().shift(1)\n \n #average volume ratio\n df_new['ratio_avg_volume_5_30'] = df_new['avg_volume_5'] / df_new['avg_volume_30']\n df_new['ratio_avg_volumee_5_90'] = df_new['avg_volume_5'] / df_new['avg_volume_90'] \n df_new['ratio_avg_volume_5_365'] = df_new['avg_volume_5'] / df_new['avg_volume_365']\n df_new['ratio_avg_volume_30_90'] = df_new['avg_volume_30'] / df_new['avg_volume_90']\n df_new['ratio_avg_volume_30_365'] = df_new['avg_volume_30'] / df_new['avg_volume_365']\n df_new['ratio_avg_volume_90_365'] = df_new['avg_volume_90'] / df_new['avg_volume_365'] \n \n \n # standard deviation of prices\n df_new['std_price_5'] = df['close'].rolling(window=5).std().shift(1)\n df_new['std_price_30'] = df['close'].rolling(window=21).std().shift(1)\n df_new['std_price_90'] = df['close'].rolling(window=63).std().shift(1) \n df_new['std_price_365'] = df['close'].rolling(window=252).std().shift(1)\n \n # standard deviation ratio of prices \n df_new['ratio_std_price_5_30'] = df_new['std_price_5'] / df_new['std_price_30']\n df_new['ratio_std_price_5_90'] = df_new['std_price_5'] / df_new['std_price_90']\n df_new['ratio_std_price_5_365'] = df_new['std_price_5'] / df_new['std_price_365']\n df_new['ratio_std_price_30_90'] = df_new['std_price_30'] / df_new['std_price_90'] \n df_new['ratio_std_price_30_365'] = df_new['std_price_30'] / df_new['std_price_365'] \n df_new['ratio_std_price_90_365'] = df_new['std_price_90'] / df_new['std_price_365'] \n \n \n # standard deviation of volumes\n df_new['std_volume_5'] = df['volume'].rolling(window=5).std().shift(1)\n df_new['std_volume_30'] = df['volume'].rolling(window=21).std().shift(1)\n df_new['std_volume_90'] = df['volume'].rolling(window=63).std().shift(1)\n df_new['std_volume_365'] = df['volume'].rolling(window=252).std().shift(1)\n \n #standard deviation ratio of volumes\n df_new['ratio_std_volume_5_30'] = df_new['std_volume_5'] / df_new['std_volume_30']\n df_new['ratio_std_volume_5_90'] = df_new['std_volume_5'] / df_new['std_volume_90']\n df_new['ratio_std_volume_5_365'] = df_new['std_volume_5'] / df_new['std_volume_365'] \n df_new['ratio_std_volume_30_90'] = df_new['std_volume_30'] / df_new['std_volume_90']\n df_new['ratio_std_volume_30_365'] = df_new['std_volume_30'] / df_new['std_volume_365']\n df_new['ratio_std_volume_90_365'] = df_new['std_volume_90'] / df_new['std_volume_365'] \n \n # return\n df_new['return_1'] = ((df['close'] - df['close'].shift(1)) / df['close'].shift(1)).shift(1)\n df_new['return_5'] = ((df['close'] - df['close'].shift(5)) / df['close'].shift(5)).shift(1)\n df_new['return_30'] = ((df['close'] - df['close'].shift(21)) / df['close'].shift(21)).shift(1)\n df_new['return_90'] = ((df['close'] - df['close'].shift(63)) / df['close'].shift(63)).shift(1) \n df_new['return_365'] = ((df['close'] - df['close'].shift(252)) / df['close'].shift(252)).shift(1)\n \n #average of return\n df_new['moving_avg_5'] = df_new['return_1'].rolling(window=5).mean()\n df_new['moving_avg_30'] = df_new['return_1'].rolling(window=21).mean()\n df_new['moving_avg_90'] = df_new['return_1'].rolling(window=63).mean()\n df_new['moving_avg_365'] = df_new['return_1'].rolling(window=252).mean()\n \n # the target\n df_new['close'] = df['close']\n df_new = df_new.dropna(axis=0)\n return df_new", "def get_geneset_features(geneset_fn, chrom, start=None, stop=None):\n if start and stop:\n region = '%s:%s-%s' % (chrom, start, stop)\n else:\n region = chrom\n return etl.fromgff3(geneset_fn, region=region)", "def feature_engineer_ts(self, month=12):\n st_data_dt = self.get_st_data_dt()\n end_data_dt = self.get_end_data_dt()\n date_list = pd.date_range(*(pd.to_datetime([st_data_dt, end_data_dt]) + pd.offsets.MonthEnd()), freq='M').to_list()\n population = self.get_population()\n is_raw_partition = self.get_is_raw_partition()\n# Lag 2 months\n all_data = []\n# join past is_raw columns\n for d in date_list:\n \n population_partition = population[population['ft_data_dt'] == d] \n old_date = d - relativedelta(months=month)\n y = old_date.year\n m = old_date.month\n day = calendar.monthrange(y, m)[1]\n old_date = date(y, m, day)\n old_date = max(old_date, st_data_dt)\n date_list_join = pd.date_range(*(pd.to_datetime([old_date, d]) + pd.offsets.MonthEnd()), freq='M').to_list()\n date_list_join.reverse()\n for index, date_join in enumerate(date_list_join):\n if date_join.strftime(\"%Y-%m-%d\") not in is_raw_partition.keys():\n continue\n \n tmp_is_raw_partition = is_raw_partition[date_join.strftime(\"%Y-%m-%d\")]\n \n rename_col = [c for c in list(tmp_is_raw_partition.columns) if c not in ['idd', 'ft_data_dt']]\n new_col = [c+'_'+str(index+1) for c in rename_col]\n name_dict = dict(list(zip(rename_col, new_col)))\n tmp_is_raw_partition = tmp_is_raw_partition.rename(columns = name_dict)\n population_partition = population_partition.merge(tmp_is_raw_partition.drop(columns=['ft_data_dt']), on=['idd'], how='left')\n all_data.append(population_partition)\n ts_df = pd.concat(all_data)\n threshold_null = len(ts_df.columns) - 4\n ts_df = ts_df[ts_df.isnull().sum(axis=1) < threshold_null]\n \n def sum_ts(self, df, col_list, feature, duration):\n ft_name = feature+ '_sum_'+str(duration)+'mth'\n tmp_df = df[col_list].sum(axis = 1).to_frame(name=ft_name)\n return tmp_df\n \n def mean_ts(self, df, col_list, feature, duration):\n ft_name = feature+ '_avg_'+str(duration)+'mth'\n tmp_df = df[col_list].mean(axis = 1).to_frame(name=ft_name)\n return tmp_df\n \n def std_ts(self, df, col_list, feature, duration):\n ft_name = feature+ '_std_'+str(duration)+'mth'\n tmp_df = df[col_list].std(axis = 1).to_frame(name=ft_name)\n return tmp_df\n \n def med_ts(self, df, col_list, feature, duration):\n ft_name = feature+ '_med_'+str(duration)+'mth'\n tmp_df = df[col_list].std(axis = 1).to_frame(name=ft_name)\n return tmp_df\n \n def min_ts(self, df, col_list, feature, duration):\n ft_name = feature+ '_min_'+str(duration)+'mth'\n tmp_df = df[col_list].min(axis = 1).to_frame(name=ft_name)\n return tmp_df\n \n def max_ts(self, df, col_list, feature, duration):\n ft_name = feature+ '_max_'+str(duration)+'mth'\n tmp_df = df[col_list].max(axis = 1).to_frame(name=ft_name)\n return tmp_df\n \n def q1_ts(self, df, col_list, feature, duration):\n ft_name = feature+ '_q1_'+str(duration)+'mth'\n tmp_df = df[col_list].quantile(q=0.25, axis = 1).to_frame(name=ft_name)\n return tmp_df\n \n def q3_ts(self, df, col_list, feature, duration):\n ft_name = feature+ '_q3_'+str(duration)+'mth'\n tmp_df = df[col_list].quantile(q=0.75, axis = 1).to_frame(name=ft_name)\n return tmp_df\n \n def last_ts(self, df, feature):\n ft_name = feature+ '_last'\n tmp_df = df[feature+'_'+str(1)].to_frame(name=ft_name)\n return tmp_df\n \n ts_duration = [1, 3, 6, 9, 12]\n feature_list = self.get_is_raw_col()\n df = ts_df[['idd', 'ft_data_dt']]\n# Time Series Features\n for duration in ts_duration:\n for col in feature_list:\n col_list = [col+'_'+str(i) for i in range(1, duration+1)]\n df = pd.concat([df\\\n , sum_ts(self, ts_df, col_list, col, duration)\\\n , mean_ts(self, ts_df, col_list, col, duration)\\\n , med_ts(self, ts_df, col_list, col, duration)\\\n , q1_ts(self, ts_df, col_list, col, duration)\\\n , q3_ts(self, ts_df, col_list, col, duration)\\\n , min_ts(self, ts_df, col_list, col, duration)\\\n , max_ts(self, ts_df, col_list, col, duration)]\n , axis=1)\n self.set_all_data(df)", "def calculate_timebase_features(self, X: pd.DataFrame) -> pd.DataFrame:\n X = self._add_lagged_features(X, [1, 3, 7, 14, 21, 365])\n\n X = self._add_rolling(X, 'mean', [5, 50])\n X = self._add_rolling(X, 'min', [5, 50])\n X = self._add_rolling(X, 'max', [5, 50])\n\n return X", "def _extract_features(self, ti, tf):\n makedir(self.featdir)\n\n # number of windows in feature request\n Nw = int(np.floor(((tf-ti)/self.dt)/(self.iw-self.io)))\n\n # features to compute\n cfp = ComprehensiveFCParameters()\n if self.compute_only_features:\n cfp = dict([(k, cfp[k]) for k in cfp.keys() if k in self.compute_only_features])\n else:\n # drop features if relevant\n _ = [cfp.pop(df) for df in self.drop_features if df in list(cfp.keys())]\n\n # check if feature matrix already exists and what it contains\n if os.path.isfile(self.featfile):\n t = pd.to_datetime(pd.read_csv(self.featfile, index_col=0, parse_dates=['time'], usecols=['time'], infer_datetime_format=True).index.values)\n ti0,tf0 = t[0],t[-1]\n Nw0 = len(t)\n hds = pd.read_csv(self.featfile, index_col=0, nrows=1)\n hds = list(set([hd.split('__')[1] for hd in hds]))\n\n # option 1, expand rows\n pad_left = int((ti0-ti)/self.dto)# if ti < ti0 else 0\n pad_right = int(((ti+(Nw-1)*self.dto)-tf0)/self.dto)# if tf > tf0 else 0\n i0 = abs(pad_left) if pad_left<0 else 0\n i1 = Nw0 + max([pad_left,0]) + pad_right\n \n # option 2, expand columns\n existing_cols = set(hds) # these features already calculated, in file\n new_cols = set(cfp.keys()) - existing_cols # these features to be added\n more_cols = bool(new_cols)\n all_cols = existing_cols|new_cols\n cfp = ComprehensiveFCParameters()\n cfp = dict([(k, cfp[k]) for k in cfp.keys() if k in all_cols])\n\n # option 3, expand both\n if any([more_cols, pad_left > 0, pad_right > 0]) and self.update_feature_matrix:\n fm = pd.read_csv(self.featfile, index_col=0, parse_dates=['time'], infer_datetime_format=True)\n if more_cols:\n # expand columns now\n df0, wd = self._construct_windows(Nw0, ti0)\n cfp0 = ComprehensiveFCParameters()\n cfp0 = dict([(k, cfp0[k]) for k in cfp0.keys() if k in new_cols])\n fm2 = extract_features(df0, column_id='id', n_jobs=self.n_jobs, default_fc_parameters=cfp0, impute_function=impute)\n fm2.index = pd.Series(wd)\n \n fm = pd.concat([fm,fm2], axis=1, sort=False)\n\n # check if updates required because training period expanded\n # expanded earlier\n if pad_left > 0:\n df, wd = self._construct_windows(Nw, ti, i1=pad_left)\n fm2 = extract_features(df, column_id='id', n_jobs=self.n_jobs, default_fc_parameters=cfp, impute_function=impute)\n fm2.index = pd.Series(wd)\n fm = pd.concat([fm2,fm], sort=False)\n # expanded later\n if pad_right > 0:\n df, wd = self._construct_windows(Nw, ti, i0=Nw - pad_right)\n fm2 = extract_features(df, column_id='id', n_jobs=self.n_jobs, default_fc_parameters=cfp, impute_function=impute)\n fm2.index = pd.Series(wd)\n fm = pd.concat([fm,fm2], sort=False)\n \n # write updated file output\n fm.to_csv(self.featfile, index=True, index_label='time')\n # trim output\n fm = fm.iloc[i0:i1] \n else:\n # read relevant part of matrix\n fm = pd.read_csv(self.featfile, index_col=0, parse_dates=['time'], infer_datetime_format=True, header=0, skiprows=range(1,i0+1), nrows=i1-i0)\n else:\n # create feature matrix from scratch \n df, wd = self._construct_windows(Nw, ti)\n fm = extract_features(df, column_id='id', n_jobs=self.n_jobs, default_fc_parameters=cfp, impute_function=impute)\n fm.index = pd.Series(wd)\n fm.to_csv(self.featfile, index=True, index_label='time')\n \n ys = pd.DataFrame(self._get_label(fm.index.values), columns=['label'], index=fm.index)\n return fm, ys", "def data_transform_timeFeature(self):\n #-------------------------------------------------------------------------\n # All new features are built into separate dataframes \n # and each of them are dumped into a separate file.\n #-------------------------------------------------------------------------\n self.strprint(\"self.df_invoice_line : \"+str(self.df_invoice_line.shape))\n \n self._dict_timeFeature_encoder, df_customers_timeFeature \\\n = p5_util.time_list_feature_build(self.df_invoice_line\\\n , self._list_new_feature, dict_encoder = self._dict_timeFeature_encoder\\\n ,is_verbose=self.is_verbose)\n \n #-------------------------------------------------------------------------\n # New time features are aggregated into a single dataframe.\n # Values are scaled.\n #-------------------------------------------------------------------------\n df_customers_timeFeature, self._std_scaler_timeFeature \\\n = p5_util.time_list_feature_restore(self._list_new_feature \\\n , std_scale = self._std_scaler_timeFeature\\\n , df_timeFeature = df_customers_timeFeature, is_verbose = self.is_verbose)\n\n self.strprint(\"df_customers_timeFeature : \"+str(df_customers_timeFeature.shape))\n \n #-------------------------------------------------------------------------\n # Dimension reduction thanks to PCA\n #-------------------------------------------------------------------------\n n_dim=30\n root_name = 'time_pca_'\n # Column CustomerID is used into df_pca_reduce\n df_customers_timeFeature['CustomerID'] = df_customers_timeFeature.index\n \n df_customers_timeFeature, pca_timeFeature \\\n = p5_util.df_pca_reduce(df_customers_timeFeature, n_dim, root_name\\\n , p_is_scale=False, pca = self._pca_timeFeature)\n\n self.strprint(df_customers_timeFeature.shape)\n \n if self._pca_timeFeature is None:\n #----------------------------------------------------------------------\n # Data-model is in built process with part of data-set.\n #----------------------------------------------------------------------\n self._pca_timeFeature = pca_timeFeature\n p5_util.object_dump(df_customers_timeFeature\\\n , self._df_customers_timeFeature_fileName)\n else:\n #----------------------------------------------------------------------\n # Data-model is already built and this method is called \n # for a customer classification.\n #----------------------------------------------------------------------\n self._df_customers_timeFeature = df_customers_timeFeature.copy()\n return", "def compute_features(\n train_dir, test_dir, output_dir, df_config, feature_config_list, filter_by_month=True, compute_load_ratio=False,\n):\n time_col_name = df_config[\"time_col_name\"]\n\n output_train_dir = os.path.join(output_dir, \"train\")\n output_test_dir = os.path.join(output_dir, \"test\")\n if not os.path.isdir(output_train_dir):\n os.mkdir(output_train_dir)\n if not os.path.isdir(output_test_dir):\n os.mkdir(output_test_dir)\n\n train_base_df = pd.read_csv(os.path.join(train_dir, TRAIN_BASE_FILE), parse_dates=[time_col_name])\n\n for i in range(1, NUM_ROUND + 1):\n train_file = os.path.join(train_dir, TRAIN_FILE_PREFIX + str(i) + \".csv\")\n test_file = os.path.join(test_dir, TEST_FILE_PREFIX + str(i) + \".csv\")\n\n train_delta_df = pd.read_csv(train_file, parse_dates=[time_col_name])\n test_round_df = pd.read_csv(test_file, parse_dates=[time_col_name])\n\n train_all_features, test_all_features = compute_features_one_round(\n train_base_df,\n train_delta_df,\n test_round_df,\n df_config,\n feature_config_list,\n FEATURE_MAP,\n filter_by_month,\n compute_load_ratio,\n )\n\n train_output_file = os.path.join(output_dir, \"train\", TRAIN_FILE_PREFIX + str(i) + \".csv\")\n test_output_file = os.path.join(output_dir, \"test\", TEST_FILE_PREFIX + str(i) + \".csv\")\n\n train_all_features.to_csv(train_output_file, index=False)\n test_all_features.to_csv(test_output_file, index=False)\n\n print(\"Round {}\".format(i))\n print(\"Training data size: {}\".format(train_all_features.shape))\n print(\"Testing data size: {}\".format(test_all_features.shape))\n print(\"Minimum training timestamp: {}\".format(min(train_all_features[time_col_name])))\n print(\"Maximum training timestamp: {}\".format(max(train_all_features[time_col_name])))\n print(\"Minimum testing timestamp: {}\".format(min(test_all_features[time_col_name])))\n print(\"Maximum testing timestamp: {}\".format(max(test_all_features[time_col_name])))\n print(\"\")", "def process_data(self):\n timer_start = time.time()\n # ensure self.df_events and self.df_locations are not None\n if self.df_events is None or self.df_locations is None:\n print(\"Missing data: either df_events or df_locations is None\")\n return\n # set start and end based on self.df_events if not already set\n if not self.start:\n self.start = self.df_events['event_time'].min()\n if not self.end:\n self.end = self.df_events['event_time'].max()\n print(f\"date range for events data is from {self.start} to {self.end}\")\n # create Grid object before processing any data\n grid = self.compute_grid_cells(self.df_locations)\n # clean and combine events and locations data\n df_data = self.combine_events_and_locations(grid)\n print(df_data.shape)\n # df_data.to_csv('../../../data_files/20210506_cleanedInputDataCumSum.csv', index=False)\n # df_data = pd.read_csv('../../../data_files/20210415_cleanedInputDataAprilCumSum.csv')\n # process data within grid class\n df_processed = grid.process_data(df_data, 'weekly')\n # df_processed = self.calculate_demand(df_processed)\n # df_processed.to_csv('../../../data_files/20210506_processedGridCellData.csv')\n # set df_demand to be df_processed\n df_processed.reset_index(inplace=True)\n df_processed = df_processed.astype({'date': 'str', 'avail_count': 'float', 'avail_mins': 'float', 'prob_scooter_avail': 'float', 'trips': 'float', 'adj_trips': 'float'})\n # make sure dates are within start and end dates\n start_date = str(iso8601.parse_date(self.start).date())\n end_date = str(iso8601.parse_date(self.end).date())\n df_processed = df_processed[(df_processed['date'] >= start_date) & (df_processed['date'] <= end_date)]\n self.set_demand(df_processed)\n timer_end = time.time()\n print('Elapsed time to process data:', (timer_end - timer_start)/60.0, 'minutes')", "def dataframe_features(df, db):\n def generator():\n for gene_id in df.index:\n yield asinterval(db[gene_id])\n\n return pybedtools.BedTool(generator())", "def features_past_generation(features_creation_function,\n days,\n feature_names_prefix,\n data,\n indices):\n matches_outcomes=[]\n for i,match_indice in enumerate(indices):\n match=data.iloc[match_indice,:]\n past_matches=data[(data.Date<match.Date)&(data.Date>=match.Date-datetime.timedelta(days=days))]\n match_features_outcome_1=features_creation_function(1,match,past_matches)\n match_features_outcome_2=features_creation_function(2,match,past_matches)\n matches_outcomes.append(match_features_outcome_1)\n matches_outcomes.append(match_features_outcome_2)\n if i%100==0:\n print(str(i)+\"/\"+str(len(indices))+\" matches treated. \"+ features_creation_function.__name__ + str(days))\n train=pd.DataFrame(matches_outcomes)\n train.columns=[feature_names_prefix + \"_\" + str(days) +\"_\" +str(i) for i in range(len(train.columns))]\n \n \n \n return train", "def _extract_features_for_peak_estimation(self, groups=(), tail=0.4, skip_if_shorter_than=6, **kwargs):\n assert 0 < tail < 1, \"Tail is a fraction, it should be between 0 and 1\"\n assert 0 < skip_if_shorter_than, \"skip_if_shorter_than should be a positive int\"\n\n groups = self.df[self.col_group].unique() if len(groups) == 0 else groups\n features = pd.DataFrame({self.col_group: groups})\n\n for idx, row in features.iterrows():\n group = row[self.col_group]\n df_loc = self.df[self.df[self.col_group] == group]\n\n length = len(df_loc[self.col_t])\n if length < skip_if_shorter_than:\n continue\n\n model_full = HuberRegressor()\n\n x_full = df_loc[self.col_t].to_numpy().reshape((-1, 1))\n y_full = df_loc[self.col_obs].to_numpy()\n model_full.fit(x_full, y_full)\n slope_full = model_full.coef_[0]\n\n tail_len = int(tail * length)\n\n x_tail = df_loc[self.col_t].to_numpy()[-tail_len:].reshape((-1, 1))\n y_tail = df_loc[self.col_obs].to_numpy()[-tail_len:]\n x_head = df_loc[self.col_t].to_numpy()[:-tail_len].reshape((-1, 1))\n y_head = df_loc[self.col_obs].to_numpy()[:-tail_len]\n\n r2_full_score = model_full.score(x_full, y_full)\n r2_head_score = model_full.score(x_head, y_head)\n r2_tail_score = model_full.score(x_tail, y_tail)\n\n model_head = HuberRegressor()\n model_head.fit(x_head, y_head)\n slope_head = model_head.coef_[0]\n\n model_tail = HuberRegressor()\n model_tail.fit(x_tail, y_tail)\n slope_tail = model_tail.coef_[0]\n features.at[idx, \"R2_full\"] = r2_full_score\n features.at[idx, \"R2_head\"] = r2_head_score\n features.at[idx, \"R2_tail\"] = r2_tail_score\n features.at[idx, \"R2_tail_own\"] = model_tail.score(x_tail, y_tail)\n features.at[idx, \"slope_full\"] = slope_full\n features.at[idx, \"slope_head\"] = slope_head\n features.at[idx, \"slope_tail\"] = slope_tail\n\n y_pred_full = model_full.predict(x_full)\n self._statistics[\"linear_r2\"][group] = r2_full_score\n self._statistics[\"linear_rmse\"][group] = np.linalg.norm(np.exp(y_full) - np.exp(y_pred_full))**2\n self._statistics[\"linear_slope\"][group] = slope_full\n\n fraction_below_score = np.mean(model_full.predict(x_tail) > y_tail)\n weights = np.array([1 / (1 + i) ** 2 for i in range(1, tail_len + 1)][::-1])\n weighted_fraction_below_score = np.dot(weights, model_full.predict(x_tail) > y_tail)\n features.at[idx, \"fraction_below\"] = fraction_below_score\n features.at[idx, \"weighted_fraction_below\"] = weighted_fraction_below_score\n\n return features.dropna()", "def get_features_from_segment_raw(seg_raw_df, feature_func_dict):\n # parse input\n if type(feature_func_dict) == str: # it's a json filename\n import json\n feature_func_str = open(feature_func_dict).read()\n feature_func_dict = json.loads(feature_func_str)\n print \"===========start computing features=================\"\n print \"===========feature function dictionary==============\"\n print feature_func_dict\n grouped = seg_raw_df.groupby(s_info.segment_col)\n # parse feature function dictionary\n result = {}\n for feature_name in feature_func_dict:\n print \"==========compute \" + feature_name + \"================\"\n feature = feature_func_dict[feature_name]\n if len(feature['paras']) == 0: # no parameter need to be set, easiest case\n # find out the function\n func_name = feature['handler']\n if hasattr(np, func_name):\n func = getattr(np, func_name)\n elif hasattr(sp_stats, func_name):\n func = getattr(sp_stats, func_name)\n elif hasattr(s_feature, func_name):\n func = getattr(s_feature, func_name)\n else:\n func = func_name\n # prepare columns\n temp = grouped[feature['apply']].aggregate(func)\n result[feature_name] = temp\n else: # has parameters, will compute column one by one\n paras = feature['paras']\n print paras\n # find out the function\n func_name = feature['handler']\n if hasattr(s_feature, func_name):\n func = getattr(s_feature, func_name)\n elif hasattr(np, func_name):\n func = getattr(np, func_name)\n else:\n print func_name + \" can't be found, ignore this feature\"\n continue\n # iterate over columns\n temp = {}\n c = 0\n for col in feature['apply']:\n if paras.has_key('with'): # need another column\n paras['another'] = grouped[paras['with'][c]].copy(True)\n temp[col] = grouped[col].aggregate(func, paras)\n c += 1\n # construct DataFrame\n result[feature_name] = pd.DataFrame(temp)\n print \"Inf values: %s\" % np.any(np.isinf(result[feature_name]))\n print \"NaN values: %s\" % np.any(np.isnan(result[feature_name]))\n feature_raw_df = pd.concat(result, axis=1)\n # feature_raw_df = feature_raw_df.reset_index(drop=True)\n return feature_raw_df", "def FE_create_time_series_features(dft, ts_column, ts_adds_in=[]):\r\n dtf = copy.deepcopy(dft)\r\n reset_index = False\r\n try:\r\n # ts_column = None assumes that that index is the time series index\r\n reset_index = False\r\n if ts_column is None:\r\n reset_index = True\r\n ts_column = dtf.index.name\r\n dtf = dtf.reset_index()\r\n\r\n ### In some extreme cases, date time vars are not processed yet and hence we must fill missing values here!\r\n null_nums = dtf[ts_column].isnull().sum()\r\n if null_nums > 0:\r\n # missing_flag = True\r\n new_missing_col = ts_column + '_Missing_Flag'\r\n dtf[new_missing_col] = 0\r\n dtf.loc[dtf[ts_column].isnull(),new_missing_col]=1\r\n dtf[ts_column].fillna(method='ffill', inplace=True)\r\n print(' adding %s column due to missing values in data' %new_missing_col)\r\n if dtf[dtf[ts_column].isnull()].shape[0] > 0:\r\n dtf[ts_column].fillna(method='bfill', inplace=True)\r\n\r\n if dtf[ts_column].dtype == float:\r\n dtf[ts_column] = dtf[ts_column].astype(int)\r\n\r\n ### if we have already found that it was a date time var, then leave it as it is. Thats good enough!\r\n items = dtf[ts_column].apply(str).apply(len).values\r\n #### In some extreme cases,\r\n if all(items[0] == item for item in items):\r\n if items[0] == 4:\r\n ### If it is just a year variable alone, you should leave it as just a year!\r\n dtf[ts_column] = pd.to_datetime(dtf[ts_column],format='%Y')\r\n ts_adds = []\r\n else:\r\n ### if it is not a year alone, then convert it into a date time variable\r\n dtf[ts_column] = pd.to_datetime(dtf[ts_column], infer_datetime_format=True)\r\n ### this is where you create the time series features #####\r\n dtf, ts_adds = _create_ts_features(df=dtf, tscol=ts_column)\r\n else:\r\n dtf[ts_column] = pd.to_datetime(dtf[ts_column], infer_datetime_format=True)\r\n ### this is where you create the time series features #####\r\n dtf, ts_adds = _create_ts_features(df=dtf, tscol=ts_column)\r\n if not ts_adds_in:\r\n ts_adds_copy = dtf[ts_adds].select_dtypes(include='number').columns.tolist()\r\n ### drop those columns where all rows are same i.e. zero variance ####\r\n for col in ts_adds_copy:\r\n if dtf[col].std() == 0:\r\n dtf.drop(col, axis=1, inplace=True)\r\n print(' dropping column due to zero variance in %s column' %col)\r\n ts_adds.remove(col)\r\n else:\r\n rem_cols = left_subtract(dtf.columns.tolist(), ts_adds_in)\r\n dtf = dtf[rem_cols+ts_adds_in]\r\n\r\n # If you had reset the index earlier, set it back before returning\r\n # to make it consistent with the dataframe that was sent as input\r\n if reset_index:\r\n dtf = dtf.set_index(ts_column)\r\n elif ts_column in dtf.columns:\r\n dtf.drop(ts_column, axis=1, inplace=True)\r\n else:\r\n pass\r\n except Exception as e:\r\n print(e)\r\n print('Error in Processing %s column for date time features. Continuing...' %ts_column)\r\n return dtf, ts_adds", "def get_features_and_target(self, trades_features: pd.DataFrame, trades_target: pd.DataFrame) -> pd.DataFrame:\n \n sf_groups = trades_features.drop_duplicates(subset=['sf_account_id', 'trade_date', 'sku']).groupby('sf_account_id')\n\n # calculate features\n feature_dfs = []\n if 'product_name' in self.feature_categories:\n feature_dfs += [sf_groups.product_name.value_counts().unstack().notnull()]\n if 'product_category' in self.feature_categories:\n feature_dfs += [sf_groups.product_category.value_counts().unstack().notnull()]\n if 'reporting_channel' in self.feature_categories:\n feature_dfs += [sf_groups.sub_reporting_channel.value_counts().unstack().notnull()]\n if 'recency' in self.feature_categories:\n feature_dfs += [(trades_features.trade_date_dt.max()-sf_groups.trade_date_dt.max()).dt.days.to_frame().rename(columns={'trade_date_dt':'recency'})]\n if 'frequency' in self.feature_categories:\n feature_dfs += [sf_groups.product_name.count().to_frame().rename(columns={'product_name':'frequency'})]\n if 'total_spend' in self.feature_categories:\n feature_dfs += [sf_groups.cost_float.sum().to_frame().rename(columns={'cost_float':'total_spend'})]\n\n # concat features\n customer_df = pd.concat(feature_dfs, axis=1, sort=False) # outer join on index\n\n # add target variable\n for target_variable in self.target_variables:\n if (trades_target.product_name == target_variable).any():\n customer_df['target_'+target_variable] = trades_target.groupby(['sf_account_id', 'product_name']).trade_date.any().unstack()[target_variable]\n else:\n customer_df['target_'+target_variable] = False\n\n # remove customers with no purchases before cut off\n customer_df = customer_df[customer_df[customer_df.columns[customer_df.columns != 'target']].any(axis=1)]\n\n # replace nans with False\n customer_df.fillna(False, inplace=True)\n\n return customer_df", "def generate_features(self):\n\n # For each STFT timebin, divide data into three bins and get mean power\n data_array = np.array([])\n bl_array = np.array([])\n\n for trial in range(self.data_stft_norm.shape[-1]): # Each trial\n for tbin in range(self.data_stft_norm.shape[-2]): # Each timebin\n for ch in range(self.data_stft_norm.shape[0]):\n data_array = np.append(data_array,[\n np.mean(self.data_stft_norm[ch, :2, tbin, trial]),\n np.mean(self.data_stft_norm[ch, 3:8, tbin, trial]),\n np.mean(self.data_stft_norm[ch, 9:27, tbin, trial])])\n\n data_array = np.reshape(data_array, (-1, 18))\n\n for trial in range(self.bl_stft_norm.shape[-1]): # Each trial\n for tbin in range(self.bl_stft_norm.shape[-2]): # Each timebin\n for ch in range(self.bl_stft_norm.shape[0]):\n bl_array = np.append(bl_array, [\n np.mean(self.bl_stft_norm[ch, :2, tbin, trial]),\n np.mean(self.bl_stft_norm[ch, 3:8, tbin, trial]),\n np.mean(self.bl_stft_norm[ch, 9:27, tbin, trial])])\n bl_array = np.reshape(bl_array, (-1, 18))\n\n X = np.append(data_array, bl_array, axis=0)\n y = np.append(np.ones(data_array.shape[0]), np.zeros(bl_array.shape[0]))\n\n return X, y", "def filter_dataframe(df, start_date_dt, end_date_dt):\n\n dff = df \n # df[\n # (df[\"timestamp\"].dt.date >= dt.date(start_date_dt.year, start_date_dt.month, start_date_dt.day))\n # & (df[\"timestamp\"].dt.date <= dt.date(end_date_dt.year, end_date_dt.month, end_date_dt.day))\n # ]\n # if (lat_min != -90) or (lat_max != 90):\n # dff = dff[\n # (dff[\"lat\"] >= lat_min)\n # & (dff[\"lat\"] <= lat_max)\n # ]\n # if (lon_min != -90) or (lon_max != 90):\n # dff = dff[\n # (dff[\"lon\"] >= lon_min)\n # & (dff[\"lon\"] <= lon_max)\n # ]\n\n return dff", "def create_features_from_transaction_timestamp(data):\n utils.save_log('{0} :: {1}'.format(\n create_features_from_transaction_timestamp.__module__,\n create_features_from_transaction_timestamp.__name__))\n\n data = data.withColumn('TransactionHour',\n hour(data[config.feature_column_timestamp]))\n data = data.withColumn('TransactionDayOfWeek',\n dayofweek(data[config.feature_column_timestamp]))\n data = data.withColumn('TransactionDayOfYear',\n dayofyear(data[config.feature_column_timestamp]))\n data = data.withColumn('TransactionWeekOfYear',\n weekofyear(data[config.feature_column_timestamp]))\n\n data = data.withColumn('WeekAction',\n when(col('TransactionWeekOfYear').\n between(50, 52), 1).\n otherwise(0))\n\n update_list_features(\"numerical\", ['TransactionHour',\n 'TransactionDayOfWeek',\n 'TransactionDayOfYear',\n 'TransactionWeekOfYear',\n 'WeekAction'])\n\n return data", "def get_cdf_data(self):\n df = self.df_events.copy()\n df['event_time'] = df['event_time'].apply(self.parse_time_stamp) # convert strings to datetime objects\n # only get the rows with event_type_reason == \"user_pick_up\" and event_time between 6 am and 10 pm\n # also make sure dates are between the start and end period\n df = df[(df['event_type_reason'] == \"user_pick_up\") & (df['event_time'] >= iso8601.parse_date(self.start)) & (df['event_time'] <= iso8601.parse_date(self.end))]\n df['date'] = df['event_time'].apply(self.get_date).astype(str) # get date part of datetime object\n df['minute'] = df['event_time'].apply(self.get_minutes).astype(float)\n # consider only trips that began with operating hours\n df = df[(df['minute'] >= (6*60)) & (df['minute'] < (22*60))]\n return df[['date', 'minute']].reset_index(drop=True)", "def engineer_features(dataframe, holiday_dates, columns, time_lags=24, \n steps_ahead=1, drop_nan_rows=True):\n \n # Make a copy of the original dataframe\n data = dataframe[columns].copy()\n \n # Features engineering\n for col in data.columns:\n for i in range(1, time_lags+1):\n # Shift data by lag of 1 to time_lags (default: 24) hours\n data[col+'_{:d}h'.format(i)] = data[col].shift(periods=i) # time-lag\n data[col+'_diff'] = data[col].diff() # first-difference\n data[col+'_week'] = data[col].shift(periods=24*7) # previous week\n \n # Hour-of-day indicators with cyclical transform\n dayhour_ind = data.index.hour\n data['hr_sin'] = np.sin(dayhour_ind*(2.*np.pi/24))\n data['hr_cos'] = np.cos(dayhour_ind*(2.*np.pi/24))\n \n # Day-of-week indicators with cyclical transform\n weekday_ind = data.index.weekday\n data['week_sin'] = np.sin(weekday_ind*(2.*np.pi/7))\n data['week_cos'] = np.cos(weekday_ind*(2.*np.pi/7))\n\n # Weekend as a binary indicator\n data['weekend'] = np.asarray([0 if ind <= 4 else 1 for ind in weekday_ind])\n\n # Month indicators with cyclical transform\n month_ind = data.index.month\n data['mnth_sin'] = np.sin((month_ind-1)*(2.*np.pi/12))\n data['mnth_cos'] = np.cos((month_ind-1)*(2.*np.pi/12))\n \n # Holidays as a binary indicator\n data['holidays'] = 0\n for holiday, date in holiday_dates.items():\n if date[1] is None:\n # Single day\n data.loc[date[0], 'holidays'] = 1\n else:\n # Date range\n data.loc[date[0]:date[1], 'holidays'] = 1\n \n # Forecast horizont\n if steps_ahead == 1:\n # Single-step forecasting\n data['Load+0h'] = data['Load'].values\n else:\n # Multi-step forecasting\n for i in range(steps_ahead):\n data['Load'+'+{:d}h'.format(i)] = data['Load'].shift(-i)\n del data['Load']\n \n if drop_nan_rows:\n # Drop rows with NaN values\n data.dropna(inplace=True)\n \n return data", "def create_lag_features_with_time_feature(df = None, cols = None, time = None, n = 5, fillna = True):\n # assert\n assert(df is not None and cols is not None)\n # set attributes\n cols_to_rename = cols\n print(cols_to_rename)\n if type(n) == list:\n shift_range = n\n elif type(n) == int:\n shift_range = range(1, n+1)\n else:\n print(\"type of n is flase, set it to default: 5\")\n shift_range = range(1, 6)\n # try to get the new features\n for month_shift in tqdm(shift_range):\n train_shift = df.copy()\n train_shift[time] = train_shift[time] + month_shift\n foo = lambda x: '{}_lag_{}'.format(x, month_shift) if x in cols_to_rename else x\n train_shift = train_shift.rename(columns=foo)\n df = pd.merge(all_data, train_shift, on=index_cols, how='left').fillna(0)\n del train_shift\n gc.collect()\n return df", "def example_staypoints():\n p1 = Point(8.5067847, 47.4)\n p2 = Point(8.5067847, 47.5)\n p3 = Point(8.5067847, 47.6)\n p4 = Point(8.5067847, 47.7)\n\n t1 = pd.Timestamp(\"1971-01-01 00:00:00\", tz=\"utc\")\n t2 = pd.Timestamp(\"1971-01-01 05:00:00\", tz=\"utc\")\n t3 = pd.Timestamp(\"1971-01-02 07:00:00\", tz=\"utc\")\n t4 = pd.Timestamp(\"1971-01-02 08:00:00\", tz=\"utc\")\n t5 = pd.Timestamp(\"1971-01-02 09:00:00\", tz=\"utc\")\n t6 = pd.Timestamp(\"1971-01-02 10:00:00\", tz=\"utc\")\n\n list_dict = [\n {\"id\": 1, \"user_id\": 0, \"started_at\": t1, \"finished_at\": t2, \"geom\": p1},\n {\"id\": 5, \"user_id\": 0, \"started_at\": t2, \"finished_at\": t3, \"geom\": p2},\n {\"id\": 2, \"user_id\": 0, \"started_at\": t3, \"finished_at\": t4, \"geom\": p3},\n {\"id\": 6, \"user_id\": 0, \"started_at\": t4, \"finished_at\": t5, \"geom\": p2},\n {\"id\": 15, \"user_id\": 0, \"started_at\": t5, \"finished_at\": t6, \"geom\": p1},\n {\"id\": 7, \"user_id\": 1, \"started_at\": t3, \"finished_at\": t4, \"geom\": p4},\n {\"id\": 80, \"user_id\": 1, \"started_at\": t4, \"finished_at\": t5, \"geom\": p2},\n {\"id\": 3, \"user_id\": 1, \"started_at\": t5, \"finished_at\": t6, \"geom\": p2},\n ]\n sp = gpd.GeoDataFrame(data=list_dict, geometry=\"geom\", crs=\"EPSG:4326\")\n sp = sp.set_index(\"id\")\n sp.as_staypoints\n return sp", "def add_features2(df_in, rolling_win_size=15):\n cols_to_drop =['TTF','60_days','Turbine_ID', 'Date','Component','Component_sd','Component_av']\n\n for i in cols_to_drop:\n if i in df_in.columns:\n df_in = df_in.drop(columns=i)\n else:\n pass\n\n sensor_cols = []\n for i in df_in.columns:\n sensor_cols.append(i)\n\n sensor_av_cols = [nm+'_av' for nm in sensor_cols]\n sensor_sd_cols = [nm+'_sd' for nm in sensor_cols]\n\n df_out = pd.DataFrame()\n\n ws = rolling_win_size\n\n #calculate rolling stats for each engine id\n\n for m_id in pd.unique(df_in.Turbine_ID):\n\n # get a subset for each engine sensors\n df_engine = df_in[df_in['Turbine_ID'] == m_id]\n df_sub = df_engine[sensor_cols]\n\n # get rolling mean for the subset\n av = df_sub.rolling(ws, min_periods=1).mean()\n av.columns = sensor_av_cols\n\n # get the rolling standard deviation for the subset\n sd = df_sub.rolling(ws, min_periods=1).std().fillna(0)\n sd.columns = sensor_sd_cols\n\n # combine the two new subset dataframes columns to the engine subset\n new_ftrs = pd.concat([df_engine,av,sd], axis=1)\n\n # add the new features rows to the output dataframe\n df_out = pd.concat([df_out,new_ftrs])\n df_out = df_out.sort_values(by=['Turbine_ID', 'Date'] )\n return df_out", "def add_features(df_in, rolling_win_size,columns_to_treat):\n \n av_cols = [nm+'__av' for nm in columns_to_treat]\n sd_cols = [nm+'__sd' for nm in columns_to_treat]\n min_cols =[nm+'__min' for nm in columns_to_treat]\n max_cols =[nm+ '__max' for nm in columns_to_treat]\n \n df_out = pd.DataFrame()\n \n ws = rolling_win_size\n \n #calculate rolling stats for each engine (engine.id)\n \n for m_id in pd.unique(df_in['id.engine.id']):\n \n # get a subset for each engine sensors\n df_engine = df_in[df_in['id.engine.id'] == m_id]\n df_sub = df_engine[columns_to_treat]\n\n \n # get rolling mean for the subset\n av = df_sub.rolling(ws, min_periods=1).mean()\n av.columns = av_cols\n \n # get the rolling standard deviation for the subset\n sd = df_sub.rolling(ws, min_periods=1).std().fillna(0)\n sd.columns = sd_cols\n\n # get rolling rolling max for the subset\n max = df_sub.rolling(ws, min_periods=1).max()\n max.columns = max_cols\n \n # get the rolling standard deviation for the subset\n min = df_sub.rolling(ws, min_periods=1).min().fillna(0)\n min.columns = min_cols\n \n # combine the two new subset dataframes columns to the engine subset\n new_ftrs = pd.concat([df_engine,av,sd,min,max], axis=1)\n \n # add the new features rows to the output dataframe\n df_out = pd.concat([df_out,new_ftrs])\n \n return df_out", "def create_feature_names_list(df):\n features = ['date_block_num', 'shop_id', 'item_id', 'Year', 'Month', 'shop_type_1',\n 'shop_type_2', 'shop_city_type', 'shop_city', 'item_category_id',\n 'item_category_main', 'is_category_digital', 'is_category_ps_related', 'item_price_avg',\n 'when_first_sold',\n 'number_of_mondays', 'number_of_saturdays', 'number_of_sundays', 'number_of_days_in_month']\n lag_cols = [x for x in df.columns if 'lag' in x]\n features = features + lag_cols\n\n return features", "def gen_features(log_file_path: str, out_path: str):\n raise RuntimeError(\"Feature extraction is not supported yet in AutoScheduler dialect\")", "def generate_features(self, df):\n df = df.reset_index()\n with warnings.catch_warnings():\n warnings.simplefilter(\"ignore\")\n return extract_features(df, column_id=\"id\", impute_function=impute,\n default_fc_parameters=self.extraction_settings)", "def create_in_range_lst(source_features: list, user_zip_code: str, radius: int,\n active_set: set, second_dose: bool,\n provider_filter: list) -> list:\n in_range_locs = []\n user_coords = ZIP_MAP_DICT[user_zip_code][0]\n for loc in source_features:\n if loc['properties']['appointments_available'] and\\\n loc['properties']['provider_brand'].lower() in active_set and\\\n (loc['properties']['appointments_available_all_doses'] is True or\n loc['properties']['appointments_available_2nd_dose_only'] is\n second_dose) and \\\n (len(provider_filter) == 0 or\n loc['properties']['provider_brand_name'] in provider_filter):\n # format [latitude, longitude]\n loc_coord = loc['geometry']['coordinates'][::-1]\n if loc_coord != [None, None]:\n if earth_distance(loc_coord[0], loc_coord[1],\n user_coords[0], user_coords[1]) <= radius:\n in_range_locs.append(loc)\n return in_range_locs", "def create_features_table():\n\n table_name = 'features'\n sql_query = f\"\"\"CREATE OR REPLACE TABLE `{GCP_PROJECT}.{BQ_TEMP_DATASET}.{table_name}`\n AS\n SELECT COALESCE(osm_id, osm_way_id) AS osm_id, osm_version, osm_timestamp, 'point' AS feature_type, all_tags, geometry FROM `{GCP_PROJECT}.{BQ_SOURCE_DATASET}.points` \n UNION ALL\n SELECT COALESCE(osm_id, osm_way_id) AS osm_id, osm_version, osm_timestamp, 'line' AS feature_type, all_tags, geometry FROM `{GCP_PROJECT}.{BQ_SOURCE_DATASET}.lines`\n UNION ALL\n SELECT COALESCE(osm_id, osm_way_id) AS osm_id, osm_version, osm_timestamp, 'multilinestring' AS feature_type, all_tags, geometry FROM `{GCP_PROJECT}.{BQ_SOURCE_DATASET}.multilinestrings`\n UNION ALL\n SELECT COALESCE(osm_id, osm_way_id) AS osm_id, osm_version, osm_timestamp, 'multipolygon' AS feature_type, all_tags, geometry FROM `{GCP_PROJECT}.{BQ_SOURCE_DATASET}.multipolygons`\n UNION ALL\n SELECT COALESCE(osm_id, osm_way_id) AS osm_id, osm_version, osm_timestamp, 'other_relation' AS feature_type, all_tags, geometry FROM `{GCP_PROJECT}.{BQ_SOURCE_DATASET}.other_relations` \n \"\"\"\n query_job = bq.query(sql_query)", "def derive_features(self):\n\n temp = int(self.stop_id)\n\n while temp not in self.stops_latlon.keys():\n if temp < 7692:\n temp += 1\n else:\n while temp not in self.stops_latlon.keys():\n temp -= 1\n\n self.latitude = self.stops_latlon[temp][0]\n self.longitude = self.stops_latlon[temp][1]\n\n self.distance_centre = FormatInput.haversine(self.latitude, self.longitude)\n\n self.cluster = FormatInput.map_stop_to_cluster(self.cluster_map, self.stop_id)\n\n self.holiday = FormatInput.add_holiday(self.date)", "def _create_feature_group(\n data: pd.DataFrame,\n layer_name: str,\n lat_column: str,\n long_column: str,\n icon_column: Optional[str],\n icon_map: IconMapper,\n popup_cols: List[str],\n tooltip_cols: List[str],\n def_layer_color: str,\n use_marker_cluster: bool = True,\n) -> folium.FeatureGroup:\n feature_group = folium.FeatureGroup(name=layer_name)\n if use_marker_cluster:\n container = MarkerCluster(name=layer_name)\n container.add_to(feature_group)\n else:\n container = feature_group\n data.apply(\n lambda row: folium.Marker(\n location=(row[lat_column], row[long_column]),\n tooltip=_create_marker_text(row, tooltip_cols),\n popup=_create_marker_text(row, popup_cols),\n icon=_create_mapped_icon(row, icon_column, icon_map, def_layer_color),\n ).add_to(feature_group),\n axis=1,\n )\n return feature_group", "def month_lag_distribution(source_df, field=\"month_lag\", path=path.path, nrows=None):\n _log.info(\"Creating features from {}\".format(field))\n prefix = source_df.split(\"_\")[0]\n source_df = \"{}/{}\".format(path, source_df)\n\n _log.info(\"Reading from {}\".format(source_df))\n try:\n df = pd.read_csv(source_df, usecols=[\"card_id\", field], nrows=nrows)\n _log.info(\"Successfully read from {}\".format(source_df))\n except Exception as e:\n _log.exception(e)\n\n _log.info(\"Computing distribution of month lag\")\n func_to_be_applied = [min, max, pd.Series.nunique]\n func_to_be_applied_dummy = [max, np.mean]\n rename_dict = create_rename_dict(prefix, field, func_to_be_applied)\n rename_dict_dummy = create_rename_dict(prefix, \"dummy\", func_to_be_applied_dummy)\n\n df[\"dummy\"] = 1\n df_features = df.groupby(\"card_id\").agg({field:func_to_be_applied}).reset_index()\n df_features = pd.concat([pd.DataFrame(df_features[\"card_id\"]), df_features[field]], axis=1, sort=False)\n\n _log.info(\"Renaming columns: {}\".format(rename_dict))\n df_features.rename(columns=rename_dict, inplace=True)\n\n _log.info(\"Computing time in month between transactions\")\n df_freq = (df.groupby([\"card_id\", field]).agg({\"dummy\": np.sum}).reset_index().groupby(\"card_id\")\n .agg({\"dummy\": func_to_be_applied_dummy}).reset_index())\n df_freq = pd.concat([pd.DataFrame(df_freq[\"card_id\"]), df_freq[\"dummy\"]], axis=1, sort=False)\n df_freq.rename(columns=rename_dict_dummy, inplace=True)\n\n _log.info(\"Creating final df\")\n df_features = df_features.merge(df_freq, how=\"inner\", on=\"card_id\")\n return df_features", "def make_time_features(ts, index=None, epoch=None, epoch_span=None):\n # input validation\n try:\n if len(ts) == 1:\n _singleton = True\n elif len(ts) > 1:\n _singleton = False\n elif len(ts) < 1:\n raise ValueError(\"must pass non-empty iterable of timestamps\")\n except TypeError:\n return make_time_features([ts], index=index, epoch=epoch, epoch_span=epoch_span)\n\n if not isinstance(ts, pd.DatetimeIndex):\n ts = pd.Series(0, index=ts).index\n if not isinstance(ts, pd.DatetimeIndex):\n raise ValueError(\"must pass non-empty iterable of timestamps\")\n\n if index is None:\n index = pd.RangeIndex(len(ts))\n if epoch is None:\n epoch = min(ts)\n if epoch_span is None:\n epoch_span = float((end - epoch).total_seconds())\n\n time_features = {}\n start = min(ts)\n end = max(ts)\n\n # Major US holidays\n NewYearsDay = pd.tseries.holiday.Holiday('New Years Day', month=1, day=1)\n MemorialDay = pd.tseries.holiday.Holiday('Memorial Day', month=6, day=1, offset=pd.DateOffset(weekday=MO(-1)))\n IndependenceDay = pd.tseries.holiday.Holiday('Independence Day', month=7, day=4)\n LaborDay = pd.tseries.holiday.Holiday('Labor Day', month=9, day=1, offset=pd.DateOffset(weekday=MO(1)))\n ThanksgivingDay = pd.tseries.holiday.Holiday('Thanksgiving Day', month=11, day=1, offset=pd.DateOffset(weekday=TH(4)))\n ChristmasDay = pd.tseries.holiday.Holiday('Christmas Day', month=12, day=25)\n holidays = \\\n NewYearsDay.dates(start.date(), end.date()).tolist() +\\\n MemorialDay.dates(start.date(), end.date()).tolist() +\\\n IndependenceDay.dates(start.date(), end.date()).tolist() +\\\n LaborDay.dates(start.date(), end.date()).tolist() +\\\n ThanksgivingDay.dates(start.date(), end.date()).tolist() +\\\n ChristmasDay.dates(start.date(), end.date()).tolist()\n holidays = set([h.date() for h in holidays])\n\n # projections onto unit circle\n time_features['day_cos'] = np.cos((ts.hour * 3600 + ts.minute * 60 + ts.second) * 2 * np.pi / 86400.)\n time_features['day_sin'] = np.sin((ts.hour * 3600 + ts.minute * 60 + ts.second) * 2 * np.pi / 86400.)\n time_features['week_cos'] = np.cos(ts.dayofweek * 2 * np.pi / 7.)\n time_features['week_sin'] = np.sin(ts.dayofweek * 2 * np.pi / 7.)\n time_features['year_cos'] = np.cos(ts.dayofyear * 2 * np.pi / 365.)\n time_features['year_sin'] = np.sin(ts.dayofyear * 2 * np.pi / 365.)\n # linear march through time\n time_features['epoch'] = (ts - epoch).total_seconds() / epoch_span\n # workday indicator\n time_features['workday'] = [int(weekday < 5 and date not in holidays) for weekday, date in zip(ts.weekday, ts.date)]\n\n if _singleton:\n return {k: v[0] for k, v in time_features.iteritems()}\n else:\n return pd.DataFrame(time_features, index=index)", "def engineer_features(\n df_org: pd.DataFrame,\n start_features: list | None = None,\n units: dict | None = None,\n max_steps: int = 3,\n transformations: list | tuple = (\"1/\", \"exp\", \"log\", \"abs\", \"sqrt\", \"^2\", \"^3\"),\n verbose: int = 0,\n) -> tuple[pd.DataFrame, dict]:\n # initialize the feature pool with columns from the dataframe\n if not start_features:\n start_features = df_org.columns\n else:\n for c in start_features:\n if c not in df_org.columns:\n raise ValueError(f\"[feateng] start feature {c} not in df_org.columns\")\n feature_pool = {c: sympy.symbols(colnames2symbols(c, i), real=True) for i, c in enumerate(start_features)} # type: ignore\n if max_steps < 1:\n if verbose > 0:\n logging.warning(\"[feateng] no features generated for max_steps < 1.\")\n return df_org, feature_pool\n # get a copy of the dataframe - this is where all the features will be added\n df = pd.DataFrame(df_org.copy(), dtype=np.float32)\n\n compiled_func_transformations = None\n compiled_func_transforms_cond = None\n compiled_func_combinations = None\n\n def compile_func_transform(name: str, ft: Callable, plus_1: bool = False):\n def _abs(x):\n return np.abs(x)\n\n # create temporary variable expression and apply it to precomputed feature\n t = sympy.symbols(\"t\")\n expr_temp = ft(t + 1) if plus_1 else ft(t)\n fn = _abs if name == \"abs\" else lambdify(t, expr_temp)\n return nb.njit(fn)\n\n def apply_transformations(features_list: list) -> tuple[list, set]:\n # feature transformations\n func_transform = {\n \"exp\": lambda x: sympy.exp(x),\n \"exp-\": lambda x: sympy.exp(-x),\n \"log\": lambda x: sympy.log(x),\n \"abs\": lambda x: sympy.Abs(x),\n \"sqrt\": lambda x: sympy.sqrt(x),\n \"sin\": lambda x: sympy.sin(x),\n \"cos\": lambda x: sympy.cos(x),\n \"2^\": lambda x: 2**x,\n \"^2\": lambda x: x**2,\n \"^3\": lambda x: x**3,\n \"1+\": lambda x: 1 + x,\n \"1-\": lambda x: 1 - x,\n \"1/\": lambda x: 1 / x,\n }\n func_transform_units = {\n \"exp\": lambda x: np.exp(x),\n \"exp-\": lambda x: np.exp(-x),\n \"log\": lambda x: np.log(x),\n \"abs\": lambda x: np.abs(x),\n \"sqrt\": lambda x: np.sqrt(x),\n \"sin\": lambda x: np.sin(x),\n \"cos\": lambda x: np.cos(x),\n \"2^\": lambda x: np.exp(x),\n \"^2\": lambda x: x**2,\n \"^3\": lambda x: x**3,\n \"1+\": lambda x: 1 + x,\n \"1-\": lambda x: 1 - x,\n \"1/\": lambda x: 1 / x,\n }\n # conditions on the original features that have to be met to apply the transformation\n func_transform_cond = {\n \"exp\": lambda x: np.all(x < 10),\n \"exp-\": lambda x: np.all(-x < 10),\n \"log\": lambda x: np.all(x >= 0),\n \"abs\": lambda x: np.any(x < 0),\n \"sqrt\": lambda x: np.all(x >= 0),\n \"sin\": lambda x: True,\n \"cos\": lambda x: True,\n \"2^\": lambda x: np.all(x < 50),\n \"^2\": lambda x: np.all(np.abs(x) < 1000000),\n \"^3\": lambda x: np.all(np.abs(x) < 10000),\n \"1+\": lambda x: True,\n \"1-\": lambda x: True,\n \"1/\": lambda x: np.all(x != 0),\n }\n # apply transformations to the features in the given features list\n # modifies global variables df and feature_pool!\n nonlocal df, feature_pool, units\n nonlocal compiled_func_transformations, compiled_func_transforms_cond\n\n if compiled_func_transformations is None:\n compiled_func_transformations = {k: compile_func_transform(k, v) for k, v in func_transform.items()}\n compiled_func_transformations[\"log_plus_1\"] = compile_func_transform(\"log\", func_transform[\"log\"], plus_1=True)\n\n compiled_func_transforms_cond = {x[0]: nb.njit(x[1]) for x in func_transform_cond.items()}\n\n # returns a list of new features that were generated\n new_features: list[str] = []\n uncorr_features = set()\n # store all new features in a preallocated numpy array before adding it to the dataframe\n feat_array = np.zeros((df.shape[0], len(features_list) * len(transformations)), dtype=np.float32)\n cat_features = {feat for feat in features_list if len(df[feat].unique()) <= 2}\n func_transform_cond_cache = {} # Cache for compiled_func_transforms_cond checks\n for i, feat in enumerate(features_list):\n if verbose and not i % 100:\n print(f\"[feateng] {i:15}/{len(features_list):15} features transformed\", end=\"\\r\")\n for ft in transformations:\n # (don't compute transformations on categorical features)\n if feat in cat_features:\n continue\n # check if transformation is valid for particular feature (i.e. given actual numerical values)\n cache_key = (ft, feat)\n if cache_key not in func_transform_cond_cache:\n func_transform_cond_cache[cache_key] = compiled_func_transforms_cond[ft](df[feat].to_numpy())\n if func_transform_cond_cache[cache_key]:\n # get the expression (based on the primary features)\n expr = func_transform[ft](feature_pool[feat])\n expr_name = str(expr)\n # we're simplifying expressions, so we might already have that one\n if expr_name not in feature_pool:\n # if we're given units, check if the operation is legal\n if units:\n try:\n units[expr_name] = func_transform_units[ft](units[feat])\n units[expr_name].__dict__[\"_magnitude\"] = 1.0\n except (pint.DimensionalityError, pint.OffsetUnitCalculusError):\n continue\n feature_pool[expr_name] = expr\n if expr == \"log\" and np.any(df[feat] < 1):\n f = compiled_func_transformations[\"log_plus_1\"]\n else:\n f = compiled_func_transformations[ft]\n new_feat = np.array(f(df[feat].to_numpy()), dtype=np.float32)\n # near 0 variance test - sometimes all that's left is \"e\"\n if np.isfinite(new_feat).all() and np.var(new_feat) > 1e-10:\n corr = abs(np.corrcoef(new_feat, df[feat])[0, 1])\n if corr < 1.0:\n feat_array[:, len(new_features)] = new_feat\n new_features.append(expr_name)\n # correlation test: don't include features that are basically the same as the original features\n # but we only filter them out at the end, since they still might help in other steps!\n if corr < 0.95:\n uncorr_features.add(expr_name)\n if verbose > 0:\n logging.info(\n f\"[feateng] Generated {len(new_features)} transformed features from {len(features_list)} original features - done.\",\n )\n df = df.join(pd.DataFrame(feat_array[:, : len(new_features)], columns=new_features, index=df.index, dtype=np.float32))\n return new_features, uncorr_features\n\n def compile_func_combinations(func_combinations: dict) -> dict:\n d = {}\n for fc in func_combinations:\n s, t = sympy.symbols(\"s t\")\n expr_temp = func_combinations[fc](s, t)\n fn = lambdify((s, t), expr_temp)\n vect = nb.vectorize([\"float32(float32, float32)\"], nopython=True)\n d[fc] = vect(fn)\n return d\n\n def get_feature_combinations(feature_tuples: list) -> tuple[list, set]:\n # new features as combinations of two other features\n func_combinations = {\n \"x+y\": lambda x, y: x + y,\n \"x*y\": lambda x, y: x * y,\n \"x-y\": lambda x, y: x - y,\n \"y-x\": lambda x, y: y - x,\n }\n # get all feature combinations for the given feature tuples\n # modifies global variables df and feature_pool!\n nonlocal df, feature_pool, units, compiled_func_combinations\n\n if compiled_func_combinations is None:\n compiled_func_combinations = compile_func_combinations(func_combinations)\n\n # only compute all combinations if there are more transformations applied afterwards\n # additions at the highest level are sorted out later anyways\n combinations = [\"x*y\"] if steps == max_steps else list(func_combinations.keys())\n # returns a list of new features that were generated\n new_features: list[str] = []\n uncorr_features = set()\n # store all new features in a preallocated numpy array before adding it to the dataframe\n feat_array = np.zeros((df.shape[0], len(feature_tuples) * len(combinations)), dtype=np.float32)\n for i, (feat1, feat2) in enumerate(feature_tuples):\n if verbose and not i % 100:\n print(f\"[feateng] {i:15}/{len(feature_tuples):15} feature tuples combined\", end=\"\\r\")\n for fc in combinations:\n expr = func_combinations[fc](feature_pool[feat1], feature_pool[feat2])\n expr_name = str(expr)\n if expr_name not in feature_pool:\n # if we're given units, check if the operation is legal\n if units:\n try:\n units[expr_name] = func_combinations[fc](units[feat1], units[feat2])\n units[expr_name].__dict__[\"_magnitude\"] = 1.0\n except (pint.DimensionalityError, pint.OffsetUnitCalculusError):\n continue\n feature_pool[expr_name] = expr\n f = compiled_func_combinations[fc]\n new_feat = np.array(f(df[feat1].to_numpy(), df[feat2].to_numpy()), dtype=np.float32)\n # near 0 variance test - sometimes all that's left is \"e\"\n if np.isfinite(new_feat).all() and np.var(new_feat) > 1e-10:\n corr = max(abs(np.corrcoef(new_feat, df[feat1])[0, 1]), abs(np.corrcoef(new_feat, df[feat2])[0, 1]))\n if corr < 1.0:\n feat_array[:, len(new_features)] = new_feat\n new_features.append(expr_name)\n # correlation test: don't include features that are basically the same as the original features\n # but we only filter them out at the end, since they still might help in other steps!\n if corr < 0.95:\n uncorr_features.add(expr_name)\n if verbose > 0:\n logging.info(\n f\"[feateng] Generated {len(new_features)} feature combinations from {len(feature_tuples)} original feature tuples - done.\",\n )\n df = df.join(pd.DataFrame(feat_array[:, : len(new_features)], columns=new_features, index=df.index, dtype=np.float32))\n return new_features, uncorr_features\n\n # get transformations of initial features\n steps = 1\n if verbose > 0:\n logging.info(\"[feateng] Step 1: transformation of original features\")\n original_features = list(feature_pool.keys())\n uncorr_features = set(feature_pool.keys())\n temp_new, temp_uncorr = apply_transformations(original_features)\n original_features.extend(temp_new)\n uncorr_features.update(temp_uncorr)\n steps += 1\n # get combinations of first feature set\n if steps <= max_steps:\n if verbose > 0:\n logging.info(\"[feateng] Step 2: first combination of features\")\n new_features, temp_uncorr = get_feature_combinations(list(combinations(original_features, 2)))\n uncorr_features.update(temp_uncorr)\n steps += 1\n while steps <= max_steps:\n # apply transformations on these new features\n if verbose > 0:\n logging.info(f\"[feateng] Step {steps}: transformation of new features\")\n temp_new, temp_uncorr = apply_transformations(new_features)\n new_features.extend(temp_new)\n uncorr_features.update(temp_uncorr)\n steps += 1\n # get combinations of old and new features\n if steps <= max_steps:\n if verbose > 0:\n logging.info(f\"[feateng] Step {steps}: combining old and new features\")\n new_new_features, temp_uncorr = get_feature_combinations(list(product(original_features, new_features)))\n uncorr_features.update(temp_uncorr)\n steps += 1\n # and combinations of new features within themselves\n if steps <= max_steps:\n if verbose > 0:\n logging.info(f\"[feateng] Step {steps}: combining new features\")\n temp_new, temp_uncorr = get_feature_combinations(list(combinations(new_features, 2)))\n new_new_features.extend(temp_new)\n uncorr_features.update(temp_uncorr)\n steps += 1\n # update old and new features and repeat\n original_features.extend(new_features)\n new_features = new_new_features\n\n # sort out all features that are just additions on the highest level or correlated with more basic features\n if verbose > 0:\n logging.info(f\"[feateng] Generated altogether {len(feature_pool) - len(start_features)} new features in {max_steps} steps\") # type: ignore\n logging.info(\"[feateng] Removing correlated features, as well as additions at the highest level\")\n feature_pool = {\n c: feature_pool[c] for c in feature_pool if c in uncorr_features and feature_pool[c].func != sympy.core.add.Add\n }\n cols = [\n c for c in list(df.columns) if c in feature_pool and c not in df_org.columns\n ] # categorical cols not in feature_pool\n if cols:\n # check for correlated features again; this time with the start features\n corrs = dict(\n zip(\n cols,\n np.max(\n np.abs(\n np.dot(StandardScaler().fit_transform(df[cols]).T, StandardScaler().fit_transform(df_org))\n / df_org.shape[0],\n ),\n axis=1,\n ),\n ),\n )\n cols = [c for c in cols if corrs[c] < 0.9]\n cols = list(df_org.columns) + cols\n if verbose > 0:\n logging.info(f\"[feateng] Generated a total of {len(feature_pool) - len(start_features)} additional features\") # type: ignore\n return df[cols], feature_pool", "def fes_date_filter(start_date='1900-01-01', stop_date='2100-01-01',\n constraint='overlaps'):\n if constraint == 'overlaps':\n propertyname = 'apiso:TempExtent_begin'\n start = fes.PropertyIsLessThanOrEqualTo(propertyname=propertyname,\n literal=stop_date)\n propertyname = 'apiso:TempExtent_end'\n stop = fes.PropertyIsGreaterThanOrEqualTo(propertyname=propertyname,\n literal=start_date)\n elif constraint == 'within':\n propertyname = 'apiso:TempExtent_begin'\n start = fes.PropertyIsGreaterThanOrEqualTo(propertyname=propertyname,\n literal=start_date)\n propertyname = 'apiso:TempExtent_end'\n stop = fes.PropertyIsLessThanOrEqualTo(propertyname=propertyname,\n literal=stop_date)\n return start, stop", "def make_features(self, x_hits, y_hits, dow, lagged_hits, pf_age, pf_si, pf_network, pf_gender, page_ix, pf_price_cat,\n page_popularity, quarter_autocorr):\n # Split day of week to train and test\n x_dow, y_dow = tf.split(dow, [self.train_window, self.predict_window], axis=0)\n\n # Normalize hits\n mean = tf.reduce_mean(x_hits)\n std = tf.sqrt(tf.reduce_mean(tf.squared_difference(x_hits, mean)))\n norm_x_hits = (x_hits - mean) / std\n norm_y_hits = (y_hits - mean) / std\n norm_lagged_hits = (lagged_hits - mean) / std\n\n # Split lagged hits to train and test\n x_lagged, y_lagged = tf.split(norm_lagged_hits, [self.train_window, self.predict_window], axis=0)\n\n # Combine all page features into single tensor\n stacked_features = tf.stack([page_popularity, quarter_autocorr])\n flat_ucdoc_features = tf.concat([pf_age, pf_si, pf_network, pf_gender, pf_price_cat, stacked_features], axis=0) #pf_region\n ucdoc_features = tf.expand_dims(flat_ucdoc_features, 0)\n\n # Train features\n x_features = tf.concat([\n # [n_days] -> [n_days, 1]\n tf.expand_dims(norm_x_hits, -1),\n x_dow,\n x_lagged,\n # Stretch ucdoc_features to all training days\n # [1, features] -> [n_days, features]\n tf.tile(ucdoc_features, [self.train_window, 1])\n ], axis=1)\n\n # Test features\n y_features = tf.concat([\n # [n_days] -> [n_days, 1]\n y_dow,\n y_lagged,\n # Stretch ucdoc_features to all testing days\n # [1, features] -> [n_days, features]\n tf.tile(ucdoc_features, [self.predict_window, 1])\n ], axis=1)\n\n return x_hits, x_features, norm_x_hits, x_lagged, y_hits, y_features, norm_y_hits, mean, std, flat_ucdoc_features, page_ix", "def add_features(df_in, rolling_win_size=15):\n\n sensor_cols = []\n index = df_in.columns.get_loc('TTF')\n for i in df_in.columns[2:index]:\n sensor_cols.append(i)\n\n sensor_av_cols = [nm+'_av' for nm in sensor_cols]\n sensor_sd_cols = [nm+'_sd' for nm in sensor_cols]\n\n df_out = pd.DataFrame()\n\n ws = rolling_win_size\n\n #calculate rolling stats for each engine id\n\n for m_id in pd.unique(df_in.Turbine_ID):\n\n # get a subset for each engine sensors\n df_engine = df_in[df_in['Turbine_ID'] == m_id]\n df_sub = df_engine[sensor_cols]\n\n # get rolling mean for the subset\n av = df_sub.rolling(ws, min_periods=1).mean()\n av.columns = sensor_av_cols\n\n # get the rolling standard deviation for the subset\n sd = df_sub.rolling(ws, min_periods=1).std().fillna(0)\n sd.columns = sensor_sd_cols\n\n # combine the two new subset dataframes columns to the engine subset\n new_ftrs = pd.concat([df_engine,av,sd], axis=1)\n\n # add the new features rows to the output dataframe\n df_out = pd.concat([df_out,new_ftrs])\n df_out = df_out.sort_values(by=['Turbine_ID', 'Date'] )\n return df_out", "def keyword_based_date_range_selection(self, keyword,keyword_value, aggfunc={},date_column=None, date_column_format=\"%Y-%m-%d %H:%M:%S\", custom=[],grouping_colums=[],where=None):\n expected_interval_for_aggregation_in_seconds = 0\n # working code with converion of date limits commenting the below section for the testing of pivot tables and grouper below this section\n # need to use reg exp but there is problem with separating kewa_value ex:10min should be separated as 10 min\n # if keyword == 'custom':\n # print(\"Currently not supported\")\n # exit()\n #\n # elif 'min' in keyword:\n # expected_seconds = 60\n # expected_interval_for_aggregation_in_seconds = expected_seconds*keyword_value\n # elif 'hour' in keyword:\n # expected_seconds = 60*60\n # expected_interval_for_aggregation_in_seconds = expected_seconds*keyword_value\n # elif 'day' in keyword:\n # expected_seconds = 60*60*24\n # expected_interval_for_aggregation_in_seconds = expected_seconds*keyword_value\n # elif 'week' in keyword:\n # expected_seconds = 60*60*24*7\n # expected_interval_for_aggregation_in_seconds = expected_seconds*keyword_value\n # elif 'month' in keyword:\n # expected_seconds = 60*60*24*30\n # expected_interval_for_aggregation_in_seconds = expected_seconds*keyword_value\n\n\n\n #uniquify the date column from the dataframe\n\n\n\n # #now get the min_interval_in_seconds of the user\n # min_seconds = self.get_min_interval_in_seconds(date_column=date_column,format_of_date=date_column_format)\n #\n # print(\"the minimum interval seconds is\", min_seconds)\n # print(\"expected_interval_for_aggregation_in_seconds\", expected_interval_for_aggregation_in_seconds)\n # #compare the min_seconds and expected_interval_for_aggregation_in_seconds if min_seconds is greated than expected_inteval then as for now its error result_df.\n #\n # if expected_interval_for_aggregation_in_seconds > min_seconds:\n # #calculating the range to split the dataframe\n # range = int(expected_interval_for_aggregation_in_seconds/min_seconds)\n # #split the dataframr into multipldf based on range\n # splited_dfs = self.split_df_to_many(range)\n #\n # date_value = []\n # aggregation_value = []\n # #here we get splited df according to range\n # for df in splited_dfs:\n # print(\"splited dfs \",df)\n # value_df = df.iloc[:,value_column]\n # # print(\"the value list is \",value_df)\n # aggregation = Aggregator()\n # #apply aggregation on each chucnk of divrded dataframe\n # aggregation_result = aggregation.many_to_one(func,value_df)\n # d = self.df.iloc[:,date_column]\n # date_name = d.name\n # print(\"the date name\",date_name)\n # #append the first vale o date field into date_value list\n # date_value.append(df[date_name].iloc[0])\n # #append the result of aggregation class into aggregation_value list\n # aggregation_value.append(aggregation_result)\n # d = self.df.iloc[:,date_column]\n # date_name = d.name\n # v = self.df.iloc[:,value_column]\n # value_name = v.name\n #\n # #generate the dict from both date_value list and aggregation_value list\n # frame = {date_name:date_value,value_name:aggregation_value}\n # #create a result dataframe\n # result_df = pd.DataFrame(frame)\n # print(\"the results dataframe is \", result_df)\n #\n # print(\"the expected range is\",range)\n #\n # else:\n # print(\"-F- the interval range supporting is not found\")\n # exit()\n\n # todo\n # use self.df\n #print(self.df.iloc[0:range,1])\n # resulted_array = []\n # for v in self.df.iloc[0:range,value_column]:\n # resulted_array.append(v)\n #\n #\n # agg = Aggregator()\n # return agg.many_to_one(func, resulted_array)\n\n\n # craeting the below section for the testing of pivot table and grouper methods.\n df = self.df\n if aggfunc:\n if len(aggfunc)>0:\n\n for column, value in aggfunc.items():\n # print(\"the converting column name is\", column)\n try:\n df[column] = df[column].astype(float)\n except:\n result_df=\"Error\"\n\n\n # print(\"the converted column name is\",df.dtypes)\n #Todo should convert the numerical columns to numbered datatype]\n #for testing purpose e manually converted it\n\n\n # print(\"the keyword is \",keyword)\n # print(\"the date column is \",date_column)\n # print(\"the grouping_colums is \",grouping_colums)\n # print(\"the value column is \",value_column)\n # print(\"the aggrigation function is \",aggfunc)\n # print(\"in project query frequency\",keyword)\n if keyword:\n\n if keyword == 'custom':\n # print(\"Currently not supported\")\n exit()\n\n elif 'min' in keyword:\n expected_freq = 'M'\n # print(\"the date column is \",date_column)\n if where and aggfunc and grouping_colums :\n \n try:\n result_df = df.pivot_table(index= grouping_colums,columns =pd.Grouper(freq=expected_freq,key=date_column),fill_value=0,aggfunc=aggfunc,)\n result_df = result_df.stack().reset_index()\n except:\n result_df=\"Error\"\n elif not where and aggfunc and grouping_colums :\n \n try:\n result_df = df.pivot_table(index= grouping_colums,columns =pd.Grouper(freq=expected_freq,key=date_column),fill_value=0,aggfunc=aggfunc,)\n result_df = result_df.stack().reset_index()\n except:\n result_df=\"Error\"\n elif where and aggfunc and not grouping_colums:\n try:\n result_df = df.pivot_table(columns =pd.Grouper(freq=expected_freq,key=date_column),fill_value=0,aggfunc=aggfunc,)\n # print(\"new type of query\",result_df)\n pv_df = result_df.transpose()\n result_df = pv_df.reset_index()\n except:\n result_df=\"Error\"\n elif not where and aggfunc and not grouping_colums:\n try:\n result_df = df.pivot_table(columns =pd.Grouper(freq=expected_freq,key=date_column),fill_value=0,aggfunc=aggfunc,)\n # print(\"new type of query\",result_df)\n pv_df = result_df.transpose()\n result_df = pv_df.reset_index()\n except:\n result_df=\"Error\"\n\n elif where and grouping_colums and not aggfunc:\n try:\n # print(\"year just grouping\")\n grouping_colums.append(date_column)\n grouped_df =df.groupby(grouping_colums)\n result_df = pd.DataFrame(grouped_df.size().reset_index(name = \"Count\"))\n except:\n result_df=\"Error\"\n elif not where and grouping_colums and not aggfunc:\n try:\n # print(\"year just grouping\")\n grouping_colums.append(date_column)\n grouped_df =df.groupby(grouping_colums)\n result_df = pd.DataFrame(grouped_df.size().reset_index(name = \"Count\"))\n except:\n result_df=\"Error\"\n\n elif where and expected_freq:\n try:\n # print(\"only frequency\")\n s_df = df.groupby(pd.Grouper(freq=expected_freq,key=date_column))\n result_df = pd.DataFrame(s_df.size().reset_index(name = \"Count\"))\n \n except:\n result_df=\"Error\"\n elif not where and expected_freq:\n try:\n # print(\"only frequency\")\n s_df = df.groupby(pd.Grouper(freq=expected_freq,key=date_column))\n result_df = pd.DataFrame(s_df.size().reset_index(name = \"Count\"))\n \n except:\n result_df=\"Error\"\n\n\n elif 'hour' in keyword:\n expected_freq = 'H'\n # print(\"the date column is \",date_column)\n if where and aggfunc and grouping_colums :\n \n try:\n result_df = df.pivot_table(index= grouping_colums,columns =pd.Grouper(freq=expected_freq,key=date_column),fill_value=0,aggfunc=aggfunc,)\n result_df = result_df.stack().reset_index()\n except:\n result_df=\"Error\"\n elif not where and aggfunc and grouping_colums :\n \n try:\n result_df = df.pivot_table(index= grouping_colums,columns =pd.Grouper(freq=expected_freq,key=date_column),fill_value=0,aggfunc=aggfunc,)\n result_df = result_df.stack().reset_index()\n except:\n result_df=\"Error\"\n elif where and aggfunc and not grouping_colums:\n try:\n result_df = df.pivot_table(columns =pd.Grouper(freq=expected_freq,key=date_column),fill_value=0,aggfunc=aggfunc,)\n # print(\"new type of query\",result_df)\n pv_df = result_df.transpose()\n result_df = pv_df.reset_index()\n except:\n result_df=\"Error\"\n elif not where and aggfunc and not grouping_colums:\n try:\n result_df = df.pivot_table(columns =pd.Grouper(freq=expected_freq,key=date_column),fill_value=0,aggfunc=aggfunc,)\n # print(\"new type of query\",result_df)\n pv_df = result_df.transpose()\n result_df = pv_df.reset_index()\n except:\n result_df=\"Error\"\n\n elif where and grouping_colums and not aggfunc:\n try:\n # print(\"year just grouping\")\n grouping_colums.append(date_column)\n grouped_df =df.groupby(grouping_colums)\n result_df = pd.DataFrame(grouped_df.size().reset_index(name = \"Count\"))\n except:\n result_df=\"Error\"\n elif not where and grouping_colums and not aggfunc:\n try:\n # print(\"year just grouping\")\n grouping_colums.append(date_column)\n grouped_df =df.groupby(grouping_colums)\n result_df = pd.DataFrame(grouped_df.size().reset_index(name = \"Count\"))\n except:\n result_df=\"Error\"\n\n elif where and expected_freq:\n try:\n # print(\"only frequency\")\n s_df = df.groupby(pd.Grouper(freq=expected_freq,key=date_column))\n result_df = pd.DataFrame(s_df.size().reset_index(name = \"Count\"))\n \n except:\n result_df=\"Error\"\n elif not where and expected_freq:\n try:\n # print(\"only frequency\")\n s_df = df.groupby(pd.Grouper(freq=expected_freq,key=date_column))\n result_df = pd.DataFrame(s_df.size().reset_index(name = \"Count\"))\n \n except:\n result_df=\"Error\"\n elif 'week' in keyword:\n expected_freq = 'W'\n # print(\"the date column is \",date_column)\n if where and aggfunc and grouping_colums :\n \n try:\n result_df = df.pivot_table(index= grouping_colums,columns =pd.Grouper(freq=expected_freq,key=date_column),fill_value=0,aggfunc=aggfunc,)\n result_df = result_df.stack().reset_index()\n except:\n result_df=\"Error\"\n elif not where and aggfunc and grouping_colums :\n \n try:\n result_df = df.pivot_table(index= grouping_colums,columns =pd.Grouper(freq=expected_freq,key=date_column),fill_value=0,aggfunc=aggfunc,)\n result_df = result_df.stack().reset_index()\n except:\n result_df=\"Error\"\n elif where and aggfunc and not grouping_colums:\n try:\n result_df = df.pivot_table(columns =pd.Grouper(freq=expected_freq,key=date_column),fill_value=0,aggfunc=aggfunc,)\n # print(\"new type of query\",result_df)\n pv_df = result_df.transpose()\n result_df = pv_df.reset_index()\n except:\n result_df=\"Error\"\n elif not where and aggfunc and not grouping_colums:\n try:\n result_df = df.pivot_table(columns =pd.Grouper(freq=expected_freq,key=date_column),fill_value=0,aggfunc=aggfunc,)\n # print(\"new type of query\",result_df)\n pv_df = result_df.transpose()\n result_df = pv_df.reset_index()\n except:\n result_df=\"Error\"\n\n elif where and grouping_colums and not aggfunc:\n try:\n # print(\"year just grouping\")\n grouping_colums.append(date_column)\n grouped_df =df.groupby(grouping_colums)\n result_df = pd.DataFrame(grouped_df.size().reset_index(name = \"Count\"))\n except:\n result_df=\"Error\"\n elif not where and grouping_colums and not aggfunc:\n try:\n # print(\"year just grouping\")\n grouping_colums.append(date_column)\n grouped_df =df.groupby(grouping_colums)\n result_df = pd.DataFrame(grouped_df.size().reset_index(name = \"Count\"))\n except:\n result_df=\"Error\"\n\n elif where and expected_freq:\n try:\n # print(\"only frequency\")\n s_df = df.groupby(pd.Grouper(freq=expected_freq,key=date_column))\n result_df = pd.DataFrame(s_df.size().reset_index(name = \"Count\"))\n \n except:\n result_df=\"Error\"\n elif not where and expected_freq:\n try:\n # print(\"only frequency\")\n s_df = df.groupby(pd.Grouper(freq=expected_freq,key=date_column))\n result_df = pd.DataFrame(s_df.size().reset_index(name = \"Count\"))\n \n except:\n result_df=\"Error\"\n\n elif 'day' in keyword:\n expected_freq = 'D'\n # print(\"the date column is \",date_column)\n if where and aggfunc and grouping_colums :\n \n try:\n result_df = df.pivot_table(index= grouping_colums,columns =pd.Grouper(freq=expected_freq,key=date_column),fill_value=0,aggfunc=aggfunc,)\n result_df = result_df.stack().reset_index()\n except:\n result_df=\"Error\"\n elif not where and aggfunc and grouping_colums :\n \n try:\n result_df = df.pivot_table(index= grouping_colums,columns =pd.Grouper(freq=expected_freq,key=date_column),fill_value=0,aggfunc=aggfunc,)\n result_df = result_df.stack().reset_index()\n except:\n result_df=\"Error\"\n elif where and aggfunc and not grouping_colums:\n try:\n result_df = df.pivot_table(columns =pd.Grouper(freq=expected_freq,key=date_column),fill_value=0,aggfunc=aggfunc,)\n # print(\"new type of query\",result_df)\n pv_df = result_df.transpose()\n result_df = pv_df.reset_index()\n except:\n result_df=\"Error\"\n elif not where and aggfunc and not grouping_colums:\n try:\n result_df = df.pivot_table(columns =pd.Grouper(freq=expected_freq,key=date_column),fill_value=0,aggfunc=aggfunc,)\n # print(\"new type of query\",result_df)\n pv_df = result_df.transpose()\n result_df = pv_df.reset_index()\n except:\n result_df=\"Error\"\n\n elif where and grouping_colums and not aggfunc:\n try:\n # print(\"year just grouping\")\n grouping_colums.append(date_column)\n grouped_df =df.groupby(grouping_colums)\n result_df = pd.DataFrame(grouped_df.size().reset_index(name = \"Count\"))\n except:\n result_df=\"Error\"\n elif not where and grouping_colums and not aggfunc:\n try:\n # print(\"year just grouping\")\n grouping_colums.append(date_column)\n grouped_df =df.groupby(grouping_colums)\n result_df = pd.DataFrame(grouped_df.size().reset_index(name = \"Count\"))\n except:\n result_df=\"Error\"\n\n elif where and expected_freq:\n try:\n # print(\"only frequency\")\n s_df = df.groupby(pd.Grouper(freq=expected_freq,key=date_column))\n result_df = pd.DataFrame(s_df.size().reset_index(name = \"Count\"))\n \n except:\n result_df=\"Error\"\n elif not where and expected_freq:\n try:\n # print(\"only frequency\")\n s_df = df.groupby(pd.Grouper(freq=expected_freq,key=date_column))\n result_df = pd.DataFrame(s_df.size().reset_index(name = \"Count\"))\n \n except:\n result_df=\"Error\"\n elif 'month' in keyword:\n expected_freq = 'M'\n # print(\"the date column is \",date_column)\n if where and aggfunc and grouping_colums :\n \n try:\n result_df = df.pivot_table(index= grouping_colums,columns =pd.Grouper(freq=expected_freq,key=date_column),fill_value=0,aggfunc=aggfunc,)\n result_df = result_df.stack().reset_index()\n except:\n result_df=\"Error\"\n elif not where and aggfunc and grouping_colums :\n \n try:\n result_df = df.pivot_table(index= grouping_colums,columns =pd.Grouper(freq=expected_freq,key=date_column),fill_value=0,aggfunc=aggfunc,)\n result_df = result_df.stack().reset_index()\n except:\n result_df=\"Error\"\n elif where and aggfunc and not grouping_colums:\n try:\n result_df = df.pivot_table(columns =pd.Grouper(freq=expected_freq,key=date_column),fill_value=0,aggfunc=aggfunc,)\n # print(\"new type of query\",result_df)\n pv_df = result_df.transpose()\n result_df = pv_df.reset_index()\n except:\n result_df=\"Error\"\n elif not where and aggfunc and not grouping_colums:\n try:\n result_df = df.pivot_table(columns =pd.Grouper(freq=expected_freq,key=date_column),fill_value=0,aggfunc=aggfunc,)\n # print(\"new type of query\",result_df)\n pv_df = result_df.transpose()\n result_df = pv_df.reset_index()\n except:\n result_df=\"Error\"\n\n elif where and grouping_colums and not aggfunc:\n try:\n # print(\"year just grouping\")\n grouping_colums.append(date_column)\n grouped_df =df.groupby(grouping_colums)\n result_df = pd.DataFrame(grouped_df.size().reset_index(name = \"Count\"))\n except:\n result_df=\"Error\"\n elif not where and grouping_colums and not aggfunc:\n try:\n # print(\"year just grouping\")\n grouping_colums.append(date_column)\n grouped_df =df.groupby(grouping_colums)\n result_df = pd.DataFrame(grouped_df.size().reset_index(name = \"Count\"))\n except:\n result_df=\"Error\"\n\n elif where and expected_freq:\n try:\n # print(\"only frequency\")\n s_df = df.groupby(pd.Grouper(freq=expected_freq,key=date_column))\n result_df = pd.DataFrame(s_df.size().reset_index(name = \"Count\"))\n \n except:\n result_df=\"Error\"\n elif not where and expected_freq:\n try:\n # print(\"only frequency\")\n s_df = df.groupby(pd.Grouper(freq=expected_freq,key=date_column))\n result_df = pd.DataFrame(s_df.size().reset_index(name = \"Count\"))\n \n except:\n result_df=\"Error\"\n elif 'year' in keyword:\n expected_freq = 'Y'\n # print(\"year just grouping\",grouping_colums)\n # print(\"the date column is \",date_column)\n if where and aggfunc and grouping_colums :\n \n try:\n result_df = df.pivot_table(index= grouping_colums,columns =pd.Grouper(freq=expected_freq,key=date_column),fill_value=0,aggfunc=aggfunc,)\n result_df = result_df.stack().reset_index()\n except:\n result_df=\"Error\"\n elif not where and aggfunc and grouping_colums :\n \n try:\n result_df = df.pivot_table(index= grouping_colums,columns =pd.Grouper(freq=expected_freq,key=date_column),fill_value=0,aggfunc=aggfunc,)\n result_df = result_df.stack().reset_index()\n except:\n result_df=\"Error\"\n elif where and aggfunc and not grouping_colums:\n try:\n result_df = df.pivot_table(columns =pd.Grouper(freq=expected_freq,key=date_column),fill_value=0,aggfunc=aggfunc,)\n # print(\"new type of query\",result_df)\n pv_df = result_df.transpose()\n result_df = pv_df.reset_index()\n except:\n result_df=\"Error\"\n elif not where and aggfunc and not grouping_colums:\n try:\n result_df = df.pivot_table(columns =pd.Grouper(freq=expected_freq,key=date_column),fill_value=0,aggfunc=aggfunc,)\n # print(\"new type of query\",result_df)\n pv_df = result_df.transpose()\n result_df = pv_df.reset_index()\n except:\n result_df=\"Error\"\n\n elif where and grouping_colums and not aggfunc:\n try:\n # print(\"year just grouping\")\n grouping_colums.append(date_column)\n grouped_df =df.groupby(grouping_colums)\n result_df = pd.DataFrame(grouped_df.size().reset_index(name = \"Count\"))\n except:\n result_df=\"Error\"\n elif not where and grouping_colums and not aggfunc:\n try:\n # print(\"year just grouping\")\n grouping_colums.append(date_column)\n grouped_df =df.groupby(grouping_colums)\n result_df = pd.DataFrame(grouped_df.size().reset_index(name = \"Count\"))\n except:\n result_df=\"Error\"\n\n elif where and expected_freq:\n try:\n # print(\"only frequency\")\n s_df = df.groupby(pd.Grouper(freq=expected_freq,key=date_column))\n result_df = pd.DataFrame(s_df.size().reset_index(name = \"Count\"))\n \n except:\n result_df=\"Error\"\n elif not where and expected_freq:\n try:\n # print(\"only frequency\")\n s_df = df.groupby(pd.Grouper(freq=expected_freq,key=date_column))\n result_df = pd.DataFrame(s_df.size().reset_index(name = \"Count\"))\n \n except:\n result_df=\"Error\"\n elif 'quarterly' in keyword:\n expected_freq = 'Q'\n # print(\"the date column is \",date_column)\n if where and aggfunc and grouping_colums :\n \n try:\n result_df = df.pivot_table(index= grouping_colums,columns =pd.Grouper(freq=expected_freq,key=date_column),fill_value=0,aggfunc=aggfunc,)\n result_df = result_df.stack().reset_index()\n except:\n result_df=\"Error\"\n elif not where and aggfunc and grouping_colums :\n \n try:\n result_df = df.pivot_table(index= grouping_colums,columns =pd.Grouper(freq=expected_freq,key=date_column),fill_value=0,aggfunc=aggfunc,)\n result_df = result_df.stack().reset_index()\n except:\n result_df=\"Error\"\n elif where and aggfunc and not grouping_colums:\n try:\n result_df = df.pivot_table(columns =pd.Grouper(freq=expected_freq,key=date_column),fill_value=0,aggfunc=aggfunc,)\n # print(\"new type of query\",result_df)\n pv_df = result_df.transpose()\n result_df = pv_df.reset_index()\n except:\n result_df=\"Error\"\n elif not where and aggfunc and not grouping_colums:\n try:\n result_df = df.pivot_table(columns =pd.Grouper(freq=expected_freq,key=date_column),fill_value=0,aggfunc=aggfunc,)\n # print(\"new type of query\",result_df)\n pv_df = result_df.transpose()\n result_df = pv_df.reset_index()\n except:\n result_df=\"Error\"\n\n elif where and grouping_colums and not aggfunc:\n try:\n # print(\"year just grouping\")\n grouping_colums.append(date_column)\n grouped_df =df.groupby(grouping_colums)\n result_df = pd.DataFrame(grouped_df.size().reset_index(name = \"Count\"))\n except:\n result_df=\"Error\"\n elif not where and grouping_colums and not aggfunc:\n try:\n # print(\"year just grouping\")\n grouping_colums.append(date_column)\n grouped_df =df.groupby(grouping_colums)\n result_df = pd.DataFrame(grouped_df.size().reset_index(name = \"Count\"))\n except:\n result_df=\"Error\"\n\n elif where and expected_freq:\n try:\n # print(\"only frequency\")\n s_df = df.groupby(pd.Grouper(freq=expected_freq,key=date_column))\n result_df = pd.DataFrame(s_df.size().reset_index(name = \"Count\"))\n \n except:\n result_df=\"Error\"\n elif not where and expected_freq:\n try:\n # print(\"only frequency\")\n s_df = df.groupby(pd.Grouper(freq=expected_freq,key=date_column))\n result_df = pd.DataFrame(s_df.size().reset_index(name = \"Count\"))\n \n except:\n result_df=\"Error\"\n else:\n print(\"else in project query\")\n if where and aggfunc and grouping_colums :\n result_df = df.pivot_table(index= grouping_colums ,aggfunc=aggfunc)\n # print(\"the df without time grouper frequency and arregation\",result_df)\n result_df = result_df.reset_index()\n \n try:\n result_df = df.pivot_table(index= grouping_colums ,aggfunc=aggfunc)\n # print(\"the df without time grouper frequency and arregation\",result_df)\n result_df = result_df.reset_index()\n except:\n result_df=\"Error\"\n elif not where and aggfunc and grouping_colums :\n result_df = df.pivot_table(index= grouping_colums ,aggfunc=aggfunc)\n print(\"the df without time grouper frequency and arregation\",result_df)\n result_df = result_df.reset_index()\n print(\"after reset index\",result_df)\n \n try:\n result_df = df.pivot_table(index= grouping_colums ,aggfunc=aggfunc)\n print(\"the df without time grouper frequency and arregation\",result_df)\n result_df = result_df.reset_index()\n print(\"after reset index\",result_df)\n except:\n result_df=\"Error\"\n elif where and grouping_colums and not aggfunc:\n grouped_df = df.groupby(grouping_colums)\n result_df = pd.DataFrame(grouped_df.size().reset_index(name = \"Count\"))\n try:\n grouped_df = df.groupby(grouping_colums)\n result_df = pd.DataFrame(grouped_df.size().reset_index(name = \"Count\"))\n except:\n result_df=\"Error\"\n elif not where and grouping_colums and not aggfunc:\n grouped_df = df.groupby(grouping_colums)\n result_df = pd.DataFrame(grouped_df.size().reset_index(name = \"Count\"))\n try:\n grouped_df = df.groupby(grouping_colums)\n result_df = pd.DataFrame(grouped_df.size().reset_index(name = \"Count\"))\n except:\n result_df=\"Error\"\n \n elif aggfunc and not grouping_colums:\n print(\"its agrigation with no grouping\")\n try:\n result_df=\"Error\"\n except:\n result_df=\"Error\"\n \n \n \n \n else:\n if where and aggfunc and grouping_colums :\n \n \n try:\n result_df = df.pivot_table(index= grouping_colums ,aggfunc=aggfunc)\n # print(\"the df without time grouper frequency and arregation\",result_df)\n result_df = result_df.reset_index()\n except:\n result_df=\"Error\"\n elif not where and aggfunc and grouping_colums :\n \n \n try:\n result_df = df.pivot_table(index= grouping_colums ,aggfunc=aggfunc)\n print(\"the df without time grouper frequency and arregation\",result_df)\n result_df = result_df.reset_index()\n print(\"after reset index\",result_df)\n except:\n result_df=\"Error\"\n elif where and grouping_colums and not aggfunc:\n \n try:\n grouped_df = df.groupby(grouping_colums)\n result_df = pd.DataFrame(grouped_df.size().reset_index(name = \"Count\"))\n except:\n result_df=\"Error\"\n elif not where and grouping_colums and not aggfunc:\n \n try:\n grouped_df = df.groupby(grouping_colums)\n result_df = pd.DataFrame(grouped_df.size().reset_index(name = \"Count\"))\n except:\n result_df=\"Error\"\n \n elif where and aggfunc and not grouping_colums:\n \n try:\n result_df=\"Error\"\n except:\n result_df=\"Error\"\n elif not where and aggfunc and not grouping_colums:\n \n try:\n result_df=\"Error\"\n except:\n result_df=\"Error\"\n # print(\"the result data head\", result_df)\n # print(\"the grouper column is \",grouping_colums)\n # print(\"the resulted dataframe is from the pivot table\",result_df)\n return result_df", "def select_annotation_by_ts(csv_data, lbound=None, rbound=None, by=None):\n if by==None:\n if not lbound:\n lbound = csv_data[st_col].iloc[0] # iloc is faster than head() or tail()\n if not rbound:\n rbound = csv_data[et_col].iloc[-1]\n # start_flags = np.array(csv_data[et_col].apply(lambda x: x>lbound)) ## Note it's too slow\n flags = (csv_data[et_col] > lbound) & (csv_data[st_col] < rbound)\n # end_flags = np.array(csv_data[st_col].apply(lambda x:x<rbound)) ## Note it's too slow\n subset_annotation_data = csv_data[flags]\n # subset_annotation_data = subset_annotation_data.reset_index(drop=True) ## Don't reset index\n subset_annotation_data[st_col].iloc[0] = max(lbound,subset_annotation_data[st_col].iloc[0])\n subset_annotation_data[et_col].iloc[-1] = min(rbound,subset_annotation_data[et_col].iloc[-1])\n else:\n groupby_annotation = csv_data.groupby(by)\n subset_group_datas = []\n for group_name, group_data in groupby_annotation:\n if lbound == None:\n lbound = group_data[st_col].iloc[0]\n if rbound == None:\n rbound = group_data[et_col].iloc[-1]\n # start_flags = np.array(group_data[et_col].apply(lambda x: x>lbound)) ## Note it's too slow\n start_flags = group_data[et_col] > lbound\n # end_flags = np.array(group_data[st_col].apply(lambda x:x<rbound)) ## Note it's too slow\n end_flags = group_data[st_col] < rbound\n subset_group_data = group_data[np.logical_and(start_flags,end_flags)]\n subset_group_data[st_col].iloc[0] = max(lbound,subset_group_data[st_col].iloc[0])\n subset_group_data[et_col].iloc[-1] = min(rbound,subset_group_data[et_col].iloc[-1])\n # subset_group_data = subset_group_data.reset_index(drop=True) ## Don't reset index\n subset_group_datas.append(subset_group_data)\n subset_annotation_data = annotation_data_consolidator(subset_group_datas)\n return subset_annotation_data", "def time_split_dataset(df, train_start_date, train_end_date, holdout_end_date, date_col):\n\n train_set = df.copy()[\n (df[date_col] >= train_start_date) & (df[date_col] <= train_end_date)]\n\n test_set = df.copy()[\n (df[date_col] > train_end_date) & (df[date_col] <= holdout_end_date)]\n\n return train_set, test_set", "def extract_features(time_series, window):\n if not tsd_common.is_standard_time_series(time_series, window):\n # add your report of this error here...\n\n return []\n\n # spilt time_series\n split_time_series = tsd_common.split_time_series(time_series, window)\n # nomalize time_series\n normalized_split_time_series = tsd_common.normalize_time_series(split_time_series)\n max_min_normalized_time_series = tsd_common.normalize_time_series_by_max_min(split_time_series)\n s_features = statistical_features.get_statistical_features(normalized_split_time_series[4])\n f_features = fitting_features.get_fitting_features(normalized_split_time_series)\n c_features = classification_features.get_classification_features(max_min_normalized_time_series)\n # combine features with types\n features = s_features + f_features + c_features\n return features", "def feature_extraction(self) -> None:\n # Add the hour, minute, and x column to the data\n self.df_poly[\"hour\"] = self.df_poly[\"time\"].apply(lambda y: y.hour)\n self.df_poly[\"minute\"] = self.df_poly[\"time\"].apply(lambda y: y.minute)\n self.df_poly[\"x\"] = self.df_poly[\"hour\"] * 60 + self.df_poly[\"minute\"]\n\n # Empty list to hold the feature names\n poly_feature_names = []\n\n # Add the poly columns to the df_poly\n for degree in [0, 1, 2, 3, 4, 5]:\n self.df_poly = poly(self.df_poly, degree)\n poly_feature_names.append(\"poly_\" + str(degree))\n\n # filterout + - inf, nan\n self.df_poly = self.df_poly[\n ~self.df_poly.isin([np.nan, np.inf, -np.inf]).any(1)\n ]\n\n # Save the poly feature name\n self.poly_feature_names = poly_feature_names\n feature_names = []\n\n #########################################################################################\n train_index_poly = self.df_poly[\n ~self.df_poly.isin([np.nan, np.inf, -np.inf]).any(1)\n ].index\n X_train_poly, y_train_poly = (\n self.df_poly[self.poly_feature_names].loc[train_index_poly],\n self.df_poly[\"y\"].loc[train_index_poly],\n )\n\n # Build the Polynomial Regression Model\n lin_reg = LinearRegression()\n lin_reg.fit(X_train_poly, y_train_poly)\n self.poly_model = lin_reg\n y_train_season = lin_reg.predict(X_train_poly)\n self.y_train_season_obj = y_train_season\n #########################################################################################\n\n for n in [10, 15, 20, 25, 30]:\n self.df = MOM(self.df, n)\n feature_names.append(\"MOM_\" + str(n))\n for n in [10, 15, 20, 25, 30]:\n self.df = ROC(self.df, n)\n feature_names.append(\"ROC_\" + str(n))\n for n in [1, 2, 3, 4, 5]:\n self.df = LAG(self.df, n)\n feature_names.append(\"LAG_\" + str(n))\n for n in [10, 20, 30]:\n self.df = MA(self.df, n)\n feature_names.append(\"MA_\" + str(n))\n\n self.df = self.df[\n ~self.df.isin([np.nan, np.inf, -np.inf]).any(1)\n ] # filterout + - inf, nan\n self.feature_names = feature_names", "def create_interpolated_turnstile_data(\n start_date: datetime,\n end_date: datetime = None,\n group_by: List[str] = ['UNIT', 'SCP'],\n frequency: str = '1H') -> pd.DataFrame:\n\n if not set(group_by).issubset(['STATION', 'LINENAME', 'UNIT', 'SCP']):\n raise Exception(\"Unsupported group by keys: \" + str(group_by))\n\n\n raw = download_turnstile_data(start_date, end_date)\n raw['date'] = pd.to_datetime(raw.DATE)\n raw = raw[(raw.date <= (end_date + timedelta(1))) & (raw.date >= (start_date - timedelta(1)))]\n raw.drop('date',axis=1,inplace=True)\n\n interpolated = _interpolate(_process_raw_data(raw, group_by), group_by, frequency)\n end_date = end_date or interpolated.index.max()\n return interpolated[interpolated.index.to_series().between(\n start_date, end_date)] .drop(columns=[\"entry_diffs\", \"exit_diffs\"])", "def extract_features(self) -> DataFrameLike:\n # return already calculated features if stored in state\n if self._final_features:\n return self._finalize_features()\n\n # initialization: generation 0 features are neighborhood features\n features = self.graph.get_neighborhood_features()\n self._update(features)\n\n for generation in range(1, self.max_generations):\n\n self.generation_count = generation\n self._feature_group_thresh = generation\n\n features = self._get_next_features()\n self._update(features)\n\n # stop if an iteration results in no features retained\n if not self._final_features[generation]:\n break\n\n return self._finalize_features()", "def df_customers_features_build(self):\n\n df_customers_rfm = self._df_customers_rfm.copy()\n df_customers_timeFeature = self._df_customers_timeFeature.copy()\n df_customers_nlp = self._df_customers_pca_nlp.copy()\n\n #-------------------------------------------------------------------------\n # Dataframe are aggregated; note that indexes are customerID.\n #-------------------------------------------------------------------------\n df_customers = pd.DataFrame()\n\n df_customers = pd.concat([df_customers,df_customers_rfm], axis=1)\n\n df_customers = pd.concat([df_customers,df_customers_timeFeature]\\\n , join='inner', axis=1)\n\n df_customers = pd.concat([df_customers,df_customers_nlp]\\\n , join='inner', axis=1)\n \n self.strprint(\"All features : \"+str(df_customers.shape))\n self._df_customers = df_customers.copy()\n return", "def calendartsfeatures(self, filename: str, country: str,\n events: Optional[Union[str, Dict[str, List[str]]]] = None,\n unique_id_column: str = 'unique_id',\n ds_column: str = 'ds',\n y_column: str = 'y') -> Dict:\n query = dict(\n s3_args=dict(\n s3_url=f's3://{self.bucket_name}',\n ),\n args = dict(\n filename=filename,\n country=country,\n unique_id_column=unique_id_column,\n ds_column=ds_column,\n y_column=y_column,\n )\n )\n\n if events is not None:\n if isinstance(events, str):\n query['args']['events'] = events\n else:\n str_events = [f'{key}={\",\".join(value)}' for key, value in events.items()]\n str_events = '/'.join(str_events)\n query['args']['events'] = str_events\n\n resp = requests.post(f'{self.invoke_url}/calendartsfeatures',\n headers={'x-api-key': self.api_key},\n data=json.dumps(query))\n\n return self._parse_response(resp.text)", "def populate_features(self):\n # AssetFeatureValue types\n satellite_feature_value = AssetFeatureValue.Standard.FUND_TYPE_SATELLITE.get_object()\n core_feature_value = AssetFeatureValue.Standard.FUND_TYPE_CORE.get_object()\n\n logger.info('Populating features for ticker %s' % self)\n r_feat = self.get_region_feature_value()\n ac_feat = self.get_asset_class_feature_value()\n curr_feat = self.get_currency_feature_value()\n at_feat = self.get_asset_type_feature_value()\n self.features.clear()\n self.features.add(r_feat, ac_feat, curr_feat, at_feat)\n if self.ethical:\n self.features.add(AssetFeatureValue.Standard.SRI_OTHER.get_object())\n self.features.add(core_feature_value if self.etf else satellite_feature_value)", "def get_fact_time_filtered(self, category, selected_option, \\\r\n start_tmstmp, \\\r\n end_tmstmp):\r\n try:\r\n conn = self.create_connection()\r\n query = \"\"\"WITH sub_category_lookup AS (\r\n\t\t\t\t\t\t\t\tSELECT id \r\n\t\t\t\t\t\t\t\tFROM categories \r\n\t\t\t\t\t\t\t\tWHERE category = '%s' \r\n\t\t\t\t\t\t\t\tAND sub_category = '%s')\t\r\n\t\t\t\t\t SELECT date_time,\r\n\t\t\t\t\t\t\t data \r\n\t\t\t\t FROM fact\r\n\t\t\t\t\t WHERE category_id = (select id FROM sub_category_lookup) \r\n\t\t\t\t\t AND (date_time>= '%s' AND date_time<'%s'\t)\t\t\t\t \r\n\t\t\t\t\t ORDER BY date_time ;\"\"\"%(category, selected_option, \\\r\n start_tmstmp, end_tmstmp)\r\n \r\n data_frame = pd.read_sql(query, conn)\r\n print(query)\r\n conn.close()\r\n except (psycopg2.Error, ValueError):\r\n print(\"Error occured at get_fact_time_filtered, check connection or query\")\r\n return data_frame", "def datetime_columns(df, feature):\r\n df['day'] = pd.to_datetime(df[feature]).dt.day\r\n df['month'] = pd.to_datetime(df[feature]).dt.month\r\n df['year'] = pd.to_datetime(df[feature]).dt.year\r\n return df", "def add_features(self, other_features, on=\"time_exchange\"):\n self.data = self.data.join(other_features, on=on).dropna()", "def datetime_features(\n s: pd.Series, result: Optional[pd.DataFrame] = None\n) -> pd.DataFrame:\n result = date_features(s, result)\n return time_features(s, result)", "def _filter_features(\n record_batch: pa.RecordBatch,\n feature_allowlist: List[types.FeatureName]) -> pa.RecordBatch:\n columns_to_select = []\n column_names_to_select = []\n for feature_name in feature_allowlist:\n col = arrow_util.get_column(record_batch, feature_name, missing_ok=True)\n if col is None:\n continue\n columns_to_select.append(col)\n column_names_to_select.append(feature_name)\n return pa.RecordBatch.from_arrays(columns_to_select, column_names_to_select)", "def create_new_features(self):\n train = self.train\n \n train['is_context'] = train['context_type'].isin(CONTEXT_TYPE_TEST)\n train['is_context_flow'] = train['listen_type'] * train['is_context']\n \n train['is_listened_context'] = train['is_listened'] * train['is_context']\n train['is_listened_flow'] = train['is_listened'] * train['listen_type']\n train['is_listened_context_flow'] = train['is_listened'] * train['is_context_flow']\n \n for feature in self.categorize_features:\n gby_feat = train.groupby(feature)\n new_features(train, gby_feat, feature, feature in self.listen_type_features, self.context_features, self.flow_features, self.fillna)\n \n # Variable combinations\n for feat1 in self.combo_features1:\n for feat2 in self.combo_features2:\n gby_feat = train.groupby([feat1, feat2])\n name = feat1 + '_' + feat2\n new_features(train, gby_feat, name, feat1 in self.listen_type_features, self.context_features, self.flow_features, self.fillna)", "def calculate_data_range(self, data: pd.DataFrame, ste_data: dict = {}) -> None:\n\n data_column = data.columns.to_numpy()\n\n if len(data_column) > 1:\n raise ValueError(\n f\"Data must only have one column with performance measures\"\n )\n\n # Initialize data range as empty object\n self.data_range = defaultdict(dict)\n\n # Get data range over scenario and STE data if not provided as input\n for task in self.unique_tasks:\n # Get feature range for each task\n task_min = np.nanmin(data.loc[task])\n task_max = np.nanmax(data.loc[task])\n\n if ste_data.get(task):\n x_ste = np.concatenate(\n [\n ste_data_df[ste_data_df[\"block_type\"] == \"train\"][\n self.perf_measure\n ].to_numpy()\n for ste_data_df in ste_data.get(task)\n ]\n )\n self.data_range[task][\"min\"] = min(task_min, np.nanmin(x_ste))\n self.data_range[task][\"max\"] = max(task_max, np.nanmax(x_ste))\n else:\n self.data_range[task][\"min\"] = task_min\n self.data_range[task][\"max\"] = task_max\n\n self.run_min = min([val[\"min\"] for val in self.data_range.values()])\n self.run_max = max([val[\"max\"] for val in self.data_range.values()])", "def compute_patterns(self, column_name, bounds, start_date=None,\n end_date=None):\n self.check_for_column(column_name)\n # If start_date or end_date is None, we use the smallest and largest\n # datetime in the data respectively\n if start_date is None:\n start_date = self.data.index.min()\n\n if end_date is None:\n end_date = self.data.index.max()\n\n all_data = self.data[start_date:end_date]\n\n # These constitute the boundaries between low, medium and high\n # derivatives\n lower, upper = dp.get_derivative_bounds(all_data[column_name], bounds)\n\n # compute patterns\n pattern_dictionary = {} # maps patterns to lists of dates with pattern\n date_pattern_dict = {} # maps datetimes to patterns\n for dt in all_data.index:\n # We skip any missing data, leaving it NaN in the final dataframe\n if np.isnan(all_data[column_name][dt]):\n continue\n\n pattern = dp.create_pattern(dt, all_data,\n column_name, (lower, upper))\n date_pattern_dict[dt] = pattern\n\n date_patterns = pd.Series(date_pattern_dict)\n self.data[column_name + '_patterns'] = date_patterns[self.data.index]", "def choose_group(df, time_step=None, base=0, interval=None, gage=None, m=None, h=None, wet=False): \n if time_step is not None:\n resample_kwargs = get_resample_kwargs(df)\n if wet:\n resample_kwargs.update({'how':'sum'})\n df = df.resample(time_step, base=base, **resample_kwargs)\n \n date_time = get_index(df, 'date_time')[1]\n a, RG = get_index(df, 'RG')\n \n # Choose along gage axis\n if gage is None:\n df = df.mean(axis=a)\n else:\n try:\n df = df.loc[:,gage]\n except: \n df = df.loc[:,:,gage]\n try:\n a, RG = get_index(df, index='RG')\n except:\n pass\n \n # Group along time axis\n if interval is 'seasonal':\n if h is not None:\n gb = df.groupby(date_time.hour)\n if type(h) is list or type(h) is tuple:\n df = pd.concat([gb.get_group(n) for n in h])\n else:\n df = gb.get_group(h)\n date_time = get_index(df, 'date_time')[1]\n gb = df.groupby(date_time.month)\n if m is not None:\n try:\n gb = [(m, gb.get_group(m))]\n except:\n gb = [(month, gb.get_group(month)) for month in m]\n \n elif interval is 'diurnal': \n if m is not None:\n gb = df.groupby(date_time.month)\n if type(m) is list or type(m) is tuple:\n df = pd.concat([gb.get_group(n) for n in m])\n else:\n df = gb.get_group(m)\n date_time = get_index(df, 'date_time')[1]\n gb = df.groupby(date_time.hour)\n if h is not None:\n try:\n gb = [(h, gb.get_group(h))]\n except:\n gb = [(hour, gb.get_group(hour)) for hour in h]\n \n else:\n gb = [('all',df)]\n\n return gb", "def construct_training_data_query(self, operation='training'):\n # FUTURE: make dollar return target/features dynamic\n if self.feature_minutes_list == None or self.trade_window_list == None:\n raise Exception(\"To construct training data query, the optional feature_minutes_list and trade_window_list attributes must be set!\")\n \n feature_col_list = []\n target_col_list = []\n base_ctes_list = []\n feature_cte_list = []\n final_col_list = []\n interaction_features_list = []\n join_conditions_list = []\n\n # Limit rows returned when pulling scoring features\n limit_where_clause = ''\n limit_clause = ''\n if operation == 'scoring':\n limit_minutes = max(self.feature_minutes_list) + 10\n limit_clause = f'LIMIT {limit_minutes}'\n # trying to move away from the where clause - limits are faster\n limit_trade_minute = (time.time() / 60) - limit_minutes - (5*60) \n limit_where_clause = f'AND trade_minute > {limit_trade_minute}'\n elif self.training_period is not None:\n limit_minutes = self.training_period + max(self.feature_minutes_list)\n limit_clause = f'LIMIT {limit_minutes}'\n print(f\"Training data query being limited to the first {limit_minutes} minutes. Training period plus {max(self.feature_minutes_list)} (max feature interval)\")\n # trying to move away from the where clause - limits are faster\n limit_trade_minute = (time.time() / 60) - self.training_period - (5*60)\n limit_where_clause = f'AND trade_minute > {limit_trade_minute}'\n\n\n for pair_type, coin_pair in self.coin_pair_dict.items():\n \"\"\"\n pair_type: 'alt', 'target'\n \"\"\"\n base_features_list = []\n base_ctes_list.append(f\"\"\"\n {pair_type}_{coin_pair}_end_orderbook AS (\n SELECT trade_minute - 1 AS lag_trade_minute, * \n FROM binance.orderbook\n WHERE coin_pair = '{coin_pair}'\n ORDER BY trade_minute DESC \n {limit_clause}\n ),\n {pair_type}_{coin_pair}_beg_orderbook AS (\n SELECT * \n FROM binance.orderbook\n WHERE coin_pair = '{coin_pair}'\n ORDER BY trade_minute DESC \n {limit_clause}\n ),\n {pair_type}_{coin_pair}_candlesticks AS (\n SELECT *\n FROM binance.candledicks c\n WHERE coin_pair = '{coin_pair}'\n ORDER BY trade_minute DESC \n {limit_clause}\n )\"\"\")\n # Base target variable features\n if pair_type == 'target':\n base_features_list.append(f\"\"\"\n c.close_datetime AS {coin_pair}_trade_close_datetime\n , extract(isodow from c.close_datetime) as trade_day_of_week\n , date_part('hour', c.close_datetime) as trade_hour\n , c.close_datetime::date - current_date as days_old\n \"\"\")\n final_col_list.append(f\"\"\"\n {coin_pair}_trade_close_datetime\n , trade_day_of_week\n , trade_hour\n , days_old\n \"\"\")\n feature_col_list.extend(['trade_day_of_week', 'trade_hour', 'days_old'])\n # Base features\n base_features_list.append(f\"\"\"\n c.trade_minute AS {coin_pair}_trade_minute\n , quote_asset_volume as {coin_pair}_quote_asset_volume\n , taker_sell_volume_percentage * 100 AS {coin_pair}_taker_sell_volume_perc_of_total\n , trade_count as {coin_pair}_trade_count\n , o_end.bids_cum_50000_weighted_avg - o_beg.bids_cum_50000_weighted_avg AS {coin_pair}_crnt_interval_bids_50000_price_diff\n , o_end.bids_cum_50000_weighted_avg - o_end.asks_cum_50000_weighted_avg AS {coin_pair}_crnt_interval_bids_v_asks_50000_price_diff \n , o_end.bids_cum_50000_weighted_std - o_beg.bids_cum_50000_weighted_std AS {coin_pair}_crnt_interval_bids_50000_std_diff\n , o_end.bids_cum_50000_weighted_std - o_end.asks_cum_50000_weighted_std AS {coin_pair}_crnt_interval_bids_v_asks_50000_std_diff\n , o_end.bids_cum_50000_weighted_std / (o_end.bids_cum_50000_weighted_std + o_end.asks_cum_50000_weighted_std) AS {coin_pair}_crnt_bids_50000_std_perc_of_total\n , o_end.bids_cum_200000_weighted_std / (o_end.bids_cum_200000_weighted_std + o_end.asks_cum_200000_weighted_std) AS {coin_pair}_crnt_bids_200000_std_perc_of_total\n , (o_end.bids_cum_200000_weighted_std / (o_end.bids_cum_200000_weighted_std + o_end.asks_cum_200000_weighted_std) \n + LEAD(o_end.bids_cum_200000_weighted_std, 1) OVER (ORDER BY c.trade_minute DESC) / (LEAD(o_end.bids_cum_200000_weighted_std, 1) OVER (ORDER BY c.trade_minute DESC) + LEAD(o_end.asks_cum_200000_weighted_std, 1) OVER (ORDER BY c.trade_minute DESC)) \n + LEAD(o_end.bids_cum_200000_weighted_std, 2) OVER (ORDER BY c.trade_minute DESC) / (LEAD(o_end.bids_cum_200000_weighted_std, 2) OVER (ORDER BY c.trade_minute DESC) + LEAD(o_end.asks_cum_200000_weighted_std, 2) OVER (ORDER BY c.trade_minute DESC))\n + LEAD(o_end.bids_cum_200000_weighted_std, 3) OVER (ORDER BY c.trade_minute DESC) / (LEAD(o_end.bids_cum_200000_weighted_std, 3) OVER (ORDER BY c.trade_minute DESC) + LEAD(o_end.asks_cum_200000_weighted_std, 3) OVER (ORDER BY c.trade_minute DESC))\n + LEAD(o_end.bids_cum_200000_weighted_std, 4) OVER (ORDER BY c.trade_minute DESC) / (LEAD(o_end.bids_cum_200000_weighted_std, 4) OVER (ORDER BY c.trade_minute DESC) + LEAD(o_end.asks_cum_200000_weighted_std, 4) OVER (ORDER BY c.trade_minute DESC))\n ) / 5 AS {coin_pair}_bids_200000_std_perc_of_total_avg\n \"\"\")\n final_col_list.append(f\"\"\"\n {coin_pair}_trade_minute\n , {coin_pair}_quote_asset_volume\n , {coin_pair}_taker_sell_volume_perc_of_total\n , {coin_pair}_trade_count\n , {coin_pair}_crnt_interval_bids_50000_price_diff\n , {coin_pair}_crnt_interval_bids_v_asks_50000_price_diff\n , {coin_pair}_crnt_interval_bids_50000_std_diff\n , {coin_pair}_crnt_interval_bids_v_asks_50000_std_diff\n , {coin_pair}_crnt_bids_50000_std_perc_of_total\n , {coin_pair}_crnt_bids_200000_std_perc_of_total\n , {coin_pair}_bids_200000_std_perc_of_total_avg\n \"\"\")\n feature_col_list.extend([\n f'{coin_pair}_quote_asset_volume'\n , f'{coin_pair}_taker_sell_volume_perc_of_total'\n , f'{coin_pair}_trade_count'\n , f'{coin_pair}_crnt_interval_bids_50000_price_diff'\n , f'{coin_pair}_crnt_interval_bids_v_asks_50000_price_diff'\n , f'{coin_pair}_crnt_interval_bids_50000_std_diff'\n , f'{coin_pair}_crnt_interval_bids_v_asks_50000_std_diff'\n , f'{coin_pair}_crnt_bids_50000_std_perc_of_total'\n , f'{coin_pair}_crnt_bids_200000_std_perc_of_total'\n , f'{coin_pair}_bids_200000_std_perc_of_total_avg'\n ])\n \n # Lag features for every interval configured at runtime\n for interval in self.feature_minutes_list:\n interval_list = []\n base_features_list.append(f\"\"\"\n ((quote_asset_volume - LEAD(quote_asset_volume, {interval}) OVER (ORDER BY c.trade_minute DESC)) \n / LEAD(quote_asset_volume, {interval}) OVER (ORDER BY c.trade_minute DESC)) * 100 AS prev_{interval}_{coin_pair}_quote_asset_volume_perc_chg\n , ((taker_sell_volume_percentage - LEAD(taker_sell_volume_percentage, {interval}) OVER (ORDER BY c.trade_minute DESC)) \n / LEAD(taker_sell_volume_percentage, {interval}) OVER (ORDER BY c.trade_minute DESC)) * 100 AS prev_{interval}_{coin_pair}_taker_sell_volume_perc_of_total_chg\n , ((trade_count::float - LEAD(trade_count::float, {interval}) OVER (ORDER BY c.trade_minute DESC)) \n / LEAD(trade_count::float, {interval}) OVER (ORDER BY c.trade_minute DESC)) * 100 AS prev_{interval}_{coin_pair}_trade_count_perc_chg\n , ((o_end.bids_cum_50000_weighted_avg - LEAD(o_end.bids_cum_50000_weighted_avg, {interval}) OVER (ORDER BY c.trade_minute DESC)) \n / LEAD(o_end.bids_cum_50000_weighted_avg, {interval}) OVER (ORDER BY c.trade_minute DESC)) * 100 AS prev_{interval}_{coin_pair}_bids_50000_perc_chg\n , ((o_end.bids_cum_50000_weighted_std - LEAD(o_end.bids_cum_50000_weighted_std, {interval}) OVER (ORDER BY c.trade_minute DESC)) \n / LEAD(o_end.bids_cum_50000_weighted_std, {interval}) OVER (ORDER BY c.trade_minute DESC)) * 100 AS prev_{interval}_{coin_pair}_bids_50000_std_chg\n \"\"\")\n final_col_list.append(f\"\"\"\n prev_{interval}_{coin_pair}_quote_asset_volume_perc_chg\n , prev_{interval}_{coin_pair}_taker_sell_volume_perc_of_total_chg\n , prev_{interval}_{coin_pair}_trade_count_perc_chg\n , prev_{interval}_{coin_pair}_bids_50000_perc_chg\n , prev_{interval}_{coin_pair}_bids_50000_std_chg\n \"\"\") \n feature_col_list.extend([\n f'prev_{interval}_{coin_pair}_quote_asset_volume_perc_chg'\n ,f'prev_{interval}_{coin_pair}_taker_sell_volume_perc_of_total_chg'\n ,f'prev_{interval}_{coin_pair}_trade_count_perc_chg'\n ,f'prev_{interval}_{coin_pair}_bids_50000_perc_chg'\n ,f'prev_{interval}_{coin_pair}_bids_50000_std_chg'\n ])\n \n if pair_type == 'target':\n for target in self.trade_window_list:\n base_features_list.append(f\"\"\"((LAG({self.target_coin}_bids_cum_5000_weighted_avg, {target}) OVER (ORDER BY {self.target_coin}_trade_minute DESC) - {self.target_coin}_asks_cum_5000_weighted_avg) / {self.target_coin}_asks_cum_5000_weighted_avg * 100) AS futr_{target}_askbid_cum_5000_weighted_avg_perc_chg\"\"\")\n # experiment with predicting return starting at minute 1 instead of minute 0 to account for our scoring->trade delay.\n #base_features_list.append(f\"\"\"((LAG({self.target_coin}_bids_cum_5000_weighted_avg, {target}) OVER (ORDER BY {self.target_coin}_trade_minute DESC) - LAG({self.target_coin}_asks_cum_5000_weighted_avg, 1) OVER (ORDER BY {self.target_coin}_trade_minute DESC)) / LAG({self.target_coin}_asks_cum_5000_weighted_avg, 1) OVER (ORDER BY {self.target_coin}_trade_minute DESC) * 100) AS futr_{target}_askbid_cum_5000_weighted_avg_perc_chg\"\"\")\n final_col_list.append(f'futr_{target}_askbid_cum_5000_weighted_avg_perc_chg') \n target_col_list.append(f'futr_{target}_askbid_cum_5000_weighted_avg_perc_chg')\n\n # Coin level CTE \n feature_cte_list.append(f\"\"\"\n {pair_type}_{coin_pair}_features AS (\n SELECT {','.join(base_features_list)}\n FROM {pair_type}_{coin_pair}_candlesticks c \n INNER JOIN {pair_type}_{coin_pair}_beg_orderbook o_beg ON o_beg.coin_pair = c.coin_pair AND o_beg.trade_minute = c.trade_minute \n INNER JOIN {pair_type}_{coin_pair}_end_orderbook o_end ON o_end.coin_pair = c.coin_pair AND o_end.lag_trade_minute = c.trade_minute\n )\"\"\")\n\n # Interaction features for alt coins (base usdt)\n interaction_features = ''\n if pair_type == 'alt':\n interaction_features_list.append(f\"\"\"AVG(({self.target_coin}_bid_ask_average_price-{coin_pair}_bid_ask_average_price)/{self.target_coin}_bid_ask_average_price) OVER (PARTITION BY {self.target_coin}_coin_partition ORDER BY {self.target_coin}_trade_minute ASC ROWS 5 PRECEDING) \n - (({self.target_coin}_bid_ask_average_price-{coin_pair}_bid_ask_average_price)/{self.target_coin}_bid_ask_average_price) AS avg_5_{coin_pair}_bid_ask_average_price_interaction\"\"\")\n interaction_features_list.append(f\"\"\"AVG(({self.target_coin}_bid_ask_average_price-{coin_pair}_bid_ask_average_price)/{self.target_coin}_bid_ask_average_price) OVER (PARTITION BY {self.target_coin}_coin_partition ORDER BY {self.target_coin}_trade_minute ASC ROWS 10 PRECEDING) \n - (({self.target_coin}_bid_ask_average_price-{coin_pair}_bid_ask_average_price)/{self.target_coin}_bid_ask_average_price) AS avg_10_{coin_pair}_bid_ask_average_price_interaction\"\"\")\n interaction_features_list.append(f\"\"\"AVG(({self.target_coin}_bid_ask_average_price-{coin_pair}_bid_ask_average_price)/{self.target_coin}_bid_ask_average_price) OVER (PARTITION BY {self.target_coin}_coin_partition ORDER BY {self.target_coin}_trade_minute ASC ROWS 20 PRECEDING) \n - (({self.target_coin}_bid_ask_average_price-{coin_pair}_bid_ask_average_price)/{self.target_coin}_bid_ask_average_price) AS avg_20_{coin_pair}_bid_ask_average_price_interaction\"\"\")\n feature_col_list.extend([f'avg_5_{coin_pair}_bid_ask_average_price_interaction',f'avg_10_{coin_pair}_bid_ask_average_price_interaction',f'avg_20_{coin_pair}_bid_ask_average_price_interaction'])\n interaction_features = ','.join(interaction_features_list)\n interaction_features = ',' + interaction_features\n\n # Join conditions\n if pair_type == 'target':\n join_conditions_list.append(f\"\"\"{pair_type}_{coin_pair}_features\"\"\") \n else:\n join_conditions_list.append(f\"\"\"{pair_type}_{coin_pair}_features ON target_{self.target_coin}_features.{self.target_coin}_trade_minute = {pair_type}_{coin_pair}_features.{coin_pair}_trade_minute\"\"\")\n\n base_ctes = ','.join(base_ctes_list)\n feature_ctes = ','.join(feature_cte_list)\n feature_ctes = ',' + feature_ctes\n final_cols = ','.join(final_col_list)\n join_conditions = ' LEFT JOIN '.join(join_conditions_list)\n\n query_template = f\"\"\"WITH {base_ctes}\n {feature_ctes}\n SELECT {final_cols}\n {interaction_features}\n FROM {join_conditions}\n ORDER BY {self.target_coin}_trade_minute {'DESC' if operation == 'scoring' else 'ASC'}\n {'LIMIT 1' if operation == 'scoring' else ''}\"\"\" # LIMIT SCORING DATA - NOT ALL DATA IS RELEVANT TO CURRENT\n\n return query_template, feature_col_list, target_col_list", "def combine_features(df, lag_fea, lags, window_size, used_columns):\n lagged_fea = lagged_features(df[lag_fea], lags)\n moving_avg = moving_averages(df[lag_fea], 2, window_size)\n fea_all = pd.concat([df[used_columns], lagged_fea, moving_avg], axis=1)\n return fea_all", "def get_feature_vector(user_id: str, session: str) -> DataFrame:\n\n #Find the time windows during which the reader is doing the desired task\n activity_data = read_file(user_id, session, 'Activity.csv')\n task_number = mode(activity_data['TaskID'])\n task_name = task_names[(task_number - 1) % len(task_names)]\n tap_windows = get_tap_events(user_id, session)\n data = get_user_session_data(user_id, session)\n add_magnitude_columns(data)\n add_columns_for_taps(data, tap_windows)\n mark_tap_start_and_end(data, delta_in_ms = 200)\n\n column_names = get_feature_names()\n\n #A feature vector for each tap, to be filled in subsequently:\n featureVectors = pd.DataFrame(columns = column_names)\n\n for tap_file in tap_file_names:\n tap_feature = tap_file_to_feature_name[tap_file]\n print(tap_feature)\n window_start_indices = data[data[tap_feature] == 4].index\n window_end_indices = data[data[tap_feature] == 5].index\n if len(window_start_indices) == 0:\n continue\n \n for i in range(len(window_start_indices)):\n start, end = window_start_indices[i], window_end_indices[i]\n window_of_interest = data[start : end + 1]\n features = feature_list(user_id, session, tap_feature, task_name, window_of_interest)\n if features != None:\n featureVectors.loc[featureVectors.shape[0]] = features\n \n return featureVectors", "def add_time_features(df_kek):\n df = pd.DataFrame([])\n df['hour'] = df_kek['OrderedDate'].dt.hour\n df['dow'] = df_kek['OrderedDate'].dt.dayofweek\n df['weekend'] = (df['dow'] >= 6) | (df_kek['OrderedDate'] == '2020-02-22') | (\n df_kek['OrderedDate'] == '2020-02-24') | (df_kek['OrderedDate'] == '2020-03-09') | (\n df_kek['OrderedDate'] >= '2020-03-30') | (df_kek['OrderedDate'] == '2020-03-07')\n return df", "def merge_additional_features(df):\n col = [\"hour\",\"day\" ,\"dayofweek\", \"month\" , \"interval\" , \"season\", \"time_of_day\"]\n additional_featues = pd.DataFrame(data = [features_from_timestamp(i) for i in df.index ],columns=col).set_index(df.index)\n data = df.merge(additional_featues,on=\"dt\")\n data.sort_index(inplace=True) #make sure data is sorted by date\n\n return data", "def get_features(self, ti=None, tf=None, n_jobs=1, drop_features=[], compute_only_features=[]):\n # initialise training interval\n self.drop_features = drop_features\n self.compute_only_features = compute_only_features\n self.n_jobs = n_jobs\n ti = self.ti_model if ti is None else datetimeify(ti)\n tf = self.tf_model if tf is None else datetimeify(tf)\n return self._load_data(ti, tf)", "def at(self, time_slices):\n new_features = FeatureCollection()\n for key in self.keys():\n new_features[key] = self[key].at(time_slices)\n return new_features", "def modify_datetime_train(df):\n\n df['pickup_hour'] = pd.to_datetime(df['pickup_datetime']).dt.hour\n\n df['dropoff_hour'] = pd.to_datetime(df['dropoff_datetime']).dt.hour\n\n df['pickup_minute'] = pd.to_datetime(df['pickup_datetime']).dt.minute\n\n df['dropoff_minute'] = pd.to_datetime(df['dropoff_datetime']).dt.minute\n\n df['pickup_hour_sin'], df['pickup_hour_cos'] = convert_time_sin_cos(df, 'pickup_hour')\n\n df['dropoff_hour_sin'], df['dropoff_hour_cos'] = convert_time_sin_cos(df, 'dropoff_hour')\n\n #split datetime between dates and time\n #using normalize even though it gives us 0:00 time, but the resulting column is a datetime object,\n #which allows us to further process for day of week\n df['pickup_date'] = pd.to_datetime(df['pickup_datetime']).dt.date\n\n df['dropoff_date'] = pd.to_datetime(df['dropoff_datetime']).dt.date\n\n #create day of the week for both pickup date and dropoff dates\n df['pickup_day'] = pd.to_datetime(df['pickup_datetime']).dt.weekday\n\n df['dropoff_day'] = pd.to_datetime(df['dropoff_datetime']).dt.weekday\n\n #get week of year to capture effects of holidays\n df['pickup_weekofyear'] = pd.to_datetime(df['pickup_datetime']).dt.weekofyear\n\n df[\"month\"] = pd.to_datetime(df['pickup_datetime']).dt.month\n\n df[\"year\"] = pd.to_datetime(df['pickup_datetime']).dt.year\n #one hot encode day of the week for both pickup and dropoff\n df = pd.get_dummies(df, columns=['pickup_day', 'dropoff_day'])\n\n return df", "def _apply_filters(self, df):\n df = df[(df['Date'] >= self.start_date) &\n (df['Date'] <= self.end_date)]\n return df", "def add_time_features(self, year=False, month=False, week=True, tod=True, dow=True):\n\n var_to_expand = []\n\n if self.preprocessed_data.empty:\n data = self.original_data\n else:\n data = self.preprocessed_data\n\n if year:\n data[\"year\"] = data.index.year\n var_to_expand.append(\"year\")\n if month:\n data[\"month\"] = data.index.month\n var_to_expand.append(\"month\")\n if week:\n data[\"week\"] = data.index.week\n var_to_expand.append(\"week\")\n if tod:\n data[\"tod\"] = data.index.hour\n var_to_expand.append(\"tod\")\n if dow:\n data[\"dow\"] = data.index.weekday\n var_to_expand.append(\"dow\")\n\n # One-hot encode the time features\n for var in var_to_expand:\n \n add_var = pd.get_dummies(data[var], prefix=var, drop_first=True)\n \n # Add all the columns to the model data\n data = data.join(add_var)\n\n # Drop the original column that was expanded\n data.drop(columns=[var], inplace=True)\n\n self.preprocessed_data = data", "def calc_temps(start_date, end_date):\n \n return session.query(func.min(Measurement.tobs), func.avg(Measurement.tobs), func.max(Measurement.tobs)). filter(Measurement.date >= start_date).filter(Measurement.date <= end_date).all()", "def init_features(\n\tattributes,\n\tcategories,\n\tcities,\n\tstates,\n\tworkdays,\n\tstart_time,\n\tend_time,\n\tdefault_value=DEFAULT_VAL_IF_NOT_EXIST,\n\tfeature_separator=FEATURE_SEPARATOR\n\t):\n\ttffeatures = list()\n\tvalfeatures = dict()\n\texistfeatures = list()\n\n\t# categories are exist feature\n\tfor category in categories:\n\t\tfeature_name = CATEGORIES + feature_separator + category\n\t\texistfeatures.append(feature_name)\n\n\t# cities & states are val feature\n\tcities.add(DEFAULT_VAL_IF_NOT_EXIST)\n\tvalfeatures[CITY] = cities\n\n\tstates.add(DEFAULT_VAL_IF_NOT_EXIST)\n\tvalfeatures[STATE] = states\n\n\t# workdays are exist feature\n\tfor day in workdays:\n\t\tfeature_name = WORKDAYS + feature_separator + day\n\t\texistfeatures.append(feature_name)\n\n\t# day_start, day_end are val features\n\ttime_list = init_24_hours()\n\ttime_list.append(DEFAULT_VAL_IF_NOT_EXIST)\n\tfor day_start in start_time:\n\t\tvalfeatures[day_start] = time_list\n\n\tfor day_end in end_time:\n\t\tvalfeatures[day_end] = time_list\n\n\t# attributes can be val features or true false features\n\thandle_attributes_features(tffeatures, valfeatures, attributes)\n\treturn tffeatures, valfeatures, existfeatures", "def add_feature(layer, branchID, segs, lines, lon, lat, Ttime, density, Initial_loc, solubility, flows, concentration, water_level, dist): \r\n ctr=0\r\n for i in range(len(lines)):\r\n ctr+=1\r\n point = osgeo.ogr.Geometry(osgeo.ogr.wkbPoint)\r\n # Add points individually to the line\r\n #xy = lines[i]\r\n \r\n #line.AddPoint_2D(xy[0][0],xy[0][1])\r\n #line.AddPoint_2D(xy[1][0],xy[1][1])\r\n point.AddPoint(lon[i], lat[i])\r\n # Update the feature with the line data\r\n featureIndex = ctr\r\n feature = osgeo.ogr.Feature(layerDefinition)\r\n #feature.SetStyleString(\"PEN(c:r,w:5px)\") \r\n feature.SetGeometry(point)\r\n feature.SetFID(featureIndex)\r\n feature.SetGeometryDirectly(point)\r\n \r\n # Set the attribute table\r\n feature.SetField('BranchID', int(branchID[i])) \r\n feature.SetField('SegID', int(segs[i])) # convert to int() is necessary, osgeo cannot recognize numpy int32 type\r\n feature.SetField('Lon', \"{:.3f}\".format(lon[i]))\r\n feature.SetField('Lat', \"{:.3f}\".format(lat[i]))\r\n #feature.SetField('Lon_east', \"{:.3f}\".format(eastlon[i]))\r\n #feature.SetField('Lat_east', \"{:.3f}\".format(eastlat[i]))\r\n feature.SetField('T (day)', int(Ttime[i]))\r\n feature.SetField('Density', density[i])\r\n feature.SetField('Initial', Initial_loc[i])\r\n feature.SetField('Solubility', solubility[i])\r\n feature.SetField('Flow', flows[i])\r\n feature.SetField('C (mg/L)', concentration[i])\r\n feature.SetField('WSE (ft)', water_level[i])\r\n feature.SetField('D (ft)', dist[i])\r\n \r\n layer.CreateFeature(feature)" ]
[ "0.6709174", "0.59639823", "0.5956674", "0.57902575", "0.5713756", "0.5689175", "0.5615991", "0.5600183", "0.5599253", "0.5595833", "0.55765986", "0.55623573", "0.5488219", "0.5388274", "0.53792185", "0.5378838", "0.5333837", "0.53081375", "0.53008443", "0.5289871", "0.5253744", "0.5248897", "0.52153873", "0.5212669", "0.51900107", "0.5179637", "0.5173532", "0.5166036", "0.5159631", "0.5148492", "0.5139258", "0.5137223", "0.5136631", "0.51309586", "0.511885", "0.5118465", "0.5094621", "0.5081301", "0.5049808", "0.5038754", "0.5037396", "0.5037162", "0.50309914", "0.5030767", "0.5029698", "0.5027214", "0.5026378", "0.5024824", "0.50220007", "0.49750066", "0.4966066", "0.4963956", "0.49507403", "0.4945541", "0.4944021", "0.49374628", "0.4931082", "0.49291357", "0.49277616", "0.49249446", "0.4924645", "0.49180272", "0.49165928", "0.49143675", "0.48794535", "0.48680574", "0.48561317", "0.48506102", "0.48465118", "0.48464498", "0.4842543", "0.48375666", "0.4834293", "0.4833273", "0.48123616", "0.4810304", "0.48074242", "0.48062772", "0.48035735", "0.47786188", "0.47749364", "0.47734183", "0.47727448", "0.47677368", "0.47639778", "0.47611052", "0.47598073", "0.47591874", "0.4750644", "0.4750381", "0.47458383", "0.4743436", "0.474031", "0.47320974", "0.47243923", "0.47130907", "0.47102678", "0.47095412", "0.46952286", "0.4684444" ]
0.7756553
0
Computes the dropoff_features feature group. To restrict features to a time range, pass in ts_column, start_date, and/or end_date as kwargs.
def dropoff_features_fn(df, ts_column, start_date, end_date): df = filter_df_by_ts( df, ts_column, start_date, end_date ) dropoffzip_features = ( df.groupBy("dropoff_zip", window("tpep_dropoff_datetime", "30 minute")) .agg(count("*").alias("count_trips_window_30m_dropoff_zip")) .select( col("dropoff_zip").alias("zip"), unix_timestamp(col("window.end")).alias("ts").cast(IntegerType()), partition_id(to_timestamp(col("window.end"))).alias("yyyy_mm"), col("count_trips_window_30m_dropoff_zip").cast(IntegerType()), is_weekend(col("window.end")).alias("dropoff_is_weekend"), ) ) return dropoffzip_features
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def pickup_features_fn(df, ts_column, start_date, end_date):\n df = filter_df_by_ts(\n df, ts_column, start_date, end_date\n )\n pickupzip_features = (\n df.groupBy(\n \"pickup_zip\", window(\"tpep_pickup_datetime\", \"1 hour\", \"15 minutes\")\n ) # 1 hour window, sliding every 15 minutes\n .agg(\n mean(\"fare_amount\").alias(\"mean_fare_window_1h_pickup_zip\"),\n count(\"*\").alias(\"count_trips_window_1h_pickup_zip\"),\n )\n .select(\n col(\"pickup_zip\").alias(\"zip\"),\n unix_timestamp(col(\"window.end\")).alias(\"ts\").cast(IntegerType()),\n partition_id(to_timestamp(col(\"window.end\"))).alias(\"yyyy_mm\"),\n col(\"mean_fare_window_1h_pickup_zip\").cast(FloatType()),\n col(\"count_trips_window_1h_pickup_zip\").cast(IntegerType()),\n )\n )\n return pickupzip_features", "def get_date_features(gt_ids=[], gt_masks=None, gt_shifts=None, first_year=None):\n # If particular arguments aren't lists, replace with repeating iterators\n if not isinstance(gt_masks, list):\n gt_masks = itertools.repeat(gt_masks)\n if not isinstance(gt_shifts, list):\n gt_shifts = itertools.repeat(gt_shifts)\n\n # Add each ground truth feature to dataframe\n df = None\n for gt_id, gt_mask, gt_shift in zip(gt_ids, gt_masks, gt_shifts):\n print \"Getting {}_shift{}\".format(gt_id, gt_shift)\n t = time.time()\n # Load ground truth data\n gt = get_ground_truth(gt_id, gt_mask, gt_shift)\n # Discard years prior to first_year\n gt = year_slice(gt, first_year = first_year)\n # If lat, lon columns exist, pivot to wide format\n if 'lat' in gt.columns and 'lon' in gt.columns:\n if gt_shift == None:\n measurement_variable = get_measurement_variable(gt_id)\n else:\n measurement_variable = get_measurement_variable(gt_id)+'_shift'+str(gt_shift)\n gt = pd.pivot_table(gt, values=measurement_variable, index='start_date',\n columns=['lat', 'lon']).reset_index()\n gt = pd.DataFrame(gt.to_records())\n gt.drop(\"index\", axis=1, inplace=True)\n # Rename columns to start_date and precip_(27.0,261.0), etc.\n gt.rename(columns={gt.columns[0]: 'start_date'}, inplace=True)\n gt.rename(columns=lambda x: x.replace('(',\n measurement_variable +\n '_('), inplace=True)\n # Use outer merge to include union of start_date values across all features\n # combinations across all features\n df = df_merge(df, gt, on=\"start_date\")\n print \"Elapsed: {}s\".format(time.time() - t)\n\n return df", "def create_features(energy_data, label=None):\n energy_data['date'] = energy_data.index\n energy_data['hour'] = energy_data['Datetime'].dt.hour\n energy_data['dayofweek'] = energy_data['Datetime'].dt.dayofweek\n energy_data['month'] = energy_data['Datetime'].dt.month\n energy_data['quarter'] = energy_data['Datetime'].dt.quarter\n energy_data['year'] = energy_data['Datetime'].dt.year\n energy_data['dayofyear'] = energy_data['Datetime'].dt.dayofyear\n energy_data['dayofmonth'] = energy_data['Datetime'].dt.day\n energy_data['weekofyear'] = energy_data['Datetime'].dt.weekofyear\n energy_data['pjme_2_hrs_lag'] = energy_data['PJME_MW'].shift(2)\n energy_data['pjme_4_hrs_lag'] = energy_data['PJME_MW'].shift(4)\n energy_data['pjme_8_hrs_lag'] = energy_data['PJME_MW'].shift(8)\n energy_data['pjme_12_hrs_lag'] = energy_data['PJME_MW'].shift(12)\n energy_data['pjme_24_hrs_lag'] = energy_data['PJME_MW'].shift(24)\n energy_data['pjme_4_hrs_mean'] = energy_data['PJME_MW'].rolling(window=4).mean()\n energy_data['pjme_8_hrs_mean'] = energy_data['PJME_MW'].rolling(window=8).mean()\n energy_data['pjme_12_hrs_mean'] = energy_data['PJME_MW'].rolling(window=12).mean()\n energy_data['pjme_24_hrs_mean'] = energy_data['PJME_MW'].rolling(window=24).mean()\n energy_data['pjme_4_hrs_std'] = energy_data['PJME_MW'].rolling(window=4).std()\n energy_data['pjme_8_hrs_std'] = energy_data['PJME_MW'].rolling(window=8).std()\n energy_data['pjme_12_hrs_std'] = energy_data['PJME_MW'].rolling(window=12).std()\n energy_data['pjme_24_hrs_std'] = energy_data['PJME_MW'].rolling(window=24).std()\n energy_data['pjme_4_hrs_max'] = energy_data['PJME_MW'].rolling(window=4).max()\n energy_data['pjme_8_hrs_max'] = energy_data['PJME_MW'].rolling(window=8).max()\n energy_data['pjme_12_hrs_max'] = energy_data['PJME_MW'].rolling(window=12).max()\n energy_data['pjme_24_hrs_max'] = energy_data['PJME_MW'].rolling(window=24).max()\n energy_data['pjme_4_hrs_min'] = energy_data['PJME_MW'].rolling(window=4).min()\n energy_data['pjme_8_hrs_min'] = energy_data['PJME_MW'].rolling(window=8).min()\n energy_data['pjme_12_hrs_min'] = energy_data['PJME_MW'].rolling(window=12).min()\n energy_data['pjme_24_hrs_min'] = energy_data['PJME_MW'].rolling(window=24).min()\n\n features = energy_data[['hour', 'dayofweek', 'quarter', 'month', 'year',\n 'dayofyear', 'dayofmonth', 'weekofyear', 'pjme_2_hrs_lag', 'pjme_4_hrs_lag',\n 'pjme_8_hrs_lag', 'pjme_12_hrs_lag', 'pjme_24_hrs_lag', 'pjme_4_hrs_mean',\n \"pjme_8_hrs_mean\", \"pjme_12_hrs_mean\", \"pjme_24_hrs_mean\", \"pjme_4_hrs_std\",\n \"pjme_8_hrs_std\", \"pjme_12_hrs_std\", \"pjme_24_hrs_std\",\n \"pjme_4_hrs_max\", \"pjme_8_hrs_max\", \"pjme_12_hrs_max\", \"pjme_24_hrs_max\",\n \"pjme_4_hrs_min\", \"pjme_8_hrs_min\", \"pjme_12_hrs_min\", \"pjme_24_hrs_min\"]]\n if label:\n label = energy_data[label]\n return features, label\n return features", "def getFeature(df, start, end):\n\n return [df[start:end].mean(),\n df[start:end].std(),\n df[start:end].skew(),\n df[start:end].kurt(),\n df[start:end].quantile(0.25),\n df[start:end].quantile(0.75),\n df[start:end].quantile(0.90),\n df[start:end].quantile(0.15),\n df[start:end].median(),\n df[start:end].mad(),\n df[start:end].sem(),\n df[start:end].var(),\n df[start:end].autocorr(1),\n df[start:end].autocorr(2),\n df[start:end].autocorr(3),\n df[start:end].autocorr(4),\n df[start:end].autocorr(5),\n np.append(df[start:end].mode(), -1)[0]\n ]", "def compute_features_one_round(\n train_base_df,\n train_delta_df,\n test_df,\n df_config,\n feature_config_list,\n feature_map,\n filter_by_month,\n compute_load_ratio=False,\n):\n\n train_round_df = pd.concat([train_base_df, train_delta_df])\n max_train_timestamp = train_round_df[df_config[\"time_col_name\"]].max()\n max_test_timestamp = test_df[df_config[\"time_col_name\"]].max()\n train_test_diff = max_test_timestamp - max_train_timestamp\n max_horizon = ceil(train_test_diff.days * 24 + train_test_diff.seconds / 3600)\n train_features, feature_pipeline = compute_training_features(\n train_round_df, df_config, feature_config_list, feature_map, max_horizon,\n )\n\n test_features = compute_testing_features(test_df, feature_pipeline, feature_config_list, train_round_df)\n\n if compute_load_ratio:\n rolling_window_args = LOAD_RATIO_CONFIG[\"same_day_of_week_rolling_args\"]\n previous_years_lag_args = LOAD_RATIO_CONFIG[\"same_week_of_year_lag_args\"]\n same_week_day_hour_rolling_featurizer = SameDayOfWeekRollingWindowFeaturizer(\n df_config, input_col_names=df_config[\"target_col_name\"], max_horizon=max_horizon, **rolling_window_args\n )\n train_df_with_recent_load = same_week_day_hour_rolling_featurizer.transform(train_round_df)\n same_week_day_hour_rolling_featurizer.train_df = train_round_df\n test_df_with_recent_load = same_week_day_hour_rolling_featurizer.transform(test_df)\n\n time_col_name = df_config[\"time_col_name\"]\n ts_id_col_names = df_config[\"ts_id_col_names\"]\n keep_col_names = [time_col_name]\n if ts_id_col_names is not None:\n if isinstance(ts_id_col_names, list):\n keep_col_names = keep_col_names + ts_id_col_names\n else:\n keep_col_names.append(ts_id_col_names)\n lag_df_list = []\n start_week = rolling_window_args[\"start_week\"]\n end_week = start_week + rolling_window_args[\"agg_count\"]\n for i in range(start_week, end_week):\n col_old = df_config[\"target_col_name\"] + \"_\" + rolling_window_args[\"output_col_suffix\"] + \"_\" + str(i)\n col_new = col_old + \"_\" + previous_years_lag_args[\"output_col_suffix\"]\n col_ratio = \"recent_load_ratio_\" + str(i)\n\n same_week_day_hour_lag_featurizer = SameWeekOfYearLagFeaturizer(\n df_config,\n input_col_names=col_old,\n train_df=train_df_with_recent_load,\n max_horizon=max_horizon,\n **previous_years_lag_args\n )\n\n lag_df = same_week_day_hour_lag_featurizer.transform(test_df_with_recent_load)\n lag_df[col_ratio] = lag_df[col_old] / lag_df[col_new]\n lag_df_list.append(lag_df[keep_col_names + [col_ratio]].copy())\n\n test_features = reduce(\n lambda left, right: pd.merge(left, right, on=keep_col_names), [test_features] + lag_df_list,\n )\n\n if filter_by_month:\n test_month = test_features[\"month_of_year\"].values[0]\n train_features = train_features.loc[train_features[\"month_of_year\"] == test_month,].copy()\n\n train_features.dropna(inplace=True)\n\n return train_features, test_features", "def _create_ts_features(df, tscol):\r\n df = copy.deepcopy(df)\r\n dt_adds = []\r\n try:\r\n df[tscol+'_hour'] = df[tscol].dt.hour.fillna(0).astype(int)\r\n df[tscol+'_minute'] = df[tscol].dt.minute.fillna(0).astype(int)\r\n dt_adds.append(tscol+'_hour')\r\n dt_adds.append(tscol+'_minute')\r\n except:\r\n print(' Error in creating hour-second derived features. Continuing...')\r\n try:\r\n df[tscol+'_dayofweek'] = df[tscol].dt.dayofweek.fillna(0).astype(int)\r\n dt_adds.append(tscol+'_dayofweek')\r\n if tscol+'_hour' in dt_adds:\r\n DAYS = dict(zip(range(7),['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat']))\r\n df[tscol+'_dayofweek'] = df[tscol+'_dayofweek'].map(DAYS)\r\n df.loc[:,tscol+'_dayofweek_hour_cross'] = df[tscol+'_dayofweek'] +\" \"+ df[tscol+'_hour'].astype(str)\r\n dt_adds.append(tscol+'_dayofweek_hour_cross')\r\n df[tscol+'_quarter'] = df[tscol].dt.quarter.fillna(0).astype(int)\r\n dt_adds.append(tscol+'_quarter')\r\n df[tscol+'_month'] = df[tscol].dt.month.fillna(0).astype(int)\r\n MONTHS = dict(zip(range(1,13),['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul',\r\n 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']))\r\n df[tscol+'_month'] = df[tscol+'_month'].map(MONTHS)\r\n dt_adds.append(tscol+'_month')\r\n #### Add some features for months ########################################\r\n festives = ['Oct','Nov','Dec']\r\n name_col = tscol+\"_is_festive\"\r\n df[name_col] = 0\r\n df[name_col] = df[tscol+'_month'].map(lambda x: 1 if x in festives else 0).values\r\n df[name_col].fillna(0,inplace=True)\r\n dt_adds.append(name_col)\r\n summer = ['Jun','Jul','Aug']\r\n name_col = tscol+\"_is_summer\"\r\n df[name_col] = 0\r\n df[name_col] = df[tscol+'_month'].map(lambda x: 1 if x in summer else 0).values\r\n df[name_col].fillna(0,inplace=True)\r\n dt_adds.append(name_col)\r\n winter = ['Dec','Jan','Feb']\r\n name_col = tscol+\"_is_winter\"\r\n df[name_col] = 0\r\n df[name_col] = df[tscol+'_month'].map(lambda x: 1 if x in winter else 0).values\r\n df[name_col].fillna(0,inplace=True)\r\n dt_adds.append(name_col)\r\n cold = ['Oct','Nov','Dec','Jan','Feb','Mar']\r\n name_col = tscol+\"_is_cold\"\r\n df[name_col] = 0\r\n df[name_col] = df[tscol+'_month'].map(lambda x: 1 if x in cold else 0).values\r\n df[name_col].fillna(0,inplace=True)\r\n dt_adds.append(name_col)\r\n warm = ['Apr','May','Jun','Jul','Aug','Sep']\r\n name_col = tscol+\"_is_warm\"\r\n df[name_col] = 0\r\n df[name_col] = df[tscol+'_month'].map(lambda x: 1 if x in warm else 0).values\r\n df[name_col].fillna(0,inplace=True)\r\n dt_adds.append(name_col)\r\n #########################################################################\r\n if tscol+'_dayofweek' in dt_adds:\r\n df.loc[:,tscol+'_month_dayofweek_cross'] = df[tscol+'_month'] +\" \"+ df[tscol+'_dayofweek']\r\n dt_adds.append(tscol+'_month_dayofweek_cross')\r\n df[tscol+'_year'] = df[tscol].dt.year.fillna(0).astype(int)\r\n dt_adds.append(tscol+'_year')\r\n today = date.today()\r\n df[tscol+'_age_in_years'] = today.year - df[tscol].dt.year.fillna(0).astype(int)\r\n dt_adds.append(tscol+'_age_in_years')\r\n df[tscol+'_dayofyear'] = df[tscol].dt.dayofyear.fillna(0).astype(int)\r\n dt_adds.append(tscol+'_dayofyear')\r\n df[tscol+'_dayofmonth'] = df[tscol].dt.day.fillna(0).astype(int)\r\n dt_adds.append(tscol+'_dayofmonth')\r\n df[tscol+'_weekofyear'] = df[tscol].dt.weekofyear.fillna(0).astype(int)\r\n dt_adds.append(tscol+'_weekofyear')\r\n weekends = (df[tscol+'_dayofweek'] == 'Sat') | (df[tscol+'_dayofweek'] == 'Sun')\r\n df[tscol+'_typeofday'] = 'weekday'\r\n df.loc[weekends, tscol+'_typeofday'] = 'weekend'\r\n dt_adds.append(tscol+'_typeofday')\r\n if tscol+'_typeofday' in dt_adds:\r\n df.loc[:,tscol+'_month_typeofday_cross'] = df[tscol+'_month'] +\" \"+ df[tscol+'_typeofday']\r\n dt_adds.append(tscol+'_month_typeofday_cross')\r\n except:\r\n print(' Error in creating date time derived features. Continuing...')\r\n print(' created %d columns from time series %s column' %(len(dt_adds),tscol))\r\n return df, dt_adds", "def create_date_features(df = None, date = None):\n #TODO", "def _drop_features(self, X, drop_features):\n self.drop_features = drop_features\n if len(self.drop_features) != 0:\n cfp = ComprehensiveFCParameters()\n df2 = []\n for df in self.drop_features:\n if df in X.columns:\n df2.append(df) # exact match\n else:\n if df in cfp.keys() or df in ['fft_coefficient_hann']:\n df = '*__{:s}__*'.format(df) # feature calculator\n # wildcard match\n df2 += [col for col in X.columns if fnmatch(col, df)] \n X = X.drop(columns=df2)\n return X", "def filter_dataframe(df, start_date_dt, end_date_dt):\n\n dff = df \n # df[\n # (df[\"timestamp\"].dt.date >= dt.date(start_date_dt.year, start_date_dt.month, start_date_dt.day))\n # & (df[\"timestamp\"].dt.date <= dt.date(end_date_dt.year, end_date_dt.month, end_date_dt.day))\n # ]\n # if (lat_min != -90) or (lat_max != 90):\n # dff = dff[\n # (dff[\"lat\"] >= lat_min)\n # & (dff[\"lat\"] <= lat_max)\n # ]\n # if (lon_min != -90) or (lon_max != 90):\n # dff = dff[\n # (dff[\"lon\"] >= lon_min)\n # & (dff[\"lon\"] <= lon_max)\n # ]\n\n return dff", "def create_features(df,rsi_window = 14,macd_feat = [12,26,9]):\n df.dropna(inplace=True)\n ## day and month\n df['Date'] = pd.to_datetime(df['Date'])\n df['Month'] = df['Date'].dt.month\n df['dayowk'] = df['Date'].dt.dayofweek\n df = pd.get_dummies(data = df,columns = ['Month','dayowk'])\n \n ##Previos n-day pct_changes\n df['1day_pct'] = df['Adj Close'].pct_change()\n df['2day_pct'] = df['Adj Close'].pct_change(periods = 2)\n df['3day_pct'] = df['Adj Close'].pct_change(periods = 3)\n df['4day_pct'] = df['Adj Close'].pct_change(periods = 4)\n df['5day_pct'] = df['Adj Close'].pct_change(periods = 5)\n df['7day_pct'] = df['Adj Close'].pct_change(periods = 7)\n \n ##Cumulative sum of 1day_pct\n df['1day_pct_cs'] = df['Adj Close'].pct_change().cumsum()\n \n ##EWMA of 7, 50 and 200 days\n df['ewma_7'] = df['Adj Close'].ewm(span=7).mean()/df['Adj Close']\n df['ewma_50'] = df['Adj Close'].ewm(span=50).mean()/df['Adj Close']\n df['ewma_200'] = df['Adj Close'].ewm(span=200).mean()/df['Adj Close']\n ## Golden Cross vs Death Cross etc.\n #df['7g(50&200)'] = (df['ewma_7'] > df['ewma_50']) & (df['ewma_7'] > df['ewma_200'])\n #df['7l(50&200)'] = (df['ewma_7'] < df['ewma_50']) & (df['ewma_7'] < df['ewma_200'])\n #df['7g50'] = (df['ewma_7'] > df['ewma_50']) & (df['ewma_7'] < df['ewma_200'])\n #df['7g200'] = (df['ewma_7'] < df['ewma_50']) & (df['ewma_7'] > df['ewma_200'])\n \n ##RSI and MACD\n df = RSI(df,14)\n df = MACD_mod(df,nl=macd_feat[0],nh=macd_feat[1],nsig=macd_feat[2])\n \n df['day_var'] = (df['High'] - df['Low'])/df['Close']## Days variance\n df['open_close'] = (df['Open'] - df['Close'])/df['Close'] ## Days Open-Close\n df['high_close'] = (df['High'] - df['Close'])/df['Close'] ##Days High-Close\n df['open_prev_close'] = (df['Open'] - df['Close'].shift(1))/df['Close'] ## Days open - Previos Dyas Close\n \n ##Classification target\n df['target'] = round((np.sign(df['1day_pct']).shift(-1)+1)/2) ## Target for classification\n #df['1_day_target'] = df['Adj Close'].shift(-1) - df['Adj Close'] ## Target for Regression\n #df['target2'] = round((np.sign(df['1day_pct']).shift(-1)+1)/2)## Will the price go up intra-day\n \n ## IS the stock Overbought or Oversold based on RSI?\n df['RSI_overbought'] = df['RSI']>70\n df['RSI_oversold'] = df['RSI']<30\n \n \n #df.drop(['Open','High','Low','Close'],axis=1,inplace=True)\n# df = df.dropna()\n \n #df = df.reset_index(drop=True)\n \n ## Calculating how large the previos hot and cold streaks were\n f = 0\n df['prev_hot_streak'] = np.zeros(df.shape[0])\n for i in range(df.shape[0]-1):\n if df['target'][i] ==1:\n f += 1\n if df['target'][i+1] ==0:\n df['prev_hot_streak'][i+1] = f\n f = 0\n for i in range(1,df.shape[0]):\n #print(i)\n if df['prev_hot_streak'][i]==0:\n df['prev_hot_streak'][i]=df['prev_hot_streak'][i-1]\n \n \n df['prev_cold_streak'] = np.zeros(df.shape[0])\n for i in range(df.shape[0]-1):\n if df['target'][i] ==0:\n f += 1\n if df['target'][i+1] ==1:\n df['prev_cold_streak'][i+1] = f\n f = 0\n\n for i in range(1,df.shape[0]):\n #print(i)\n if df['prev_cold_streak'][i]==0:\n df['prev_cold_streak'][i] = df['prev_cold_streak'][i-1]\n \n ## Calculating current hot and cold streaks\n df['current_hot_streak'] = np.zeros(df.shape[0])\n df['current_cold_streak'] = np.zeros(df.shape[0])\n fhot=0\n fcold=0\n for i in range(df.shape[0]):\n if df['target'][i]==1:\n fhot += 1\n fcold = 0\n df['current_hot_streak'][i] = fhot\n elif df['target'][i]==0:\n fcold += 1\n fhot = 0\n df['current_cold_streak'][i] = fcold\n \n df['prev_hot_streak'] = df['prev_hot_streak'].shift(1)\n df['prev_cold_streak'] = df['prev_cold_streak'].shift(1)\n df['current_hot_streak'] = df['current_hot_streak'].shift(1)\n df['current_cold_streak'] = df['current_cold_streak'].shift(1)\n \n ## Combinations of previos streaks\n df['prev_current_hot'] = df['prev_hot_streak'] - df['current_hot_streak']\n df['prev_current_cold'] = df['prev_cold_streak'] - df['current_cold_streak']\n df['current_hot_prev_cold'] = df['current_hot_streak'] - df['prev_cold_streak']\n df['current_cold_prev_hot'] = df['current_cold_streak'] - df['prev_hot_streak']\n \n ##Calculating days since max\n current_max = df['Adj Close'][0]\n df['days_from_max'] = np.zeros(df.shape[0])\n df['pct_from_max'] = np.zeros(df.shape[0])\n #print('blah')\n for i in range(1,df.shape[0]):\n if df['Adj Close'][i] > current_max:\n current_max = df['Adj Close'][i]\n # print(current_max)\n else:\n df['days_from_max'][i] = df['days_from_max'][i-1]+1\n df['pct_from_max'][i] = (df['Adj Close'][i]-current_max)/current_max\n #print(df['days_from_max'][i])\n \n \n \n df.dropna(inplace=True)\n df = df.reset_index(drop=True)\n return df", "def postprocess_features(self, featurelist):\n \n ##: To overwrite the time of features that are in a clause\n for feature in featurelist:\n if feature.inClause() or self.is_in_clause(feature.getStartPos(), feature.getSentNum()):\n feature = self.assign_feature_time_with_references(feature, self.timeReferences, feature.getStartPos(), True)\n \n ##: To set time of features after death to none. Currently disabled.\n# deathDates = []\n# for feature in featurelist:\n# if 'Death' in [tg[1] for tg in feature.getTags()]:\n# dt = feature.getDateTime()\n# if dt and feature.getTlink().getTimexes()[0].getType()!='VIRTUAL': ##: only original date counts\n# deathDates.append(dt)\n# \n# if feature.getType()=='CAUSE_OF_DEATH':\n# feature.setTlink(None)\n# \n# if deathDates:\n# deathDate = min(deathDates)\n# for feature in featurelist: \n# dt = feature.getDateTime()\n# if dt and dt>deathDate:\n# feature.setTlink(None)\n \n ##: Remove time from features in the blockout range, \n ##: e.g., A 34 years old male with{ history of leg pain }who on ....\n for feature in featurelist:\n posStart = feature.getStartPos()\n posEnd = feature.getEndPos()\n for r in self.blockout_range:\n if (posStart>r[0] and posStart<r[1]) or (posEnd>r[0] and posEnd<r[1]):\n timex = feature.getTimex()\n if timex:\n tpos = timex.getStartPos()\n if tpos>=r[0] and tpos<=r[1]:\n continue\n \n feature.setTlink(None)\n \n return featurelist", "def samples_timesteps_features(dataframe, columns, start_date, timesteps=72, \n steps_ahead=24, window_days=100, train_percent=80.):\n \n def overlap_windows(dataset, timesteps, steps_ahead):\n \"\"\" Create overlaping window of time-series data\n \n Parameters\n ----------\n dataset: pd.DataFrame\n time-series pandas dataset\n timesteps: int\n number of time steps from the past for creating output arrays\n steps_ahead: int\n number of time steps into the future for making predictions\n \n Returns\n -------\n X, y: np.array\n input and output 3-d arrays of overlaping time windows\n \"\"\"\n X = []; y = []\n \n start = 0\n for i in range(len(dataset)):\n # Define the end of the input sequence\n in_end = start + timesteps\n out_end = in_end + steps_ahead\n # Ensure that there is enough data\n if out_end <= len(dataset):\n X.append(dataset[start:in_end, :])\n # First column holds load values\n y.append(dataset[in_end:out_end, 0])\n # Move along one time step\n start += 1\n \n # Convert list to np.array\n X = np.asarray(X)\n y = np.asarray(y)\n \n return X, y\n\n\n data = dataframe.copy()\n \n if window_days*24 > data.values.shape[0]:\n raise ValueError('Variable window_days has too large value: {}*24h = {} > {}, which is more than there is data!'.format(window_days, window_days*24, \n data.values.shape[0]))\n \n # Training period\n # ---------------\n train_percent = train_percent/100.\n st = pd.to_datetime(start_date) # start date\n et = st + dt.timedelta(days=int(train_percent*window_days)) # end date\n train = data.loc[st:et].values\n \n # Standardize and transform training data set\n mean_std_values = {}\n for i, column in enumerate(columns):\n # Calculate mean and standard deviation only\n # from the training data set values\n mu = train[:,i].mean() # axis=0\n sd = train[:,i].std()\n mean_std_values[column] = (mu, sd)\n # Standardize training data\n train[:,i] = (train[:,i] - mu)/sd\n \n # Create overlapping windows with training data\n X_train, y_train = overlap_windows(train, timesteps, steps_ahead)\n \n # Testing / Validation period\n # ---------------------------\n sv = et \n ev = sv + dt.timedelta(days=int((1-train_percent)*window_days)+1)\n test = data.loc[sv:ev].values\n \n # Transform testing/validation data set\n for i, column in enumerate(columns):\n # Use mean and standard deviation from the\n # training data set\n mu = mean_std_values[column][0]\n sd = mean_std_values[column][1]\n # Standardize test data\n test[:,i] = (test[:,i] - mu)/sd\n \n # Create overlaping windows with test data\n X_test, y_test = overlap_windows(test, timesteps, steps_ahead)\n \n return mean_std_values, X_train, y_train, X_test, y_test", "def FE_start_end_date_time_features(smalldf, startTime, endTime, splitter_date_string=\"/\",splitter_hour_string=\":\"):\r\n smalldf = smalldf.copy()\r\n add_cols = []\r\n date_time_variable_flag = False\r\n if smalldf[startTime].dtype in ['datetime64[ns]','datetime16[ns]','datetime32[ns]']:\r\n print('%s variable is a date-time variable' %startTime)\r\n date_time_variable_flag = True\r\n if date_time_variable_flag:\r\n view_days = 'processing'+startTime+'_elapsed_days'\r\n smalldf[view_days] = (smalldf[endTime] - smalldf[startTime]).astype('timedelta64[s]')/(60*60*24)\r\n smalldf[view_days] = smalldf[view_days].astype(int)\r\n add_cols.append(view_days)\r\n view_time = 'processing'+startTime+'_elapsed_time'\r\n smalldf[view_time] = (smalldf[endTime] - smalldf[startTime]).astype('timedelta64[s]').values\r\n add_cols.append(view_time)\r\n else:\r\n start_date = 'processing'+startTime+'_start_date'\r\n smalldf[start_date] = smalldf[startTime].map(lambda x: x.split(\" \")[0])\r\n add_cols.append(start_date) \r\n try:\r\n start_time = 'processing'+startTime+'_start_time'\r\n smalldf[start_time] = smalldf[startTime].map(lambda x: x.split(\" \")[1])\r\n add_cols.append(start_time)\r\n except:\r\n ### there is no hour-minutes part of this date time stamp field. You can just skip it if it is not there\r\n pass\r\n end_date = 'processing'+endTime+'_end_date'\r\n smalldf[end_date] = smalldf[endTime].map(lambda x: x.split(\" \")[0])\r\n add_cols.append(end_date)\r\n try:\r\n end_time = 'processing'+endTime+'_end_time'\r\n smalldf[end_time] = smalldf[endTime].map(lambda x: x.split(\" \")[1])\r\n add_cols.append(end_time)\r\n except:\r\n ### there is no hour-minutes part of this date time stamp field. You can just skip it if it is not there\r\n pass\r\n view_days = 'processing'+startTime+'_elapsed_days'\r\n smalldf[view_days] = (pd.to_datetime(smalldf[end_date]) - pd.to_datetime(smalldf[start_date])).values.astype(int)\r\n add_cols.append(view_days)\r\n try:\r\n view_time = 'processing'+startTime+'_elapsed_time'\r\n smalldf[view_time] = (pd.to_datetime(smalldf[end_time]) - pd.to_datetime(smalldf[start_time])).astype('timedelta64[s]').values\r\n add_cols.append(view_time)\r\n except:\r\n ### In some date time fields this gives an error so skip it in that case\r\n pass\r\n #### The reason we chose endTime here is that startTime is usually taken care of by another library. So better to do this alone.\r\n year = 'processing'+endTime+'_end_year'\r\n smalldf[year] = smalldf[end_date].map(lambda x: str(x).split(splitter_date_string)[0]).values\r\n add_cols.append(year)\r\n #### The reason we chose endTime here is that startTime is usually taken care of by another library. So better to do this alone.\r\n month = 'processing'+endTime+'_end_month'\r\n smalldf[month] = smalldf[end_date].map(lambda x: str(x).split(splitter_date_string)[1]).values\r\n add_cols.append(month)\r\n try:\r\n #### The reason we chose endTime here is that startTime is usually taken care of by another library. So better to do this alone.\r\n daynum = 'processing'+endTime+'_end_day_number'\r\n smalldf[daynum] = smalldf[end_date].map(lambda x: str(x).split(splitter_date_string)[2]).values\r\n add_cols.append(daynum)\r\n except:\r\n ### In some date time fields the day number is not there. If not, just skip it ####\r\n pass\r\n #### In some date time fields, the hour and minute is not there, so skip it in that case if it errors!\r\n try:\r\n start_hour = 'processing'+startTime+'_start_hour'\r\n smalldf[start_hour] = smalldf[start_time].map(lambda x: str(x).split(splitter_hour_string)[0]).values\r\n add_cols.append(start_hour)\r\n start_min = 'processing'+startTime+'_start_hour'\r\n smalldf[start_min] = smalldf[start_time].map(lambda x: str(x).split(splitter_hour_string)[1]).values\r\n add_cols.append(start_min)\r\n except:\r\n ### If it errors, skip it\r\n pass\r\n #### Check if there is a weekday and weekends in date time columns using endTime only\r\n weekday_num = 'processing'+endTime+'_end_weekday_number'\r\n smalldf[weekday_num] = pd.to_datetime(smalldf[end_date]).dt.weekday.values\r\n add_cols.append(weekday_num)\r\n weekend = 'processing'+endTime+'_end_weekend_flag'\r\n smalldf[weekend] = smalldf[weekday_num].map(lambda x: 1 if x in[5,6] else 0)\r\n add_cols.append(weekend)\r\n #### If everything works well, there should be 13 new columns added by module. All the best!\r\n print('%d columns added using start date=%s and end date=%s processing...' %(len(add_cols),startTime,endTime))\r\n return smalldf", "def add_datepart(\n cls,\n df: pd.DataFrame,\n field_name: str,\n frequency: str,\n prefix: str = None,\n drop: bool = True,\n ) -> Tuple[pd.DataFrame, List[str]]:\n field = df[field_name]\n prefix = (re.sub(\"[Dd]ate$\", \"\", field_name) if prefix is None else prefix) + \"_\"\n attr = cls.time_features_from_frequency_str(frequency)\n added_features = []\n for n in attr:\n if n == \"Week\":\n continue\n df[prefix + n] = getattr(field.dt, n.lower())\n added_features.append(prefix + n)\n # Pandas removed `dt.week` in v1.1.10\n if \"Week\" in attr:\n week = field.dt.isocalendar().week if hasattr(field.dt, \"isocalendar\") else field.dt.week\n df.insert(3, prefix + \"Week\", week)\n added_features.append(prefix + \"Week\")\n # TODO Not adding Elapsed by default. Need to route it through config\n # mask = ~field.isna()\n # df[prefix + \"Elapsed\"] = np.where(\n # mask, field.values.astype(np.int64) // 10 ** 9, None\n # )\n # added_features.append(prefix + \"Elapsed\")\n if drop:\n df.drop(field_name, axis=1, inplace=True)\n\n # Removing features woth zero variations\n # for col in added_features:\n # if len(df[col].unique()) == 1:\n # df.drop(columns=col, inplace=True)\n # added_features.remove(col)\n return df, added_features", "def time_split_dataset(df, train_start_date, train_end_date, holdout_end_date, date_col):\n\n train_set = df.copy()[\n (df[date_col] >= train_start_date) & (df[date_col] <= train_end_date)]\n\n test_set = df.copy()[\n (df[date_col] > train_end_date) & (df[date_col] <= holdout_end_date)]\n\n return train_set, test_set", "def create_feature_based_on_spent_by_timestamp(data):\n utils.save_log('{0} :: {1}'.format(\n create_feature_based_on_spent_by_timestamp.__module__,\n create_feature_based_on_spent_by_timestamp.__name__))\n\n data = data.withColumn('RatioValueSpentByWeekOfYear',\n (data['Value'] / data['TransactionWeekOfYear']))\n data = data.withColumn('RatioValueSpentByDayOfWeek',\n (data['Value'] / data['TransactionDayOfWeek']))\n data = data.withColumn('RatioValueSpentByDayOfYear',\n (data['Value'] / data['TransactionDayOfYear']))\n\n update_list_features(\"numerical\", ['RatioValueSpentByWeekOfYear',\n 'RatioValueSpentByDayOfWeek',\n 'RatioValueSpentByDayOfYear'])\n\n return data", "def dataset_extract_features_from_date(dataset,date_feature): \n dataset['dayofmonth'] = dataset[date_feature].dt.day\n dataset['dayofyear'] = dataset[date_feature].dt.dayofyear \n dataset['dayofweek'] = dataset[date_feature].dt.dayofweek\n dataset['month'] = dataset[date_feature].dt.month\n dataset['year'] = dataset[date_feature].dt.year\n dataset['weekofyear'] = dataset[date_feature].dt.weekofyear\n dataset['is_month_start'] = (dataset[date_feature].dt.is_month_start).astype(int)\n dataset['is_month_end'] = (dataset[date_feature].dt.is_month_end).astype(int)\n return dataset", "def get_lat_lon_date_features(gt_ids=[], gt_masks=None, gt_shifts=None,\n forecast_ids=[], forecast_masks=None, forecast_shifts=None,\n anom_ids=[], anom_masks=None, anom_shifts=None,\n first_year = None):\n # If particular arguments aren't lists, replace with repeating iterators\n if not isinstance(gt_masks, list):\n gt_masks = itertools.repeat(gt_masks)\n if not isinstance(gt_shifts, list):\n gt_shifts = itertools.repeat(gt_shifts)\n if not isinstance(forecast_masks, list):\n forecast_masks = itertools.repeat(forecast_masks)\n if not isinstance(forecast_shifts, list):\n forecast_shifts = itertools.repeat(forecast_shifts)\n if not isinstance(anom_masks, list):\n anom_masks = itertools.repeat(anom_masks)\n if not isinstance(anom_shifts, list):\n anom_shifts = itertools.repeat(anom_shifts)\n\n # Define canonical name for target start date column\n date_col = \"start_date\"\n # Add each ground truth feature to dataframe\n df = None\n for gt_id, gt_mask, gt_shift in zip(gt_ids, gt_masks, gt_shifts):\n print \"Getting {}_shift{}\".format(gt_id, gt_shift)\n t = time.time()\n # Load ground truth data\n gt = get_ground_truth(gt_id, gt_mask, shift=gt_shift)\n # Discard years prior to first_year\n gt = year_slice(gt, first_year = first_year)\n # Use outer merge to include union of (lat,lon,date_col)\n # combinations across all features\n df = df_merge(df, gt)\n print \"Elapsed: {}s\".format(time.time() - t)\n\n # Add each forecast feature to dataframe\n for forecast_id, forecast_mask, forecast_shift in zip(forecast_ids,\n forecast_masks,\n forecast_shifts):\n print \"Getting {}_shift{}\".format(forecast_id, forecast_shift)\n t = time.time()\n # Load forecast with years >= first_year\n forecast = get_forecast(forecast_id, forecast_mask, shift=forecast_shift)\n # Rename target start date column to \"start_date\"\n fcst_date_col = get_target_start_date_col(forecast_id)\n forecast.rename(columns={fcst_date_col: date_col}, inplace=True)\n # Discard years prior to first_year\n forecast = year_slice(forecast, first_year = first_year)\n # Use outer merge to include union of (lat,lon,date_col)\n # combinations across all features\n df = df_merge(df, forecast)\n print \"Elapsed: {}s\".format(time.time() - t)\n\n # Add anomaly features and climatology last so that climatology\n # is produced for all previously added start dates\n for anom_id, anom_mask, anom_shift in zip(anom_ids, anom_masks, anom_shifts):\n print \"Getting {}_shift{} with anomalies\".format(anom_id, anom_shift)\n t = time.time()\n # Check if ground truth column already exists\n gt_col = get_measurement_variable(anom_id, shift=anom_shift)\n if df is None or gt_col not in df.columns:\n # Add masked ground truth data if absent\n gt = get_ground_truth(anom_id, anom_mask, shift=anom_shift)\n # Discard years prior to first_year\n gt = year_slice(gt, first_year = first_year)\n # Use outer merge to include union of (lat,lon,date_col)\n # combinations across all features\n df = df_merge(df, gt)\n\n # Load masked ground truth data climatology\n climatology = get_climatology(anom_id, anom_mask, anom_shift)\n # Merge climatology into dataset\n df = pd.merge(df, climatology[[gt_col]],\n left_on=['lat', 'lon', df[date_col].dt.month,\n df[date_col].dt.day],\n right_on=[climatology.lat, climatology.lon,\n climatology[date_col].dt.month,\n climatology[date_col].dt.day],\n how='left', suffixes=('', '_clim'))\n clim_col = gt_col+\"_clim\"\n # Compute ground-truth anomalies\n anom_col = gt_col+\"_anom\"\n df[anom_col] = df[gt_col] - df[clim_col]\n print \"Elapsed: {}s\".format(time.time() - t)\n\n return df", "def month_lag_distribution(source_df, field=\"month_lag\", path=path.path, nrows=None):\n _log.info(\"Creating features from {}\".format(field))\n prefix = source_df.split(\"_\")[0]\n source_df = \"{}/{}\".format(path, source_df)\n\n _log.info(\"Reading from {}\".format(source_df))\n try:\n df = pd.read_csv(source_df, usecols=[\"card_id\", field], nrows=nrows)\n _log.info(\"Successfully read from {}\".format(source_df))\n except Exception as e:\n _log.exception(e)\n\n _log.info(\"Computing distribution of month lag\")\n func_to_be_applied = [min, max, pd.Series.nunique]\n func_to_be_applied_dummy = [max, np.mean]\n rename_dict = create_rename_dict(prefix, field, func_to_be_applied)\n rename_dict_dummy = create_rename_dict(prefix, \"dummy\", func_to_be_applied_dummy)\n\n df[\"dummy\"] = 1\n df_features = df.groupby(\"card_id\").agg({field:func_to_be_applied}).reset_index()\n df_features = pd.concat([pd.DataFrame(df_features[\"card_id\"]), df_features[field]], axis=1, sort=False)\n\n _log.info(\"Renaming columns: {}\".format(rename_dict))\n df_features.rename(columns=rename_dict, inplace=True)\n\n _log.info(\"Computing time in month between transactions\")\n df_freq = (df.groupby([\"card_id\", field]).agg({\"dummy\": np.sum}).reset_index().groupby(\"card_id\")\n .agg({\"dummy\": func_to_be_applied_dummy}).reset_index())\n df_freq = pd.concat([pd.DataFrame(df_freq[\"card_id\"]), df_freq[\"dummy\"]], axis=1, sort=False)\n df_freq.rename(columns=rename_dict_dummy, inplace=True)\n\n _log.info(\"Creating final df\")\n df_features = df_features.merge(df_freq, how=\"inner\", on=\"card_id\")\n return df_features", "def list_feature_drop(self):\n \n list_to_drop = list()\n list_not_in_df = list()\n \n #-------------------------------------------------------------------------\n # Columns are checked to be into df_invoice_line dataframe\n #-------------------------------------------------------------------------\n for col in self._list_feature_to_drop:\n if col in self.df_invoice_line.columns:\n list_to_drop.append(col)\n else:\n list_not_in_df.append(col)\n \n if 0 == len(list_to_drop):\n self.strprint(\"\\n*** ERROR : no element in list belonging to dataframe!\")\n else:\n if len(self._list_feature_to_drop) != len(list_to_drop):\n self.strprint(\"\\n*** WARNING : followings features do not belong to \\\n dataframe : {}\".format(list_not_in_df))\n else:\n pass\n list_col_keep \\\n = [col for col in self.df_invoice_line.columns \\\n if col not in list_to_drop]\n s\n self.df_invoice_line = self.df_invoice_line[list_col_keep]\n return", "def feature_engineer_ts(self, month=12):\n st_data_dt = self.get_st_data_dt()\n end_data_dt = self.get_end_data_dt()\n date_list = pd.date_range(*(pd.to_datetime([st_data_dt, end_data_dt]) + pd.offsets.MonthEnd()), freq='M').to_list()\n population = self.get_population()\n is_raw_partition = self.get_is_raw_partition()\n# Lag 2 months\n all_data = []\n# join past is_raw columns\n for d in date_list:\n \n population_partition = population[population['ft_data_dt'] == d] \n old_date = d - relativedelta(months=month)\n y = old_date.year\n m = old_date.month\n day = calendar.monthrange(y, m)[1]\n old_date = date(y, m, day)\n old_date = max(old_date, st_data_dt)\n date_list_join = pd.date_range(*(pd.to_datetime([old_date, d]) + pd.offsets.MonthEnd()), freq='M').to_list()\n date_list_join.reverse()\n for index, date_join in enumerate(date_list_join):\n if date_join.strftime(\"%Y-%m-%d\") not in is_raw_partition.keys():\n continue\n \n tmp_is_raw_partition = is_raw_partition[date_join.strftime(\"%Y-%m-%d\")]\n \n rename_col = [c for c in list(tmp_is_raw_partition.columns) if c not in ['idd', 'ft_data_dt']]\n new_col = [c+'_'+str(index+1) for c in rename_col]\n name_dict = dict(list(zip(rename_col, new_col)))\n tmp_is_raw_partition = tmp_is_raw_partition.rename(columns = name_dict)\n population_partition = population_partition.merge(tmp_is_raw_partition.drop(columns=['ft_data_dt']), on=['idd'], how='left')\n all_data.append(population_partition)\n ts_df = pd.concat(all_data)\n threshold_null = len(ts_df.columns) - 4\n ts_df = ts_df[ts_df.isnull().sum(axis=1) < threshold_null]\n \n def sum_ts(self, df, col_list, feature, duration):\n ft_name = feature+ '_sum_'+str(duration)+'mth'\n tmp_df = df[col_list].sum(axis = 1).to_frame(name=ft_name)\n return tmp_df\n \n def mean_ts(self, df, col_list, feature, duration):\n ft_name = feature+ '_avg_'+str(duration)+'mth'\n tmp_df = df[col_list].mean(axis = 1).to_frame(name=ft_name)\n return tmp_df\n \n def std_ts(self, df, col_list, feature, duration):\n ft_name = feature+ '_std_'+str(duration)+'mth'\n tmp_df = df[col_list].std(axis = 1).to_frame(name=ft_name)\n return tmp_df\n \n def med_ts(self, df, col_list, feature, duration):\n ft_name = feature+ '_med_'+str(duration)+'mth'\n tmp_df = df[col_list].std(axis = 1).to_frame(name=ft_name)\n return tmp_df\n \n def min_ts(self, df, col_list, feature, duration):\n ft_name = feature+ '_min_'+str(duration)+'mth'\n tmp_df = df[col_list].min(axis = 1).to_frame(name=ft_name)\n return tmp_df\n \n def max_ts(self, df, col_list, feature, duration):\n ft_name = feature+ '_max_'+str(duration)+'mth'\n tmp_df = df[col_list].max(axis = 1).to_frame(name=ft_name)\n return tmp_df\n \n def q1_ts(self, df, col_list, feature, duration):\n ft_name = feature+ '_q1_'+str(duration)+'mth'\n tmp_df = df[col_list].quantile(q=0.25, axis = 1).to_frame(name=ft_name)\n return tmp_df\n \n def q3_ts(self, df, col_list, feature, duration):\n ft_name = feature+ '_q3_'+str(duration)+'mth'\n tmp_df = df[col_list].quantile(q=0.75, axis = 1).to_frame(name=ft_name)\n return tmp_df\n \n def last_ts(self, df, feature):\n ft_name = feature+ '_last'\n tmp_df = df[feature+'_'+str(1)].to_frame(name=ft_name)\n return tmp_df\n \n ts_duration = [1, 3, 6, 9, 12]\n feature_list = self.get_is_raw_col()\n df = ts_df[['idd', 'ft_data_dt']]\n# Time Series Features\n for duration in ts_duration:\n for col in feature_list:\n col_list = [col+'_'+str(i) for i in range(1, duration+1)]\n df = pd.concat([df\\\n , sum_ts(self, ts_df, col_list, col, duration)\\\n , mean_ts(self, ts_df, col_list, col, duration)\\\n , med_ts(self, ts_df, col_list, col, duration)\\\n , q1_ts(self, ts_df, col_list, col, duration)\\\n , q3_ts(self, ts_df, col_list, col, duration)\\\n , min_ts(self, ts_df, col_list, col, duration)\\\n , max_ts(self, ts_df, col_list, col, duration)]\n , axis=1)\n self.set_all_data(df)", "def build_shape_data(self, start=None, end=None):\n # If start and end are None, then set them to be min/max of self.df_demand\n if start is None:\n start = self.df_demand['date'].min()\n if end is None:\n end = self.df_demand['date'].max()\n print(f\"date range for shape data is from {start} to {end}\")\n # Extract part of df_demand that is within start and end\n df_sub = self.df_demand[(self.df_demand['date'] >= start) & (self.df_demand['date'] <= end)]\n assert df_sub['date'].min() >= start\n assert df_sub['date'].max() <= end\n num_days = len(pd.date_range(iso8601.parse_date(start), iso8601.parse_date(end), freq='d'))\n print(f\"number of days is {num_days}\")\n # When finding variance and mean, add in missing days as 0s\n # Obtain the counts for each lat/lng region\n counts = df_sub.groupby(['left_lng', 'right_lng', 'lower_lat', 'upper_lat']).size().reset_index(name='counts')\n # Group demand data by lat/lng region and average across other cols\n df = df_sub.groupby(['left_lng', 'right_lng', 'lower_lat', 'upper_lat'])[['avail_count', 'avail_mins', 'trips', 'prob_scooter_avail', 'adj_trips']].mean().reset_index()\n df = df.merge(counts, on=['left_lng', 'right_lng', 'lower_lat', 'upper_lat'])\n # print(df.head())\n # Modify averages by multiplying each by count and divide by num_days\n vars = ['avail_count', 'avail_mins', 'trips', 'prob_scooter_avail', 'adj_trips']\n for var in vars:\n df[var] = df[var]*df['counts']/num_days\n # print(df.head())\n # Calculate the variance for prob_scooter_avail\n probVariance = df_sub.groupby(['left_lng', 'right_lng', 'lower_lat', 'upper_lat']).apply(lambda x: ((x['prob_scooter_avail'] - (x['prob_scooter_avail'].sum()/num_days))**2).sum()/(num_days-1)).reset_index(name='prob_scooter_avail')\n # print(probVariance.head())\n df['prob_scooter_avail_var'] = probVariance['prob_scooter_avail']\n # Check to see if there are any Nan values\n print(f\"Nan values in df? {df.isnull().values.any()}\")\n # print(df.head())\n # For each var col, create corresponding color columns (log and unlog)\n # Also create the factors list that get passed into self.create_rectangle_lst\n factors = [('avail_count', 'decimal'), ('avail_mins', 'decimal'),\n ('trips', 'decimal'), ('prob_scooter_avail', 'percent'), ('adj_trips', 'decimal')]\n i = 0\n original_len = len(factors)\n while i < original_len:\n name, type = factors[i]\n # print(f\"name={name}, type={type}\")\n # Create color column\n df = self.map_values_to_color(df, name)\n # If type is not percent than create log version\n if type != 'percent':\n df = self.create_log_column(df, name)\n factors.append(('log_'+name, type))\n i += 1\n # Deal with estimated demand and unmet demand\n # Filter out rows where prob_scooter_avail sig diff from 0\n sigDiffIdx = df.apply(lambda x: utils.sig_diff_from_zero(x['prob_scooter_avail'], x['prob_scooter_avail_var']), axis=1)\n # print(sigDiffIdx.head())\n df_sig_diff = df[sigDiffIdx]\n # Calculate estimated demand and unmet demand\n df_sig_diff = self.calculate_demand(df_sig_diff)\n # print(df_sig_diff.head())\n # Create color column and log column for unmet demand\n df_sig_diff = self.map_values_to_color(df_sig_diff, 'unmet_demand')\n df_sig_diff = self.map_values_to_color(df_sig_diff, 'estimated_demand')\n df_sig_diff = self.create_log_column(df_sig_diff, 'unmet_demand')\n factors.extend([('estimated_demand', 'decimal'), ('unmet_demand', 'decimal'), ('log_unmet_demand', 'decimal')])\n # Fill in the colors for the grid cells that aren't significantly different\n df_not_sig_diff = df[~sigDiffIdx]\n # print(df_not_sig_diff.head())\n df = pd.concat([df_sig_diff, df_not_sig_diff])\n # df.to_csv('../../../data_files/20210427_estimatedDemand.csv', index=False)\n # Create Rectangle information\n rectangles = self.create_rectangle_lst(df, factors)\n return rectangles, start, end", "def create_lag_features_with_time_feature(df = None, cols = None, time = None, n = 5, fillna = True):\n # assert\n assert(df is not None and cols is not None)\n # set attributes\n cols_to_rename = cols\n print(cols_to_rename)\n if type(n) == list:\n shift_range = n\n elif type(n) == int:\n shift_range = range(1, n+1)\n else:\n print(\"type of n is flase, set it to default: 5\")\n shift_range = range(1, 6)\n # try to get the new features\n for month_shift in tqdm(shift_range):\n train_shift = df.copy()\n train_shift[time] = train_shift[time] + month_shift\n foo = lambda x: '{}_lag_{}'.format(x, month_shift) if x in cols_to_rename else x\n train_shift = train_shift.rename(columns=foo)\n df = pd.merge(all_data, train_shift, on=index_cols, how='left').fillna(0)\n del train_shift\n gc.collect()\n return df", "def create_feature_names_list(df):\n features = ['date_block_num', 'shop_id', 'item_id', 'Year', 'Month', 'shop_type_1',\n 'shop_type_2', 'shop_city_type', 'shop_city', 'item_category_id',\n 'item_category_main', 'is_category_digital', 'is_category_ps_related', 'item_price_avg',\n 'when_first_sold',\n 'number_of_mondays', 'number_of_saturdays', 'number_of_sundays', 'number_of_days_in_month']\n lag_cols = [x for x in df.columns if 'lag' in x]\n features = features + lag_cols\n\n return features", "def FE_create_time_series_features(dft, ts_column, ts_adds_in=[]):\r\n dtf = copy.deepcopy(dft)\r\n reset_index = False\r\n try:\r\n # ts_column = None assumes that that index is the time series index\r\n reset_index = False\r\n if ts_column is None:\r\n reset_index = True\r\n ts_column = dtf.index.name\r\n dtf = dtf.reset_index()\r\n\r\n ### In some extreme cases, date time vars are not processed yet and hence we must fill missing values here!\r\n null_nums = dtf[ts_column].isnull().sum()\r\n if null_nums > 0:\r\n # missing_flag = True\r\n new_missing_col = ts_column + '_Missing_Flag'\r\n dtf[new_missing_col] = 0\r\n dtf.loc[dtf[ts_column].isnull(),new_missing_col]=1\r\n dtf[ts_column].fillna(method='ffill', inplace=True)\r\n print(' adding %s column due to missing values in data' %new_missing_col)\r\n if dtf[dtf[ts_column].isnull()].shape[0] > 0:\r\n dtf[ts_column].fillna(method='bfill', inplace=True)\r\n\r\n if dtf[ts_column].dtype == float:\r\n dtf[ts_column] = dtf[ts_column].astype(int)\r\n\r\n ### if we have already found that it was a date time var, then leave it as it is. Thats good enough!\r\n items = dtf[ts_column].apply(str).apply(len).values\r\n #### In some extreme cases,\r\n if all(items[0] == item for item in items):\r\n if items[0] == 4:\r\n ### If it is just a year variable alone, you should leave it as just a year!\r\n dtf[ts_column] = pd.to_datetime(dtf[ts_column],format='%Y')\r\n ts_adds = []\r\n else:\r\n ### if it is not a year alone, then convert it into a date time variable\r\n dtf[ts_column] = pd.to_datetime(dtf[ts_column], infer_datetime_format=True)\r\n ### this is where you create the time series features #####\r\n dtf, ts_adds = _create_ts_features(df=dtf, tscol=ts_column)\r\n else:\r\n dtf[ts_column] = pd.to_datetime(dtf[ts_column], infer_datetime_format=True)\r\n ### this is where you create the time series features #####\r\n dtf, ts_adds = _create_ts_features(df=dtf, tscol=ts_column)\r\n if not ts_adds_in:\r\n ts_adds_copy = dtf[ts_adds].select_dtypes(include='number').columns.tolist()\r\n ### drop those columns where all rows are same i.e. zero variance ####\r\n for col in ts_adds_copy:\r\n if dtf[col].std() == 0:\r\n dtf.drop(col, axis=1, inplace=True)\r\n print(' dropping column due to zero variance in %s column' %col)\r\n ts_adds.remove(col)\r\n else:\r\n rem_cols = left_subtract(dtf.columns.tolist(), ts_adds_in)\r\n dtf = dtf[rem_cols+ts_adds_in]\r\n\r\n # If you had reset the index earlier, set it back before returning\r\n # to make it consistent with the dataframe that was sent as input\r\n if reset_index:\r\n dtf = dtf.set_index(ts_column)\r\n elif ts_column in dtf.columns:\r\n dtf.drop(ts_column, axis=1, inplace=True)\r\n else:\r\n pass\r\n except Exception as e:\r\n print(e)\r\n print('Error in Processing %s column for date time features. Continuing...' %ts_column)\r\n return dtf, ts_adds", "def feature_list(user_id: str, session: str, tap_feature: str, task_name: str, window: DataFrame):\n if window.shape[0] == 0:\n return None\n #Add user ID, session, task name\n features = [user_id, session, task_name]\n\n #Add orientation\n orientation = mode(window['Phone_orientation_accel'])\n features.append(orientation)\n\n #Add tap type\n features.append(tap_feature)\n\n lead_file = 'Accelerometer.csv'\n\n time_col = x_columns[lead_file]\n\n before_start = window[window[tap_feature] == 4].index[0]\n during_start = window[window[tap_feature] == 2].index[0]\n after_start = window[window[tap_feature] == 3].index[0] + 1\n after_end = window[window[tap_feature] == 5].index[0]\n\n before = window.loc[before_start : during_start]\n during = window.loc[during_start : after_start]\n after = window.loc[after_start : after_end + 1]\n\n if during.shape[0] < 2:\n # If there were none or one measurements during the tap,\n # add the closest ones\n during = window[during_start - 1 : after_start + 1]\n\n for file_name in file_names:\n for y in y_columns[file_name]:\n\n # Feature 1: Mean during\n mean_during = mean(during[y])\n\n # Feature 2: SD during\n sd_during = sd(during[y])\n\n # Feature 3: Difference before/after\n mean_before = mean(before[y])\n mean_after = mean(after[y])\n difference_before_after = mean_after - mean_before\n\n # Feature 4: Net change from tap\n net_change_due_to_tap = mean_during - mean_before\n\n # Feature 5: Maximal change from tap\n max_tap = max(during[y])\n max_change = max_tap - mean_before\n\n # Feature 6: Restoration time\n avgDiffs = []\n for j in range(after[y].shape[0]):\n subsequentValues = after[y].iloc[j:]\n subsequentDistances = subsequentValues.map(lambda x: abs(x - mean_before))\n averageDistance = mean(subsequentDistances)\n avgDiffs.append(averageDistance)\n time_of_earliest_restoration = min(avgDiffs)\n restoration_time = time_of_earliest_restoration - during[time_col].iloc[-1]\n\n # Feature 7: Normalized duration\n t_before_center = (before[time_col].iloc[0] + before[time_col].iloc[-1]) / 2 \n t_after_center = (after[time_col].iloc[0] + after[time_col].iloc[-1]) / 2\n normalized_duration = (t_after_center - t_before_center) / (mean_after - mean_before)\n \n # Feature 8: Ndormalized duration max\n t_max_in_tap = during[during[y] == max_tap][time_col].iloc[0]\n normalized_duration_max = (t_after_center - t_max_in_tap) / (mean_after - max_tap)\n\n\n features += [mean_during, sd_during, difference_before_after,\n net_change_due_to_tap, max_change, restoration_time,\n normalized_duration, normalized_duration_max]\n\n if random.choice(range(100))== 0:\n plot_tap('Plots/Project/' + session, before, during, after, time_col)\n \n return features", "def generate_features(df, suffix = '_diff_', step=1, relevant_features=[], ignore_columns=[]):\n # cols = self.get_active_columns(df, ignore_columns)\n cols = relevant_features\n deltas = {}\n for c in cols:\n deltas['%s%s'% (c, suffix)] = subtract_from_prev_val(df, c, step=step)\n df_new = pd.DataFrame(deltas)\n return df_new", "def dataframe_features(df, db):\n def generator():\n for gene_id in df.index:\n yield asinterval(db[gene_id])\n\n return pybedtools.BedTool(generator())", "def _extract_features_for_peak_estimation(self, groups=(), tail=0.4, skip_if_shorter_than=6, **kwargs):\n assert 0 < tail < 1, \"Tail is a fraction, it should be between 0 and 1\"\n assert 0 < skip_if_shorter_than, \"skip_if_shorter_than should be a positive int\"\n\n groups = self.df[self.col_group].unique() if len(groups) == 0 else groups\n features = pd.DataFrame({self.col_group: groups})\n\n for idx, row in features.iterrows():\n group = row[self.col_group]\n df_loc = self.df[self.df[self.col_group] == group]\n\n length = len(df_loc[self.col_t])\n if length < skip_if_shorter_than:\n continue\n\n model_full = HuberRegressor()\n\n x_full = df_loc[self.col_t].to_numpy().reshape((-1, 1))\n y_full = df_loc[self.col_obs].to_numpy()\n model_full.fit(x_full, y_full)\n slope_full = model_full.coef_[0]\n\n tail_len = int(tail * length)\n\n x_tail = df_loc[self.col_t].to_numpy()[-tail_len:].reshape((-1, 1))\n y_tail = df_loc[self.col_obs].to_numpy()[-tail_len:]\n x_head = df_loc[self.col_t].to_numpy()[:-tail_len].reshape((-1, 1))\n y_head = df_loc[self.col_obs].to_numpy()[:-tail_len]\n\n r2_full_score = model_full.score(x_full, y_full)\n r2_head_score = model_full.score(x_head, y_head)\n r2_tail_score = model_full.score(x_tail, y_tail)\n\n model_head = HuberRegressor()\n model_head.fit(x_head, y_head)\n slope_head = model_head.coef_[0]\n\n model_tail = HuberRegressor()\n model_tail.fit(x_tail, y_tail)\n slope_tail = model_tail.coef_[0]\n features.at[idx, \"R2_full\"] = r2_full_score\n features.at[idx, \"R2_head\"] = r2_head_score\n features.at[idx, \"R2_tail\"] = r2_tail_score\n features.at[idx, \"R2_tail_own\"] = model_tail.score(x_tail, y_tail)\n features.at[idx, \"slope_full\"] = slope_full\n features.at[idx, \"slope_head\"] = slope_head\n features.at[idx, \"slope_tail\"] = slope_tail\n\n y_pred_full = model_full.predict(x_full)\n self._statistics[\"linear_r2\"][group] = r2_full_score\n self._statistics[\"linear_rmse\"][group] = np.linalg.norm(np.exp(y_full) - np.exp(y_pred_full))**2\n self._statistics[\"linear_slope\"][group] = slope_full\n\n fraction_below_score = np.mean(model_full.predict(x_tail) > y_tail)\n weights = np.array([1 / (1 + i) ** 2 for i in range(1, tail_len + 1)][::-1])\n weighted_fraction_below_score = np.dot(weights, model_full.predict(x_tail) > y_tail)\n features.at[idx, \"fraction_below\"] = fraction_below_score\n features.at[idx, \"weighted_fraction_below\"] = weighted_fraction_below_score\n\n return features.dropna()", "def calculate_timebase_features(self, X: pd.DataFrame) -> pd.DataFrame:\n X = self._add_lagged_features(X, [1, 3, 7, 14, 21, 365])\n\n X = self._add_rolling(X, 'mean', [5, 50])\n X = self._add_rolling(X, 'min', [5, 50])\n X = self._add_rolling(X, 'max', [5, 50])\n\n return X", "def create_features_using_groupby(training, entity, feature, avg=True, minimum=True, maximum=True):\n\n entity_col = 'offer_id' if entity == 'portfolio' else 'person'\n\n groupby = training.groupby(entity_col)[feature]\n\n features, col_name = [], []\n if avg:\n features.append(groupby.mean())\n col_name.append('avg_'+feature)\n if minimum:\n features.append(groupby.min())\n col_name.append('min_'+feature)\n if maximum:\n features.append(groupby.max())\n col_name.append('max_'+feature)\n\n feature_df = pd.concat(features, axis=1)\n feature_df.columns = [col + '_' + entity for col in col_name]\n\n return feature_df", "def add_features(df_in, rolling_win_size=15):\n cols =['Turbine_ID', 'Date', 'TTF', '60_days', 'Component']\n other_cols = []\n for i in df_in.columns:\n if i not in cols:\n other_cols.append(i)\n all_cols = cols + other_cols\n\n df_in = df_in[all_cols]\n\n sensor_cols = []\n for i in df_in.columns[5:]:\n sensor_cols.append(i)\n\n sensor_av_cols = [nm+'_av' for nm in sensor_cols]\n sensor_sd_cols = [nm+'_sd' for nm in sensor_cols]\n\n df_out = pd.DataFrame()\n\n ws = rolling_win_size\n\n #calculate rolling stats for each engine id\n\n for m_id in pd.unique(df_in.Turbine_ID):\n\n # get a subset for each engine sensors\n df_engine = df_in[df_in['Turbine_ID'] == m_id]\n df_sub = df_engine[sensor_cols]\n\n # get rolling mean for the subset\n av = df_sub.rolling(ws, min_periods=1).mean()\n av.columns = sensor_av_cols\n\n # get the rolling standard deviation for the subset\n sd = df_sub.rolling(ws, min_periods=1).std().fillna(0)\n sd.columns = sensor_sd_cols\n\n # combine the two new subset dataframes columns to the engine subset\n new_ftrs = pd.concat([df_engine,av,sd], axis=1)\n\n # add the new features rows to the output dataframe\n df_out = pd.concat([df_out,new_ftrs])\n df_out = df_out.sort_values(by=['Turbine_ID', 'Date'] )\n return df_out", "def _filter_temporal(self, start_date: str, end_date: str) -> 'ImageCollection':\n process_id = 'filter_daterange'\n args = {\n 'imagery': self.graph,\n 'extent': [start_date, end_date]\n }\n\n return self.graph_add_process(process_id, args)", "def generate_features(df):\n return np.array([np.array(xi) for xi in pd.to_datetime(df).apply(lambda x: [x.year, x.month, x.day, x.hour, x.minute, x.second, x.weekday()])])", "def select_features(self):\r\n \r\n features_list = list(self.feed_data.columns.values)\r\n features_list.remove(\"min_time\")\r\n thisrace = self.config.race_to_predict\r\n\r\n #if never ran race before, don't include these variables in feature\r\n #selection, they're just 0's anyway\r\n if self.config.first_time_running_race == True:\r\n unuseable_columns = [('min_time', thisrace),('std', thisrace),('num_races', thisrace),\r\n ('rainfall', thisrace),\r\n ('temp', thisrace),\r\n ('wind', thisrace),\r\n ('metersup', thisrace), \r\n 'sex_W']\r\n else:\r\n #drop this column...probs should have removed it earlier. \r\n unuseable_columns = ['sex_W']\r\n #print(features_list)\r\n for element in unuseable_columns:\r\n features_list.remove(element)\r\n data_with_all_feats = self.feed_data.drop(unuseable_columns,axis=1)\r\n colstodrop = features_list\r\n thiscols = []\r\n data_with_current_feats = data_with_all_feats.drop(features_list,axis=1)\r\n checkfit=100.0\r\n scores = []\r\n dropped_cols = []\r\n loopgain =True\r\n #mymod = RandomForestRegressor(n_estimators=80, oob_score = True, max_depth=10,\r\n # min_samples_split = 25, criterion='mse')\r\n thisloopfeatures_list = features_list\r\n curcols = data_with_current_feats.columns\r\n countgain=0\r\n #print(\"cc\",curcols)\r\n while loopgain == True:\r\n thisloopscore=100.0\r\n for fet in thisloopfeatures_list:\r\n data_with_current_feats[fet] = data_with_all_feats[fet]\r\n etrain=data_with_current_feats.sample(frac=0.8,random_state=200)\r\n etest=data_with_current_feats.drop(etrain.index)\r\n y = etrain.pop('min_time')\r\n ytest = etest.pop('min_time')\r\n #print(y)\r\n model = RandomForestRegressor(n_estimators=80, oob_score = True, max_depth=15,\r\n min_samples_split = 12, criterion='mse')\r\n model.fit(etrain,y)\r\n\r\n PRED = model.predict(etrain)\r\n predscore = self.mean_absolute_percentage_error(y,PRED)#= r2_score(y,PRED)\r\n oobs = self.mean_absolute_percentage_error(y,model.oob_prediction_)\r\n scores.append(oobs)\r\n if ((thisloopscore - oobs) > 0.0):\r\n thisloopscore = oobs\r\n fetwinner = fet\r\n data_with_current_feats.drop(fet,axis=1,inplace=True)\r\n etrain.drop(fet,axis=1,inplace=True)\r\n\r\n data_with_current_feats[fetwinner] = data_with_all_feats[fetwinner]\r\n etrain=data_with_current_feats.sample(frac=0.8,random_state=200)\r\n etest=data_with_current_feats.drop(etrain.index)\r\n y = etrain.pop('min_time')\r\n ytest = etest.pop('min_time')\r\n #print(y)\r\n model = RandomForestRegressor(n_estimators=80, oob_score = True, max_depth=30,\r\n min_samples_split = 12,min_samples_leaf =7, criterion='mse')\r\n model.fit(etrain,y)\r\n\r\n PRED = model.predict(etrain)\r\n predscore = self.mean_absolute_percentage_error(y,PRED)#= r2_score(y,PRED)\r\n #print(fetwinner,predscore)\r\n oobs = self.mean_absolute_percentage_error(y,model.oob_prediction_)\r\n scores.append(oobs)\r\n #print(fetwinner,\"~\",oobs)\r\n thisloopfeatures_list.remove(fetwinner)\r\n if ((checkfit-oobs)>0.0001):\r\n checkfit = oobs\r\n curcols = data_with_current_feats.columns\r\n #print(curcols)\r\n else:\r\n break\r\n\r\n\r\n self.final_df = self.feed_data[data_with_current_feats.columns]\r\n self.Xtrain=self.final_df.sample(frac=0.8,random_state=200)\r\n self.Xtest=self.final_df.drop(self.Xtrain.index)#\r\n self.ytrain = self.Xtrain.pop('min_time')\r\n self.ytest = self.Xtest.pop('min_time')\r\n self.model= RandomForestRegressor(n_estimators=80, oob_score = True, max_depth=30,\r\n min_samples_split = 12,min_samples_leaf =7, criterion='mse')\r\n self.model.fit(self.Xtrain,self.ytrain)\r\n #print(y)\r\n return", "def _filter_features(\n record_batch: pa.RecordBatch,\n feature_allowlist: List[types.FeatureName]) -> pa.RecordBatch:\n columns_to_select = []\n column_names_to_select = []\n for feature_name in feature_allowlist:\n col = arrow_util.get_column(record_batch, feature_name, missing_ok=True)\n if col is None:\n continue\n columns_to_select.append(col)\n column_names_to_select.append(feature_name)\n return pa.RecordBatch.from_arrays(columns_to_select, column_names_to_select)", "def add_features(df_in, rolling_win_size,columns_to_treat):\n \n av_cols = [nm+'__av' for nm in columns_to_treat]\n sd_cols = [nm+'__sd' for nm in columns_to_treat]\n min_cols =[nm+'__min' for nm in columns_to_treat]\n max_cols =[nm+ '__max' for nm in columns_to_treat]\n \n df_out = pd.DataFrame()\n \n ws = rolling_win_size\n \n #calculate rolling stats for each engine (engine.id)\n \n for m_id in pd.unique(df_in['id.engine.id']):\n \n # get a subset for each engine sensors\n df_engine = df_in[df_in['id.engine.id'] == m_id]\n df_sub = df_engine[columns_to_treat]\n\n \n # get rolling mean for the subset\n av = df_sub.rolling(ws, min_periods=1).mean()\n av.columns = av_cols\n \n # get the rolling standard deviation for the subset\n sd = df_sub.rolling(ws, min_periods=1).std().fillna(0)\n sd.columns = sd_cols\n\n # get rolling rolling max for the subset\n max = df_sub.rolling(ws, min_periods=1).max()\n max.columns = max_cols\n \n # get the rolling standard deviation for the subset\n min = df_sub.rolling(ws, min_periods=1).min().fillna(0)\n min.columns = min_cols\n \n # combine the two new subset dataframes columns to the engine subset\n new_ftrs = pd.concat([df_engine,av,sd,min,max], axis=1)\n \n # add the new features rows to the output dataframe\n df_out = pd.concat([df_out,new_ftrs])\n \n return df_out", "def datetime_columns(df, feature):\r\n df['day'] = pd.to_datetime(df[feature]).dt.day\r\n df['month'] = pd.to_datetime(df[feature]).dt.month\r\n df['year'] = pd.to_datetime(df[feature]).dt.year\r\n return df", "def get_features_and_target(self, trades_features: pd.DataFrame, trades_target: pd.DataFrame) -> pd.DataFrame:\n \n sf_groups = trades_features.drop_duplicates(subset=['sf_account_id', 'trade_date', 'sku']).groupby('sf_account_id')\n\n # calculate features\n feature_dfs = []\n if 'product_name' in self.feature_categories:\n feature_dfs += [sf_groups.product_name.value_counts().unstack().notnull()]\n if 'product_category' in self.feature_categories:\n feature_dfs += [sf_groups.product_category.value_counts().unstack().notnull()]\n if 'reporting_channel' in self.feature_categories:\n feature_dfs += [sf_groups.sub_reporting_channel.value_counts().unstack().notnull()]\n if 'recency' in self.feature_categories:\n feature_dfs += [(trades_features.trade_date_dt.max()-sf_groups.trade_date_dt.max()).dt.days.to_frame().rename(columns={'trade_date_dt':'recency'})]\n if 'frequency' in self.feature_categories:\n feature_dfs += [sf_groups.product_name.count().to_frame().rename(columns={'product_name':'frequency'})]\n if 'total_spend' in self.feature_categories:\n feature_dfs += [sf_groups.cost_float.sum().to_frame().rename(columns={'cost_float':'total_spend'})]\n\n # concat features\n customer_df = pd.concat(feature_dfs, axis=1, sort=False) # outer join on index\n\n # add target variable\n for target_variable in self.target_variables:\n if (trades_target.product_name == target_variable).any():\n customer_df['target_'+target_variable] = trades_target.groupby(['sf_account_id', 'product_name']).trade_date.any().unstack()[target_variable]\n else:\n customer_df['target_'+target_variable] = False\n\n # remove customers with no purchases before cut off\n customer_df = customer_df[customer_df[customer_df.columns[customer_df.columns != 'target']].any(axis=1)]\n\n # replace nans with False\n customer_df.fillna(False, inplace=True)\n\n return customer_df", "def add_features2(df_in, rolling_win_size=15):\n cols_to_drop =['TTF','60_days','Turbine_ID', 'Date','Component','Component_sd','Component_av']\n\n for i in cols_to_drop:\n if i in df_in.columns:\n df_in = df_in.drop(columns=i)\n else:\n pass\n\n sensor_cols = []\n for i in df_in.columns:\n sensor_cols.append(i)\n\n sensor_av_cols = [nm+'_av' for nm in sensor_cols]\n sensor_sd_cols = [nm+'_sd' for nm in sensor_cols]\n\n df_out = pd.DataFrame()\n\n ws = rolling_win_size\n\n #calculate rolling stats for each engine id\n\n for m_id in pd.unique(df_in.Turbine_ID):\n\n # get a subset for each engine sensors\n df_engine = df_in[df_in['Turbine_ID'] == m_id]\n df_sub = df_engine[sensor_cols]\n\n # get rolling mean for the subset\n av = df_sub.rolling(ws, min_periods=1).mean()\n av.columns = sensor_av_cols\n\n # get the rolling standard deviation for the subset\n sd = df_sub.rolling(ws, min_periods=1).std().fillna(0)\n sd.columns = sensor_sd_cols\n\n # combine the two new subset dataframes columns to the engine subset\n new_ftrs = pd.concat([df_engine,av,sd], axis=1)\n\n # add the new features rows to the output dataframe\n df_out = pd.concat([df_out,new_ftrs])\n df_out = df_out.sort_values(by=['Turbine_ID', 'Date'] )\n return df_out", "def create_shifted_orderbook_more_feature(ticker, start_date, end_date, lag_period = 5, pred_period = 7):\n # Retrieve the Nifty data from Yahoo finance:\n format = '%Y-%m-%d' # Formatting directives\n start = start_date.strftime(format)\n end = end_date.strftime(format)\n\n yf.pdr_override() # <== that's all it takes :-)\n stock_data = pdr.get_data_yahoo(ticker, start=start, end=end)\n\n # Creates stock lag\n stock_data.dropna()\n stock_lag = pd.DataFrame(data = stock_data, index=stock_data.index)\n\n stock_returns = pd.DataFrame(data = stock_data, index=stock_data.index)\n\n # Initializes dataframe values and smooths the closing price data\n stock_data_smooth = stock_data['Adj Close']\n exponential_smoothing(0.7, stock_data_smooth) #so the stock_data_smooth is smoothing\n\n\n #stock_lag['Volume'] = stock_returns['Volume'] = stock_data['Volume']\n stock_lag[\"Close\"] = stock_data_smooth #so, now the stock_lag[\"Close\"] is derive from Adj Close + smoothing.\n #print stock_lag[\"Close\"]\n\n # Sets lagging price data (previous days' price data as feature inputs)\n for i in range(0, lag_period):\n column_label = 'Lag{:d}'.format(i)\n stock_lag[column_label] = stock_lag['Close'].shift(1+i)\n\n # EMA- Momentum\n #stock_lag['EMA'] = talib.EMA(close, timeperiod = 30)\n ndays = 30\n name_EWMA = 'EWMA_' + str(ndays)\n stock_lag['EWMA_'] = EWMA(stock_lag,ndays )[name_EWMA]\n\n # Bollinger Bands\n #stock_lag['upperband'], stock_lag['middleband'], stock_lag['lowerband'] = talib.BBANDS(close, timeperiod=5, nbdevup=2, nbdevdn=2, matype=0)\n aa = BBANDS(stock_lag, ndays=30)\n stock_lag['upperband'] = aa['Upper BollingerBand']\n stock_lag['lowerband'] = aa['Lower BollingerBand']\n\n # StochK\n #stock_lag['slowk'], stock_lag['slowd'] = talib.STOCH(high, low, close, fastk_period=14, slowk_period=3, slowk_matype=0, slowd_period=3,\n # slowd_matype=0)\n n = 30\n name_slowk = 'SO%k'\n name_slowd = 'SO%d_' + str(n)\n stock_lag['slowk'] = STOK(stock_lag)[name_slowk]\n stock_lag['slowd'] = STO(stock_lag, n)[name_slowd]\n\n # MACD- Momentum\n #macd, macdsignal, stock_lag['macdhist'] = talib.MACD(close, fastperiod=12, slowperiod=26, signalperiod=9)\n n_fast = 12\n n_slow = 26\n name_macd = 'MACD_' + str(n_fast) + '_' + str(n_slow)\n name_macdsignal = 'MACDsign_' + str(n_fast) + '_' + str(n_slow)\n name_macdhist = 'MACDdiff_' + str(n_fast) + '_' + str(n_slow)\n macd = MACD(stock_lag, n_fast, n_slow)[name_macd]\n macdsignal = MACD(stock_lag, n_fast, n_slow)[name_macdsignal]\n stock_lag['macdhist'] = MACD(stock_lag, n_fast, n_slow)[name_macdhist]\n\n # CCI- Momentum\n #stock_lag['CCI'] = talib.CCI(high, low, close)\n stock_lag['CCI'] = CCI(stock_lag, ndays = 30)[\"CCI\"]\n #print stock_lag['CCI']\n\n # # RSI- Momentum\n # #stock_lag['RSI'] = talib.RSI(close, timeperiod=14)\n # ndays = 14\n # name_RSI = 'RSI_' + str(ndays)\n # stock_lag['RSI'] = RSI(stock_lag, n = ndays)[name_RSI]\n # #print stock_lag['RSI']\n\n\n # Chaikin- Volume\n #stock_lag['Chaikin'] = talib.ADOSC(high, low, close, volume, fastperiod=3, slowperiod=10)\n stock_lag['Chaikin'] = Chaikin(stock_lag)['Chaikin']\n #print stock_lag['Chaikin']\n\n stock_returns['Day Returns'] = stock_data['Adj Close'].pct_change() * 100\n # Sets lagging percent change data\n for i in range(0, lag_period):\n column_label = 'Lag{:d}'.format(i)\n stock_returns[column_label] = stock_lag[column_label].pct_change() * 100\n\n\n # Remove NaN's from stock lag\n print \"shape of stock_lag before dropna: \",stock_lag.shape[0]\n stock_lag = stock_lag.dropna(axis=0, how='any')\n print \"shape of stock_lag before dropna: \",stock_lag.shape[0]\n\n print \"shape of stock_returns before dropna: \",stock_returns.shape[0]\n # Adjusts stock_return data to same length as stock_lag\n stock_returns = stock_returns.tail(stock_lag.shape[0])\n print \"shape of stock_returns after dropna: \",stock_returns.shape[0]\n\n\n # Determine stock movement direction and lagging movement\n stock_movement = pd.DataFrame(index=stock_returns.index)\n stock_movement['Movement_0'] = np.sign(stock_returns['Day Returns'])\n stock_movement['Movement_0'][0] = 1\n for i in range(0, pred_period):\n column_label = 'Movement_{:d}'.format(i + 1)\n stock_movement[column_label] = stock_movement['Movement_0'].shift(i + 1)\n\n # Removes NaNs from 'stock_movement' and resizes 'stocks_returns' and 'stock_lag' accordingly\n print \"shape of stock_movement before dropna: \",stock_movement.shape[0]\n stock_movement = stock_movement.dropna(axis=0, how='any')\n print \"shape of stock_movement after dropna: \",stock_movement.shape[0]\n\n stock_returns = stock_returns[stock_returns.index <= stock_movement.index[stock_movement.index.__len__() - 1]]\n stock_returns = stock_returns.tail(stock_movement.shape[0])\n stock_lag = stock_lag[stock_lag.index <= stock_movement.index[stock_movement.index.__len__() - 1]]\n stock_lag = stock_lag.tail(stock_movement.shape[0])\n\n return stock_data, stock_returns, stock_lag, stock_movement", "def filter_features_and_samples(data: pd.DataFrame, drop_features: float = 0.1, drop_samples: float = 0.1):\n logger = get_logger()\n # check arguments\n if drop_features < 0 or drop_features >= 1:\n raise ValueError(\"Incorrect value od 'drop_feature', expected value in range [0,1)\")\n if drop_samples < 0 or drop_samples >= 1:\n raise ValueError(\"Incorrect value od 'drop_samples', expected value in range [0,1)\")\n\n before = data.shape\n # drop features if needed\n nans = pd.isna(data).values\n data.drop(data.index[np.sum(nans, axis=1) / nans.shape[1] > drop_features], axis=0, inplace=True)\n\n # drop samples if needed\n nans = pd.isna(data).values\n data.drop(data.columns[np.sum(nans, axis=0) / nans.shape[0] > drop_samples], axis=1, inplace=True)\n\n logger.info(\"Number of dropped rows/features: {}\".format(before[0] - data.shape[0]))\n logger.info(\"Number of dropped columns/samples: {}\".format(before[1] - data.shape[1]))\n logger.info(\"Data shape: {}\".format(data.values.shape))\n\n return data", "def generate_features(df):\n df_new = pd.DataFrame()\n \n # 6 original features\n df_new['open'] = df['open']\n df_new['open_1'] = df['open'].shift(1)\n df_new['close_1'] = df['close'].shift(1)\n df_new['high_1'] = df['high'].shift(1)\n df_new['low_1'] = df['low'].shift(1)\n df_new['volume_1'] = df['volume'].shift(1)\n \n # 50 original features\n # average price\n df_new['avg_price_5'] = df['close'].rolling(window=5).mean().shift(1)\n df_new['avg_price_30'] = df['close'].rolling(window=21).mean().shift(1)\n df_new['avg_price_90'] = df['close'].rolling(window=63).mean().shift(1)\n df_new['avg_price_365'] = df['close'].rolling(window=252).mean().shift(1)\n \n # average price ratio\n df_new['ratio_avg_price_5_30'] = df_new['avg_price_5'] / df_new['avg_price_30']\n df_new['ratio_avg_price_905_'] = df_new['avg_price_5'] / df_new['avg_price_90']\n df_new['ratio_avg_price_5_365'] = df_new['avg_price_5'] / df_new['avg_price_365']\n df_new['ratio_avg_price_30_90'] = df_new['avg_price_30'] / df_new['avg_price_90']\n df_new['ratio_avg_price_30_365'] = df_new['avg_price_30'] / df_new['avg_price_365']\n df_new['ratio_avg_price_90_365'] = df_new['avg_price_90'] / df_new['avg_price_365'] \n \n \n # average volume\n df_new['avg_volume_5'] = df['volume'].rolling(window=5).mean().shift(1)\n df_new['avg_volume_30'] = df['volume'].rolling(window=21).mean().shift(1)\n df_new['avg_volume_90'] = df['volume'].rolling(window=63).mean().shift(1)\n df_new['avg_volume_365'] = df['volume'].rolling(window=252).mean().shift(1)\n \n #average volume ratio\n df_new['ratio_avg_volume_5_30'] = df_new['avg_volume_5'] / df_new['avg_volume_30']\n df_new['ratio_avg_volumee_5_90'] = df_new['avg_volume_5'] / df_new['avg_volume_90'] \n df_new['ratio_avg_volume_5_365'] = df_new['avg_volume_5'] / df_new['avg_volume_365']\n df_new['ratio_avg_volume_30_90'] = df_new['avg_volume_30'] / df_new['avg_volume_90']\n df_new['ratio_avg_volume_30_365'] = df_new['avg_volume_30'] / df_new['avg_volume_365']\n df_new['ratio_avg_volume_90_365'] = df_new['avg_volume_90'] / df_new['avg_volume_365'] \n \n \n # standard deviation of prices\n df_new['std_price_5'] = df['close'].rolling(window=5).std().shift(1)\n df_new['std_price_30'] = df['close'].rolling(window=21).std().shift(1)\n df_new['std_price_90'] = df['close'].rolling(window=63).std().shift(1) \n df_new['std_price_365'] = df['close'].rolling(window=252).std().shift(1)\n \n # standard deviation ratio of prices \n df_new['ratio_std_price_5_30'] = df_new['std_price_5'] / df_new['std_price_30']\n df_new['ratio_std_price_5_90'] = df_new['std_price_5'] / df_new['std_price_90']\n df_new['ratio_std_price_5_365'] = df_new['std_price_5'] / df_new['std_price_365']\n df_new['ratio_std_price_30_90'] = df_new['std_price_30'] / df_new['std_price_90'] \n df_new['ratio_std_price_30_365'] = df_new['std_price_30'] / df_new['std_price_365'] \n df_new['ratio_std_price_90_365'] = df_new['std_price_90'] / df_new['std_price_365'] \n \n \n # standard deviation of volumes\n df_new['std_volume_5'] = df['volume'].rolling(window=5).std().shift(1)\n df_new['std_volume_30'] = df['volume'].rolling(window=21).std().shift(1)\n df_new['std_volume_90'] = df['volume'].rolling(window=63).std().shift(1)\n df_new['std_volume_365'] = df['volume'].rolling(window=252).std().shift(1)\n \n #standard deviation ratio of volumes\n df_new['ratio_std_volume_5_30'] = df_new['std_volume_5'] / df_new['std_volume_30']\n df_new['ratio_std_volume_5_90'] = df_new['std_volume_5'] / df_new['std_volume_90']\n df_new['ratio_std_volume_5_365'] = df_new['std_volume_5'] / df_new['std_volume_365'] \n df_new['ratio_std_volume_30_90'] = df_new['std_volume_30'] / df_new['std_volume_90']\n df_new['ratio_std_volume_30_365'] = df_new['std_volume_30'] / df_new['std_volume_365']\n df_new['ratio_std_volume_90_365'] = df_new['std_volume_90'] / df_new['std_volume_365'] \n \n # return\n df_new['return_1'] = ((df['close'] - df['close'].shift(1)) / df['close'].shift(1)).shift(1)\n df_new['return_5'] = ((df['close'] - df['close'].shift(5)) / df['close'].shift(5)).shift(1)\n df_new['return_30'] = ((df['close'] - df['close'].shift(21)) / df['close'].shift(21)).shift(1)\n df_new['return_90'] = ((df['close'] - df['close'].shift(63)) / df['close'].shift(63)).shift(1) \n df_new['return_365'] = ((df['close'] - df['close'].shift(252)) / df['close'].shift(252)).shift(1)\n \n #average of return\n df_new['moving_avg_5'] = df_new['return_1'].rolling(window=5).mean()\n df_new['moving_avg_30'] = df_new['return_1'].rolling(window=21).mean()\n df_new['moving_avg_90'] = df_new['return_1'].rolling(window=63).mean()\n df_new['moving_avg_365'] = df_new['return_1'].rolling(window=252).mean()\n \n # the target\n df_new['close'] = df['close']\n df_new = df_new.dropna(axis=0)\n return df_new", "def make_time_features(ts, index=None, epoch=None, epoch_span=None):\n # input validation\n try:\n if len(ts) == 1:\n _singleton = True\n elif len(ts) > 1:\n _singleton = False\n elif len(ts) < 1:\n raise ValueError(\"must pass non-empty iterable of timestamps\")\n except TypeError:\n return make_time_features([ts], index=index, epoch=epoch, epoch_span=epoch_span)\n\n if not isinstance(ts, pd.DatetimeIndex):\n ts = pd.Series(0, index=ts).index\n if not isinstance(ts, pd.DatetimeIndex):\n raise ValueError(\"must pass non-empty iterable of timestamps\")\n\n if index is None:\n index = pd.RangeIndex(len(ts))\n if epoch is None:\n epoch = min(ts)\n if epoch_span is None:\n epoch_span = float((end - epoch).total_seconds())\n\n time_features = {}\n start = min(ts)\n end = max(ts)\n\n # Major US holidays\n NewYearsDay = pd.tseries.holiday.Holiday('New Years Day', month=1, day=1)\n MemorialDay = pd.tseries.holiday.Holiday('Memorial Day', month=6, day=1, offset=pd.DateOffset(weekday=MO(-1)))\n IndependenceDay = pd.tseries.holiday.Holiday('Independence Day', month=7, day=4)\n LaborDay = pd.tseries.holiday.Holiday('Labor Day', month=9, day=1, offset=pd.DateOffset(weekday=MO(1)))\n ThanksgivingDay = pd.tseries.holiday.Holiday('Thanksgiving Day', month=11, day=1, offset=pd.DateOffset(weekday=TH(4)))\n ChristmasDay = pd.tseries.holiday.Holiday('Christmas Day', month=12, day=25)\n holidays = \\\n NewYearsDay.dates(start.date(), end.date()).tolist() +\\\n MemorialDay.dates(start.date(), end.date()).tolist() +\\\n IndependenceDay.dates(start.date(), end.date()).tolist() +\\\n LaborDay.dates(start.date(), end.date()).tolist() +\\\n ThanksgivingDay.dates(start.date(), end.date()).tolist() +\\\n ChristmasDay.dates(start.date(), end.date()).tolist()\n holidays = set([h.date() for h in holidays])\n\n # projections onto unit circle\n time_features['day_cos'] = np.cos((ts.hour * 3600 + ts.minute * 60 + ts.second) * 2 * np.pi / 86400.)\n time_features['day_sin'] = np.sin((ts.hour * 3600 + ts.minute * 60 + ts.second) * 2 * np.pi / 86400.)\n time_features['week_cos'] = np.cos(ts.dayofweek * 2 * np.pi / 7.)\n time_features['week_sin'] = np.sin(ts.dayofweek * 2 * np.pi / 7.)\n time_features['year_cos'] = np.cos(ts.dayofyear * 2 * np.pi / 365.)\n time_features['year_sin'] = np.sin(ts.dayofyear * 2 * np.pi / 365.)\n # linear march through time\n time_features['epoch'] = (ts - epoch).total_seconds() / epoch_span\n # workday indicator\n time_features['workday'] = [int(weekday < 5 and date not in holidays) for weekday, date in zip(ts.weekday, ts.date)]\n\n if _singleton:\n return {k: v[0] for k, v in time_features.iteritems()}\n else:\n return pd.DataFrame(time_features, index=index)", "def compute_features(\n train_dir, test_dir, output_dir, df_config, feature_config_list, filter_by_month=True, compute_load_ratio=False,\n):\n time_col_name = df_config[\"time_col_name\"]\n\n output_train_dir = os.path.join(output_dir, \"train\")\n output_test_dir = os.path.join(output_dir, \"test\")\n if not os.path.isdir(output_train_dir):\n os.mkdir(output_train_dir)\n if not os.path.isdir(output_test_dir):\n os.mkdir(output_test_dir)\n\n train_base_df = pd.read_csv(os.path.join(train_dir, TRAIN_BASE_FILE), parse_dates=[time_col_name])\n\n for i in range(1, NUM_ROUND + 1):\n train_file = os.path.join(train_dir, TRAIN_FILE_PREFIX + str(i) + \".csv\")\n test_file = os.path.join(test_dir, TEST_FILE_PREFIX + str(i) + \".csv\")\n\n train_delta_df = pd.read_csv(train_file, parse_dates=[time_col_name])\n test_round_df = pd.read_csv(test_file, parse_dates=[time_col_name])\n\n train_all_features, test_all_features = compute_features_one_round(\n train_base_df,\n train_delta_df,\n test_round_df,\n df_config,\n feature_config_list,\n FEATURE_MAP,\n filter_by_month,\n compute_load_ratio,\n )\n\n train_output_file = os.path.join(output_dir, \"train\", TRAIN_FILE_PREFIX + str(i) + \".csv\")\n test_output_file = os.path.join(output_dir, \"test\", TEST_FILE_PREFIX + str(i) + \".csv\")\n\n train_all_features.to_csv(train_output_file, index=False)\n test_all_features.to_csv(test_output_file, index=False)\n\n print(\"Round {}\".format(i))\n print(\"Training data size: {}\".format(train_all_features.shape))\n print(\"Testing data size: {}\".format(test_all_features.shape))\n print(\"Minimum training timestamp: {}\".format(min(train_all_features[time_col_name])))\n print(\"Maximum training timestamp: {}\".format(max(train_all_features[time_col_name])))\n print(\"Minimum testing timestamp: {}\".format(min(test_all_features[time_col_name])))\n print(\"Maximum testing timestamp: {}\".format(max(test_all_features[time_col_name])))\n print(\"\")", "def engineer_features(dataframe, holiday_dates, columns, time_lags=24, \n steps_ahead=1, drop_nan_rows=True):\n \n # Make a copy of the original dataframe\n data = dataframe[columns].copy()\n \n # Features engineering\n for col in data.columns:\n for i in range(1, time_lags+1):\n # Shift data by lag of 1 to time_lags (default: 24) hours\n data[col+'_{:d}h'.format(i)] = data[col].shift(periods=i) # time-lag\n data[col+'_diff'] = data[col].diff() # first-difference\n data[col+'_week'] = data[col].shift(periods=24*7) # previous week\n \n # Hour-of-day indicators with cyclical transform\n dayhour_ind = data.index.hour\n data['hr_sin'] = np.sin(dayhour_ind*(2.*np.pi/24))\n data['hr_cos'] = np.cos(dayhour_ind*(2.*np.pi/24))\n \n # Day-of-week indicators with cyclical transform\n weekday_ind = data.index.weekday\n data['week_sin'] = np.sin(weekday_ind*(2.*np.pi/7))\n data['week_cos'] = np.cos(weekday_ind*(2.*np.pi/7))\n\n # Weekend as a binary indicator\n data['weekend'] = np.asarray([0 if ind <= 4 else 1 for ind in weekday_ind])\n\n # Month indicators with cyclical transform\n month_ind = data.index.month\n data['mnth_sin'] = np.sin((month_ind-1)*(2.*np.pi/12))\n data['mnth_cos'] = np.cos((month_ind-1)*(2.*np.pi/12))\n \n # Holidays as a binary indicator\n data['holidays'] = 0\n for holiday, date in holiday_dates.items():\n if date[1] is None:\n # Single day\n data.loc[date[0], 'holidays'] = 1\n else:\n # Date range\n data.loc[date[0]:date[1], 'holidays'] = 1\n \n # Forecast horizont\n if steps_ahead == 1:\n # Single-step forecasting\n data['Load+0h'] = data['Load'].values\n else:\n # Multi-step forecasting\n for i in range(steps_ahead):\n data['Load'+'+{:d}h'.format(i)] = data['Load'].shift(-i)\n del data['Load']\n \n if drop_nan_rows:\n # Drop rows with NaN values\n data.dropna(inplace=True)\n \n return data", "def featuretest(self, args):\n db_engine = create_engine(self.root.db_url)\n feature_config = yaml.load(args.feature_config_file)\n\n FeatureGenerator(db_engine, 'features_test').create_features_before_imputation(\n feature_aggregation_config=feature_config,\n feature_dates=[args.as_of_date]\n )\n logging.info('Features created for feature_config %s and date %s', feature_config, args.as_of_date)", "def extract_features(time_series, window):\n if not tsd_common.is_standard_time_series(time_series, window):\n # add your report of this error here...\n\n return []\n\n # spilt time_series\n split_time_series = tsd_common.split_time_series(time_series, window)\n # nomalize time_series\n normalized_split_time_series = tsd_common.normalize_time_series(split_time_series)\n max_min_normalized_time_series = tsd_common.normalize_time_series_by_max_min(split_time_series)\n s_features = statistical_features.get_statistical_features(normalized_split_time_series[4])\n f_features = fitting_features.get_fitting_features(normalized_split_time_series)\n c_features = classification_features.get_classification_features(max_min_normalized_time_series)\n # combine features with types\n features = s_features + f_features + c_features\n return features", "def window_filter(df, date_col, start, end):\n date_format = '%Y%m%d'\n start_date = datetime.strptime(str(start), date_format)\n end_date = datetime.strptime(str(end), date_format)\n return df[(df[date_col] >= start_date) & (df[date_col] <= end_date)]", "def clean_features(dataframe, features, target=None, fill=None):\n\n # Copy and split data frames\n X_df = dataframe[features].copy()\n if target:\n y_df = dataframe[target].copy()\n\n # Create dummy features\n dummies = DUMMY_FEATS.intersection(set(features))\n if dummies:\n X_df = one_hot(X_df, dummies)\n \n # Fill missing dummy features\n if fill:\n X_df = fill_features(X_df, fill)\n\n # Replace YearMade == 1000 with NaN\n if 'YearMade' in features:\n X_df.loc[X_df['YearMade'] == 1000, 'YearMade'] = X_df.loc[X_df['YearMade'] > 1000, 'YearMade'].median()\n\n # Parse year from datetime sold\n if 'saledate' in features:\n X_df['SaleYear'] = pd.to_datetime(X_df['saledate']).dt.year\n X_df['SaleMonth'] = pd.to_datetime(X_df['saledate']).dt.month\n X_df.drop('saledate', axis=1, inplace=True)\n\n ## All features\n # Impute NaN values with median\n X_df.fillna(X_df.median(axis=0), axis=0, inplace=True)\n\n if target:\n return X_df, y_df\n else:\n return X_df", "def fes_date_filter(start_date='1900-01-01', stop_date='2100-01-01',\n constraint='overlaps'):\n if constraint == 'overlaps':\n propertyname = 'apiso:TempExtent_begin'\n start = fes.PropertyIsLessThanOrEqualTo(propertyname=propertyname,\n literal=stop_date)\n propertyname = 'apiso:TempExtent_end'\n stop = fes.PropertyIsGreaterThanOrEqualTo(propertyname=propertyname,\n literal=start_date)\n elif constraint == 'within':\n propertyname = 'apiso:TempExtent_begin'\n start = fes.PropertyIsGreaterThanOrEqualTo(propertyname=propertyname,\n literal=start_date)\n propertyname = 'apiso:TempExtent_end'\n stop = fes.PropertyIsLessThanOrEqualTo(propertyname=propertyname,\n literal=stop_date)\n return start, stop", "def reduce_dfs_to_feat_subset(self, feat_subset):\n reduced_dfs = []\n\n for df in self:\n reduced_dfs.append(df.reindex(columns=feat_subset))\n\n # keep only categorical feats that are present in the feat_subset\n categorical_feats = list(set(self.categorical_feats).intersection(feat_subset))\n\n return DataCollection(reduced_dfs, self.df_names, categorical_feats)", "def get_cut_level_dataframe(start, end, level):\n entries = get_cut_dataframe(start, end)\n data = entries.copy()\n break_point = break_level(start, end, level)\n if len(break_point) != 0:\n data['datetime'] = data['to'].map(ts2date)\n\n ind = 0\n new_row = 0\n for index, row in entries.iterrows():\n if row['to'] == break_point[ind]:\n ind += 1\n elif row['from'] < break_point[ind] < row['to']:\n row = entries.iloc[[index]].copy()\n row.loc[index, 'delta'] = abs(break_point[ind]-row.loc[index, 'from'])\n row.loc[index, 'to'] = break_point[ind]\n row.loc[index, 'datetime'] = ts2date(row.loc[index, 'from'])\n new_ind = index + new_row\n data.loc[new_ind, 'delta'] = abs(break_point[ind]-data.loc[new_ind, 'to'])\n data.loc[new_ind, 'from'] = break_point[ind]\n data = pd.concat([data[:new_ind], row, data[new_ind:]])\n data = data.reset_index(drop=True)\n new_row += 1\n ind += 1\n if ind >= len(break_point):\n break\n\n data['datetime'] = data['from'].map(ts2datetime)\n data['date_agg'] = data['from'].map(lambda x: ts2str_level(x, level))\n return data", "def drop_dfcol(self, drop_list):\n self.data = self.df\n for lbl in drop_list:\n self.data = self.data.drop(lbl, axis=1)\n self.n_features = np.shape(self.data)[1]", "def add_features(df):\n \n assert df.columns.str.contains(\"query|value|keyword|ranking|timestamp|geo\").all(), \"Add features failed. \\\n Missing one of [query, value, keyword, ranking, timestamp, geo]\"\n \n # feature engineering: totals and normalize\n grouped = df.groupby(['ranking']).value # group values by ranking\n df['value_total'] = grouped.transform('sum') # total sum \n df['value_normalized'] = (df.value-grouped.transform('min'))/(grouped.transform('max')-grouped.transform('min')) # normalize \n df['value_normalized_total'] = df.groupby(['ranking']).value_normalized.transform('sum') # total sum of normalized values \n df['date'] = pd.to_datetime(df.query_timestamp).dtd\n \n return df", "def add_time_features(df_kek):\n df = pd.DataFrame([])\n df['hour'] = df_kek['OrderedDate'].dt.hour\n df['dow'] = df_kek['OrderedDate'].dt.dayofweek\n df['weekend'] = (df['dow'] >= 6) | (df_kek['OrderedDate'] == '2020-02-22') | (\n df_kek['OrderedDate'] == '2020-02-24') | (df_kek['OrderedDate'] == '2020-03-09') | (\n df_kek['OrderedDate'] >= '2020-03-30') | (df_kek['OrderedDate'] == '2020-03-07')\n return df", "def add_features(df_in, rolling_win_size=15):\n\n sensor_cols = []\n index = df_in.columns.get_loc('TTF')\n for i in df_in.columns[2:index]:\n sensor_cols.append(i)\n\n sensor_av_cols = [nm+'_av' for nm in sensor_cols]\n sensor_sd_cols = [nm+'_sd' for nm in sensor_cols]\n\n df_out = pd.DataFrame()\n\n ws = rolling_win_size\n\n #calculate rolling stats for each engine id\n\n for m_id in pd.unique(df_in.Turbine_ID):\n\n # get a subset for each engine sensors\n df_engine = df_in[df_in['Turbine_ID'] == m_id]\n df_sub = df_engine[sensor_cols]\n\n # get rolling mean for the subset\n av = df_sub.rolling(ws, min_periods=1).mean()\n av.columns = sensor_av_cols\n\n # get the rolling standard deviation for the subset\n sd = df_sub.rolling(ws, min_periods=1).std().fillna(0)\n sd.columns = sensor_sd_cols\n\n # combine the two new subset dataframes columns to the engine subset\n new_ftrs = pd.concat([df_engine,av,sd], axis=1)\n\n # add the new features rows to the output dataframe\n df_out = pd.concat([df_out,new_ftrs])\n df_out = df_out.sort_values(by=['Turbine_ID', 'Date'] )\n return df_out", "def new_features(df):\n print(\"Add new features ...\")\n # distinguish Spring, Fall and pregnant females (don't care about juvenilles/unknown)\n df[\"gender_plus\"] = df[\"Gender\"]\n df.loc[df.Gravid, \"gender_plus\"] = \"f_gra\"\n\n df[\"gender_seasons\"] = df[\"Gender\"]\n df.loc[df.Gravid, \"gender_seasons\"] = \"f_gra\"\n\n # add features\n df[\"Age_To_Weight\"] = df[\"Annuli\"] / df[\"Weight\"]\n\n # Calcuate Number of recaptures\n df_captures = df[[\"ID\", \"Date\"]].groupby(\"ID\").count()\n df_captures.columns = [\"recapture_count\"]\n df_captures.reset_index(inplace=True)\n df = pd.merge(df, df_captures, how=\"outer\", on=\"ID\")\n\n # recalculate annuli\n df_min = pd.pivot_table(\n df[df.Annuli > 0],\n values=[\"Date\", \"Annuli\"],\n index=[\"ID\"],\n aggfunc={\"Date\": min, \"Annuli\": min},\n )\n df_min.columns = [\"annuli_min\", \"date_min\"]\n df_min.reset_index(inplace=True)\n\n df = pd.merge(df, df_min, how=\"outer\", on=\"ID\")\n df[\"year\"] = df.Date.map(lambda x: x.year)\n df[\"year_min\"] = df.date_min.map(lambda x: x.year)\n df[\"Annuli_orig\"] = df.Annuli\n df.Annuli = df.year - df.year_min + df.annuli_min\n df.Annuli = np.nan_to_num(df.Annuli)\n df[\"Annuli\"] = pd.to_numeric(df[\"Annuli\"], downcast=\"integer\")\n\n # Annuli Buckets\n buckets = 5\n interval = int(df[\"Annuli\"].max() / buckets)\n buckets = [i for i in range(0, df[\"Annuli\"].max() + interval, interval)]\n labels = [\"'{0} - {1}'\".format(i, i + interval) for i in buckets]\n df[\"Annuli_Group\"] = pd.cut(\n df.Annuli, buckets, labels=labels[:-1], include_lowest=True\n )\n\n return df", "def at(self, time_slices):\n\n if self.base is not None:\n return self.base.at(time_slices)\n\n if isinstance(time_slices, TimeSlice):\n time_slices = [time_slices]\n\n # join the time slice values\n timed_data = pd.DataFrame(columns=self.data.columns)\n\n # make the new data\n for slice_t in time_slices:\n slice_index = (slice_t.time <= self.data.index) & (\n self.data.index < slice_t.time + slice_t.duration\n )\n timed_data.loc[slice_t.time] = self.aggregate(\n self.data[slice_index], axis=0\n )\n\n # return the new feature object\n return Feature(\n data=timed_data,\n aggregate=self.aggregate,\n base=self,\n time_slices=time_slices,\n )", "def add_change_features(df):\n # copy input for comparison of outputs\n df_copy = df.copy()\n\n # calculate interval change features\n df_copy[\"Duration_Start\"] = (\n df_copy[\"Schedule_Start\"] - df_copy[\"Design_Start\"]\n ).dt.days\n df_copy[\"Duration_End\"] = (\n df_copy[\"Schedule_End\"] - df_copy[\"Design_Start\"]\n ).dt.days\n df_copy[\"Schedule_Change\"] = (\n df_copy[\"Duration_End\"] - df_copy[\"Duration_Start\"]\n )\n df_copy[\"Budget_Change\"] = df_copy[\"Budget_End\"] - df_copy[\"Budget_Start\"]\n\n # define schedule change ratio\n df_copy[\"Schedule_Change_Ratio\"] = (\n df_copy[\"Schedule_Change\"] / df_copy[\"Duration_Start\"]\n )\n # define budget change ratio\n df_copy[\"Budget_Change_Ratio\"] = (\n df_copy[\"Budget_Change\"] / df_copy[\"Budget_Start\"]\n )\n\n # define project metrics\n df_copy[\"Budget_Abs_Per_Error\"] = (\n df_copy[\"Budget_Start\"] - df_copy[\"Budget_End\"]\n ).abs() / df_copy[\"Budget_End\"]\n\n df_copy[\"Budget_Rel_Per_Error\"] = (\n df_copy[\"Budget_Start\"] - df_copy[\"Budget_End\"]\n ).abs() / df_copy[\"Budget_Start\"]\n\n df_copy[\"Duration_End_Ratio\"] = (\n df_copy[\"Duration_End\"] / df_copy[\"Duration_Start\"]\n )\n df_copy[\"Budget_End_Ratio\"] = (\n df_copy[\"Budget_End\"] / df_copy[\"Budget_Start\"]\n )\n\n # previously titled 'Mark Metric'\n df_copy[\"Duration_Ratio_Inv\"] = (\n df_copy[\"Duration_Start\"] / df_copy[\"Duration_End\"]\n ) - 1\n df_copy[\"Budget_Ratio_Inv\"] = (\n df_copy[\"Budget_Start\"] / df_copy[\"Budget_End\"]\n ) - 1\n\n return df_copy", "def _add_lagged_features(self, X: pd.DataFrame, lags: list) -> pd.DataFrame:\n for l in lags:\n X[f'sales_lag_{l + self.shift_days}'] = (X[['id', 'sales', 'd']]\n .groupby('id')['sales']\n .transform(lambda x: x.shift(l + self.shift_days))\n .fillna(0))\n return X", "def downsample(self, target, **kwargs):\n df_ds = downsample(\n self, sampling_freq=self.sampling_freq, target=target, **kwargs\n ).__finalize__(self)\n df_ds.sampling_freq = target\n\n if self.features is not None:\n ds_features = downsample(\n self.features, sampling_freq=self.sampling_freq, target=target, **kwargs\n )\n else:\n ds_features = self.features\n df_ds.features = ds_features\n return df_ds\n # return self.__class__(df_ds, sampling_freq=target, features=ds_features)", "def add_time_features(self, year=False, month=False, week=True, tod=True, dow=True):\n\n var_to_expand = []\n\n if self.preprocessed_data.empty:\n data = self.original_data\n else:\n data = self.preprocessed_data\n\n if year:\n data[\"year\"] = data.index.year\n var_to_expand.append(\"year\")\n if month:\n data[\"month\"] = data.index.month\n var_to_expand.append(\"month\")\n if week:\n data[\"week\"] = data.index.week\n var_to_expand.append(\"week\")\n if tod:\n data[\"tod\"] = data.index.hour\n var_to_expand.append(\"tod\")\n if dow:\n data[\"dow\"] = data.index.weekday\n var_to_expand.append(\"dow\")\n\n # One-hot encode the time features\n for var in var_to_expand:\n \n add_var = pd.get_dummies(data[var], prefix=var, drop_first=True)\n \n # Add all the columns to the model data\n data = data.join(add_var)\n\n # Drop the original column that was expanded\n data.drop(columns=[var], inplace=True)\n\n self.preprocessed_data = data", "def cut_train(self, hits, *args):\n n_days = self.predict_window + self.train_window\n # How much free space we have to choose starting day\n free_space = self.inp.data_days - n_days - self.back_offset - self.start_offset\n if self.verbose:\n lower_train_start = self.inp.data_start + pd.Timedelta(self.start_offset, 'D')\n lower_test_end = lower_train_start + pd.Timedelta(n_days, 'D')\n lower_test_start = lower_test_end - pd.Timedelta(self.predict_window, 'D')\n upper_train_start = self.inp.data_start + pd.Timedelta(free_space - 1, 'D')\n upper_test_end = upper_train_start + pd.Timedelta(n_days, 'D')\n upper_test_start = upper_test_end - pd.Timedelta(self.predict_window, 'D')\n print(f\"Free space for training: {free_space} days.\")\n print(f\" Lower train {lower_train_start}, prediction {lower_test_start}..{lower_test_end}\")\n print(f\" Upper train {upper_train_start}, prediction {upper_test_start}..{upper_test_end}\")\n # Random starting point\n offset = tf.random_uniform((), self.start_offset,self.start_offset + free_space+1, dtype=tf.int32, seed=self.rand_seed)\n end = offset + n_days\n # Cut all the things\n return self.cut(hits, offset, end) + args", "def getTimePointFeatures(self):\r\n\r\n def quarterToFeature():\r\n quarter = np.asarray([[0] * 4])\r\n if self.month in [12, 1, 2]:\r\n quarter[:, 0] = 1\r\n elif self.month in [3, 4, 5]:\r\n quarter[:, 1] = 1\r\n elif self.month in [6, 7, 8]:\r\n quarter[:, 2] = 1\r\n else:\r\n quarter[:, 3] = 1\r\n return quarter\r\n\r\n # Mon=0 tue=1 wed=2 thu=3 sun=6\r\n def dayToFeature(day):\r\n feature = np.asarray([[0] * 3])\r\n if day == 0 or day == 4:\r\n # Day is Mon or Fri\r\n feature[:, 0] = 1\r\n elif 0 < day < 4:\r\n # Day is Tue, Wed, Thu\r\n feature[:, 1] = 1\r\n else:\r\n # Weekend\r\n feature[:, 2] = 1\r\n return feature\r\n\r\n # Can split time of day as night and 4 halves\r\n def timeToFeature(time):\r\n feature = np.asarray([[0] * 17])\r\n if time >= 22 or time <= 5:\r\n feature[:, 0] = 1\r\n else:\r\n feature[:, time - 5] = 1\r\n return feature\r\n\r\n return np.concatenate((timeToFeature(self.hour).flatten(),\r\n dayToFeature(self.weekDay).flatten(),\r\n quarterToFeature().flatten()))", "def create_features_from_transaction_timestamp(data):\n utils.save_log('{0} :: {1}'.format(\n create_features_from_transaction_timestamp.__module__,\n create_features_from_transaction_timestamp.__name__))\n\n data = data.withColumn('TransactionHour',\n hour(data[config.feature_column_timestamp]))\n data = data.withColumn('TransactionDayOfWeek',\n dayofweek(data[config.feature_column_timestamp]))\n data = data.withColumn('TransactionDayOfYear',\n dayofyear(data[config.feature_column_timestamp]))\n data = data.withColumn('TransactionWeekOfYear',\n weekofyear(data[config.feature_column_timestamp]))\n\n data = data.withColumn('WeekAction',\n when(col('TransactionWeekOfYear').\n between(50, 52), 1).\n otherwise(0))\n\n update_list_features(\"numerical\", ['TransactionHour',\n 'TransactionDayOfWeek',\n 'TransactionDayOfYear',\n 'TransactionWeekOfYear',\n 'WeekAction'])\n\n return data", "def choose_group(df, time_step=None, base=0, interval=None, gage=None, m=None, h=None, wet=False): \n if time_step is not None:\n resample_kwargs = get_resample_kwargs(df)\n if wet:\n resample_kwargs.update({'how':'sum'})\n df = df.resample(time_step, base=base, **resample_kwargs)\n \n date_time = get_index(df, 'date_time')[1]\n a, RG = get_index(df, 'RG')\n \n # Choose along gage axis\n if gage is None:\n df = df.mean(axis=a)\n else:\n try:\n df = df.loc[:,gage]\n except: \n df = df.loc[:,:,gage]\n try:\n a, RG = get_index(df, index='RG')\n except:\n pass\n \n # Group along time axis\n if interval is 'seasonal':\n if h is not None:\n gb = df.groupby(date_time.hour)\n if type(h) is list or type(h) is tuple:\n df = pd.concat([gb.get_group(n) for n in h])\n else:\n df = gb.get_group(h)\n date_time = get_index(df, 'date_time')[1]\n gb = df.groupby(date_time.month)\n if m is not None:\n try:\n gb = [(m, gb.get_group(m))]\n except:\n gb = [(month, gb.get_group(month)) for month in m]\n \n elif interval is 'diurnal': \n if m is not None:\n gb = df.groupby(date_time.month)\n if type(m) is list or type(m) is tuple:\n df = pd.concat([gb.get_group(n) for n in m])\n else:\n df = gb.get_group(m)\n date_time = get_index(df, 'date_time')[1]\n gb = df.groupby(date_time.hour)\n if h is not None:\n try:\n gb = [(h, gb.get_group(h))]\n except:\n gb = [(hour, gb.get_group(hour)) for hour in h]\n \n else:\n gb = [('all',df)]\n\n return gb", "def filter_gdf(\n gdf, rel_orbit_numbers=None, footprint_overlaps=None,\n start_date=None, end_date=None):\n if len(gdf) and start_date is not None:\n mask = gdf['sensing_end'] >= start_date\n gdf = gdf[mask]\n if len(gdf) and end_date is not None:\n mask = gdf['sensing_start'] <= end_date\n gdf = gdf[mask]\n if len(gdf) and rel_orbit_numbers is not None:\n mask = gdf.apply((lambda d: d['relative_orbit_number'] in rel_orbit_numbers), axis=1)\n gdf = gdf[mask]\n if len(gdf) and footprint_overlaps is not None:\n mask = gdf.intersects(footprint_overlaps)\n gdf = gdf[mask]\n return gdf", "def select_annotation_by_ts(csv_data, lbound=None, rbound=None, by=None):\n if by==None:\n if not lbound:\n lbound = csv_data[st_col].iloc[0] # iloc is faster than head() or tail()\n if not rbound:\n rbound = csv_data[et_col].iloc[-1]\n # start_flags = np.array(csv_data[et_col].apply(lambda x: x>lbound)) ## Note it's too slow\n flags = (csv_data[et_col] > lbound) & (csv_data[st_col] < rbound)\n # end_flags = np.array(csv_data[st_col].apply(lambda x:x<rbound)) ## Note it's too slow\n subset_annotation_data = csv_data[flags]\n # subset_annotation_data = subset_annotation_data.reset_index(drop=True) ## Don't reset index\n subset_annotation_data[st_col].iloc[0] = max(lbound,subset_annotation_data[st_col].iloc[0])\n subset_annotation_data[et_col].iloc[-1] = min(rbound,subset_annotation_data[et_col].iloc[-1])\n else:\n groupby_annotation = csv_data.groupby(by)\n subset_group_datas = []\n for group_name, group_data in groupby_annotation:\n if lbound == None:\n lbound = group_data[st_col].iloc[0]\n if rbound == None:\n rbound = group_data[et_col].iloc[-1]\n # start_flags = np.array(group_data[et_col].apply(lambda x: x>lbound)) ## Note it's too slow\n start_flags = group_data[et_col] > lbound\n # end_flags = np.array(group_data[st_col].apply(lambda x:x<rbound)) ## Note it's too slow\n end_flags = group_data[st_col] < rbound\n subset_group_data = group_data[np.logical_and(start_flags,end_flags)]\n subset_group_data[st_col].iloc[0] = max(lbound,subset_group_data[st_col].iloc[0])\n subset_group_data[et_col].iloc[-1] = min(rbound,subset_group_data[et_col].iloc[-1])\n # subset_group_data = subset_group_data.reset_index(drop=True) ## Don't reset index\n subset_group_datas.append(subset_group_data)\n subset_annotation_data = annotation_data_consolidator(subset_group_datas)\n return subset_annotation_data", "def stop_frequency_percent(connection, line_number, days_to_consider, date_range):\n sql = '''\n SELECT\n DATEPART(month, apc_date_time) as month_of_year,\n DATEPART ( day , apc_date_time ) as day_of_month,\n datepart(dy, [apc_date_time]) as 'day_of_year',\n apc_date_time,\n current_route_id,\n K.direction_code_id,\n dc.[direction_description],\n bs_id,\n ext_trip_id\n FROM\n [ACS_13].[dbo].[apc_correlated] K\n LEFT JOIN\n [ACS_13].[dbo].[direction_codes] dc\n on k.direction_code_id = dc.[direction_code_id]\n WHERE\n (apc_date_time between %s) and\n current_route_id = %d and\n bs_id != 0\n ORDER BY\n direction_code_id, bs_id, apc_date_time\n ''' % (date_range, line_number)\n\n trips_sampled = pd.read_sql(sql,connection).rename(columns={'bs_id':'STOP_ID','direction_description':'DIRECTION_NAME'})\n\n #Only consider certain days of the month\n trips_sampled = trips_sampled.loc[trips_sampled['day_of_month'].isin(days_to_consider),]\n \n #Add a time grouping\n trips_sampled['TIME_PERIOD'] = trips_sampled['apc_date_time'].apply(TIME_PERIOD)\n \n stops_visited_counts = trips_sampled.groupby([\n 'current_route_id','DIRECTION_NAME','TIME_PERIOD','STOP_ID'\n ])['ext_trip_id'].count().reset_index()\n stops_visited_counts.rename(columns={'ext_trip_id':'number_of_times_stopped'},inplace=True)\n \n trips_sampled_unique = trips_sampled.groupby([\n 'current_route_id','DIRECTION_NAME','day_of_year','TIME_PERIOD'\n ])['ext_trip_id'].nunique().reset_index()\n trips_sampled_count = trips_sampled_unique.groupby([\n 'current_route_id','DIRECTION_NAME','TIME_PERIOD'\n ])['ext_trip_id'].sum().reset_index()\n # stops_visited_counts\n trips_sampled_count.rename(columns={'ext_trip_id':'total_trips_sampled'},inplace=True)\n\n return stops_visited_counts, trips_sampled_count", "def _extract_features(self, row):\n ncep_data = self.ncep_data\n ncep_sfc_data = self.ncep_sfc_data\n date = row['date']\n features = dict(row)\n #reduce the dimensions of ncep_data(xarray dataset) by fixing coordinates(lon,lat)\n #and then convert it to dataframe\n ncep_data = ncep_data[date.year] \\\n .sel(lon=row['longitude'], lat=row['latitude'], method='nearest') \\\n .to_dask_dataframe() \\\n .compute() \\\n .set_index(['level','time'])\n #reduce the dimensions of ncep_sfc_data(xarray dataset) by fixing coordinates(lon,lat)\n #and then convert it to dataframe\n ncep_sfc_data = ncep_sfc_data[date.year] \\\n .sel(lon=row['longitude'], lat=row['latitude'], method='nearest') \\\n .to_dask_dataframe() \\\n .compute() \\\n .set_index(['time'])\n\n for level in self.levels:\n #features at different pressure level\n point = ncep_data.loc[level]\n p1w = point.rolling(7).mean() # 1 Week mean\n p2w = point.rolling(14).mean() # 2 Week mean\n p3w = point.rolling(21).mean() # 3 Week mean\n # \n v0w = point.loc[date]\n v1w = p1w.loc[date]\n v2w = p2w.loc[date]\n v3w = p3w.loc[date]\n #\n for data_var in self.ncep_data_vars:\n features[\"{0}_0w_lvl_{1}\".format(data_var,level)] = v0w[data_var]\n features[\"{0}_1w_lvl_{1}\".format(data_var,level)] = v1w[data_var]\n features[\"{0}_2w_lvl_{1}\".format(data_var,level)] = v2w[data_var]\n features[\"{0}_3w_lvl_{1}\".format(data_var,level)] = v3w[data_var]\n #features at surface level\n point = ncep_sfc_data\n p1w = point.rolling(7).mean() # 1 Week mean\n p2w = point.rolling(14).mean() # 2 Week mean\n p3w = point.rolling(21).mean() # 3 Week mean\n # \n v0w = point.loc[date]\n v1w = p1w.loc[date]\n v2w = p2w.loc[date]\n v3w = p3w.loc[date]\n #\n for data_var in self.ncep_sfc_data_vars:\n features[\"{0}_0w\".format(data_var)] = v0w[data_var]\n features[\"{0}_1w\".format(data_var)] = v1w[data_var]\n features[\"{0}_2w\".format(data_var)] = v2w[data_var]\n features[\"{0}_3w\".format(data_var)] = v3w[data_var] \n\n return features", "def feature_selection_gbt(df, threshold, cols_to_filter, label_col = 'label', pcg = 1.0):\n print(\"[Info] feature selection by Gradient Boosting may take a long time\")\n\n df = df.select(cols_to_filter + [label_col]).sample(withReplacement=False, fraction=pcg)\n\n df = only_numeric_columns(df, label_col=label_col)\n\n df.cache()\n\n print \"[Info] Number of rows in the DF: \" + str(df.count())\n\n input_cols = list(set(df.columns) - set([label_col]))\n\n assembler = VectorAssembler(inputCols=input_cols, outputCol='features')\n\n numTrees, maxDepth, minInstancesPerNode, maxBins, subsamplingRate, maxIter = param_selection(df)\n\n gb_model = GBTClassifier(maxDepth=maxDepth, minInstancesPerNode=minInstancesPerNode, maxBins=maxBins,\n subsamplingRate=subsamplingRate, maxIter=maxIter, stepSize=0.1,\n minInfoGain=0.0, lossType='logistic', labelCol = label_col)\\\n\n pipeline = Pipeline(stages=[assembler, gb_model])\n\n pipeline_model = pipeline.fit(df)\n\n from churn_nrt.src.projects_utils.models.modeler import getOrderedRelevantFeats\n\n feat_imp_nrt = getOrderedRelevantFeats(pipeline_model, input_cols, \"f\")\n\n n = threshold if (threshold >= 1) else round(threshold * len(feat_imp_nrt))\n\n num_cols = [f[0] for f in feat_imp_nrt][0:n]\n\n return num_cols", "def add_features(self, other_features, on=\"time_exchange\"):\n self.data = self.data.join(other_features, on=on).dropna()", "def get_feature_vector(user_id: str, session: str) -> DataFrame:\n\n #Find the time windows during which the reader is doing the desired task\n activity_data = read_file(user_id, session, 'Activity.csv')\n task_number = mode(activity_data['TaskID'])\n task_name = task_names[(task_number - 1) % len(task_names)]\n tap_windows = get_tap_events(user_id, session)\n data = get_user_session_data(user_id, session)\n add_magnitude_columns(data)\n add_columns_for_taps(data, tap_windows)\n mark_tap_start_and_end(data, delta_in_ms = 200)\n\n column_names = get_feature_names()\n\n #A feature vector for each tap, to be filled in subsequently:\n featureVectors = pd.DataFrame(columns = column_names)\n\n for tap_file in tap_file_names:\n tap_feature = tap_file_to_feature_name[tap_file]\n print(tap_feature)\n window_start_indices = data[data[tap_feature] == 4].index\n window_end_indices = data[data[tap_feature] == 5].index\n if len(window_start_indices) == 0:\n continue\n \n for i in range(len(window_start_indices)):\n start, end = window_start_indices[i], window_end_indices[i]\n window_of_interest = data[start : end + 1]\n features = feature_list(user_id, session, tap_feature, task_name, window_of_interest)\n if features != None:\n featureVectors.loc[featureVectors.shape[0]] = features\n \n return featureVectors", "def generate_features(self):\n\n # For each STFT timebin, divide data into three bins and get mean power\n data_array = np.array([])\n bl_array = np.array([])\n\n for trial in range(self.data_stft_norm.shape[-1]): # Each trial\n for tbin in range(self.data_stft_norm.shape[-2]): # Each timebin\n for ch in range(self.data_stft_norm.shape[0]):\n data_array = np.append(data_array,[\n np.mean(self.data_stft_norm[ch, :2, tbin, trial]),\n np.mean(self.data_stft_norm[ch, 3:8, tbin, trial]),\n np.mean(self.data_stft_norm[ch, 9:27, tbin, trial])])\n\n data_array = np.reshape(data_array, (-1, 18))\n\n for trial in range(self.bl_stft_norm.shape[-1]): # Each trial\n for tbin in range(self.bl_stft_norm.shape[-2]): # Each timebin\n for ch in range(self.bl_stft_norm.shape[0]):\n bl_array = np.append(bl_array, [\n np.mean(self.bl_stft_norm[ch, :2, tbin, trial]),\n np.mean(self.bl_stft_norm[ch, 3:8, tbin, trial]),\n np.mean(self.bl_stft_norm[ch, 9:27, tbin, trial])])\n bl_array = np.reshape(bl_array, (-1, 18))\n\n X = np.append(data_array, bl_array, axis=0)\n y = np.append(np.ones(data_array.shape[0]), np.zeros(bl_array.shape[0]))\n\n return X, y", "def limit_df(df, fs, start=None, stop=None):\n\n center_e, side_e = get_extrema(df)\n\n # Limit dataframe to xlim\n start = 0 if start is None else start\n\n df = df[df['sample_next_' + side_e].values >= start*fs]\n\n if stop is not None:\n df = df[df['sample_last_' + side_e].values < stop*fs]\n\n # Shift sample indices to start at 0\n df['sample_last_' + side_e] = df['sample_last_' + side_e] - int(fs * start)\n df['sample_next_' + side_e] = df['sample_next_' + side_e] - int(fs * start)\n df['sample_' + center_e] = df['sample_' + center_e] - int(fs * start)\n df['sample_zerox_rise'] = df['sample_zerox_rise'] - int(fs * start)\n df['sample_zerox_decay'] = df['sample_zerox_decay'] - int(fs * start)\n\n return df", "def feature_filter(df,feature, high = True):\r\n assert feature in [\"speechiness\",\r\n \"acousticness\",\r\n \"instrumentalness\",\r\n \"liveness\"], \"feature must be one of the following: speechiness,acousticness,instrumentalness,liveness\"\r\n #more features may be added\r\n x = 0.9 if high == True else 0.1\r\n df = df[df[feature] > x] if high == True else df[df[feature] < x]\r\n return df", "def datetime_features(\n s: pd.Series, result: Optional[pd.DataFrame] = None\n) -> pd.DataFrame:\n result = date_features(s, result)\n return time_features(s, result)", "def _apply_filters(self, df):\n df = df[(df['Date'] >= self.start_date) &\n (df['Date'] <= self.end_date)]\n return df", "def make_features(self, x_hits, y_hits, dow, lagged_hits, pf_age, pf_si, pf_network, pf_gender, page_ix, pf_price_cat,\n page_popularity, quarter_autocorr):\n # Split day of week to train and test\n x_dow, y_dow = tf.split(dow, [self.train_window, self.predict_window], axis=0)\n\n # Normalize hits\n mean = tf.reduce_mean(x_hits)\n std = tf.sqrt(tf.reduce_mean(tf.squared_difference(x_hits, mean)))\n norm_x_hits = (x_hits - mean) / std\n norm_y_hits = (y_hits - mean) / std\n norm_lagged_hits = (lagged_hits - mean) / std\n\n # Split lagged hits to train and test\n x_lagged, y_lagged = tf.split(norm_lagged_hits, [self.train_window, self.predict_window], axis=0)\n\n # Combine all page features into single tensor\n stacked_features = tf.stack([page_popularity, quarter_autocorr])\n flat_ucdoc_features = tf.concat([pf_age, pf_si, pf_network, pf_gender, pf_price_cat, stacked_features], axis=0) #pf_region\n ucdoc_features = tf.expand_dims(flat_ucdoc_features, 0)\n\n # Train features\n x_features = tf.concat([\n # [n_days] -> [n_days, 1]\n tf.expand_dims(norm_x_hits, -1),\n x_dow,\n x_lagged,\n # Stretch ucdoc_features to all training days\n # [1, features] -> [n_days, features]\n tf.tile(ucdoc_features, [self.train_window, 1])\n ], axis=1)\n\n # Test features\n y_features = tf.concat([\n # [n_days] -> [n_days, 1]\n y_dow,\n y_lagged,\n # Stretch ucdoc_features to all testing days\n # [1, features] -> [n_days, features]\n tf.tile(ucdoc_features, [self.predict_window, 1])\n ], axis=1)\n\n return x_hits, x_features, norm_x_hits, x_lagged, y_hits, y_features, norm_y_hits, mean, std, flat_ucdoc_features, page_ix", "def get_fact_time_filtered(self, category, selected_option, \\\r\n start_tmstmp, \\\r\n end_tmstmp):\r\n try:\r\n conn = self.create_connection()\r\n query = \"\"\"WITH sub_category_lookup AS (\r\n\t\t\t\t\t\t\t\tSELECT id \r\n\t\t\t\t\t\t\t\tFROM categories \r\n\t\t\t\t\t\t\t\tWHERE category = '%s' \r\n\t\t\t\t\t\t\t\tAND sub_category = '%s')\t\r\n\t\t\t\t\t SELECT date_time,\r\n\t\t\t\t\t\t\t data \r\n\t\t\t\t FROM fact\r\n\t\t\t\t\t WHERE category_id = (select id FROM sub_category_lookup) \r\n\t\t\t\t\t AND (date_time>= '%s' AND date_time<'%s'\t)\t\t\t\t \r\n\t\t\t\t\t ORDER BY date_time ;\"\"\"%(category, selected_option, \\\r\n start_tmstmp, end_tmstmp)\r\n \r\n data_frame = pd.read_sql(query, conn)\r\n print(query)\r\n conn.close()\r\n except (psycopg2.Error, ValueError):\r\n print(\"Error occured at get_fact_time_filtered, check connection or query\")\r\n return data_frame", "def apply_bandpass_filter_timeseries(self, folder_name, indices, start_stop_freq, stop_stop_freq):\n (x_index, y_index) = indices\n photo_list = self.get_photo_list(folder_name)\n\n ts = self.get_pixel_timeseries(folder_name, (x_index, y_index))\n self.plot_fft_pixel_timeseries(folder_name, ts, str(x_index) + '_' + str(y_index) + 'pre_butterworth')\n n = len(ts)\n frequency = self.get_sampling_frequency(folder_name)\n d = 1.0 / frequency # 'sample spacing'\n fig, ax = plt.subplots()\n sample_freqs = np.fft.rfftfreq(n, d)\n fourier = np.fft.rfft(ts)\n print(sample_freqs)\n nyquist = frequency / 2.0\n\n start_stop_band = start_stop_freq / nyquist\n stop_stop_band = stop_stop_freq / nyquist\n\n print(start_stop_band)\n print(stop_stop_band)\n\n sos = sgnl.butter(2, Wn=[start_stop_band, stop_stop_band], btype='bandstop', output='sos')\n filtered = sgnl.sosfilt(sos, ts)\n self.plot_fft_pixel_timeseries(folder_name, filtered, str(x_index) + '_' + str(y_index) + 'post_butterworth')\n fig, ax = plt.subplots()\n indices = self.get_indices_from_filenames(folder_name)\n index_dates = dates.date2num(indices)\n ax.plot_date(index_dates, ts, xdate=True, linestyle='solid', marker='None',\n label=str(x_index) + ' , ' + str(y_index))\n ax.plot_date(index_dates, filtered, xdate=True, linestyle='solid', marker='None',\n label=str(x_index) + ' , ' + str(y_index) + ' filtered')\n\n ax.legend()\n ax.grid(b=True, which='major', color='#666666', linestyle='-')\n\n # Show the minor grid lines with very faint and almost transparent grey lines\n ax.minorticks_on()\n ax.grid(b=True, which='minor', color='#999999', linestyle='-', alpha=0.2)\n fig.set_figwidth(40)\n fig.savefig(self.parent_folder + 'analysis/timeseries_filtered_' + str(x_index) + '_' + str(y_index) + '.png')\n fig.savefig(self.parent_folder + 'analysis/timeseries_filtered_' + str(x_index) + '_' + str(y_index) + '.svg')\n fig.clf()", "def make_multi_lagger(lags, groupby_kwargs=None):\n laggers = [SingleLagger(l, groupby_kwargs=groupby_kwargs) for l in lags]\n feature_union = FeatureUnion([\n (repr(lagger), lagger) for lagger in laggers\n ])\n return feature_union", "def combine_features(df, lag_fea, lags, window_size, used_columns):\n lagged_fea = lagged_features(df[lag_fea], lags)\n moving_avg = moving_averages(df[lag_fea], 2, window_size)\n fea_all = pd.concat([df[used_columns], lagged_fea, moving_avg], axis=1)\n return fea_all", "def _bucket_builder_ddf(self, ddf):\n\n bins_left = np.vectorize(lambda x: x.left)\n min_val = np.floor(ddf[self.temp_var].min())\n max_val = np.ceil(ddf[self.temp_var].max())\n offset_scale = max_val + 2\n\n buckets = np.arange(min_val,\n max_val + offset_scale,\n self.temp_interval_size)\n\n bin_array = pd.cut(ddf[self.temp_var], \n bins=buckets, \n precision=0,\n include_lowest=True)\n\n df_ = pd.DataFrame({\n 'time': ddf.time,\n 'lat': ddf.lat,\n 'lon': ddf.lon,\n self.temp_var: ddf[self.temp_var],\n 'area_grid': ddf.area_grid,\n 'temp_bucket': bins_left(bin_array)\n })\n\n return df_", "def _filter_universe_from_data_for_prediction(self, data, current_timestamp, universe):\n current_date = current_timestamp.date()\n assets = []\n for idx, row in universe.iterrows():\n if row.start_date <= current_date <= row.end_date:\n assets = row.assets\n break\n\n filtered = {}\n for feature, df in data.items():\n filtered[feature] = df.drop(df.columns.difference(assets), axis=1)\n\n return filtered", "def features_past_generation(features_creation_function,\n days,\n feature_names_prefix,\n data,\n indices):\n matches_outcomes=[]\n for i,match_indice in enumerate(indices):\n match=data.iloc[match_indice,:]\n past_matches=data[(data.Date<match.Date)&(data.Date>=match.Date-datetime.timedelta(days=days))]\n match_features_outcome_1=features_creation_function(1,match,past_matches)\n match_features_outcome_2=features_creation_function(2,match,past_matches)\n matches_outcomes.append(match_features_outcome_1)\n matches_outcomes.append(match_features_outcome_2)\n if i%100==0:\n print(str(i)+\"/\"+str(len(indices))+\" matches treated. \"+ features_creation_function.__name__ + str(days))\n train=pd.DataFrame(matches_outcomes)\n train.columns=[feature_names_prefix + \"_\" + str(days) +\"_\" +str(i) for i in range(len(train.columns))]\n \n \n \n return train", "def get_features(self, ti=None, tf=None, n_jobs=1, drop_features=[], compute_only_features=[]):\n # initialise training interval\n self.drop_features = drop_features\n self.compute_only_features = compute_only_features\n self.n_jobs = n_jobs\n ti = self.ti_model if ti is None else datetimeify(ti)\n tf = self.tf_model if tf is None else datetimeify(tf)\n return self._load_data(ti, tf)", "def groups_of_train_test_set(df, config, fbprophet=None):\n \n tvar = config.variables['tvar']\n xvar = config.variables['xvar'] + config.variables['xvar_derived']\n \n begin_date = config.timestamps['begin_date']\n end_date = config.timestamps['end_date']\n deltat = config.timestamps['deltat']\n\n begin_date = datetime.datetime.strptime(begin_date, \"%Y-%m-%d %H:%M:%S\")\n end_date = datetime.datetime.strptime(end_date, \"%Y-%m-%d %H:%M:%S\")\n\n \n # Check if ustar threshold is provided for year of interest\n if config.data['ustar']==True:\n if not begin_date.year in config.data['ustar_map'].keys():\n raise ValueError('{} is missing from config/data/ustar_map'.format(begin_date.year))\n if not end_date.year in config.data['ustar_map'].keys():\n raise ValueError('{} is missing from config/data/ustar_map'.format(end_date.year))\n\n\n if (end_date - begin_date).days < deltat:\n raise ValueError(\"Time difference in days between begin and end date\" + \n \"must be greater than deltat.\")\n\n\n df = df.loc[df[tvar] <= end_date]\n\n number_of_train_test_sets = int((end_date - begin_date).total_seconds()/\\\n datetime.timedelta(deltat).total_seconds())\n\n begin_test_timestamp = begin_date\n \n\n test_df = []\n train_df = [] \n for i in range(number_of_train_test_sets):\n if i == number_of_train_test_sets-1:\n end_test_timestamp = end_date\n else:\n end_test_timestamp = None\n\n\n i_test_set, i_train_set, end_test_timestamp =\\\n _train_test_split(df.copy(), config,\n begin_test_timestamp,\n deltat, end_test_timestamp,\n fbprophet)\n begin_test_timestamp = end_test_timestamp\n \n # Interpolating where x-var is nan.\n i_test_set[xvar] = i_test_set[xvar].interpolate()\n i_train_set[xvar] = i_train_set[xvar].interpolate()\n \n\n i_test_set['Set_rank'] = i\n i_train_set['Set_rank'] = i\n \n if i == 0:\n test_df = i_test_set\n train_df = i_train_set\n else:\n test_df = pd.concat((test_df, i_test_set))\n train_df = pd.concat((train_df, i_train_set))\n\n\n return test_df, train_df", "def get_cdf_data(self):\n df = self.df_events.copy()\n df['event_time'] = df['event_time'].apply(self.parse_time_stamp) # convert strings to datetime objects\n # only get the rows with event_type_reason == \"user_pick_up\" and event_time between 6 am and 10 pm\n # also make sure dates are between the start and end period\n df = df[(df['event_type_reason'] == \"user_pick_up\") & (df['event_time'] >= iso8601.parse_date(self.start)) & (df['event_time'] <= iso8601.parse_date(self.end))]\n df['date'] = df['event_time'].apply(self.get_date).astype(str) # get date part of datetime object\n df['minute'] = df['event_time'].apply(self.get_minutes).astype(float)\n # consider only trips that began with operating hours\n df = df[(df['minute'] >= (6*60)) & (df['minute'] < (22*60))]\n return df[['date', 'minute']].reset_index(drop=True)", "def cleanup_outliers(d , feature , cutoff , max_outliers, preservevar, task):\n\n\t# Calculate SSD for all sample groups\n\tf = (d['Ignore'].eq(False)) & (d['Task'].str.lower() == task.lower())\n\td1 = d[f].groupby(['Sample Name' , 'Target Name']).agg({'CT': ['std']})\n\n\n\t# print(tabulate(d1, headers='keys', tablefmt='psql'))\n\tf = (d1['CT']['std'] >= cutoff)\n\td2 = d1[f]\n\t# print(tabulate(d2, headers='keys', tablefmt='psql'))\n\n\n\tif not d2.empty:\n\t\t# Mark all outliers\n\t\tfor i , row in enumerate(d2.itertuples(name=None) , 1):\n\t\t\tf = (d['Ignore'].eq(False)) & (d['Task'].str.lower() == task.lower()) \\\n\t\t\t\t& (d['Sample Name'] == row[0][0]) & (d['Target Name'] == row[0][1])\n\t\t\tdx_idx = d[f].index\n\t\t\tgroup_size = len(dx_idx)\n\t\t\tmin_size = round(group_size * (1 - max_outliers))\n\t\t\tsize = group_size\n\t\t\tif min_size < 2:\n\t\t\t\tmin_size = 2\n\t\t\twhile True:\n\t\t\t\tf = (d['Ignore'].eq(False)) & (d['Task'].str.lower() == task.lower()) \\\n\t\t\t\t\t& (d['Sample Name'] == row[0][0]) & (d['Target Name'] == row[0][1])\n\t\t\t\tdx = d[f].copy()\n\t\t\t\tdxg1 = d[f].groupby(['Sample Name' , 'Target Name']).agg({'CT': [np.size , 'std' , 'mean']})\n\t\t\t\tdxg2 = d[f].groupby(['Sample Name', 'Target Name']).agg({feature: [np.size, 'std', 'mean']})\n\t\t\t\t# print(tabulate(dxg1, headers='keys', tablefmt='psql'))\n\n\t\t\t\tif dxg1['CT']['std'].iloc[0] < cutoff:\n\t\t\t\t\t# CT std is under the threshold\n\t\t\t\t\tbreak\n\t\t\t\t# Will ignore one or all measurements\n\t\t\t\tsize -= 1\n\t\t\t\tif size < min_size:\n\t\t\t\t\t# Ignore the entire group of measurements\n\t\t\t\t\t# for j in dx_idx:\n\t\t\t\t\t# d['Ignore'].loc[j] = True\n\t\t\t\t\tbreak\n\t\t\t\t# Will remove the measurement which is furthest from the mean\n\t\t\t\tdx['Distance'] = (dx[feature] - dxg2[feature]['mean'].iloc[0]) ** 2\n\t\t\t\tj = dx.sort_values(by='Distance', ascending=False).index[0]\n\t\t\t\td['Outliers'].loc[j] = True\n\t\t\t\t# check if the outlier should be kept if mean has high variation\n\t\t\t\tif preservevar == 'True':\n\t\t\t\t\tif abs((dxg2[feature]['mean'].iloc[0]-dx[feature].median())/dx[feature].median()) < 0.1:\n\t\t\t\t\t\t# print('preserve: '+ str(abs((dxg2[feature]['mean'].iloc[0]-dx[feature].median())/dx[feature].median())))\n\t\t\t\t\t\td['Outliers'].loc[j] = False\n\n\treturn d[(d['Ignore'].eq(False))]", "def _create_feature_group(\n data: pd.DataFrame,\n layer_name: str,\n lat_column: str,\n long_column: str,\n icon_column: Optional[str],\n icon_map: IconMapper,\n popup_cols: List[str],\n tooltip_cols: List[str],\n def_layer_color: str,\n use_marker_cluster: bool = True,\n) -> folium.FeatureGroup:\n feature_group = folium.FeatureGroup(name=layer_name)\n if use_marker_cluster:\n container = MarkerCluster(name=layer_name)\n container.add_to(feature_group)\n else:\n container = feature_group\n data.apply(\n lambda row: folium.Marker(\n location=(row[lat_column], row[long_column]),\n tooltip=_create_marker_text(row, tooltip_cols),\n popup=_create_marker_text(row, popup_cols),\n icon=_create_mapped_icon(row, icon_column, icon_map, def_layer_color),\n ).add_to(feature_group),\n axis=1,\n )\n return feature_group", "def __init__(self, data_frame, mins_set):\n # super(FeaturePrevDelays, self).__init__()\n self.df = data_frame.copy()\n self.mins_set = mins_set", "def construct_training_data_query(self, operation='training'):\n # FUTURE: make dollar return target/features dynamic\n if self.feature_minutes_list == None or self.trade_window_list == None:\n raise Exception(\"To construct training data query, the optional feature_minutes_list and trade_window_list attributes must be set!\")\n \n feature_col_list = []\n target_col_list = []\n base_ctes_list = []\n feature_cte_list = []\n final_col_list = []\n interaction_features_list = []\n join_conditions_list = []\n\n # Limit rows returned when pulling scoring features\n limit_where_clause = ''\n limit_clause = ''\n if operation == 'scoring':\n limit_minutes = max(self.feature_minutes_list) + 10\n limit_clause = f'LIMIT {limit_minutes}'\n # trying to move away from the where clause - limits are faster\n limit_trade_minute = (time.time() / 60) - limit_minutes - (5*60) \n limit_where_clause = f'AND trade_minute > {limit_trade_minute}'\n elif self.training_period is not None:\n limit_minutes = self.training_period + max(self.feature_minutes_list)\n limit_clause = f'LIMIT {limit_minutes}'\n print(f\"Training data query being limited to the first {limit_minutes} minutes. Training period plus {max(self.feature_minutes_list)} (max feature interval)\")\n # trying to move away from the where clause - limits are faster\n limit_trade_minute = (time.time() / 60) - self.training_period - (5*60)\n limit_where_clause = f'AND trade_minute > {limit_trade_minute}'\n\n\n for pair_type, coin_pair in self.coin_pair_dict.items():\n \"\"\"\n pair_type: 'alt', 'target'\n \"\"\"\n base_features_list = []\n base_ctes_list.append(f\"\"\"\n {pair_type}_{coin_pair}_end_orderbook AS (\n SELECT trade_minute - 1 AS lag_trade_minute, * \n FROM binance.orderbook\n WHERE coin_pair = '{coin_pair}'\n ORDER BY trade_minute DESC \n {limit_clause}\n ),\n {pair_type}_{coin_pair}_beg_orderbook AS (\n SELECT * \n FROM binance.orderbook\n WHERE coin_pair = '{coin_pair}'\n ORDER BY trade_minute DESC \n {limit_clause}\n ),\n {pair_type}_{coin_pair}_candlesticks AS (\n SELECT *\n FROM binance.candledicks c\n WHERE coin_pair = '{coin_pair}'\n ORDER BY trade_minute DESC \n {limit_clause}\n )\"\"\")\n # Base target variable features\n if pair_type == 'target':\n base_features_list.append(f\"\"\"\n c.close_datetime AS {coin_pair}_trade_close_datetime\n , extract(isodow from c.close_datetime) as trade_day_of_week\n , date_part('hour', c.close_datetime) as trade_hour\n , c.close_datetime::date - current_date as days_old\n \"\"\")\n final_col_list.append(f\"\"\"\n {coin_pair}_trade_close_datetime\n , trade_day_of_week\n , trade_hour\n , days_old\n \"\"\")\n feature_col_list.extend(['trade_day_of_week', 'trade_hour', 'days_old'])\n # Base features\n base_features_list.append(f\"\"\"\n c.trade_minute AS {coin_pair}_trade_minute\n , quote_asset_volume as {coin_pair}_quote_asset_volume\n , taker_sell_volume_percentage * 100 AS {coin_pair}_taker_sell_volume_perc_of_total\n , trade_count as {coin_pair}_trade_count\n , o_end.bids_cum_50000_weighted_avg - o_beg.bids_cum_50000_weighted_avg AS {coin_pair}_crnt_interval_bids_50000_price_diff\n , o_end.bids_cum_50000_weighted_avg - o_end.asks_cum_50000_weighted_avg AS {coin_pair}_crnt_interval_bids_v_asks_50000_price_diff \n , o_end.bids_cum_50000_weighted_std - o_beg.bids_cum_50000_weighted_std AS {coin_pair}_crnt_interval_bids_50000_std_diff\n , o_end.bids_cum_50000_weighted_std - o_end.asks_cum_50000_weighted_std AS {coin_pair}_crnt_interval_bids_v_asks_50000_std_diff\n , o_end.bids_cum_50000_weighted_std / (o_end.bids_cum_50000_weighted_std + o_end.asks_cum_50000_weighted_std) AS {coin_pair}_crnt_bids_50000_std_perc_of_total\n , o_end.bids_cum_200000_weighted_std / (o_end.bids_cum_200000_weighted_std + o_end.asks_cum_200000_weighted_std) AS {coin_pair}_crnt_bids_200000_std_perc_of_total\n , (o_end.bids_cum_200000_weighted_std / (o_end.bids_cum_200000_weighted_std + o_end.asks_cum_200000_weighted_std) \n + LEAD(o_end.bids_cum_200000_weighted_std, 1) OVER (ORDER BY c.trade_minute DESC) / (LEAD(o_end.bids_cum_200000_weighted_std, 1) OVER (ORDER BY c.trade_minute DESC) + LEAD(o_end.asks_cum_200000_weighted_std, 1) OVER (ORDER BY c.trade_minute DESC)) \n + LEAD(o_end.bids_cum_200000_weighted_std, 2) OVER (ORDER BY c.trade_minute DESC) / (LEAD(o_end.bids_cum_200000_weighted_std, 2) OVER (ORDER BY c.trade_minute DESC) + LEAD(o_end.asks_cum_200000_weighted_std, 2) OVER (ORDER BY c.trade_minute DESC))\n + LEAD(o_end.bids_cum_200000_weighted_std, 3) OVER (ORDER BY c.trade_minute DESC) / (LEAD(o_end.bids_cum_200000_weighted_std, 3) OVER (ORDER BY c.trade_minute DESC) + LEAD(o_end.asks_cum_200000_weighted_std, 3) OVER (ORDER BY c.trade_minute DESC))\n + LEAD(o_end.bids_cum_200000_weighted_std, 4) OVER (ORDER BY c.trade_minute DESC) / (LEAD(o_end.bids_cum_200000_weighted_std, 4) OVER (ORDER BY c.trade_minute DESC) + LEAD(o_end.asks_cum_200000_weighted_std, 4) OVER (ORDER BY c.trade_minute DESC))\n ) / 5 AS {coin_pair}_bids_200000_std_perc_of_total_avg\n \"\"\")\n final_col_list.append(f\"\"\"\n {coin_pair}_trade_minute\n , {coin_pair}_quote_asset_volume\n , {coin_pair}_taker_sell_volume_perc_of_total\n , {coin_pair}_trade_count\n , {coin_pair}_crnt_interval_bids_50000_price_diff\n , {coin_pair}_crnt_interval_bids_v_asks_50000_price_diff\n , {coin_pair}_crnt_interval_bids_50000_std_diff\n , {coin_pair}_crnt_interval_bids_v_asks_50000_std_diff\n , {coin_pair}_crnt_bids_50000_std_perc_of_total\n , {coin_pair}_crnt_bids_200000_std_perc_of_total\n , {coin_pair}_bids_200000_std_perc_of_total_avg\n \"\"\")\n feature_col_list.extend([\n f'{coin_pair}_quote_asset_volume'\n , f'{coin_pair}_taker_sell_volume_perc_of_total'\n , f'{coin_pair}_trade_count'\n , f'{coin_pair}_crnt_interval_bids_50000_price_diff'\n , f'{coin_pair}_crnt_interval_bids_v_asks_50000_price_diff'\n , f'{coin_pair}_crnt_interval_bids_50000_std_diff'\n , f'{coin_pair}_crnt_interval_bids_v_asks_50000_std_diff'\n , f'{coin_pair}_crnt_bids_50000_std_perc_of_total'\n , f'{coin_pair}_crnt_bids_200000_std_perc_of_total'\n , f'{coin_pair}_bids_200000_std_perc_of_total_avg'\n ])\n \n # Lag features for every interval configured at runtime\n for interval in self.feature_minutes_list:\n interval_list = []\n base_features_list.append(f\"\"\"\n ((quote_asset_volume - LEAD(quote_asset_volume, {interval}) OVER (ORDER BY c.trade_minute DESC)) \n / LEAD(quote_asset_volume, {interval}) OVER (ORDER BY c.trade_minute DESC)) * 100 AS prev_{interval}_{coin_pair}_quote_asset_volume_perc_chg\n , ((taker_sell_volume_percentage - LEAD(taker_sell_volume_percentage, {interval}) OVER (ORDER BY c.trade_minute DESC)) \n / LEAD(taker_sell_volume_percentage, {interval}) OVER (ORDER BY c.trade_minute DESC)) * 100 AS prev_{interval}_{coin_pair}_taker_sell_volume_perc_of_total_chg\n , ((trade_count::float - LEAD(trade_count::float, {interval}) OVER (ORDER BY c.trade_minute DESC)) \n / LEAD(trade_count::float, {interval}) OVER (ORDER BY c.trade_minute DESC)) * 100 AS prev_{interval}_{coin_pair}_trade_count_perc_chg\n , ((o_end.bids_cum_50000_weighted_avg - LEAD(o_end.bids_cum_50000_weighted_avg, {interval}) OVER (ORDER BY c.trade_minute DESC)) \n / LEAD(o_end.bids_cum_50000_weighted_avg, {interval}) OVER (ORDER BY c.trade_minute DESC)) * 100 AS prev_{interval}_{coin_pair}_bids_50000_perc_chg\n , ((o_end.bids_cum_50000_weighted_std - LEAD(o_end.bids_cum_50000_weighted_std, {interval}) OVER (ORDER BY c.trade_minute DESC)) \n / LEAD(o_end.bids_cum_50000_weighted_std, {interval}) OVER (ORDER BY c.trade_minute DESC)) * 100 AS prev_{interval}_{coin_pair}_bids_50000_std_chg\n \"\"\")\n final_col_list.append(f\"\"\"\n prev_{interval}_{coin_pair}_quote_asset_volume_perc_chg\n , prev_{interval}_{coin_pair}_taker_sell_volume_perc_of_total_chg\n , prev_{interval}_{coin_pair}_trade_count_perc_chg\n , prev_{interval}_{coin_pair}_bids_50000_perc_chg\n , prev_{interval}_{coin_pair}_bids_50000_std_chg\n \"\"\") \n feature_col_list.extend([\n f'prev_{interval}_{coin_pair}_quote_asset_volume_perc_chg'\n ,f'prev_{interval}_{coin_pair}_taker_sell_volume_perc_of_total_chg'\n ,f'prev_{interval}_{coin_pair}_trade_count_perc_chg'\n ,f'prev_{interval}_{coin_pair}_bids_50000_perc_chg'\n ,f'prev_{interval}_{coin_pair}_bids_50000_std_chg'\n ])\n \n if pair_type == 'target':\n for target in self.trade_window_list:\n base_features_list.append(f\"\"\"((LAG({self.target_coin}_bids_cum_5000_weighted_avg, {target}) OVER (ORDER BY {self.target_coin}_trade_minute DESC) - {self.target_coin}_asks_cum_5000_weighted_avg) / {self.target_coin}_asks_cum_5000_weighted_avg * 100) AS futr_{target}_askbid_cum_5000_weighted_avg_perc_chg\"\"\")\n # experiment with predicting return starting at minute 1 instead of minute 0 to account for our scoring->trade delay.\n #base_features_list.append(f\"\"\"((LAG({self.target_coin}_bids_cum_5000_weighted_avg, {target}) OVER (ORDER BY {self.target_coin}_trade_minute DESC) - LAG({self.target_coin}_asks_cum_5000_weighted_avg, 1) OVER (ORDER BY {self.target_coin}_trade_minute DESC)) / LAG({self.target_coin}_asks_cum_5000_weighted_avg, 1) OVER (ORDER BY {self.target_coin}_trade_minute DESC) * 100) AS futr_{target}_askbid_cum_5000_weighted_avg_perc_chg\"\"\")\n final_col_list.append(f'futr_{target}_askbid_cum_5000_weighted_avg_perc_chg') \n target_col_list.append(f'futr_{target}_askbid_cum_5000_weighted_avg_perc_chg')\n\n # Coin level CTE \n feature_cte_list.append(f\"\"\"\n {pair_type}_{coin_pair}_features AS (\n SELECT {','.join(base_features_list)}\n FROM {pair_type}_{coin_pair}_candlesticks c \n INNER JOIN {pair_type}_{coin_pair}_beg_orderbook o_beg ON o_beg.coin_pair = c.coin_pair AND o_beg.trade_minute = c.trade_minute \n INNER JOIN {pair_type}_{coin_pair}_end_orderbook o_end ON o_end.coin_pair = c.coin_pair AND o_end.lag_trade_minute = c.trade_minute\n )\"\"\")\n\n # Interaction features for alt coins (base usdt)\n interaction_features = ''\n if pair_type == 'alt':\n interaction_features_list.append(f\"\"\"AVG(({self.target_coin}_bid_ask_average_price-{coin_pair}_bid_ask_average_price)/{self.target_coin}_bid_ask_average_price) OVER (PARTITION BY {self.target_coin}_coin_partition ORDER BY {self.target_coin}_trade_minute ASC ROWS 5 PRECEDING) \n - (({self.target_coin}_bid_ask_average_price-{coin_pair}_bid_ask_average_price)/{self.target_coin}_bid_ask_average_price) AS avg_5_{coin_pair}_bid_ask_average_price_interaction\"\"\")\n interaction_features_list.append(f\"\"\"AVG(({self.target_coin}_bid_ask_average_price-{coin_pair}_bid_ask_average_price)/{self.target_coin}_bid_ask_average_price) OVER (PARTITION BY {self.target_coin}_coin_partition ORDER BY {self.target_coin}_trade_minute ASC ROWS 10 PRECEDING) \n - (({self.target_coin}_bid_ask_average_price-{coin_pair}_bid_ask_average_price)/{self.target_coin}_bid_ask_average_price) AS avg_10_{coin_pair}_bid_ask_average_price_interaction\"\"\")\n interaction_features_list.append(f\"\"\"AVG(({self.target_coin}_bid_ask_average_price-{coin_pair}_bid_ask_average_price)/{self.target_coin}_bid_ask_average_price) OVER (PARTITION BY {self.target_coin}_coin_partition ORDER BY {self.target_coin}_trade_minute ASC ROWS 20 PRECEDING) \n - (({self.target_coin}_bid_ask_average_price-{coin_pair}_bid_ask_average_price)/{self.target_coin}_bid_ask_average_price) AS avg_20_{coin_pair}_bid_ask_average_price_interaction\"\"\")\n feature_col_list.extend([f'avg_5_{coin_pair}_bid_ask_average_price_interaction',f'avg_10_{coin_pair}_bid_ask_average_price_interaction',f'avg_20_{coin_pair}_bid_ask_average_price_interaction'])\n interaction_features = ','.join(interaction_features_list)\n interaction_features = ',' + interaction_features\n\n # Join conditions\n if pair_type == 'target':\n join_conditions_list.append(f\"\"\"{pair_type}_{coin_pair}_features\"\"\") \n else:\n join_conditions_list.append(f\"\"\"{pair_type}_{coin_pair}_features ON target_{self.target_coin}_features.{self.target_coin}_trade_minute = {pair_type}_{coin_pair}_features.{coin_pair}_trade_minute\"\"\")\n\n base_ctes = ','.join(base_ctes_list)\n feature_ctes = ','.join(feature_cte_list)\n feature_ctes = ',' + feature_ctes\n final_cols = ','.join(final_col_list)\n join_conditions = ' LEFT JOIN '.join(join_conditions_list)\n\n query_template = f\"\"\"WITH {base_ctes}\n {feature_ctes}\n SELECT {final_cols}\n {interaction_features}\n FROM {join_conditions}\n ORDER BY {self.target_coin}_trade_minute {'DESC' if operation == 'scoring' else 'ASC'}\n {'LIMIT 1' if operation == 'scoring' else ''}\"\"\" # LIMIT SCORING DATA - NOT ALL DATA IS RELEVANT TO CURRENT\n\n return query_template, feature_col_list, target_col_list", "def filter_subsets(p_df):\n months = p_df[\"Month_start\"].unique()\n months.sort()\n for month in months:\n print(\n \"====================================\\n\"+\" \"\n \"===========\"+str(month)+\"===========\\n\"+\n \"====================================\"\n )\n df_subset = p_df[p_df[\"Month_start\"] == month]\n test_subset_classification(df_subset)", "def modify_datetime_train(df):\n\n df['pickup_hour'] = pd.to_datetime(df['pickup_datetime']).dt.hour\n\n df['dropoff_hour'] = pd.to_datetime(df['dropoff_datetime']).dt.hour\n\n df['pickup_minute'] = pd.to_datetime(df['pickup_datetime']).dt.minute\n\n df['dropoff_minute'] = pd.to_datetime(df['dropoff_datetime']).dt.minute\n\n df['pickup_hour_sin'], df['pickup_hour_cos'] = convert_time_sin_cos(df, 'pickup_hour')\n\n df['dropoff_hour_sin'], df['dropoff_hour_cos'] = convert_time_sin_cos(df, 'dropoff_hour')\n\n #split datetime between dates and time\n #using normalize even though it gives us 0:00 time, but the resulting column is a datetime object,\n #which allows us to further process for day of week\n df['pickup_date'] = pd.to_datetime(df['pickup_datetime']).dt.date\n\n df['dropoff_date'] = pd.to_datetime(df['dropoff_datetime']).dt.date\n\n #create day of the week for both pickup date and dropoff dates\n df['pickup_day'] = pd.to_datetime(df['pickup_datetime']).dt.weekday\n\n df['dropoff_day'] = pd.to_datetime(df['dropoff_datetime']).dt.weekday\n\n #get week of year to capture effects of holidays\n df['pickup_weekofyear'] = pd.to_datetime(df['pickup_datetime']).dt.weekofyear\n\n df[\"month\"] = pd.to_datetime(df['pickup_datetime']).dt.month\n\n df[\"year\"] = pd.to_datetime(df['pickup_datetime']).dt.year\n #one hot encode day of the week for both pickup and dropoff\n df = pd.get_dummies(df, columns=['pickup_day', 'dropoff_day'])\n\n return df", "def aggregateFunctions(fnPointFeatures, start_date, end_date, out_dir):\n\n downloadStreamflowFromGeoJson(fnPointFeatures=fnPointFeatures, target_dir=out_dir,\n startDT=start_date, endDT=end_date)\n\n dat = format_streamflows(out_dir)\n fname = out_dir + '/pd_streamflow.csv'\n\n dat.to_csv(fname)", "def create_interpolated_turnstile_data(\n start_date: datetime,\n end_date: datetime = None,\n group_by: List[str] = ['UNIT', 'SCP'],\n frequency: str = '1H') -> pd.DataFrame:\n\n if not set(group_by).issubset(['STATION', 'LINENAME', 'UNIT', 'SCP']):\n raise Exception(\"Unsupported group by keys: \" + str(group_by))\n\n\n raw = download_turnstile_data(start_date, end_date)\n raw['date'] = pd.to_datetime(raw.DATE)\n raw = raw[(raw.date <= (end_date + timedelta(1))) & (raw.date >= (start_date - timedelta(1)))]\n raw.drop('date',axis=1,inplace=True)\n\n interpolated = _interpolate(_process_raw_data(raw, group_by), group_by, frequency)\n end_date = end_date or interpolated.index.max()\n return interpolated[interpolated.index.to_series().between(\n start_date, end_date)] .drop(columns=[\"entry_diffs\", \"exit_diffs\"])", "def executeFeatures(dfIn, train = True):\n\n if train == True:\n dfOut = dfIn['TARGET'] #update this with numerical columns that don't need cleaning\n dfOut = standardizedIncome(dfIn, dfOut)\n dfOut = engineerDays(dfIn, dfOut)\n dfOut = createEncoders(dfIn, dfOut)\n dfOut = simplifyEducation(dfIn, dfOut)\n dfOut = simplifyFamily(dfIn, dfOut)\n dfOut = simplifyIncome(dfIn, dfOut)\n dfOut = addExtSources(dfIn, dfOut)\n dfOut = cleanNames(dfOut)\n dfOut = createPolyFeatures(dfOut)\n else:\n dfOut = dfIn['SK_ID_CURR'] ## tags from test set\n dfOut = standardizedIncome(dfIn, dfOut)\n dfOut = engineerDays(dfIn, dfOut)\n dfOut = createEncoders(dfIn, dfOut)\n dfOut = simplifyEducation(dfIn, dfOut)\n dfOut = simplifyFamily(dfIn, dfOut)\n dfOut = simplifyIncome(dfIn, dfOut)\n dfOut = addExtSources(dfIn, dfOut)\n dfOut = dfOut.drop('CODE_GENDER', axis = 1) ## Need to fix this\n #print(dfOut.columns)\n dfOut = cleanNamesTest(dfOut)\n dfOut = createPolyFeatures(dfOut)\n\n return dfOut", "def compute_features(ctx, input_file, output_file):\n kwargs = {ctx.args[i][2:]: ctx.args[i+1].strip('\"') for i in range(0, len(ctx.args), 2)}\n output_file = os.path.abspath(output_file)\n click.echo(\"Init feature set computation\")\n executor = FeatureSetPreparer.build(verbose=True, violate=True, independent=True, session_file=None, location_mapping_file = None, orientation_fix_file=None, ws=12800, ss=12800, threshold=0.2, subwins=4, skip_post=True, **kwargs)\n click.echo(\"Compute feautures\")\n result = executor(input_file)\n if not os.path.exists(os.path.dirname(output_file)):\n click.echo(\"Create output folder if not exists\")\n os.makedirs(os.path.dirname(output_file))\n click.echo(\"Save feature set to: \" + output_file)\n result.to_csv(output_file, index=False, float_format='%.6f')\n click.echo(\"Saved\")" ]
[ "0.66726613", "0.5643213", "0.55374706", "0.5524301", "0.5516429", "0.54855764", "0.5438203", "0.538857", "0.53348887", "0.53311694", "0.5320984", "0.5259109", "0.5249251", "0.52357846", "0.52223134", "0.5205125", "0.5173679", "0.5165818", "0.51619375", "0.5140185", "0.5136399", "0.51243925", "0.510697", "0.5103279", "0.5085388", "0.5066197", "0.5062592", "0.50466233", "0.49861452", "0.49712297", "0.49537554", "0.49176174", "0.48992068", "0.48927152", "0.48877126", "0.4878882", "0.48748538", "0.48747015", "0.4853755", "0.4829283", "0.4805713", "0.48015398", "0.47989878", "0.47980437", "0.4790598", "0.4788905", "0.47754177", "0.47616023", "0.47226158", "0.47072095", "0.47058222", "0.47024196", "0.46976742", "0.4697462", "0.46953312", "0.46853426", "0.46811745", "0.46777153", "0.46764898", "0.46731544", "0.46726102", "0.46721822", "0.46592945", "0.4656766", "0.46498355", "0.46365157", "0.46328023", "0.4621413", "0.46066424", "0.45995584", "0.45986068", "0.45899662", "0.4589465", "0.45805278", "0.45803654", "0.4576788", "0.45571736", "0.45558423", "0.45547095", "0.45516413", "0.45377937", "0.45315096", "0.45256898", "0.45216033", "0.4500372", "0.4498745", "0.44843137", "0.44841513", "0.44754294", "0.4470646", "0.44702256", "0.44589707", "0.44588235", "0.44525653", "0.44521284", "0.44440207", "0.44377047", "0.44343722", "0.4428681", "0.4428239" ]
0.75834435
0
Ceilings datetime dt to interval num_minutes, then returns the unix timestamp.
def rounded_unix_timestamp(dt, num_minutes=15): nsecs = dt.minute * 60 + dt.second + dt.microsecond * 1e-6 delta = math.ceil(nsecs / (60 * num_minutes)) * (60 * num_minutes) - nsecs return int((dt + timedelta(seconds=delta)).timestamp())
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _time_ms(self, dt):\n if dt.tzinfo is None:\n dt = dt.replace(tzinfo=pytz.utc)\n return int((dt - self._EPOCH).total_seconds() * 1000)", "def _time_ms(dt):\n epoch = datetime.datetime.utcfromtimestamp(0)\n diff = dt - epoch\n return diff.total_seconds() * 1000", "def _get_milleseconds(self):\n return int(round(time.time() * 1000))", "def minutes_in(sec):\r\n return int((sec - (hours_in(sec)*3600))//60)", "def calculate_seconds_in_minutes(minutes):\n return int(minutes * 60)", "def to_minutes(delta):\n return int(math.ceil(delta.total_seconds() / 60))", "def get_closest_minute(t):\n ts = dt.datetime.utcfromtimestamp(t/1000)\n s = ts.second\n if s < 30:\n return dt.datetime(ts.year, ts.month, ts.day, ts.hour, ts.minute)\n else:\n return dt.datetime(ts.year, ts.month, ts.day, ts.hour, ts.minute) + dt.timedelta(minutes=1)", "def round_time(dt=None, roundTo=60): # IGNORE:W0621\n\n if dt is None:\n dt = datetime.now()\n\n dt = np.asarray(dt, dtype='datetime64[s]').reshape(-1)\n\n for li in range(len(dt)):\n date = dt[li].astype(object)\n seconds = (date - date.min).seconds\n\n # // is a floor division, not a comment on following line:\n rounding = (seconds + roundTo / 2) // roundTo * roundTo\n\n dt[li] = date + timedelta(0, rounding - seconds, -date.microsecond)\n\n return len(dt) == 1 and dt[0].astype(object) or dt", "def timestamp_floor(ts: int, how: str = 'day', unit: str = 'ms'):\n dt = datetime.fromtimestamp(ts / 1000 if unit == 'ms' else ts,\n tz = timezone.utc)\n if how == 'second':\n new_dt = datetime(year = dt.year, month = dt.month, day = dt.day,\n hour = dt.hour, minute = dt.minute, second = dt.second,\n tzinfo = timezone.utc)\n elif how == 'minute':\n new_dt = datetime(year = dt.year, month = dt.month, day = dt.day,\n hour = dt.hour, minute = dt.minute, tzinfo = timezone.utc)\n elif how == 'hour':\n new_dt = datetime(year = dt.year, month = dt.month, day = dt.day,\n hour = dt.hour, tzinfo = timezone.utc)\n else:\n new_dt = datetime(year = dt.year, month = dt.month, tzinfo = timezone.utc)\n\n timestamp = dt.replace(tzinfo = timezone.utc).timestamp()\n return int(timestamp * 1000 if unit == 'ms' else timestamp)", "def calculate_minutes(time):\n return int(time / 60)", "def timeToMinutes(timestamp):\n if len(timestamp) == 5: \n return int(timestamp[0])*600 + int(timestamp[1])*60 + int(timestamp[3])*10 + int(timestamp[4])\n return None", "def get_minutes(self, datetime):\n return datetime.hour*60.0+datetime.minute+datetime.second/60", "def floor_time(self, ts):\n return datetime.datetime.fromtimestamp(\n int(ts.timestamp()) // self.interval * self.interval\n )", "def multMinuteAlign(ts, min):\n\tintv = secInMinute * min\n\treturn int((ts / intv)) * intv", "def dt_epoch_msecs(value):\n return long(calendar.timegm(value.timetuple())) * 1000", "def unix_time_nanos(dt):\n return timedelta_to_micros(dt - epoch)", "def _floor_to_dt(value: np.datetime64) -> np.datetime64:\n integral = int(value.astype(\"<M8[h]\").astype(\"int64\") /\n 3) # type: ignore\n return np.datetime64(integral * 3, \"h\")", "def _to_minutes(seconds):\n return '%d:%d' % divmod(seconds, 60)", "def get_minutes(video: Video) -> int:\n mins = re.findall(r'PT(\\d+)M', video.duration)\n if mins:\n return int(mins[0])\n return 1000", "def minutes_in_day_to_time(minutes):\n return seconds_in_day_to_time(minutes*60)", "def minutes(self):\n return int((self.end - self.start).total_seconds()) / 60", "def _round(self, x):\n return x - x % self.minutes_per_step", "def get_time_to_end_stream(minutes):\n time_now = datetime.datetime.now()\n now_plus_10 = time_now + datetime.timedelta(minutes=minutes)\n return now_plus_10.strftime('%H:%M')", "def minutes_to_seconds(minutes) -> int:\n return int(minutes) * 60", "def total_minutes(td):\n return total_seconds(td) / 60", "def sct_numericdate(cls, d):\n return (d-cls.SCT_EPOCH).total_seconds() / 60", "def seconds_to_minutes(seconds: int, round: Optional[bool] = True) -> Union[int, float]:\n return int(seconds / 60) if round else seconds / 60", "def clock_helper(total_seconds):\n seconds_in_minute = total_seconds % 60", "def unixTimeMs(dateAndTime):\n dateAndTime = dateAndTime + datetime.timedelta(hours=HOUR_ADJUSTMENT)\n return int((dateAndTime - EPOCH).total_seconds() * 1000.0)", "def datetime2UnixTime(dt):\n\n # UTC unix timestamp\n unix_timestamp = (dt - datetime(1970, 1, 1)).total_seconds()\n\n return unix_timestamp", "def round_to_ten_minutes(python_time):\n python_time += timedelta(minutes=5)\n python_time -= timedelta(\n minutes=python_time.minute % 10,\n seconds=python_time.second,\n microseconds=python_time.microsecond,\n )\n\n return python_time", "def np_dt_epoch_msec(value):\n return value.astype(long) / 1000", "def _unit_sec(self):\n return self.time_base / 60.0", "def minutes_to_seconds(minutes):\n return minutes * 60", "def _get_time_interval_in_minutes(self):\n return self.visa.get_request_interval_in_minutes()", "def write_timestamp_millis_long(self, dt: datetime) -> None:\n self.write_int(int(datetime_to_micros(dt) / 1000))", "def write_time_millis_int(self, dt: time) -> None:\n self.write_int(int(time_object_to_micros(dt) / 1000))", "def _unit_ms(self):\n return (self.time_base / 1000.0) / 60.0", "def unit_ms(self):\n return (self.time_base / 1000.0) / 60.0", "def datetime_to_timestamp(dt):\n return calendar.timegm(dt.timetuple()) * 1000", "def unit_sec(self):\n return self.time_base / 60.0", "def minutes(self):\n return int(int(self) / 60)", "def get_interval(self):\n return self.interval * 1000", "def datetime2epoch(dt):\n return int(mktime(dt.timetuple())*1000)", "def convert_time(time_passed):\n\n minutes = time_passed.seconds // 60\n\n return minutes", "def roundSeconds(seconds):\n\n minutes = seconds / 60\n remainder = seconds % 60\n\n if(remainder >= 30):\n minutes += 1\n\n return minutes * 60", "def calculate_time_ms(self, jiffies):\n\n return int((jiffies * 1000.0) / self._jiffies_per_sec)", "def roundTime(dt=None, roundTo=60):\n if dt == None : dt = datetime.datetime.now()\n seconds = (dt - dt.min).seconds\n # // is a floor division, not a comment on following line:\n rounding = (seconds+roundTo/2) // roundTo * roundTo\n dt + datetime.timedelta(0,rounding-seconds,-dt.microsecond)\n dt.replace(second=0, microsecond=0)\n return dt", "def round_to_nearest_60(x):\r\n return int(60 * round(float(x) / 60))", "def part1() -> int:\n longest_sleeper = max(sleep_times, key=lambda g: len(sleep_times[g]))\n sleepiest_minute = max(\n sleep_times[longest_sleeper], key=sleep_times[longest_sleeper].count)\n\n return longest_sleeper * sleepiest_minute", "def unix_time_millisecond(date):\r\n return unix_time(date, float=True) * 1e3", "def calculate_fetch_size(minutes: int):\n return round(minutes / CONF.interval) if minutes >= CONF.interval else 1", "def minutes(input=None):\n return get(input).minutes", "def last_5_mins(conn,from_time):\n durTot = 0\n time = '{}'.format(from_time)\n query = ''' SELECT sum(duration) FROM events WHERE event_type = 'Cycle End' AND unix_time > ?'''\n c = conn.cursor()\n c.execute(query,(time,))\n (data, ) = c.fetchone()\n try:\n \t durTot = round(data,2)\n except:\n\tpass\n return durTot", "def roundTime(dt=None, roundTo=60):\n if dt == None : dt = datetime.datetime.now()\n seconds = (dt - dt.min).seconds\n # // is a floor division, not a comment on following line:\n rounding = (seconds+roundTo/2) // roundTo * roundTo\n return dt + datetime.timedelta(0,rounding-seconds,-dt.microsecond)", "def MINUTE(time):\n return _make_datetime(time).minute", "def datetime_to_jsec(dt):\n delta = dt - JSEC_START\n total = delta.days * 3600 * 24\n total += delta.seconds\n total += delta.microseconds * 1e-6\n return total", "def _round_to_next_five_minutes(now):\n matching_seconds = [0]\n matching_minutes = [0, 5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 55]\n matching_hours = dt_util.parse_time_expression(\"*\", 0, 23)\n return dt_util.find_next_time_expression_time(\n now, matching_seconds, matching_minutes, matching_hours\n )", "def datetime_round(dt, period, start=None):\n result = datetime_mod(dt, period, start)\n if abs(dt - result) >= period // 2:\n result += period\n return result", "def _get_time(self, sec, nsec):\n return sec + nsec / (10**9)", "def __timedelta_millis(td):\n return int(round(td.total_seconds(), 3) * 1000)", "def date_minute(date):\n return date.minute", "def round_time(dt=None, round_to=60):\n if dt == None : dt = datetime.now()\n seconds = (dt.replace(tzinfo=None) - dt.min).seconds\n rounding = (seconds+round_to/2) // round_to * round_to\n return dt + timedelta(0,rounding-seconds,-dt.microsecond)", "def get_time_round(date):\r\n return int(date / self.timeframe) * self.timeframe", "def roundTime(dt=None, dateDelta=datetime.timedelta(minutes=1)):\n roundTo = dateDelta.total_seconds()\n\n if dt == None : dt = datetime.datetime.now()\n seconds = (dt - dt.min).seconds\n # // is a floor division, not a comment on following line:\n rounding = (seconds+roundTo/2) // roundTo * roundTo\n\n return dt + datetime.timedelta(0,rounding-seconds,-dt.microsecond)", "def break_points(inte, minutes):\n inte = np.asarray(inte)\n minutes = np.asarray(minutes)\n n = len(inte)\n breaks = []\n last_observed_min = 0\n\n # we consider sessions of 30 productive minutes, so we cannot split a session lesser than 60 minutes\n if n > 60:\n c = 0\n for i in range(0,(n)):\n if minutes[i] != last_observed_min:\n last_observed_min = minutes[i]\n c += 1\n \n if (inte[i] >= BREAK_POINT and c > 30) and len(inte[i:]) > 30:\n breaks.append(i)\n c = 0\n \n return breaks", "def round_minutes(minutes):\n i = math.floor(minutes / 15)\n under, over = i * 15, (i + 1) * 15\n d1, d2 = abs(minutes - under), abs(minutes - over)\n # Return the increment closest to the original value.\n return over if d2 <= d1 else under", "def ceil(self, freq: Union[str, DateOffset], *args: Any, **kwargs: Any) -> \"DatetimeIndex\":\n disallow_nanoseconds(freq)\n\n return DatetimeIndex(self.to_series().dt.ceil(freq, *args, **kwargs))", "def minutes_to_seconds( minutes: str ) -> int:\r\n return int(minutes)*60", "def round_time(dt, roundTo=60):\n seconds = (dt.replace(tzinfo=None) - dt.min).seconds\n rounding = (seconds + roundTo/2) // roundTo * roundTo\n return dt + timedelta(0, rounding - seconds, -dt.microsecond)", "def time_to_int(self):\n minutes = self.hour * 60 + self.minute\n secconds = self.minute * 60 + self.second\n return secconds", "def interval_to_milliseconds(interval):\r\n ms = None\r\n seconds_per_unit = {\r\n \"m\": 60,\r\n \"h\": 60 * 60,\r\n \"d\": 24 * 60 * 60,\r\n \"w\": 7 * 24 * 60 * 60\r\n }\r\n\r\n unit = interval[-1]\r\n if unit in seconds_per_unit:\r\n try:\r\n ms = int(interval[:-1]) * seconds_per_unit[unit] * 1000\r\n except ValueError:\r\n pass\r\n return ms", "def roundTime(dt=None, roundTo=60):\n\n if dt == None : dt = datetime.datetime.now()\n seconds = (dt.replace(tzinfo=None) - dt.min).seconds\n rounding = (seconds+roundTo/2) // roundTo * roundTo\n return dt + timedelta(0,rounding-seconds,-dt.microsecond)", "def next(self):\n\n crontab = self._crontab\n return math.ceil(crontab.next(default_utc=False))", "def retry_interval_in_minutes(self) -> pulumi.Input[int]:\n return pulumi.get(self, \"retry_interval_in_minutes\")", "def retry_interval_in_minutes(self) -> pulumi.Input[int]:\n return pulumi.get(self, \"retry_interval_in_minutes\")", "def freq_minutes(self):\n return 5", "def test_interval_to_seconds_with_minutes(self):\n self.assert_interval_to_seconds(\n 0, \"0min\", \"0mins\", \"0minute\", \"0minutes\")\n self.assert_interval_to_seconds(\n 60, \"1min\", \"1mins\", \"1minute\", \"1minutes\")\n self.assert_interval_to_seconds(\n 3600, \"60min\", \"60mins\", \"60minute\", \"60minutes\")\n self.assert_interval_to_seconds(\n 1234567 * 60, \"1234567min\", \"1234567mins\", \"1234567minute\",\n \"1234567minutes\")\n self.assert_interval_to_seconds(\n 720, \"012min\", \"012mins\", \"012minute\", \"012minutes\")", "def timestamp_to_unix(timestamp):\n return timestamp / 1e6", "def poll_interval_in_milliseconds(self):\n\n return self._poll_interval_in_milliseconds", "def get_unixtime(humantime, dateformat):\n temp = datetime.datetime.strptime(humantime, dateformat)\n # add microseconds which are dropped by timetuple\n return int(calendar.timegm(temp.timetuple()))*1000000.0+temp.microsecond", "def task11_time_converter(num):\n if num < 0:\n raise ValueError\n hour = num // 60\n minute = num % 60\n return f'{hour}:{minute}'", "def interval_to_milliseconds(interval):\n ms = None\n seconds_per_unit = {\n \"m\": 60,\n \"h\": 60 * 60,\n \"d\": 24 * 60 * 60,\n \"w\": 7 * 24 * 60 * 60\n }\n\n unit = interval[-1]\n if unit in seconds_per_unit:\n try:\n ms = int(interval[:-1]) * seconds_per_unit[unit] * 1000\n except ValueError:\n pass\n return ms", "def interval_to_milliseconds(interval):\n ms = None\n seconds_per_unit = {\n \"m\": 60,\n \"h\": 60 * 60,\n \"d\": 24 * 60 * 60,\n \"w\": 7 * 24 * 60 * 60\n }\n\n unit = interval[-1]\n if unit in seconds_per_unit:\n try:\n ms = int(interval[:-1]) * seconds_per_unit[unit] * 1000\n except ValueError:\n pass\n return ms", "def convert_time(t):\n minutes = int(t/60)\n seconds = int(t-60*minutes)\n return minutes, seconds", "def second_to_minute(time):\n if time % 60 != 0:\n time = time + 60\n return time // 60", "def _nsec_to_usec_round(nsec):\n return (nsec + 500) // 10 ** 3", "def datetime_to_ticks(when: str) -> int:\n dt = dateparser.parse(when, settings={\"RETURN_AS_TIMEZONE_AWARE\": True})\n if not dt:\n raise FailedActivity(\"failed parsing moment: {}\".format(when))\n\n span = dt - datetime(1, 1, 1, tzinfo=timezone.utc)\n return int(span.total_seconds() * 10**7)", "def get_timebase(self,dt):\r\n\r\n if dt < 1E-9:\r\n dt = 1E-9\r\n\r\n if dt > 4E-9:\r\n n = int(dt*125E6 + 2)\r\n else:\r\n dt *= 1E9\r\n n = round(log(dt,2))\r\n return n", "def minutesSinceLastUpdate(self):\n if self.seenTimes == []:\n return 0\n latestTime = max(self.seenTimes)\n return int(self.timeCode())-int(latestTime)", "def getMinute(self):\n return _libsbml.Date_getMinute(self)", "def __len__(self):\n if self.first_timestamp is None or self.last_timestamp is None:\n return 0\n return int(\n (self.last_timestamp - self.first_timestamp).total_seconds()\n ) // self.interval + 1", "def write_timestamp_micros_long(self, dt: datetime) -> None:\n self.write_int(datetime_to_micros(dt))", "def smart_interval(count):\n if count >= 50:\n return 1000\n else:\n sq = lambda n: n * n\n return int(1000 * (1 - (sq(50.0 - count) / sq(50))))", "def round_time(\n dt: datetime,\n delta: str | timedelta | relativedelta,\n start_date: datetime = timezone.make_aware(datetime.min),\n):\n if isinstance(delta, str):\n # It's cron based, so it's easy\n time_zone = start_date.tzinfo\n start_date = timezone.make_naive(start_date, time_zone)\n cron = croniter(delta, start_date)\n prev = cron.get_prev(datetime)\n if prev == start_date:\n return timezone.make_aware(start_date, time_zone)\n else:\n return timezone.make_aware(prev, time_zone)\n\n # Ignore the microseconds of dt\n dt -= timedelta(microseconds=dt.microsecond)\n\n # We are looking for a datetime in the form start_date + i * delta\n # which is as close as possible to dt. Since delta could be a relative\n # delta we don't know its exact length in seconds so we cannot rely on\n # division to find i. Instead we employ a binary search algorithm, first\n # finding an upper and lower limit and then dissecting the interval until\n # we have found the closest match.\n\n # We first search an upper limit for i for which start_date + upper * delta\n # exceeds dt.\n upper = 1\n while start_date + upper * delta < dt:\n # To speed up finding an upper limit we grow this exponentially by a\n # factor of 2\n upper *= 2\n\n # Since upper is the first value for which start_date + upper * delta\n # exceeds dt, upper // 2 is below dt and therefore forms a lower limited\n # for the i we are looking for\n lower = upper // 2\n\n # We now continue to intersect the interval between\n # start_date + lower * delta and start_date + upper * delta\n # until we find the closest value\n while True:\n # Invariant: start + lower * delta < dt <= start + upper * delta\n # If start_date + (lower + 1)*delta exceeds dt, then either lower or\n # lower+1 has to be the solution we are searching for\n if start_date + (lower + 1) * delta >= dt:\n # Check if start_date + (lower + 1)*delta or\n # start_date + lower*delta is closer to dt and return the solution\n if (start_date + (lower + 1) * delta) - dt <= dt - (start_date + lower * delta):\n return start_date + (lower + 1) * delta\n else:\n return start_date + lower * delta\n\n # We intersect the interval and either replace the lower or upper\n # limit with the candidate\n candidate = lower + (upper - lower) // 2\n if start_date + candidate * delta >= dt:\n upper = candidate\n else:\n lower = candidate\n\n # in the special case when start_date > dt the search for upper will\n # immediately stop for upper == 1 which results in lower = upper // 2 = 0\n # and this function returns start_date.", "def get_time_in_round() -> int:\n # FIXME - returning negative value for projectiles\n return store.round_time", "def to_unix_milli(self):\n try:\n dt_obj = duparser.parse(timestamp)\n self.out_unix_milli = str(int((dt_obj - self.epoch_1970).total_seconds()*1000))\n except Exception as e:\n if not args.log:\n pass\n else:\n logging.error(str(type(e)) + \",\" + str(e))\n self.out_unix_milli = False\n return self.out_unix_milli", "def get_epoch_time(utc_datetime=None):\n if not utc_datetime:\n utc_datetime = datetime.datetime.utcnow()\n return math.ceil((utc_datetime - EPOCH_START).total_seconds())", "def unixtime(self,current_datetime):\n unixtime = time.mktime(current_datetime.timetuple())\n return unixtime", "def get_epoch_time_milliseconds(utc_datetime=None):\n epoch_seconds = get_epoch_time(utc_datetime)\n return epoch_seconds * MILLISECONDS_IN_SECOND" ]
[ "0.57981074", "0.5748822", "0.5663159", "0.56519955", "0.5603236", "0.5469628", "0.5442284", "0.54418385", "0.5433084", "0.5365743", "0.5359583", "0.53497386", "0.53274846", "0.5236749", "0.5208083", "0.520233", "0.51816577", "0.5176535", "0.51646566", "0.51259786", "0.51006633", "0.5091771", "0.5088361", "0.50492877", "0.5043618", "0.50284106", "0.5025343", "0.5017732", "0.5005033", "0.5000358", "0.4998208", "0.499563", "0.49867225", "0.49847862", "0.4978384", "0.4969594", "0.49641544", "0.4957345", "0.49448097", "0.4943555", "0.49382073", "0.49344543", "0.49325195", "0.49265555", "0.49224895", "0.49066702", "0.49048698", "0.49022594", "0.48883387", "0.4882985", "0.48825517", "0.48697072", "0.48681203", "0.48673335", "0.48665842", "0.48457417", "0.48384044", "0.48186195", "0.4811926", "0.4785313", "0.47804", "0.47785535", "0.47736374", "0.4768219", "0.47677806", "0.47670275", "0.47634798", "0.4757029", "0.4743235", "0.4737017", "0.47286662", "0.4717757", "0.47158092", "0.47118548", "0.4709373", "0.4709373", "0.47004557", "0.469529", "0.46885535", "0.4680429", "0.46761623", "0.4663318", "0.4651254", "0.4651254", "0.46396366", "0.46264315", "0.46250296", "0.46238738", "0.46234468", "0.46166185", "0.4608608", "0.46021804", "0.45977646", "0.45880222", "0.4570725", "0.45686394", "0.45682433", "0.4567214", "0.45657817", "0.4553903" ]
0.7375313
0
Align trigger on display (> 0 is right of center, < 0 is left of center)
def hpos(self, offset_sec = None): if offset_sec is not None: if not isinstance(offset_sec, (int, float)): raise TypeError('delay_sec must be numeric value') self.command(f'SET Horizontal Offset of Trigger {-offset_sec} sec {"LEFT" if offset_sec < 0 else "RIGHT"} of center') else: return self.query_float('ENTER Current Horizontal Offset of Trigger (seconds)')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def center(self):\n if self.pos != 0.0:\n self.pos = 0.0", "def center_ava(self):\n\t\tself.rect.midbottom = self.screen_rect.midbottom\n\t\tself.x = float(self.rect.x)", "def center(self):\n cp = self.dat.flowsheet.getCenter()\n self.centerOn(cp[0], cp[1])", "def centerAxis():\n dislin.center()", "def GetCenter(self):\n ...", "def GetCenter(self):\n ...", "def GetCenter(self):\n ...", "def GetCenter(self):\n ...", "def center(self):\r\n self.centerx = self.screen_rect.centerx \r\n self.centery = self.screen_rect.centery", "def update(self): \n super().update()\n if self.center_x < constants.left_limit:\n self.center_x = self.screen_width + constants.offscreen_space\n if self.center_x > self.screen_width + constants.offscreen_space:\n self.center_x = constants.left_limit\n if self.center_y > self.screen_height + constants.offscreen_space:\n self.center_y = constants.bottom_limit\n if self.center_y < constants.bottom_limit:\n self.center_y = self.screen_height + constants.offscreen_space", "def align(self):\n ...", "def center(self):\n return (self.upper_right + self.lower_left) * 0.5", "def horiz_center(self):\n return self._horiz_center", "def horiz_center(self):\n return self._horiz_center", "def update_center(self): \r\n \r\n self.grfx[0].center = self.center\r\n\r\n self.update_bbox()", "def center(self):\r\n qr = self.frameGeometry()\r\n cp = QtWidgets.QDesktopWidget().availableGeometry().center()\r\n qr.moveCenter(cp)\r\n self.move(qr.topLeft())", "def centre(self):\n\n qr = self.frameGeometry()\n cp = QtWidgets.QDesktopWidget().availableGeometry().center()\n qr.moveCenter(cp)\n self.move(qr.topLeft())", "def wrap(self):\n if self.center.x > SCREEN_WIDTH:\n self.center.x = 0\n if self.center.y > SCREEN_HEIGHT:\n self.center.y = 0\n if self.center.x < 0:\n self.center.x = SCREEN_WIDTH\n if self.center.y < 0:\n self.center.y = SCREEN_HEIGHT", "def middlemakevisible(self, pos):\n pass", "def go_left(self):\n self.rect.centerx -= 9", "def onAlign(self, value):\n # Ensure that we can work\n plt = Plot.getPlot()\n if not plt:\n self.updateUI()\n return\n # Get again all the subwidgets (to avoid PySide Pitfalls)\n mw = self.getMainWindow()\n form = mw.findChild(QtGui.QWidget, \"TaskPanel\")\n form.all = self.widget(QtGui.QCheckBox, \"allAxes\")\n form.xAlign = self.widget(QtGui.QComboBox, \"xAlign\")\n form.yAlign = self.widget(QtGui.QComboBox, \"yAlign\")\n\n axesList = [plt.axes]\n if form.all.isChecked():\n axesList = plt.axesList\n # Set new alignement\n for axes in axesList:\n if form.xAlign.currentIndex() == 0:\n axes.xaxis.tick_bottom()\n axes.spines['bottom'].set_color((0.0, 0.0, 0.0))\n axes.spines['top'].set_color('none')\n axes.xaxis.set_ticks_position('bottom')\n axes.xaxis.set_label_position('bottom')\n else:\n axes.xaxis.tick_top()\n axes.spines['top'].set_color((0.0, 0.0, 0.0))\n axes.spines['bottom'].set_color('none')\n axes.xaxis.set_ticks_position('top')\n axes.xaxis.set_label_position('top')\n if form.yAlign.currentIndex() == 0:\n axes.yaxis.tick_left()\n axes.spines['left'].set_color((0.0, 0.0, 0.0))\n axes.spines['right'].set_color('none')\n axes.yaxis.set_ticks_position('left')\n axes.yaxis.set_label_position('left')\n else:\n axes.yaxis.tick_right()\n axes.spines['right'].set_color((0.0, 0.0, 0.0))\n axes.spines['left'].set_color('none')\n axes.yaxis.set_ticks_position('right')\n axes.yaxis.set_label_position('right')\n plt.update()", "def center(self):\n return self._lower + 0.5 * (self._upper - self._lower)", "def center(self):\n qr = self.frameGeometry()\n cp = QtWidgets.QDesktopWidget().availableGeometry().center()\n qr.moveCenter(cp)", "def center(self):\n return self['center']", "def fl_is_center_lalign(align):\n _fl_is_center_lalign = library.cfuncproto(\n library.load_so_libforms(), \"fl_is_center_lalign\", \\\n cty.c_int, [cty.c_int],\n \"\"\"int fl_is_center_lalign(int align) \"\"\")\n library.check_if_flinitialized()\n i_align = library.convert_to_intc(align)\n library.keep_elem_refs(align, i_align)\n retval = _fl_is_center_lalign(i_align)\n return retval", "def go_right(self):\n self.rect.centerx += 9", "def resetAlignmentCenter(self):\n cent = self.TiltSeries_._TiltAlignmentParas.cent\n imdimX = self.TiltSeries_._imdimX\n imdimY = self.TiltSeries_._imdimY\n print(imdimX, imdimY)\n if cent[0] != imdimX//2+1 or cent[1] != imdimY//2+1:\n #rint \"Centers do not match: cent=\"+str(cent)+\", imdim=\"+str(imdim)\n self.TiltSeries_._TiltAlignmentParas.cent = [imdimX//2+1, imdimY//2+1]", "def to_center(self):\n self.ids.edit_area.to_center()", "def text_alignment(x, y):\n if x == 0:\n ha = \"center\"\n elif x > 0:\n ha = \"left\"\n else:\n ha = \"right\"\n if y == 0:\n va = \"center\"\n elif y > 0:\n va = \"bottom\"\n else:\n va = \"top\"\n\n return ha, va", "def center(self):\n qr = self.frameGeometry()\n central_p = QDesktopWidget().availableGeometry().center()\n qr.moveCenter(central_p)\n self.move(qr.topLeft())", "def center_on_spawn(self):\n self.center_on(*self.world.metadata['playerStart'])", "def center(self):\n qr = self.frameGeometry()\n cp = QDesktopWidget().availableGeometry().center()\n qr.moveCenter(cp)\n self.move(qr.topLeft())", "def center(self):\n qr = self.frameGeometry()\n cp = QDesktopWidget().availableGeometry().center()\n qr.moveCenter(cp)\n self.move(qr.topLeft())", "def center_mario(self):\n self.rect.midbottom = self.screen_rect.midbottom\n self.x, self.y = float(self.rect.x), float(self.rect.y)", "def test_align(self):\n al = align(self.amp1, self.amp2).m\n\n # Both objects are already centered, so should be close to origin (allowing for some inaccuracy)\n self.assertAlmostEqual(al.vert.mean(axis=0)[0], 0, delta=TestAlign.DELTA)\n self.assertAlmostEqual(al.vert.mean(axis=0)[1], 0, delta=TestAlign.DELTA)\n self.assertAlmostEqual(al.vert.mean(axis=0)[2], 0, delta=TestAlign.DELTA)", "def centerx(self):\n return self.left + self.width / 2", "def center_ship(self):\r\n self.center = self.screen_rect.centerx", "def center_ship(self):\r\n self.center = self.screen_rect.centerx", "def flip(self):\n self.align = self._left if self.align == self._right else self._right\n self.group.layout_all()", "def positioning(self):\n pass", "def _center(pos, shift):\n x = np.concatenate((pos[0], pos[0] + shift[0]))\n y = np.concatenate((pos[1], pos[1] + shift[1]))\n return (x.max() + x.min()) / 2, (y.max() + y.min()) / 2", "def center(self,c, ADDR):\r\n #FIGURE OUT HOW TO DO THIS\r\n #Actually pretty sure this is impossible to do from software\r\n returnValue('Success!')", "def get_center_scr(self):\r\n return self.rect.center", "def display_right_to_left(self):\n return self.container['display_right_to_left']", "def center_by_widget(self, center_by_window: QDialog):\n if self.center_by_window:\n pg: QPoint = center_by_window.frameGeometry().topLeft()\n size_diff = center_by_window.rect().center() - self.rect().center()\n pg.setX(pg.x() + int((size_diff.x())))\n pg.setY(pg.y() + int((size_diff.y())))\n self.move(pg)", "def _setCenter(self, value, index):\n item = self.item()\n if item is not None:\n if value == 'Origin':\n value = 0.\n elif value not in self._ROTATION_CENTER_OPTIONS:\n value = float(value)\n else:\n value = value.lower()\n\n center = list(item.getRotationCenter())\n center[index] = value\n item.setRotationCenter(*center)", "def center(self):\n return self._center", "def center(self):\n return self.pos + self.axis / 2.0", "def center(self):\n \n geometry = self.frameGeometry()\n center_p = QDesktopWidget().availableGeometry().center()\n geometry.moveCenter(center_p)\n self.move(geometry.topLeft())", "def center_on(self, x, y):\n\n # Mark that we can start actually drawing now\n self.given_center = True\n\n # Center the view\n (ctr_x, ctr_y) = self.ingame_to_scene(x, y)\n self.parent.centerOn(ctr_x, ctr_y)\n\n # Draw what needs drawing\n self.draw_visible_area()", "def centre(self):\n self.top.update_idletasks()\n # The horizontal position is calculated as (screenwidth - window_width)/2\n hpos = int((self.top.winfo_screenwidth() - self.top.winfo_width())/2)\n # And vertical position the same, but with the height dimensions\n vpos = int((self.top.winfo_screenheight() - self.top.winfo_height())/2)\n # And the move call repositions the window\n self.top.geometry('+{x}+{y}'.format(x=hpos, y=vpos))", "def Center(self):\r\n \r\n self.dock_direction = AUI_DOCK_CENTER\r\n return self", "def rotation_pivot_to_center(self):\n pass", "def center(self, center):\n\n self._center = center", "def is_center(self):\n if self.pupils_located:\n return self.is_right() is not True and self.is_left() is not True", "def center_horizontal(self, rect):\n self.rect.centerx = rect.centerx", "def updatePos(self):\n self.setPos(self.centerX-self.boundingRect().width()/2.0,\n self.centerY-self.boundingRect().height()/2.0)", "def center_ship(self):\n self.center = self.screen_rect.centerx", "def center_ship(self):\n self.center = self.screen_rect.centerx", "def center_ship(self):\n self.center = self.screen_rect.centerx", "def center(self):\n self.root.update_idletasks()\n w = self.root.winfo_screenwidth()\n h = self.root.winfo_screenheight()\n size = tuple(int(_) for _ in self.root.geometry().split('+')[0].split('x'))\n x = w/2 - size[0]/2\n y = h/2 - size[1]/2\n self.root.geometry(\"240x80+%d+%d\" % (x, y))", "def to_center():\n center_msg = b'\\x02\\x35\\x35\\x03'\n #qpt.write(center_msg)\n #feedback = qpt.readline()\n move_to_position(0,0)\n #return feedback", "def rotation_center(self, *args, **kwargs) -> Any:\n pass", "def set_center(self,structure):\n for i,b in enumerate(self.bfs):\n b.set_center( structure[ self.LIST1[i] ] ) \n return", "def _center(self, forces):\n\t\t\n\t\tzipped = zip(self.grid.corners(), forces)\n\t\treturn self._weightedAverage(zipped)", "def Center(self):\r\n\r\n self.dock_direction = AUI_DOCK_CENTER\r\n return self", "def center(self):\n return self.centralizer(self)", "def center(self, obj):\n return self.phy2abs.center(obj)", "def __show_computed_alignment(self):\n success = False\n try:\n pcd = o3d.io.read_point_cloud(\n self.source_cloud\n )\n pcd.paint_uniform_color([0, 1, 0])\n pcd.transform(self.__compose_transformation())\n pcd.estimate_normals()\n self.computed_alignment_point_cloud_view.load_cloud(pcd)\n success = True\n except (FileNotFoundError, RuntimeError):\n QtWidgets.QMessageBox.warning(self, \"Error\",\n f\"Source point cloud is no longer available\"\n )\n self.source_cloud = \"\"\n self.__update_clickability()\n if success:\n try:\n pcd = o3d.io.read_point_cloud(\n self.target_cloud\n )\n pcd.paint_uniform_color([0, 0, 1])\n pcd.estimate_normals()\n self.computed_alignment_point_cloud_view.load_cloud(pcd)\n try:\n self.computed_alignment_point_cloud_view.show_window()\n except RuntimeError:\n pass\n except(FileNotFoundError, RuntimeError):\n QtWidgets.QMessageBox.warning(self, \"Error\",\n f\"Target point cloud is no longer available\"\n )\n self.source_cloud = \"\"\n self.__update_clickability()\n self.__save_context()", "def center(self):\n # get the compute screen's size\n screen = QDesktopWidget().screenGeometry()\n # get the app windows' size\n size = self.geometry()\n self.move(int((screen.width() - size.width()) / 2), int((screen.height() - size.height()) / 2))", "def center(self):\r\n frameGm = self.frameGeometry()\r\n screen = QtGui.QApplication.desktop().screenNumber(QtGui.QApplication.desktop().cursor().pos())\r\n centerPoint = QtGui.QApplication.desktop().screenGeometry(screen).center()\r\n frameGm.moveCenter(centerPoint)\r\n self.move(frameGm.topLeft())", "def center_on_screen(self):\n window_frame = self.frameGeometry()\n screen_center = QtGui.QDesktopWidget().availableGeometry().center()\n window_frame.moveCenter(screen_center)\n self.move(window_frame.topLeft())", "def action_to_coords(self, x, y):\n self.scene.center_on(x, y)", "def return_to_center(): #ignore this for now, use move_to_position_(0,0)\n current_pos = '\\xAA\\xBB\\xCC\\xDD'\n #run command until back to center (0,0)\n while True: #change the byte locations\n current_pos = to_center()\n print(current_pos)\n time.sleep(0.2) #check timing\n if((current_pos[1] == 0) and (current_pos[1] == 0)):\n break\n print('At center')", "def _align_toplevel_grid(self):\n\n # align origin with nearest multple of 128\n self.mins[0] -= self.mins[0] % 128\n self.mins[1] -= self.mins[1] % 128\n\n width = self.maxs[0] - self.mins[0]\n height = self.maxs[1] - self.mins[1]\n greatest_dim = max(width, height)\n nearest_pow_two = int(2 ** np.ceil(np.log2(greatest_dim)))\n width_adjustment = (nearest_pow_two - width)\n height_adjustment = (nearest_pow_two - height)\n\n self.maxs[0] += width_adjustment\n self.maxs[1] += height_adjustment", "def center_me(self, container):\n x = (container.winfo_screenwidth() - container.winfo_reqwidth()) / 2\n y = (container.winfo_screenheight() - container.winfo_reqheight()) / 2\n container.geometry(\"+%d+%d\" % (x, y))", "def update(self):\n super().update()\n if self.center_y > TOP_LIMIT:\n self.center_y = BOTTOM_LIMIT\n if self.center_y < BOTTOM_LIMIT:\n self.center_y = TOP_LIMIT\n\n if self.center_x < 250:\n self.change_x = (0.2) * OBJECTS_SPEED\n elif self.center_x > SCREEN_WIDTH - 250:\n self.change_x = (-0.2) * OBJECTS_SPEED", "def horizontal_alignment(self):\n self.update()\n return self._horizontal_alignment", "def CenterPane(self):\r\n \r\n self.state = 0\r\n return self.Center().PaneBorder().Resizable()", "def display_right_to_left(self, display_right_to_left):\n\n self.container['display_right_to_left'] = display_right_to_left", "def action_to_spawn(self):\n self.scene.center_on_spawn()", "def center_abs(self, source):\n if self.relative:\n return source.center + self.center\n else:\n return self.center", "def rot_center(self):\n loc = self.rect.center\n self.image = pygame.transform.rotate(self.current_sprite_alpha, self.rot)\n self.rect = self.image.get_rect()\n self.rect.center = loc", "def go_left(self):\n self.rect.centerx -= self.__dx", "def center_on_mouse(w):\n root=w.get_toplevel().get_root_window()\n (screen, x, y, mod) = root.get_display().get_pointer()\n r = screen.get_monitor_geometry(screen.get_monitor_at_point(x, y))\n\n # Let's try to center the window on the mouse as much as possible.\n width, height = w.get_size()\n\n posx = max(r.x, x - width / 2)\n if posx + width > r.x + r.width:\n posx = r.x + r.width - width\n\n posy = max(r.y, y - height / 2)\n if posy + height > r.y + r.height:\n posy = r.y + r.height - height\n\n w.move(posx, posy)", "def setCenter(self, p):\n self.__center = p", "def center_horizontal_paddle(self):\n self.top_center = self.screen_rect.centerx - (self.screen_rect.centerx/2)\n self.bot_center = self.screen_rect.centerx - (self.screen_rect.centerx/2)", "def CenterZombie(self):\n # Requirement ID: 8.0.1\n\n self.center = self.screen_rect.centerx", "def adjust_visual(self):\n\n if (self.direction is bs.Direction.LEFT):\n self.rect.x -= 0.5 * CELL_SIZE", "def vert_center(self):\n return self._vert_center", "def vert_center(self):\n return self._vert_center", "def rec_default(self):\n self.average_triggers.setText('(-50,1)')", "def align(): # open EH and fast shutter\n\t#marAuxiliary.closeMarShield()\n\td2in()\n\td3in()\n\tsh('o')", "def move_center(obj):\n desktop = QApplication.desktop()\n dw = desktop.width()\n dh = desktop.height()\n size = obj.size()\n mw = size.width()\n mh = size.height()\n obj.move(dw/2-mw/2, dh/2-mh/2)", "def center(self):\n return (self.centerx, self.centery)", "def __show_initial_alignment(self):\n success = False\n\n pcd = o3d.io.read_point_cloud(\n self.source_cloud\n )\n if np.asarray(pcd.points).shape[0] != 0:\n pcd.paint_uniform_color([0, 1, 0])\n pcd.estimate_normals()\n self.initial_alignment_point_cloud_view.load_cloud(pcd)\n success = True\n else:\n QtWidgets.QMessageBox.warning(self, \"Error\",\n f\"Source point cloud is no longer available\"\n )\n self.source_cloud = \"\"\n self.__update_clickability()\n if success:\n pcd = o3d.io.read_point_cloud(\n self.target_cloud\n )\n if np.asarray(pcd.points).shape[0] != 0:\n pcd.paint_uniform_color([0, 0, 1])\n pcd.estimate_normals()\n self.initial_alignment_point_cloud_view.load_cloud(pcd)\n try:\n self.initial_alignment_point_cloud_view.show_window()\n except RuntimeError:\n pass\n else:\n QtWidgets.QMessageBox.warning(self, \"Error\",\n f\"Target point cloud is no longer available\"\n )\n self.source_cloud = \"\"\n self.__update_clickability()\n self.__save_context()", "def set_center(self, center):\n self._center = center\n self._reset_slot_bounds()", "def test_align_points(self):\n mv = [\n [0, 0, 5],\n [5, 0, 5],\n [0, 5, 5]\n ]\n sv = [\n [0, 0, 0],\n [5, 0, 0],\n [0, 5, 0]\n ]\n al = align(self.amp1, self.amp2, mv=mv, sv=sv, method='contPoints').m\n zMax = self.amp1.vert[:, 2].max() - 5\n # Both objects are already centered, so should be close to origin (allowing for some inaccuracy)\n self.assertAlmostEqual(al.vert[:, 2].max(), zMax, delta=TestAlign.DELTA)", "def position_center(self, x, y):\n self.x = x\n self.y = y\n self.pos[0] = x - self.pos[2]/2\n self.pos[1] = y - self.pos[3]/2", "def onInvoke():\n if dock.isVisible():\n dock.toggleViewAction().trigger()\n else:\n dock.setFloating(True)\n pos = QtGui.QCursor.pos()\n dock.move(pos.x() - dock.size().width() / 2,\n pos.y() - dock.size().height() / 2)\n dock.setVisible(True)", "def centerOnMark(self, mark):\n\n # get the center of the mark\n point = mark.mapToScene(mark.pos())\n\n # and center the view on it\n self.centerOnPoint(point)" ]
[ "0.61338747", "0.60391134", "0.5984671", "0.5806344", "0.57887024", "0.57887024", "0.57887024", "0.57887024", "0.5788586", "0.5779757", "0.5755072", "0.5707836", "0.57064337", "0.57064337", "0.5662579", "0.5656464", "0.5644351", "0.5622311", "0.5614773", "0.5609005", "0.5605141", "0.554421", "0.55386585", "0.55359644", "0.55300593", "0.552739", "0.5511874", "0.54986745", "0.54798347", "0.547107", "0.5463447", "0.54531735", "0.54531735", "0.54367286", "0.5433351", "0.5428642", "0.54208666", "0.54208666", "0.5415673", "0.5409166", "0.53926235", "0.5370048", "0.53546154", "0.5337096", "0.53295726", "0.5325127", "0.53237236", "0.53177637", "0.53014594", "0.5298978", "0.52902645", "0.5287881", "0.528755", "0.5287449", "0.52819455", "0.5277439", "0.5272276", "0.5271514", "0.5271514", "0.5271514", "0.5266271", "0.526586", "0.52616364", "0.524917", "0.52491164", "0.5247623", "0.52319145", "0.52198267", "0.5217865", "0.52171457", "0.52160734", "0.5205464", "0.5169838", "0.5158228", "0.5157731", "0.51558435", "0.5141868", "0.5137075", "0.51356477", "0.51345617", "0.5104644", "0.51037365", "0.510229", "0.5096816", "0.50861466", "0.50858516", "0.5082567", "0.5081468", "0.5076135", "0.5075803", "0.5075803", "0.50654656", "0.5046328", "0.50385785", "0.5036771", "0.5031124", "0.5029436", "0.5028403", "0.5020938", "0.5019653", "0.50120443" ]
0.0
-1
Return current sample rate in Sa/s
def sample_rate(self): return self.query_float('ENTER Current Sample Rate (Sa/s)')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def sample_rate(self):\r\n return self.config.sample_rate", "def sample_rate(self):\n return self._sample_rate", "def sample_rate(self):\n return self._sample_rate", "def sample_rate(self):\n return self._sample_rate", "def sample_rate(self, sr=None):\n return self._sample_rate", "def samp_rate(self):\n return self._samp_rate", "def sampling_rate(self):\n return self.track.sampling_rate", "def get_samp_rate(self):\n return _uhd_swig.usrp_sink_get_samp_rate(self)", "def get_sample_rate(self):\n return 1", "def get_samp_rate(self):\n return _uhd_swig.usrp_source_get_samp_rate(self)", "def get_samplerate(self):\n\t\treturn _PM_UPDATE_RATE / self.output_decimation", "def sampling_rate(self):\n with audioread.audio_open(self.path) as f:\n return f.samplerate", "def get_samp_rate(self):\n return _uhd_swig.usrp_sink_sptr_get_samp_rate(self)", "def sample_interval(self):\n\n if self.sample_rate != 0:\n return 1.0 / self.sample_rate\n return 0.0", "def get_samp_rate(self):\n return _uhd_swig.usrp_source_sptr_get_samp_rate(self)", "def samplerate(self):\n return self.sound.samplerate", "def sample_rate(self):\n if self.has_data():\n try:\n return round(\n 1.0\n / np.float64(\n (\n np.median(\n np.diff(self.dataset.coords[\"time\"].to_index())\n / np.timedelta64(1, \"s\")\n )\n )\n ),\n 0,\n )\n except AttributeError:\n self.logger.warning(\n \"Something weird happend with xarray time indexing\"\n )\n\n raise ValueError(\n \"Something weird happend with xarray time indexing\"\n )\n return self.run_metadata.sample_rate", "def rate(self):\n if self._rate:\n return self._rate\n else:\n return self._wave.getframerate()", "def get_channel_sampling_rate(self)->float:\n return self.__sampling_rate", "def input_data_sample_rate(self):\n return self._input_data_sample_rate", "def rate(self):\n return self.__rate", "def rate(self):\n return self._rate", "def get_current_rate(self):\n pass", "def update_rate_hz(self) -> float:\n return self._update_rate_hz", "def get_scan_rate(self):\n raise NotImplementedError", "def read(self):\n beats, interval_ms = self.read_raw()\n if 0 < interval_ms < 2500:\n rate = 60000.0 / interval_ms\n else:\n raise RuntimeError(\"Value out of range or device not connected.\")\n return rate", "def update_rate(self):\n self._rate = (\n (self._received - self._samples[0]) / float(self.sample_size)\n )\n self._samples.append(self._received)", "def data_rate(self):\n return self._data_rate", "def get_samp_rates(self):\n return _uhd_swig.usrp_sink_get_samp_rates(self)", "def bandwidth(self):\n return self.stop_hz - self.start_hz", "def duration(self) -> float:\n return float(len(self.__samples))/float(self.__rate)", "def app_insights_sampling_rate(self) -> Optional[pulumi.Input[float]]:\n return pulumi.get(self, \"app_insights_sampling_rate\")", "def get_samp_rates(self):\n return _uhd_swig.usrp_source_get_samp_rates(self)", "def getRate(self, context):\n try:\n return VTypeHelper.toDouble(context.getDevice(\"rate\").read())\n except:\n return 60.0", "def rate(self) -> float:\n return self.success_cnt / self.total_cnt if self.total_cnt > 0 else 1.0", "def rate(self):\n return self.brate / FAC", "def getDataRate(self):\n \n return self.DataRate", "def num_samples(self):\n with audioread.audio_open(self.path) as f:\n return int(f.duration * f.samplerate)", "def sample_rate(self):\n\n properties_file = open(self.scenario_path + \"/conf/sandag_abm.properties\", \"r\")\n rate = None\n\n for line in properties_file:\n # strip all white space from the line\n line = line.replace(\" \", \"\")\n\n # find line containing \"sample_rates=\"\n m = re.compile(\"sample_rates=\").match(line)\n if m:\n # take the portion of the line after the matching string\n # and split by the comma character\n line = line[m.end():].split(\",\")\n\n # if the split line contains a single element return that element\n # otherwise return the final element\n if len(line) == 0:\n rate = float(line[0])\n else:\n rate = float(line[-1])\n break\n\n properties_file.close()\n\n return rate", "def var(self):\n\n return self.rate", "def samples_per_frame(self):\n return self._samples_per_frame", "def compute_rate(self):\n bg_rate = self.counts.data / self.livetime.data\n\n bg_rate /= self.counts.bin_volume\n\n bg_rate = bg_rate.to('MeV-1 sr-1 s-1')\n\n self.bg_rate.data = bg_rate\n self.bg_rate.data_err = (np.sqrt(self.counts.data) / (self.counts.bin_volume * self.livetime.data)).to(\n 'MeV-1 sr-1 s-1')", "def get_frequency(self, detune=0) -> float:\n return np.power(2, (self._cents + detune)/1200) * 440", "def input_data_sample_rate(self, value):\n self._input_data_sample_rate = value", "def _do_get_rate(self):\n rate = {\n 1: \"1 : Helium Probe in FAST rate\",\n 0: \"0 : Helium Probe in SLOW rate\"\n }\n result = self._execute('X')\n return rate.get(int(format(int(result[5:7]), '08b')[6]), \"Unknown\")", "def frame_rate(self):\n return self._frame_rate", "def frequency(self):\n return self.reference_clock_speed / 4096 / self.prescale_reg", "def relative_rate(self) -> \"double\":\n return _beamforming_swig.randomsampler_sptr_relative_rate(self)", "def get_tickrate(self):\n raise NotImplementedError", "def get_measured_current(self):\n status = self.get_status_response()\n current = status[16] + (status[17] * 0x100) + (status[18] * 0x10000) + (status[19] * 0x1000000)\n current = float(current)\n current /= (1000.0 * 1000.0)\n return current\n #end get_measured_current", "def get_samp_rates(self):\n return _uhd_swig.usrp_sink_sptr_get_samp_rates(self)", "def get_samp_rates(self):\n return _uhd_swig.usrp_source_sptr_get_samp_rates(self)", "def sample_period(self) -> int:\n return self.__sample_period", "def us(self):\n return 1000 * 1000 * self.read()", "def get_sound_speed(self):\n return calculate_speed_of_sound(self.T, self.H, self.p)", "def wave_samples(self):\n return self._quantized_subsamples", "def throughput(self):\n return self.cwnd_from_file * self.mss / self.rtt", "def compute_rate(self):\n bg_rate = self.counts_cube.data / self.livetime_cube.data\n\n bg_rate /= self.counts_cube.bin_volume\n # bg_rate.set_zero_level()\n\n # import IPython; IPython.embed()\n bg_rate = bg_rate.to('1 / (MeV sr s)')\n\n self.background_cube.data = bg_rate", "def sampling_frequency(self) -> int:\n return self._sample_freq", "def samp_frac(self):\n return self._samp_frac", "def samplingTime(self):\n return self._AWG.samplingTime_ns()", "def bitrate(self) -> float:\n msb = self._read_u8(_REG_BITRATE_MSB)\n lsb = self._read_u8(_REG_BITRATE_LSB)\n return _FXOSC / ((msb << 8) | lsb)", "def expectation(self):\n\n return self.rate", "def _compute_rate(self, sampling_interval):\n if sampling_interval is None:\n return 1\n\n step = SpynnakerDataView.get_simulation_time_step_ms()\n rate = int(sampling_interval / step)\n if sampling_interval != rate * step:\n raise ConfigurationException(\n f\"sampling_interval {sampling_interval} is not an an \"\n f\"integer multiple of the simulation timestep {step}\")\n if rate > self._MAX_RATE:\n raise ConfigurationException(\n f\"sampling_interval {sampling_interval} higher than \"\n f\"max allowed which is {step * self._MAX_RATE}\")\n return rate", "def frequency(self):\n return float(self.get_frequency())", "def frame_rate():\n def r(x):\n return 6E7/x\n\n def w(x):\n return int(6E7/x)\n return r, w", "def get_maximum_input_channel_rate(self):\r\n sample_rate = cfloat64()\r\n NIDAQ_dll.DAQmxGetDevAIMaxSingleChanRate(self.dev_id.encode('ascii'),\r\n ctypes.byref(sample_rate))\r\n return sample_rate.value", "def get_sampwidth(self):\n return self._sampwidth", "def __get_speed(self):\n if self.speed_method == 'average_gap':\n total_gap = 0\n for i in range(1, len(self.__spike_buffer)):\n total_gap += self.__spike_buffer[i] - self.__spike_buffer[i-1]\n\n average_gap = total_gap / len(self.__spike_buffer)\n\n\n if self.__spike_buffer[-1] > timeit.default_timer() - self.cooldown:\n speed = self.tick_length/average_gap\n else:\n speed = 0.00\n\n return speed", "def get_framerate(self):\n return self._framerate", "def frequency(self):\n return self.reference_clock_speed / 4096 / self.prescale_reg", "def _set_rate(self):\r\n interval = self.data.iloc[2, 0] - self.data.iloc[1, 0]\r\n self.rate = int(1 / interval)", "def get_duration_sox_s(audio_file_path: str) -> float:\n global FS_HZ\n assert FS_HZ is not None\n duration_n = get_duration_sox_n(audio_file_path)\n return duration_n / FS_HZ", "def calculate_throughput(self,sender,noise_from_other_devices):\r\n B = 0\r\n sig_pow = 0\r\n for freq_range in sender.currently_used_frequencies:\r\n B += (freq_range[1] - freq_range[0]) * 1000 #kHz\r\n sig_pow += self.calculate_signal_power(sender, freq_range)\\\r\n /(len(sender.currently_used_frequencies))\r\n throughput = B * np.log2(1 + sig_pow / (noise_from_other_devices + settings.noise_factor))\r\n return throughput", "def audio_bitrate(self):\n # type: () -> int\n return self._audio_bitrate", "def force_samplerate(self):\n return self.input_samplerate", "def brate(self):\n try:\n return self.pos / self.runtime\n except ZeroDivisionError:\n return 0", "def relative_rate(self):\n return _TestA_swig.my_qpsk_demod_cb_sptr_relative_rate(self)", "def get_clock_rate(self):\n return self.o.read_register(self.dev_id, CLOCK_RATE)", "def get_sampled_timesteps(self) -> int:\n return self.sampled_timesteps", "def getFrameRate(self):\n if not self.proxy:\n self.proxy = self.session.service(\"ALVideoRecorder\")\n return self.proxy.getFrameRate()", "def _send_sampled_event(self):\n if not self.enabled:\n return False\n send_sample = False\n self.count += 1\n if self.actual_rate < self.statsd_sample_rate:\n self.monitored += 1\n send_sample = True\n self.actual_rate = float(self.monitored) / float(self.count)\n if self.count >= maxint or self.monitored >= maxint:\n self.count = 0\n self.monitored = 0\n return send_sample", "def calculateDataRate(self):\n pass", "def total_samples(self):\n\n totals = self.recording_data()['totals']\n return totals[0] + totals[1]", "def MinKbpsRate(self):\n\t\treturn self._get_attribute('minKbpsRate')", "def get_rate(self, model: str) -> Quantity:\n if model not in self._count_rate:\n raise ModelNotAssociatedError(\"There are no XSPEC fits associated with this Spectrum\")\n else:\n rate = Quantity(self._count_rate[model], 'ct/s')\n\n return rate", "def sampling_frequency(self) -> int:\n return int(1 / self.x_scale)", "def _avg_sample(self):\n samples = [0] * self.num_samples\n for i in range(self.num_samples):\n samples[i] = self.sensor.measure_distance()\n time.sleep(self.sample_delay)\n if self.drop_extremes:\n samples.sort()\n samples = samples[1:-1]\n return sum(samples) / len(samples)", "def success_rate(self):\n success_rate_text = self.emulator.get_screen_text(ui_element=self.ui['ENHANCE_POTENTIAL_RATE'])\n success_rate = success_rate_text.replace(\"%\", \"\").replace(\" \", \"\")\n return float(success_rate)", "def getAvgOfSamples(self, ch = \"CH1\", samples = 100):\n\t\tself.isReady()\n\t\tcounter = 1\n\t\twhile True:\n\t\t\ttry:\t\t\n\t\t\t\twaveform = self.osc.get_waveform(source = ch, start = 1, stop = samples)\n\t\t\t\tbreak\n\t\t\texcept:\n\t\t\t\tprint(\"Retry: \" + str(counter))\n\t\t\t\tcounter += 1\n\t\ty_array = []\n\t\tfor x,y in waveform:\n\t\t\ty_array.append(y)\n\t\tvoltage = sum(y_array)/len(y_array)\n\t\treturn voltage", "def get_sampling_interval(sampling_rate):\n return sampling_rate * SpynnakerDataView.get_simulation_time_step_ms()", "def set_samp_rate(self, *args, **kwargs):\n return _uhd_swig.usrp_sink_set_samp_rate(self, *args, **kwargs)", "def update_playback_gain(self, val):\n self.playbackGain = 10**(5.0*(val - self.speedDial.maximum()/2)/self.speedDial.maximum())", "def getRate(self) -> int:\n if (self._total_stake.get() + self._daily_reward.get()) == 0:\n rate = DENOMINATOR\n else:\n rate = (self._total_stake.get() + self._daily_reward.get()) * DENOMINATOR // self.sICX_score.totalSupply()\n return rate", "def ventilation_rate_per_second(self):\n return self.volume * self.outdoor_air_ventilation * 1000 / 3600", "def get_sample_rate(self, audio_bits=24, channels=2):\n if (channels < 1 or channels > 2):\n raise I2SError (\"Channels can only be 1 or 2 at this time\")\n\n\n clock_rate = self.get_clock_rate()\n divisor = self.get_clock_divisor()\n sample_rate = clock_rate / ((divisor * audio_bits * channels) + 1)\n return sample_rate", "def sample_count(self):\n if self._sample_count:\n return self._sample_count\n else:\n return self._wave.getnframes()", "def quality(self):\n return self.plays * self.number", "def ramp_rate(self) -> IMockPin:\n return self[\"ramp_rate\"]", "def LoadRateValue(self):\n\t\treturn self._get_attribute('loadRateValue')" ]
[ "0.81212515", "0.8000023", "0.8000023", "0.7969299", "0.7864392", "0.7847092", "0.782177", "0.7783444", "0.7727639", "0.76789045", "0.763086", "0.74798954", "0.7479725", "0.7386037", "0.7357198", "0.73316765", "0.7272404", "0.72663385", "0.7222648", "0.70849174", "0.6931208", "0.68862706", "0.68820935", "0.68451726", "0.68282443", "0.6816689", "0.67112553", "0.66999406", "0.66733843", "0.6672461", "0.6660907", "0.6657606", "0.66493607", "0.66290224", "0.6581465", "0.65778494", "0.6558713", "0.6538218", "0.65325415", "0.65284455", "0.6506782", "0.649961", "0.64928865", "0.6488855", "0.64500165", "0.64281386", "0.6425293", "0.64171016", "0.6408757", "0.63971424", "0.6395845", "0.63547236", "0.6344777", "0.6330723", "0.6300904", "0.6285202", "0.6281653", "0.6253322", "0.62499714", "0.6223876", "0.6216551", "0.62006193", "0.6200399", "0.61760885", "0.6163521", "0.61053824", "0.6094869", "0.6093994", "0.6093149", "0.60929835", "0.6087362", "0.6081345", "0.60780764", "0.60762733", "0.607132", "0.6071026", "0.6069398", "0.60670847", "0.60534483", "0.6052064", "0.6043079", "0.6031788", "0.60316056", "0.60167795", "0.60139704", "0.6013802", "0.6007744", "0.6006238", "0.6002932", "0.599738", "0.59843457", "0.59820426", "0.59800684", "0.59762704", "0.5966008", "0.5964534", "0.5954172", "0.5953107", "0.5950308", "0.59419894" ]
0.86877906
0
Capture scope screen and save as filename. File type determines format as supported by derived class
def screen_capture(self, filename): self.command(f'CAPTURE SCREEN as {filename}') return None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def save_raw(self, filename, typ):\n self.lib.SaveAsRaw(ct.c_char_p(str.encode(filename)),\n ct.c_int(self.savetypes[typ]))", "def out_filename(self, filetype, format='old', dir=Location.OUT_DIR):\n filename = self.filename(filetype=filetype, format=format)\n #return Path(dir) / filename\n return filename", "def out_filename(self, filetype, format='old', dir=Location.OUT_DIR):\n filename = self.filename(filetype=filetype, format=format)\n # return Path(dir) / filename\n return filename", "def save(self):\n \n fileName=self.characterName+\"_\"+self.race+\"_\"+self.classType+\"_lvl_\"+str(self.level)\n new_file = open(str(fileName)+\".txt\",\"w\")\n new_file.write(\"~~~~~~~~~~~ \"+self.characterName+\" the \"+self.race+\" \"+self.classType+\" ~~~~~~~~~~~\\n\\n\")\n new_file.write(\"Level: \"+str(self.level)+\" HP: \"+str(self.hp)+\" XP: \"+str(self.xp)+\" Hit Dice: \"+str(self.level)+str(self.hit_dice[self.classType])+\"\\n\")\n new_file.write(str(self.abilityScores()))\n new_file.write(\"\\n\\n~~~~~~~~~ Skills ~~~~~~~~~\\n\")\n for i in self.skills:\n new_file.write(\"\\n\"+i+\" \"+\"(\"+skills[i.lower()].upper()+\")\")\n new_file.write(\"\\n\\n~~~~~~~~~ Traits ~~~~~~~~~\\n\")\n for i in self.traits:\n new_file.write(\"\\n ~~\"+i+\"~~\\n \"+str(self.traits[i])+\"\\n\")\n new_file.write(\"\\n\\n~~~~~~~~~ Specialty: \"+self.specialty+\" ~~~~~~~~\\n\")\n new_file.write(\"\\n \"+self.specialtyStory+\"\\n\")\n new_file.write(\"\\n ~~~~ Feats ~~~~\\n\")\n for i in range(1,self.level+1):\n if i == 1 or i%3 == 0:\n new_file.write(\"\\n Level \"+str(i)+\": \"+self.feats[i]['name']+' '\\\n \"(\"+self.feats[i]['type']+\")\\n\"\\\n ' \"'+self.feats[i]['description']+'\"\\n\\n')\n if 'prereq' in self.feats[i]:\n new_file.write(\" Prerequisite: \"+self.feats[i]['prereq']+\"\\n\")\n if 'benefit' in self.feats[i]:\n new_file.write(\" Benefit: \"+self.feats[i]['benefit']+\"\\n\")\n if 'effect' in self.feats[i]:\n new_file(\" Effect: \"+self.feats[i]['effect']+\"\\n\")\n \n new_file.write(\"\\n\\n~~~~~~~~~ Background: \"+self.background+\" ~~~~~~~~\\n\")\n if self.backgroundProfession == '':\n pass\n else:\n new_file.write(\"Profession: \"+self.backgroundProfession)\n new_file.write(\"\\n \"+self.backgroundStory)\n \n new_file.close()\n print \"File \"+str(fileName)+\".txt saved.\"", "def save(self, filename):\n pass", "def save():\n file_name = filedialog.asksaveasfilename(\n filetypes=[\n (\"Scalable Vector Graphics\", \"*.svg\"),\n (\"Postscript\", \"*.ps\"),\n (\"Portable Network Graphics\", \"*.png\")\n ],\n initialdir=os.getcwd())\n if file_name: # save option not cancelled by user\n extension = re.search(r\"\\.[\\w]+$\", file_name)[0]\n if extension == '.png':\n self.parent_class.save_png(file_name)\n elif extension == \".ps\":\n self.parent_class.save_postscript(file_name)\n elif extension == \".svg\":\n self.parent_class.save_canvas_svg(file_name)\n else:\n raise TypeError(\"Unknown Filetype\")", "def _prettyfilename(self):\n return f'{self.title} ({self.subtype})'", "def __repr__(self):\r\n class_name = type(self).__name__\r\n return f'{class_name}({self.file_path})'", "def _file_format_adapter(self):\n raise NotImplementedError", "def save(self,filename,format=None,double_precision=False):\n\n\t\tif format is None:\n\t\t\t\n\t\t\textension = filename.split(\".\")[-1]\n\t\t\tif extension in [\"fit\",\"fits\"]:\n\t\t\t\tformat=\"fits\"\n\t\t\telif extension in [\"npy\",\"npz\"]:\n\t\t\t\tformat=\"npz\"\n\t\t\telse:\n\t\t\t\traise IOError(\"File format not recognized from extension '{0}', please specify it manually\".format(extension))\n\n\t\t\n\t\tif format==\"fits\":\n\t\t\tsaveFITS(self,filename,double_precision)\n\t\telif format==\"npz\":\n\t\t\tsaveNPZ(self,filename)\n\t\telse:\n\t\t\traise ValueError(\"Format {0} not implemented yet!!\".format(format))", "def save(self, filename):\n pass", "def save_file(self, param=None):\n param = param if isinstance(param, str) else param.keysym # which is the letter with the CTRL-\n if param == 'r':\n saved_files = self.controller.save_current_buffers(log_type='raw', log_format = 'both')\n elif param == 'f':\n saved_files = self.controller.save_current_buffers(log_type='filtered', log_format = 'both')\n elif param == 'e':\n saved_files = self.controller.save_current_buffers(log_type='raw', log_format = 'supersid_extended')\n elif param == 's':\n filename = self.AskSaveasFilename()\n if filename:\n saved_files = self.controller.save_current_buffers(filename, log_type='filtered', log_format = 'supersid')\n MessageBox.showinfo(\"SuperSID files saved\", \"\\n\".join(saved_files))", "def save_specs(self, filename):\n pass", "def save_specs(self, filename):\n pass", "def saveFile(self):\n fName = str(self.ui.lineEditPath.text()) + '/' + \\\n str(self.ui.lineEditFileName.text())\n try:\n self.caller.raw.save(fName)\n self.caller.setRaw(fName, self.parent)\n except IOError as e:\n self.error = True\n sys.stderr.write(\"Could not save!\\n\")\n sys.stderr.write(str(e))\n finally:\n self.e.set()", "def save(self, filename):\n raise NotImplementedError", "def __str__(self):\r\n class_name = type(self).__name__\r\n return f'{class_name}({self.fname})'", "def out_filename(self, filetype, dir, format='old'):\n filename = self.filename(filetype=filetype, format=format)\n return Path(dir) / filename", "def saveAsHandler(self, subsystem_controller):\n\n subystem_name = subsystem_controller.mySubsystem.subsystemName\n file_path = self.saveFileExplorer(caption=f'Enter file path for {subystem_name} subsystem')\n\n if file_path is not None:\n\n subsystem_controller.setFilePath(file_path)\n subsystem_controller.buildCommandFile(subsystem_controller.filePath)\n\n else:\n\n print('file not saved')", "def __repr__(self):\n cls_name = self.__class__.__name__\n src_file = self._file_repr\n return '{0}({1})'.format(cls_name, src_file)", "def GetCaptureFileFormats(self): # real signature unknown; restored from __doc__\n pass", "def save(self, fname):\n pass", "def get_file_format(self):\n # if self.save_image_or_figure == IF_MOVIE:\n # return self.movie_format.value\n return self.file_format.value", "def export_screenshot(self):\n\n if self.vis_type is None or len(self.vis_type) < 1:\n vis_type_suffix = ''\n else:\n vis_type_suffix = self.vis_type\n\n print(\"exporting screenshot for {}\".format(self.current_unit_id))\n ss_out_file = self.screenshot_dir / \"{}_{}_{}.{}\".format(\n self.current_unit_id, vis_type_suffix,\n cfg.screenshot_suffix, cfg.screenshot_format_ext)\n self.fig.savefig(ss_out_file, bbox_inches='tight', dpi=cfg.dpi_export_fig)", "def _savefilename(self):\n logger.debug(\"Popping SaveFilename browser\")\n return filedialog.asksaveasfilename(**self._kwargs)", "def to_fname(self, type):\n if type in [\"pdf\", \"p\"]:\n return _g.currentPath / \"pdf\" / f\"{self.doi.replace('/','#')}.pdf\"\n elif type in [\"si\", \"s\"]:\n return _g.currentPath / \"si\" / f\"{self.doi.replace('/','#')}.pdf\"\n elif type in [\"web\", \"w\"]:\n return f\"https://doi.org/{self.doi}\"\n else:\n raise ValueError(\"Invalid type '{type}' given\")", "def capture(self):\n current_time = time.strftime('%Y%m%d-%H%M%S')\n filepath = f'files/{current_time}.png'\n self.ids.camera.export_to_png(filepath)\n self.manager.current = 'image_screen' # switch to the next screen\n self.manager.current_screen.ids.img.source = filepath # inherit img to the next screen\n return filepath", "def saveSnapshot(self, filename): \n\t\tpass", "def file_type(self):\n ftype = str(self.FileType)\n choices = None\n if self.FileType == 'Driver':\n choices = {\n 0x0: 'Unknown',\n 0x1: 'Printer',\n 0x2: 'Keyboard',\n 0x3: 'Language',\n 0x4: 'Display',\n 0x5: 'Mouse',\n 0x6: 'Network',\n 0x7: 'System',\n 0x8: 'Installable',\n 0x9: 'Sound',\n 0xA: 'Comms',\n 0xB: 'Input Method',\n 0xC: 'Versioned Printer',\n }\n elif self.FileType == 'Font':\n choices = {\n 0x1: 'Raster',\n 0x2: 'Vector',\n 0x3: 'Truetype',\n }\n if choices != None:\n subtype = obj.Object('Enumeration', 0x28, vm = self.obj_vm, parent = self, choices = choices)\n ftype += \" (\" + str(subtype) + \")\"\n\n return ftype", "def save(self, filename, format=None, verbose=True):\n from . import Formats\n Formats.save(self, filename, format=format, verbose=verbose)", "def _getfilename(self):\n pass", "def save_as_file(self, event=None):\n\n file = fd.asksaveasfile(title=\"Save as\", defaultextension=\".txt\",\n filetypes=[(\"Text(default)\", \"*.txt\"), (\"Python\", \"*.py\"), (\"Java\", \"*.java\"),\n (\"All files\", \"*.*\")])\n if file == None:\n return\n else:\n # self.file_list.append(file.name)\n file.write(self.get_current().get('1.0', 'end-1c'))\n file.close()\n self.add_tab(file=file.name, open_file=1)\n from syntax_highlight import Highlighting\n Highlighting().highlight2()", "def _toFile(self):\n pass", "def OnSaveAsFileRibbon(self, event):\n# def onSaveAsFile(self, event):\n wildcard = \"Text source (*.txt)|*.txt|\" \\\n \"All files (*.*)|*.*\"\n \n self.currentDirectory = os.getcwd()\n \n dlg = wx.FileDialog(self, message=\"Сохранение документа\", defaultDir=self.currentDirectory, \n defaultFile=\"\", wildcard=wildcard, style=wx.FD_SAVE)\n if dlg.ShowModal() == wx.ID_OK:\n path = dlg.GetPath()\n report = open(path, \"w\") \n\n report.close()\n\n dlg.Destroy()", "def save_as_fits(self, filename):", "def selection_file_type(self):\n self.selection_directory()\n self.ui_FileList.clearSelection()\n if not self.show_save_action:\n self.ui_SelectedName.setText(None)\n if self.show_save_action:\n text = self.ui_SelectedName.text()\n new_text = text.split(\".\")[0]\n self.ui_SelectedName.setText(new_text)", "def save(self):\n if PYTHON3:\n fileobj = open(self.filename, 'w', encoding=self.ENCODING, errors=\"replace\")\n else:\n fileobj = open(self.filename, 'w')\n self.save_to_fileobj(fileobj)\n fileobj.close()", "def format(self):\n self.clear_whitespace()\n self.to_ascii()\n return self._filename", "def save_gen_GUI(genotype):\n\n\tfilename = simpledialog.askstring(\"Get filepath.\", \"Where do you want the file to be saved?\")\n\t# make sure user did not cancel request for filepath\n\tif(filename != None):\n\t\tgenotype.save(filename)", "def file_type(self):\n try:\n return self.get_driver().ShortName\n except AttributeError:\n return", "def save(self,fname=None):\r\n if fname == None:\r\n fname = \"school_%s.save\" % self.name_full\r\n fname = fname.replace(\"'\", \"\")\r\n fname = fname.replace(\". \", \"_\")\r\n fname = fname.replace(\" \", \"_\")\r\n fname = fname.replace(\"'\", \"\")\r\n thisdir = os.getcwd()\r\n tempdir = \"TEMP\"\r\n\r\n if os.path.isdir(os.path.join(thisdir, \"TEMP\")):\r\n tempdir = os.path.join(thisdir, \"TEMP\")\r\n #elif os.path.isdir(os.path.join(thisdir, \"..\", \"TEMP\")):\r\n # tempdir = os.path.join(thisdir, \"..\", \"TEMP\")\r\n\r\n if os.path.isdir(tempdir):\r\n os.chdir(tempdir)\r\n\r\n\r\n #if we try pickling as is we get following error message:\r\n #raise TypeError, \"can't pickle %s objects\" % base.__name__\r\n #TypeError: can't pickle Font objects\r\n\r\n #set sign_font to NONE to get around this\r\n #we have both self.sign_font_name and self.sign_font_size set,\r\n # so we should be able to reconstuct the FOnt object fron those on re-loading \r\n\r\n self.sign_font = None\r\n #school.sign_font_name = signFont[1]\r\n #school.sign_font_size = BIGFONT_SIZE\r\n\r\n #we can't pickle a pygame Surface\r\n #and pygame_sdl2 doesn's have the\r\n #pygame.image.tostring() method.\r\n #\r\n #We'll just have to save an image file, and\r\n #use the filename as the badge contents in\r\n #our save file\r\n #self.badge = pygame.image.tostring(self.badge)\r\n badge_fname = \"school_%s_badge.png\" % self.name_full\r\n badge_fname = string.replace(badge_fname, \" \", \"_\")\r\n badge_fname = string.replace(badge_fname, \"'\", \"\")\r\n self.badge_fname = badge_fname\r\n if GRAPHICSMODE == \"PyGame\":\r\n if type(self.badge) == StringType:\r\n pass # must have already saved it?\r\n else:\r\n print \"self.badge:\", self.badge\r\n print \"badge_fname:\", badge_fname\r\n pygame.image.save(self.badge,badge_fname)\r\n elif GRAPHICSMODE == \"PIL\":\r\n #not really needed, since we can pickle PIL objects,\r\n #but doing it to stay consistent with Pygame\r\n self.badge.save(badge_fname, \"PNG\")\r\n self.badge = badge_fname\r\n\r\n outfile = open(fname, \"wb\")\r\n #pickle.dump(self, outfile)\r\n try:\r\n pickle.dump(self, outfile)\r\n except:\r\n print \"!!! CANNOT FUCKING PICKLE !!!\"\r\n for q in self.__dict__.keys():\r\n print \"\\tremoving '%s'\"% q\r\n self.__dict__[q] = None\r\n try:\r\n pickle.dump(self, outfile)\r\n print \"\\t !!! IT WORKED !!!\"\r\n print \"'%s' WAS THE PROBLEM!\" % q\r\n print\r\n break\r\n except:\r\n print \"\\tNOPE.. STILL FAILED...\"\r\n \r\n pickle.dump(self, outfile)\r\n \r\n outfile.close()\r\n\r\n if self.VERBOSE > 0:\r\n print \"wrote file '%s' OK\" % fname\r\n\r\n os.chdir(thisdir)\r\n return fname", "def save_frame(self, save_path_filename):\n raise NotImplementedError", "def on_save_as(self, event):\n data = self._get_data_selection(event)\n # path = None\n default_name = data.name\n if default_name.count('.') > 0:\n default_name = default_name.split('.')[0]\n default_name += \"_out\"\n if self.parent is not None:\n if issubclass(data.__class__, Data1D):\n self.parent.save_data1d(data, default_name)\n elif issubclass(data.__class__, Data2D):\n self.parent.save_data2d(data, default_name)\n else:\n print(\"unable to save this type of data\")", "def save_screenshot(self, file_name, width=3840, height=2160, first=True, last=True):\n if first and self.assigned_opengl_context is not None:\n self.assigned_opengl_context.makeCurrent()\n gr3.export(file_name, width, height)\n if last and self.assigned_opengl_context is not None:\n self.assigned_opengl_context.doneCurrent()", "def saveAs(self):\n print('running saveAs')\n fileName = filedialog.asksaveasfilename(\n defaultextension='.gmcr',\n filetypes=((\"GMCR+ Save Files\", \"*.gmcr\"), (\"All files\", \"*.*\")),\n parent=self.root\n )\n if fileName:\n self.file = fileName\n self.root.wm_title('GMCR+ v{} | {}'.format(__version__, self.file))\n self.saveConflict()", "def AskSaveasFilename(self, title='Save File', filetypes=None, initialfile=''):\n if filetypes==None:\n filetypes = [\n ('CSV File','*.csv'),\n ('Any File','*.*')]\n fileName = FileDialog.asksaveasfilename(parent=self.tk_root, filetypes=filetypes, initialfile=initialfile ,title=title)\n return fileName", "def save_as(\n cls, figure_or_data, filename, format=None, width=None, height=None, scale=None\n ):\n # todo: format shadows built-in name\n (base, ext) = os.path.splitext(filename)\n if not ext and not format:\n filename += \".png\"\n elif ext and not format:\n format = ext[1:]\n elif not ext and format:\n filename += \".\" + format\n\n img = cls.get(figure_or_data, format, width, height, scale)\n\n f = open(filename, \"wb\")\n f.write(img)\n f.close()", "def save_file(self, file_format, employee_list):\r\n try:\r\n if \".csv\" in file_format:\r\n self.f.save_csv(file_format, employee_list)\r\n elif \".xlsx\" in file_format:\r\n self.f.save_excel(file_format, employee_list)\r\n elif \".txt\" in file_format:\r\n self.f.save_txt_file(file_format, employee_list)\r\n else:\r\n raise NameError(\"can not save that file type\")\r\n except NameError as e:\r\n print(e)\r\n # except exception part needed\r", "def save_visualization_to_file(self, file_name, length = 90):\n session = self.capture_last(length)\n visualizer.animate(session , 0, length, name = file_name, min_x = -1, max_x = 1, min_y = -1, max_y = 1, show = False )", "def export_model(self, save_path: str, save_format: Optional[str] = None) -> None:", "def action_inject(self, save_file_location):\n # split_filename = os.path.splitext(ntpath.basename(self.in_file))\n # base_filename = split_filename[0]\n # extension = split_filename[1]\n self.save_file = save_file_location\n if not self.save_file:\n return\n\n print(\"Saving file to %s\" % ntpath.basename(self.save_file))\n\n # Launch injection on a separate thread after disabling buttons.\n self.action_inject_delay()", "def saveFormatFileTo(self, cur, encoding, format):\n if cur is None: cur__o = None\n else: cur__o = cur._o\n ret = libxml2mod.xmlSaveFormatFileTo(self._o, cur__o, encoding, format)\n return ret", "def save_model(self, filename):\r\n pass", "def save(self, x):\n self.print_handler_f(x, x.get_full_path())", "def callback_Save():\n saving_msg = 'Save Bioprocess As:'\\\n '\\n(will save in processes/ by default)'\n fileName = sg.popup_get_text(saving_msg, 'File Saver')\n\n if fileName:\n # read filename and add default path\n fileName = fileName.strip(' ')\n\n # if user does not input a fileName\n elif fileName is None:\n fileName = 'cancel'\n elif fileName == '':\n fileName = 'exit'\n\n return fileName", "def OnSaveAs(self, event):\r\n dirname = ''\r\n d = wx.FileDialog(self, \"Save File\", dirname, \"\", \"*.panda\", wx.SAVE)\r\n if d.ShowModal() == wx.ID_OK:\r\n self.filename = os.path.join(d.GetDirectory(), d.GetFilename())\r\n self.core.Save(self.filename)\r\n# TODO check for and strip off .panda?\r\n d.Destroy()\r\n\r\n self.RefreshTitleBar()", "def on_save(self, *args):\n file = self.get_filename()\n f = open(file.path, \"w\")\n content = self.SwitchesChooser.get_cmd_line()\n content = re.sub(\" +\", \"\\n\", content)\n f.write(content)\n f.close()\n if self.open_file_after_exit_check.get_active():\n GPS.EditorBuffer.get(file)\n self.response(Gtk.ResponseType.APPLY)", "def save_screen(screen):\n if not video_mode: # Don't record video\n return False\n # Make global variables writeable\n global current_frame\n global path_checked\n frames_directory = os.path.dirname(\n os.path.dirname(\n os.path.realpath(__file__))) + \"\\\\frames\\\\\"\n if not path_checked:\n check_folder(frames_directory)\n pygame.image.save(\n screen,\n frames_directory + \"ants-frame{}.jpeg\".format(\n str(current_frame).zfill(4)))\n current_frame += 1 # Move count to next frame", "def build_mimetype(self) -> None:\n logger.info(__('writing mimetype file...'))\n copy_asset_file(path.join(self.template_dir, 'mimetype'), self.outdir)", "def save(self, format='npz'):\n _path = os.getenv('STARTERLITE') + '/output/grf/%s.%s' % (self.fn, format)\n _wf_dict = {'grf': self.survey_maps, 'coords': self.survey_map_coords}\n np.savez(_path, **_wf_dict)", "def save(self, fname=None):\n if not fname:\n fname = self.getname() \n assert(fname != None), 'You must specify a filename to save to'\n if not fname.endswith('.png'):\n fname += '.png'\n try:\n urlretrieve(str(self), fname) \n except IOError, e:\n raise IOError, 'Problem saving chart to file: %s'%e \n return fname", "def export(self, file: TextIO) -> None:\n file.write(f'\"{self.name}\"\\n\\t{{\\n')\n file.write(f'\\tchannel {self.channel}\\n')\n file.write(f'\\tsoundlevel {join_float(self.level)}\\n')\n\n if self.volume != (1, 1):\n file.write(f'\\tvolume {join_float(self.volume)}\\n')\n if self.pitch != (100, 100):\n file.write(f'\\tpitch {join_float(self.pitch)}\\n')\n\n if len(self.sounds) != 1:\n file.write('\\trndwave\\n\\t\\t{\\n')\n for wav in self.sounds:\n file.write(f'\\t\\twave \"{wav}\"\\n')\n file.write('\\t\\t}\\n')\n else:\n file.write(f'\\twave \"{self.sounds[0]}\"\\n')\n\n if self.force_v2 or self.stack_start or self.stack_stop or self.stack_update:\n file.write(\n '\\t' 'soundentry_version 2\\n'\n '\\t' 'operator_stacks\\n'\n '\\t\\t' '{\\n'\n )\n if self.stack_start:\n file.write(\n '\\t\\t' 'start_stack\\n'\n '\\t\\t\\t' '{\\n'\n )\n for prop in self.stack_start:\n for line in prop.export():\n file.write('\\t\\t\\t' + line)\n file.write('\\t\\t\\t}\\n')\n if self.stack_update:\n file.write(\n '\\t\\t' 'update_stack\\n'\n '\\t\\t\\t' '{\\n'\n )\n for prop in self.stack_update:\n for line in prop.export():\n file.write('\\t\\t\\t' + line)\n file.write('\\t\\t\\t}\\n')\n if self.stack_stop:\n file.write(\n '\\t\\t' 'stop_stack\\n'\n '\\t\\t\\t' '{\\n'\n )\n for prop in self.stack_stop:\n for line in prop.export():\n file.write('\\t\\t\\t' + line)\n file.write('\\t\\t\\t}\\n')\n file.write('\\t\\t}\\n')\n file.write('\\t}\\n')", "def write_saver_defs(self):\n assert self.savers_constructed\n full_saver_def = self.full_saver.as_saver_def()\n full_file = self.params.save_dir+self.params.model_name+\"_v\"+self.params.version+\".def\"\n with open(full_file, \"wb\") as f:\n f.write(full_saver_def.SerializeToString())\n self.logger.log_info(\"Full saver def saved in file %s\"%full_file)", "def save_scopeTraces(fileName, scope, channel, noPulses):\n scope._get_preamble(channel)\n results = utils.PickleFile(fileName, 1)\n results.add_meta_data(\"timeform_1\", scope.get_timeform(channel))\n\n #ct = scope.acquire_time_check()\n #if ct == False:\n # print 'No triggers for this data point. Will skip and set data to 0.'\n # results.save()\n # results.close()\n # return False\n\n t_start, loopStart = time.time(),time.time()\n for i in range(noPulses):\n try:\n ct = scope.acquire_time_check(timeout=.4)\n results.add_data(scope.get_waveform(channel), 1)\n except Exception, e:\n print \"Scope died, acquisition lost.\"\n print e\n if i % 100 == 0 and i > 0:\n print \"%d traces collected - This loop took : %1.1f s\" % (i, time.time()-loopStart)\n loopStart = time.time()\n print \"%d traces collected TOTAL - took : %1.1f s\" % (i, (time.time()-t_start))\n results.save()\n results.close()\n return True", "def save_phantom(self, file_or_fname):\n pass", "def OnSave(self, e):\n\t\tconvert_to = None\n\t\tif e.Id == 201:\n\t\t\tconvert_to = \"photoabsorption\"\n\t\telif e.Id == 202:\n\t\t\tconvert_to = \"refractive_index\"\n\t\tlogger.info(\"Save\")\n\t\tfd = wx.FileDialog(self, style=wx.FD_SAVE|wx.FD_OVERWRITE_PROMPT)\n\t\tif fd.ShowModal()==wx.ID_OK:\n\t\t\tmetadata = {\"Density\": float(self.DensityText.GetValue()), \"Molecular Formula\":self.StoichiometryText.GetValue(),\"Formula Mass\":data.calculate_FormulaMass(self.Stoichiometry)}\n\t\t\tdata.export_data(fd.GetPath(), numpy.transpose(numpy.vstack((self.Full_E,self.KK_Real_Spectrum,data.coeffs_to_ASF(self.Full_E,self.Imaginary_Spectrum)))), header_info=metadata, convert_to=convert_to)", "def save(self, filename):\n \n raise NotImplementedError(\"not implemented!\")", "def _file_name(self, dtype_out_time, extension='nc'):\n out_lbl = utils.io.data_out_label(self.intvl_out, dtype_out_time,\n dtype_vert=self.dtype_out_vert)\n in_lbl = utils.io.data_in_label(self.intvl_in, self.dtype_in_time,\n self.dtype_in_vert)\n ens_lbl = utils.io.ens_label(self.ens_mem)\n yr_lbl = utils.io.yr_label((self.start_date.year, self.end_date.year))\n return '.'.join(\n [self.name, out_lbl, in_lbl, self.model.name,\n self.run.name, ens_lbl, yr_lbl, extension]\n ).replace('..', '.')", "def _save(self):\n\n out_dict = {}\n out_dict[\"version\"] = pyfx.__version__\n out_dict[\"name\"] = self._name\n out_dict[\"src\"] = self._src\n\n # Write out the background file as an image\n bg_file = os.path.join(self._name,\"master_bg_image.png\")\n pyfx.util.to_file(self._bg_frame,bg_file)\n out_dict[\"bg_frame\"] = bg_file\n\n f = open(os.path.join(self._name,\"pyfx.json\"),\"w\")\n json.dump(out_dict,f)\n f.close()", "def write(self, filename): # real signature unknown; restored from __doc__\n pass", "def queue_screenshot(self, filename):\n fname_split = filename.split('.')\n ext = '.' + fname_split[-1]\n stem = '.'.join(fname_split[:-1])\n self._save_fname = stem + str(self._save_fname_num).zfill(4) + ext\n self._save_fname_num = self._save_fname_num + 1\n self._save_flag = True", "def capture(self):\n current_time=time.strftime('%Y%m%d-%H%M%S')\n self.filepath=f\"files/{current_time}.png\"\n self.ids.camera.export_to_png(self.filepath)\n self.manager.current='image_screen'\n self.manager.current_screen.ids.img.source=self.filepath", "def save_grdecl(self , pyfile):\n cfile = CFILE( pyfile )\n self._fprintf_grdecl( cfile )", "def printfile(self, event=None):\n filename = self.en.get()\n bbox = self.canvas.bbox(ALL)\n width=bbox.right*self.unit\n height=bbox.bottom*self.unit\n self.canvas.config(width=width, height=height)\n self.canvas.dump(filename)\n self.canvas.config(width=self.ca_width, height=self.ca_height)\n self.la.config(text='Wrote file ' + filename)", "def handle_as_file(view: View, point: int, string: str):\n # \"screenshot.png\"\n\n name = osp.basename(string)\n file, folder = get_file(view, string, name)\n\n # if file doesn't exist, return\n if not osp.isfile(file):\n return\n\n # does the file need conversion ?\n need_conversion = file.endswith(FORMAT_TO_CONVERT)\n\n # if the file needs conversion, convert it and read data from the resulting png\n if need_conversion:\n # keep the image's file and name for later use\n conv_file = file\n conv_name = name\n\n # create a temporary file\n tmp_file = osp.join(TEMP_DIR, \"tmp_png.png\")\n name = osp.splitext(name)[0] + \".png\"\n\n # use the magick command of Imagemagick to convert the image to png\n magick(file, tmp_file)\n\n file = tmp_file\n\n with open(file, \"rb\") as f:\n encoded = str(base64.b64encode(f.read()), \"utf-8\")\n\n real_width, real_height, size = get_image_size(file)\n width, height = get_dimensions(view, file)\n size = str(size // 1024) + \"KB\" if size >= 1024 else str(size) + 'B'\n\n def on_navigate(href):\n\n if href == \"save\":\n if need_conversion:\n save(conv_file, conv_name, \"file\")\n else:\n save(file, name, \"file\", folder)\n elif href == \"save_as\":\n convert(conv_file if need_conversion else file, \"file\")\n else:\n sublime.active_window().open_file(file)\n\n view.show_popup(\n TEMPLATE % (width, height, \"png\", encoded, real_width,\n real_height, size),\n sublime.HIDE_ON_MOUSE_MOVE_AWAY,\n point,\n *view.viewport_extent(),\n on_navigate=on_navigate)", "def saveFormatFileTo(self, buf, encoding, format):\n if buf is None: buf__o = None\n else: buf__o = buf._o\n ret = libxml2mod.xmlSaveFormatFileTo(buf__o, self._o, encoding, format)\n return ret", "def save_params():\n file_name = filedialog.asksaveasfilename(\n filetypes=[\n (\"JSON\", \"*.json\")\n ],\n initialdir=os.getcwd())\n if file_name: # save option not cancelled by user\n self.parent_class.classes[\"fractal\"].curve.store_curve_tofile(\n file_name)", "def OnSave(self, event):\r\n if self.filename == None:\r\n self.OnSaveAs(event)\r\n else:\r\n self.core.Save(self.filename)", "def save_as(self, filename):\n raise NotImplementedError(\n \"Saving ring buffers to other formats is not yet implemented.\")\n\n if filename[-3:] == 'zip':\n pass # TODO\n elif filename[-2:] == 'h5':\n pass # TODO\n elif filename[-4:] == 'fits':\n pass # TODO\n elif filename[-3:] == 'npz':\n self.save_as_numpy(filename)", "def capture(self):\n filename = self.get_new_photo_filename()\n open(self.camid + '/' + filename, 'wb').write(self.fake_shot)\n return filename", "def hap_filename(self, filetype):\n if filetype == 'events':\n return self.folder('events') / 'run_{:07d}_{}_eventlist.fits'.format(self.obs_id, self.hap_config)\n # return self.folder('events') / 'events_{:06d}.fits.gz'.format(self.obs_id)\n elif filetype == 'aeff':\n return self.folder('irfs') / 'aeff_{:06d}.fits.gz'.format(self.obs_id)\n elif filetype == 'edisp':\n return self.folder('irfs') / 'edisp_{:06d}.fits.gz'.format(self.obs_id)\n elif filetype == 'psf_3gauss':\n return self.folder('irfs') / 'psf_3gauss_{:06d}.fits.gz'.format(self.obs_id)\n else:\n raise ValueError('Invalid {} {}'.format(filetype))", "def hap_filename(self, filetype):\n if filetype == 'events':\n return self.folder('events') / 'run_{:07d}_{}_eventlist.fits'.format(self.obs_id, self.hap_config)\n # return self.folder('events') / 'events_{:06d}.fits.gz'.format(self.obs_id)\n elif filetype == 'aeff':\n return self.folder('irfs') / 'aeff_{:06d}.fits.gz'.format(self.obs_id)\n elif filetype == 'edisp':\n return self.folder('irfs') / 'edisp_{:06d}.fits.gz'.format(self.obs_id)\n elif filetype == 'psf_3gauss':\n return self.folder('irfs') / 'psf_3gauss_{:06d}.fits.gz'.format(self.obs_id)\n else:\n raise ValueError('Invalid {} {}'.format(filetype))", "def hap_filename(self, filetype):\n if filetype == 'events':\n return self.folder('events') / 'run_{:07d}_{}_eventlist.fits'.format(self.obs_id, self.hap_config)\n # return self.folder('events') / 'events_{:06d}.fits.gz'.format(self.obs_id)\n elif filetype == 'aeff':\n return self.folder('irfs') / 'aeff_{:06d}.fits.gz'.format(self.obs_id)\n elif filetype == 'edisp':\n return self.folder('irfs') / 'edisp_{:06d}.fits.gz'.format(self.obs_id)\n elif filetype == 'psf_3gauss':\n return self.folder('irfs') / 'psf_3gauss_{:06d}.fits.gz'.format(self.obs_id)\n else:\n raise ValueError('Invalid {} {}'.format(filetype))", "def saveScenarioHandler(self):\n\n file_path = self.saveFileExplorer(caption=\"Enter File Path for Scenario\")\n\n # save all active command files\n active_subsystems = self.scenarioController.getActiveSubsystems()\n for subsystem_controller in active_subsystems:\n\n self.saveHandler(subsystem_controller)\n\n self.scenarioController.writeScenarioFile(file_path)", "def save(self):\n if not \"w\" in self.mode:\n self.logger.warning(\"save() called on a read-only opened track. Ignored!\")\n return\n\n if not self.accessor.supports_write:\n self.logger.warning(\"save() called on a track with only read-access supporting accessors. Ignored!\")\n return\n \n self.logger.debug(\"save(): writing '%s'\" % self.path)\n\n def to_str(obj):\n # convert simple data-types to their string representation\n # but classes and more complex types to their names.\n return getattr(obj,\"__name__\",str(obj))\n\n kwarg_str = \"\\n\".join([\"%s=%s\" % (k,to_str(self.kwargs[k])) for k in sorted(self.kwargs.keys()) if k != \"mode\"])\n file(os.path.join(self.path,\"track.rc\"),\"w+\").write(trackrc % dict(accessor=self.accessor.__name__,kwargs=kwarg_str))\n self.flush_all()", "def but_save_net(self):\n if isinstance(self.nn_obj, dict):\n if platform == \"linux\" or platform == \"linux2\":\n path=tk.filedialog.asksaveasfilename(filetypes = [('LM NN file','.csv')])\n elif platform == \"win32\":\n path=tk.filedialog.asksaveasfilename(filetypes = [('LM NN file','.csv')], defaultextension=\"*.*\")\n else:\n path=tk.filedialog.asksaveasfilename(filetypes = [('LM NN file','.csv')])\n elif isinstance(self.nn_obj, Net_tr):\n if platform == \"linux\" or platform == \"linux2\":\n path=tk.filedialog.asksaveasfilename(filetypes = [(\"Torch NN file\",\".pt\")])\n elif platform == \"win32\":\n path=tk.filedialog.asksaveasfilename(filetypes = [(\"Torch NN file\",\".pt\")], defaultextension=\"*.*\")\n else:\n path=tk.filedialog.asksaveasfilename(filetypes = [(\"Torch NN file\",\".pt\")])\n else:\n tk.messagebox.showerror(\"Error\", \"Crete NN\")\n return\n save_nn(self.nn_obj, path)", "def _prettyfilename(self):\n return self.title", "def save_fileDialog(self):\n\n if self.check_data():\n options = QFileDialog.Options()\n options |= QFileDialog.DontUseNativeDialog\n fileName, _ = QFileDialog.getSaveFileName(self, \"Сохранить как\", os.path.expanduser(\"~\"), \"Все файлы (*);;XML Файлы (*.xml);;JSON Файлы (*.json)\", options=options)\n if fileName:\n file_format = fileName.split('.')[1]\n if file_format =='xml':\n self.create_gen_xml(fileName)\n elif file_format =='json':\n self.create_gen_json(fileName)\n self.msg2Statusbar.emit('Сохранено в файл: {0}'.format(fileName))", "def __call__(self, format, filename):\n # turn the filename into something suitable for use in #define's\n prettyname = filename.replace(\".\", \"_\").upper()\n prettyname = prettyname.replace(\"/\", \"__\")\n prettyname = prettyname.replace(\":\", \"__\")\n prettyname = prettyname.replace(\"-\", \"__\")\n\n # try and open the file\n with open(filename, \"w\") as output:\n self.writeFuncsLut[format]( output, prettyname )", "def file_type(self):\n return self.__file_type", "def gettype(self):\n return self.filetype", "def __str__(self):\n # TODO: Curently this just stores/returns the file path.\n return unicode(self.path).encode('utf-8')", "def save(self):\n return self.save_as(self.filename)", "def onSave(self):\n #productive #onButton\n profprint()\n \n self.fileDialog = qt.QFileDialog(self.parent)\n self.fileDialog.setDirectory(slicer.modules.needlefinder.path.replace(\"NeedleFinder.py\",\"Config\"))\n self.fileDialog.options = self.fileDialog.DontUseNativeDialog\n self.fileDialog.acceptMode = self.fileDialog.AcceptSave\n self.fileDialog.defaultSuffix = \"cfg\"\n self.fileDialog.setNameFilter(\"Configuration file (*.cfg)\")\n self.fileDialog.connect(\"fileSelected(QString)\", self.saveFileSelected)\n self.fileDialog.show()", "def savecopy(self, filename, lineendings=\"default\", encoding=\"latin-1\"):\n super(IDF, self).save(filename, lineendings, encoding)\n return Path(filename)", "def Save_Current_Profile(self):\r\n #name = tkFileDialog.asksaveasfilename()\r\n #if( name == \"\" ):\r\n # return\r\n #self.system.Save_Current_Profile(name)\r\n self.system.Save_Current_Profile()", "def _standardized_filename(self, election, bits=None, **kwargs):\n reporting_level = kwargs.get('reporting_level')\n jurisdiction = kwargs.get('jurisdiction')\n office = kwargs.get('office')\n office_district = kwargs.get('office_district')\n extension = kwargs.get('extension')\n if extension is None:\n extension = self._filename_extension(election)\n\n if bits is None:\n bits = []\n\n bits.extend([\n election['start_date'].replace('-', ''),\n self.state,\n ])\n\n if election['special']:\n bits.append('special')\n\n bits.append(election['race_type'].replace('-', '_'))\n\n if jurisdiction:\n bits.append(slugify(jurisdiction))\n\n if office:\n bits.append(slugify(office))\n\n if office_district:\n bits.append(slugify(office_district))\n\n if reporting_level:\n bits.append(reporting_level)\n\n return \"__\".join(bits) + extension", "def save(self):\n super(YacoFile, self).save(self._filename)", "def OnSave(self, e):\n if (not self.mainparent.file_loaded):\n msg = \"An input file must be loaded/built before it can be written\"\n ShowMessage(msg, kind='warn')\n return\n self.mainparent.statusbar.SetStatusText(\"Select a File ...\", 0)\n\n dirname = os.getcwd()\n dlg = wx.FileDialog(self, \"Save File\", dirname, \"\", \"*\", wx.FD_SAVE|wx.FD_OVERWRITE_PROMPT)\n\n if (dlg.ShowModal() != wx.ID_OK):\n dlg.Destroy()\n self.mainparent.reset_statusbar()\n return\n\n full_path = str(dlg.GetPath()) # get selected filename and convert to standard string\n\n # set overwrite to True since the above FileDialog already asked\n self.mainparent.input_file.write(output=full_path, indent=defaults.indent, overwrite=True)\n self.mainparent.statusbar.SetStatusText(\"Written to: {}\".format(full_path), 0)\n\n self.mainparent.input_file.filename = full_path\n self.mainparent.statusbar.SetStatusText(\"File: {}\".format(full_path), 2)", "def _type(self) -> str:\n ..." ]
[ "0.59803706", "0.58541864", "0.58138627", "0.5693963", "0.5692039", "0.5593365", "0.55539596", "0.55515766", "0.5475402", "0.5469633", "0.5382346", "0.5378022", "0.53728104", "0.53728104", "0.53492075", "0.533417", "0.5317118", "0.53051895", "0.53029484", "0.52836293", "0.52821314", "0.5281406", "0.52813005", "0.52654684", "0.52626824", "0.525867", "0.523456", "0.521934", "0.5213806", "0.52088714", "0.5207303", "0.5205092", "0.5197778", "0.5179903", "0.5177852", "0.51639134", "0.515617", "0.51375926", "0.5127905", "0.51205784", "0.5110067", "0.5102944", "0.50908756", "0.5072191", "0.5068276", "0.5049807", "0.50433594", "0.50417274", "0.5041491", "0.504129", "0.50405484", "0.5040377", "0.5040011", "0.50381804", "0.5036342", "0.5033258", "0.5032183", "0.50133926", "0.500839", "0.5004751", "0.4998129", "0.49837285", "0.4978573", "0.49743366", "0.4972458", "0.49708256", "0.49694052", "0.49686047", "0.49660113", "0.49556604", "0.49491742", "0.49458024", "0.49415657", "0.4939572", "0.4935623", "0.49339247", "0.49308294", "0.49296084", "0.49285", "0.49254376", "0.49254254", "0.49254254", "0.49254254", "0.49243736", "0.49199098", "0.49167493", "0.49073783", "0.48995167", "0.4895601", "0.48951918", "0.4894059", "0.48909602", "0.4889133", "0.48884523", "0.4888195", "0.48812085", "0.48811778", "0.48791778", "0.4878424", "0.487407" ]
0.5047978
46
assert expected_content has been written to stdout
def assertStdoutContains(self, expected_content): if type(expected_content) is not types.ListType: expected_content = [ expected_content ] stdout_message = sys.stdout.getvalue() for the_text in expected_content: self.assertIn(the_text, stdout_message,('Stdout "%s" does not contain text "%s"' % (stdout_message, the_text)))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def check_cot_output(self, expected):\n sys.stdout = StringIO.StringIO()\n output = None\n try:\n self.instance.run()\n except (TypeError, ValueError, SyntaxError, LookupError):\n self.fail(traceback.format_exc())\n finally:\n output = sys.stdout.getvalue()\n sys.stdout = sys.__stdout__\n self.maxDiff = None\n self.assertMultiLineEqual(expected.strip(), output.strip())", "def check_stdout(self, expected: str):\n assert self._std_out is not None, f\"You first need to `execute` the program before checking stdout!\"\n self._test.assertEqual(self._std_out.strip(), expected.strip())", "def assert_output(self, parser_args, expected_output):\n c = count_nginx_log_frequency(\n parser_args.file,\n parser_args.segment,\n NGINX_ACCESS_LOG_REGEX\n )\n saved_stdout = sys.stdout\n try:\n out = StringIO()\n sys.stdout = out\n print_report(\n c,\n parser_args.segment,\n parser_args.limit,\n parser_args.file\n )\n output = out.getvalue().strip()\n assert output == expected_output\n finally:\n sys.stdout = saved_stdout", "def out_test(self, func, arg, expect):\n std_out = StringIO()\n sys.stdout = std_out\n func(arg)\n output = std_out.getvalue()\n self.assertEqual(output, expect + '\\n')\n return output", "def test_capture_stdout():\n\n sys.stdout.write('Print to stdout')\n\n assert False", "def test_debug_output(self):\n assert output(self.msg) is not None", "def test_stdout(self):\n stdout = StringIO()\n self.patch(sys, 'stdout', stdout)\n\n # Suppress warnings so that if there are any old-style plugins that\n # lore queries for don't confuse the assertion below. See #3070.\n self.patch(warnings, 'warn', lambda *a, **kw: None)\n self.test_buildTeX()\n self.assertEqual(stdout.getvalue(), '')", "def assertOutput(cls, expected, actual):\n if expected != actual:\n raise Exception(\"'\" + expected + \"' != '\" + actual + \"'\")", "def test_output_interception(self):\n expected_output = 'testing, 1, 2, 3 ..'\n actual_output = capture(['echo', expected_output])\n assert actual_output.strip() == expected_output.strip()", "def assertContent(self, response, expected_response):\n self.assertEqual(list(response.streaming_content)[0], expected_response)", "def testStdoutAndStderr(self):\n with self.OutputCapturer():\n print('foo')\n print('bar', file=sys.stderr)\n self.AssertOutputContainsLine('foo')\n self.AssertOutputContainsLine('bar', check_stdout=False, check_stderr=True)", "def expect_output(self, file, parse_json=False):\n contents = self._data_file(file)\n patcher = mock.patch('sys.stdout', new_callable=StringIO)\n output = patcher.start()\n yield\n patcher.stop()\n if parse_json:\n self.assertEqual(json.loads(output.getvalue()),\n json.loads(contents))\n else:\n self.assertEqual(output.getvalue().split('\\n'), contents.split('\\n'))", "def test_03_out(self, mock_stdout):\n msg = udocker.Msg(udocker.Msg.MSG)\n msg.out(\"111\", \"222\", \"333\", 444, ('555'))\n self.assertEqual(\"111 222 333 444 555\\n\", mock_stdout.getvalue())\n sys.stdout = STDOUT\n sys.stderr = STDERR", "def do_test_expected(self):\n self.maxDiff = None\n\n # We currently don't throw any exceptions in Writer, so this\n # this is always false\n if 'error' in test_src:\n self.assertRaises(test_src['error'], yamlish.dumps,\n test_src['in'], options)\n else:\n logging.debug(\"out:\\n%s\", textwrap.dedent(test_src['out']))\n want = yaml.load(textwrap.dedent(test_src['out']))\n logging.debug(\"want:\\n%s\", want)\n with tempfile.NamedTemporaryFile() as test_file:\n tested_function(test_src['in'], test_file)\n test_file.seek(0)\n got_str = test_file.read()\n logging.debug(\"got_str = %s\", got_str)\n got = yaml.load(got_str)\n self.assertEqual(got, want, \"Result matches\")", "def assert_console_output(self, *output, **kwargs):\r\n self.assertEqual(sorted(output), sorted(self.execute_console_task(**kwargs)))", "def test_printerr(self, fake_stderr):\n msg = 'hello world!'\n utils.printerr(msg)\n\n the_args, _ = fake_stderr.write.call_args\n written_msg = the_args[0]\n expected = '{}\\n'.format(msg)\n\n self.assertEqual(written_msg, expected)", "def assert_console_output(self, *output, **kwargs):\n self.assertEqual(sorted(output), sorted(self.execute_console_task(**kwargs)))", "def test_stdout_pattern(f, result):\n if not os.path.exists(f):\n return\n\n expected = open(f, encoding=\"utf-8\").read()\n\n # curl debug logs are too dependent on the context, so we filter\n # them and not take them into account for testing differences.\n expected = remove_curl_debug_lines(expected)\n expected_lines = expected.split(\"\\n\")\n expected_pattern_lines = [parse_pattern(line) for line in expected_lines]\n\n actual = decode_string(result.stdout)\n actual = remove_curl_debug_lines(actual)\n actual_lines = re.split(r\"\\r?\\n\", actual)\n\n if len(actual_lines) != len(expected_pattern_lines):\n print(\">>> error in stdout / mismatch in number of lines\")\n print(\n f\"actual: {len(actual_lines)} lines\\nexpected: {len(expected_pattern_lines)} lines\"\n )\n print(f\"actual <{actual}>\")\n print(\"# Actual lines\")\n for i, line in enumerate(actual_lines):\n print(\"%2d: %s\" % (i, line))\n print(\"# Expected lines\")\n for i, line in enumerate(expected_lines):\n print(\"%2d: %s\" % (i, line))\n print(\"# Expected Pattern lines\")\n for i, line in enumerate(expected_pattern_lines):\n print(\"%2d: %s\" % (i, line))\n\n sys.exit(1)\n for i in range(len(expected_pattern_lines)):\n if not re.match(expected_pattern_lines[i], actual_lines[i]):\n print(f\">>> error in stdout in line {i+1}\")\n print(f\"actual: <{actual_lines[i]}>\")\n print(\n f\"expected: <{expected_lines[i]}> (translated to regex <{expected_pattern_lines[i]}>)\"\n )\n sys.exit(1)", "def compare_output(self, input, output, expected):\n if type(input) == UnicodeType:\n input = input.encode('raw_unicode_escape')\n if type(output) == UnicodeType:\n output = output.encode('raw_unicode_escape')\n if type(expected) == UnicodeType:\n expected = expected.encode('raw_unicode_escape')\n # Remove \"generated on\" lines.\n output = self.remove_lines(output, ('generated on --',))\n expected = self.remove_lines(expected, ('generated on --',))\n try:\n self.assertEquals('\\n' + output, '\\n' + expected)\n except AssertionError:\n print >>sys.stderr, '\\n%s\\ninput:' % (self,)\n print >>sys.stderr, input\n print >>sys.stderr, '-: expected\\n+: output'\n print >>sys.stderr, ''.join(self.compare(expected.splitlines(1),\n output.splitlines(1)))\n raise", "def compare_output(self, input, output, expected):\n if type(input) == UnicodeType:\n input = input.encode('raw_unicode_escape')\n if type(output) == UnicodeType:\n output = output.encode('raw_unicode_escape')\n if type(expected) == UnicodeType:\n expected = expected.encode('raw_unicode_escape')\n try:\n self.assertEquals('\\n' + output, '\\n' + expected)\n except AssertionError:\n print >>sys.stderr, '\\n%s\\ninput:' % (self,)\n print >>sys.stderr, input\n print >>sys.stderr, '-: expected\\n+: output'\n print >>sys.stderr, ''.join(self.compare(expected.splitlines(1),\n output.splitlines(1)))\n raise", "def test_capture_both():\n\n sys.stdout.write('Print to stdout')\n sys.stderr.write('Print to stderr')\n\n assert False", "def assert_bytes_sent(self, bytes):\n sent = b''.join(args[0] for args, _ in self.client.writer.write.call_args_list)\n assert sent == bytes\n self.client.writer.write.reset_mock()", "def testStdoutReadDuringCapture(self):\n with self.OutputCapturer():\n print('foo')\n self.AssertOutputContainsLine('foo')\n print('bar')\n self.AssertOutputContainsLine('bar')\n self.AssertOutputContainsLine('foo')\n self.AssertOutputContainsLine('bar')", "def assert_response_correct(self, response, expected_status, expected_content):\n assert response.status_code == expected_status\n\n if expected_content:\n assert str(response.content) == expected_content", "def test_runSuccess(self):\n builder = BookBuilder()\n self.assertEquals(\n builder.run([\n sys.executable, '-c',\n 'import sys; '\n 'sys.stdout.write(\"hi\\\\n\"); '\n 'sys.stdout.flush(); '\n 'sys.stderr.write(\"bye\\\\n\"); '\n 'sys.stderr.flush()']),\n \"hi\\nbye\\n\")", "def assert_response_correct(self, response, expected_status, expected_content):\n assert response.status_code == expected_status\n\n if expected_content:\n assert response.content.decode('utf-8') == expected_content", "def assert3(*extra_args, stdin):\n sys.argv[1:] = []\n sys.argv.append('markdown')\n _stdout = io.StringIO()\n pf.stdio(*extra_args, input_stream=io.StringIO(stdin), output_stream=_stdout)\n _stdout = pf.convert_text(_stdout.getvalue(), 'json', 'markdown')\n assert _stdout == out1", "def assertStdoutDoesNotContain(self, unexpected_content):\n if type(unexpected_content) is not types.ListType:\n unexpected_content = [ unexpected_content ]\n stdout_message = sys.stdout.getvalue()\n for the_text in unexpected_content:\n self.assertNotIn(the_text, stdout_message,('Stdout \"%s\" contains text \"%s\"' % (stdout_message, the_text)))", "def test_print(capsys):\n text = \"hello\"\n err = \"world\"\n print(text)\n sys.stderr.write(\"world\")\n captured = capsys.readouterr()\n assert text in captured.out\n assert err in captured.err", "def test_text(self):\n result = self._do_output(o.TextOutput(o.Color.Never), self._demo_msgs)\n self.assertEqual(result,\n \"mock: mock.cmake(1): error: short text\\n\"\n \"mock: mock.cmake(2): warning: short text\\n\"\n \"mock: mock.cmake(3): notice: short text\\n\"\n \"mock: error: short text\\n\"\n \"mock: mock.cmake: error: short text\\n\"\n )", "def test_printerr_newline(self, mocked_stderr):\n generic.printerr('some error')\n args, _ = mocked_stderr.write.call_args\n message = args[0]\n self.assertTrue(message.endswith('\\n'))", "def test_capture_output(capsys):\n print(\"hello world\")\n out, err = capsys.readouterr()\n assert out == \"hello world\\n\"\n assert err == \"\"", "def _do_test(self, content, expected):\n self.assertEqual(list(NewickTokenizer(StringIO(content))), expected)\n self.assertEqual(list(NewickTokenizer(newick=content)), expected)\n fp = path_map.next_unique_scratch_filepath('tok_test')\n try:\n write_to_filepath(content, fp)\n self.assertEqual(list(NewickTokenizer(filepath=fp)), expected)\n finally:\n try:\n os.unlink(fp)\n except: # pragma: no cover\n pass", "def test_getContent(self):\n self.assertEquals(\n self.successResultOf(self.testObject.getContent()), 'somecontent')", "def assertText(self,content,expected_text,description=\"\"): \n self.assertTrue(expected_text in content,\n \"expected to find '{0}' but found '{1}' instead.\\\n Attemted action: {2}\".format(expected_text, \n content,\n description))", "def assertValue(self, indata, expected_output, message=None):\n outstream = StringIO()\n giganticGrep(indata, outstream)\n value = outstream.getvalue()\n self.assertEqual(value, expected_output, message)", "def assert_console_output_ordered(self, *output, **kwargs):\n self.assertEqual(list(output), self.execute_console_task(**kwargs))", "def test_capture_stdout_works_with_print(self):\n with debug_env:\n with captured_stdout() as stdout:\n print(\"wibble\")\n\n self.assertIn(\"wibble\", stdout.getvalue())", "def setUp(self):\n self.actualstdout = sys.stdout\n sys.stdout = StringIO.StringIO()", "def test_stdout_to_pipe(self):\n original_stdout = sys.stdout\n with self.stdout_to_pipe() as output:\n self.assertNotEqual(original_stdout, sys.stdout)\n print \"Hello world!\"\n self.assertEqual(output.readline(), \"Hello world!\\n\")\n # Line without CR should be readable after closing\n sys.stdout.write(\"Goodbye\")\n self.assertEqual(original_stdout, sys.stdout)\n # Now that writing side is closed, we should be able to read\n # up to EOF.\n self.assertEqual(output.readline(), \"Goodbye\")", "def _test_text(self, url, content, buffering):\n # read(-1), readable(), seekable()\n with wfdb.io._url.openurl(url, \"r\", buffering=buffering) as tf:\n self.assertTrue(tf.readable())\n self.assertTrue(tf.seekable())\n self.assertEqual(tf.read(), content)\n self.assertEqual(tf.read(), \"\")\n\n # read(10)\n with wfdb.io._url.openurl(url, \"r\", buffering=buffering) as tf:\n result = \"\"\n while True:\n chunk = tf.read(10)\n result += chunk\n if len(chunk) < 10:\n break\n self.assertEqual(result, content)\n\n # readline(), seek(), tell()\n with wfdb.io._url.openurl(url, \"r\", buffering=buffering) as tf:\n result = \"\"\n while True:\n rpos = tf.tell()\n tf.seek(0)\n tf.seek(rpos)\n chunk = tf.readline()\n result += chunk\n if len(chunk) == 0:\n break\n self.assertEqual(result, content)", "def evaluate(self, expected_output, actual_output, command=''):\n\n expected_lines = strip_text(expected_output)\n actual_lines = strip_text(actual_output)\n\n if expected_lines != actual_lines:\n print(f'\\nTest \\'{command}\\' failed.\\nDiff:')\n diff = difflib.Differ().compare(expected_lines, actual_lines)\n print('\\n'.join(diff))\n\n self.fail('Test failed.')", "def assertOutput(self, expected=None, timeout=5, message=None):\n assertTimeout = int (globalVar.assertTimeout)\n if (timeout != 0) :\n assertTimeout = timeout\n p = self.spawnProc\n \n #If any expected output is specified, append it to the List \n if not expected:\n expected = self.prompt \n expList.append(expected) \n \n if not message :\n message = \"Expected output %s not received\" %expected\n \n # Wait for the output \n result = p.expect(expList, assertTimeout)\n # If expected is true and the output is not expected, Call the _postCheck function\n if (result != expList.index(expected)):\n self._postCheck(result, message)\n expList.remove(expected)", "def assertOutput(self, toExec, argList, expectedStdout=None, \n\t\t\texpectedStderr=\"\", expectedRetcode=0, input=None,\n\t\t\tstdoutStrings=None):\n\t\tfor name in [\"output.stderr\", \"output.stdout\"]:\n\t\t\ttry:\n\t\t\t\tos.unlink(name)\n\t\t\texcept os.error:\n\t\t\t\tpass\n\n\t\tif isinstance(toExec, basestring):\n\t\t\tp = subprocess.Popen([toExec]+argList, executable=toExec, \n\t\t\t\tstdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE)\n\t\telse:\n\t\t\tp = ForkingSubprocess([\"test harness\"]+argList, executable=toExec, \n\t\t\t\tstdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE)\n\t\tout, err = p.communicate(input=input)\n\t\tretcode = p.wait()\n\n\t\ttry:\n\t\t\tself.assertEqual(expectedRetcode, retcode)\n\n\t\t\tif isinstance(expectedStderr, basestring):\n\t\t\t\tself.assertEqual(err, expectedStderr)\n\t\t\telse:\n\t\t\t\tself.failUnless(expectedStderr(err))\n\t\texcept AssertionError:\n\t\t\twith open(\"output.stdout\", \"w\") as f:\n\t\t\t\tf.write(out)\n\t\t\twith open(\"output.stderr\", \"w\") as f:\n\t\t\t\tf.write(err)\n\t\t\traise\n\n\t\ttry:\n\t\t\tif isinstance(expectedStdout, basestring):\n\t\t\t\tself.assertEqual(out, expectedStdout)\n\t\t\telif expectedStdout is not None:\n\t\t\t\tself.failUnless(expectedStdout(out))\n\t\t\tif stdoutStrings:\n\t\t\t\tfor s in stdoutStrings:\n\t\t\t\t\tself.failIf(s not in out, \"%s missing\"%s)\n\t\texcept AssertionError:\n\t\t\twith open(\"output.stdout\", \"w\") as f:\n\t\t\t\tf.write(out)\n\t\t\traise", "def test_write_key__to_stdout(isatty_cleanup):\n\n download.sys.stdout.isatty = mock.Mock(return_value=False)\n key = mock.Mock()\n download.write_key(key)\n key.get_contents_to_file.assert_called_once_with(download.sys.stdout)", "def check_test(self, test):\n (stdout, stderr) = (out.decode('ascii').strip()\n for out in test.process.communicate())\n self.assertEqual(stderr, \"\")\n self.assertEqual(stdout, EXPCT_RESULTS[test.number],\n \"Test {} failed\".format(test.number))\n print(\"Test {} passed\".format(test.number))", "def do_test_expected(self):\n self.maxDiff = None\n\n got = \"\"\n if 'error' in test_src:\n self.assertRaises(test_src['error'], tested_function,\n test_src['in'], options)\n else:\n want = test_src['out']\n got = tested_function(test_src['in'], options)\n logging.debug('got = type %s', type(got))\n logging.debug(\"test_src['out'] = %s\",\n unicode(test_src['out']))\n self.assertEqual(got, want, \"\"\"Result matches\n expected = %s\n\n observed = %s\n \"\"\" % (want, got))", "def test_printerr_happy_path(self, mocked_stderr):\n generic.printerr('some error')\n self.assertEqual(mocked_stderr.write.call_count, 1)\n self.assertEqual(mocked_stderr.flush.call_count, 1)", "def test_print_mimic_no_newlines(self):\n d = self.module.create_mimic_dict(\"imdev.txt\")\n buffer = StringIO()\n with redirect_stdout(buffer):\n self.module.print_mimic_random(d, 200)\n output = buffer.getvalue()\n self.assertNotIn(\n '\\n', output,\n \"There should not be any newline (\\\\n) characters in output\"\n )", "def test_str3(self):\n capturedOutput = io.StringIO()\n sys.stdout = capturedOutput\n r3 = Rectangle(1, 2, 3, 4, 5)\n print(r3)\n sys.stdout = sys.__stdout__\n str_r3 = \"[Rectangle] (5) 3/4 - 1/2\\n\"\n self.assertEqual(capturedOutput.getvalue(), str_r3)", "def compare(self, output, expected, ignore_imports=True):\n if ignore_imports:\n output = self.strip_future_imports(output)\n expected = self.strip_future_imports(expected)\n if isinstance(output, bytes) and not isinstance(expected, bytes):\n output = output.decode('utf-8')\n if isinstance(expected, bytes) and not isinstance(output, bytes):\n expected = expected.decode('utf-8')\n self.assertEqual(order_future_lines(output.rstrip()),\n expected.rstrip())", "def test_something(self):\n self.assertEqual(\n b\"\"\"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\\n<Foo>bar</Foo>\"\"\",\n self.successResultOf(to_xml(tags.Foo(\"bar\"))),\n )", "def test_command(self):\n out = io.StringIO()\n management.call_command('import_data', stdout=out)\n self.assertIn(\"Successfully imported\", out.getvalue())", "def test_str1(self):\n capturedOutput = io.StringIO()\n sys.stdout = capturedOutput\n r1 = Rectangle(4, 6, 2, 1, 12)\n print(r1)\n sys.stdout = sys.__stdout__\n str_r1 = \"[Rectangle] (12) 2/1 - 4/6\\n\"\n self.assertEqual(capturedOutput.getvalue(), str_r1)", "def test_normal_goes_normal(self):\n eq_(self.msg, output(self.msg,\"OUTPUT\"))", "def assert_valid_html_screenshot_content(content):\n assert content.startswith('<html xmlns=\"http://www.w3.org/1999/xhtml\">')\n assert '<div id=\"content\">' in content\n assert \"<strong>text</strong>\" in content\n assert content.endswith(\"</html>\")", "def testWriteLine(self):\n file_writer = writers.FileWriter()\n\n file_writer._file = io.BytesIO()\n\n file_writer.WriteLine('Line of text')\n\n file_writer._file.seek(0, os.SEEK_SET)\n output_data = file_writer._file.read()\n expected_output_data = b'Line of text\\r\\n'\n self.assertEqual(output_data, expected_output_data)", "def test_write_to_console(self, _step: PropertyMock):\n trials = [2, True, None, 'This is a test', b'hello']\n\n for message in trials:\n _step_mock = MagicMock()\n write_source = MagicMock()\n _step_mock.report.stdout_interceptor.write_source = write_source\n _step.return_value = _step_mock\n step = exposed.ExposedStep()\n step.write_to_console(message)\n\n args, kwargs = write_source.call_args\n self.assertEqual('{}'.format(message), args[0])", "def test_subprocess_calls(self):\n self.assertTrue(uut.get_stdout(['echo', 'this']) == 'this\\n')\n self.assertTrue(\n uut.get_outstreams(['python', 'test/str_in_stdout_stderr.py']) ==\n [\"In stdout.\\n\", \"In stderr.\\n\", 17])", "def test_update_enforcement_mode_command_failure_human_readable(\n enforcement_mode_failure_hr, enforcement_mode_failure_expected\n):\n resp = prepare_update_enforcement_mode_output(enforcement_mode_failure_expected)\n\n assert resp == enforcement_mode_failure_hr", "def test_updated_display3(self):\n capturedOutput = io.StringIO()\n sys.stdout = capturedOutput\n r3 = Rectangle(3, 2, 0, 1)\n r3.display()\n sys.stdout = sys.__stdout__\n desired = '\\n###\\n###\\n'\n self.assertEqual(capturedOutput.getvalue(), desired)", "def test_updated_display1(self):\n capturedOutput = io.StringIO()\n sys.stdout = capturedOutput\n r1 = Rectangle(2, 3, 2, 2)\n r1.display()\n sys.stdout = sys.__stdout__\n desired = '\\n\\n ##\\n ##\\n ##\\n'\n self.assertEqual(capturedOutput.getvalue(), desired)", "def test_content():\n # PREPARE\n expected_f = open(\n 'tests/pages/expected/stepanenkoartem-github-io.html',\n 'rb',\n )\n expected_dom = BeautifulSoup(\n expected_f.read(),\n 'html.parser',\n )\n\n actual_f = open(\n os.path.join(TEMP_DIR, path.for_page(URL)),\n )\n actual_dom = BeautifulSoup(actual_f, 'html.parser')\n\n # CHECK\n assert actual_dom.decode() == expected_dom.decode()", "def test_print1(self):\n writer = StringIO()\n collatz_print(writer, 100, 200, 125)\n self.assertEqual(writer.getvalue(), \"100 200 125\\n\")", "def assertLines(self, indata, expected_output, message=None):\n outstream = StringIO()\n giganticGrep(indata, outstream)\n value = outstream.getvalue()\n actual_lines = value.split('\\n')\n expected_lines = expected_output + ['']\n\n present = set(expected_lines) & set(actual_lines)\n missing = set(expected_lines) - set(actual_lines)\n extra = set(actual_lines) - set(expected_lines)\n\n if missing or extra:\n self.fail('Expected these lines:\\n%s\\n\\nPresent:\\n%s\\n\\nExtra:\\n%s\\n\\nMissing:\\n%s' % (\n '\\n'.join(expected_output),\n '\\n'.join(list(present)),\n '\\n'.join(list(extra)),\n '\\n'.join(list(missing))))", "def test_str(self):\n r1 = Rectangle(2, 5, 2, 4)\n res = \"[Rectangle] (1) 2/4 - 2/5\\n\"\n with patch('sys.stdout', new=StringIO()) as str_out:\n print(r1)\n self.assertEqual(str_out.getvalue(), res)", "def test_update_enforcement_mode_command_success_human_readable(\n enforcement_mode_success, enforcement_mode_success_hr\n):\n resp = prepare_update_enforcement_mode_output(enforcement_mode_success)\n\n assert resp == enforcement_mode_success_hr", "def test_print_result(capsys):\n assert \"\"\"Total 5 hands solved\nTotal 4 hands solved with hint\nTotal 4 hands failed to solve\"\"\" in hl.test_help_print_result(capsys)", "def test_showVersion(self):\n origout = sys.stdout\n try:\n out = io.StringIO()\n sys.stdout = out\n ArmiCLI.showVersion()\n finally:\n sys.stdout = origout\n\n self.assertIn(\"armi\", out.getvalue())\n self.assertIn(meta.__version__, out.getvalue())", "def test_order1(self):\n\n process = subprocess.Popen(\n ['python', './echo.py', '-tul', 'hello!'],\n stdout=subprocess.PIPE)\n stdout, _ = process.communicate()\n expected = 'Hello!'\n\n self.assertEquals(stdout.strip('\\n'), expected)", "def test_is_information_written_through_stderr_methods(self):\n\n io = BufferedSystemIO()\n io._stdout = lambda *args, **kwargs: None\n\n try:\n raise IndexError('Invalid index 5')\n except Exception as exc:\n output_formatted_exception(exc, ':my-test-task', io)\n\n self.assertIn('IndexError', io.get_value())\n self.assertIn('Invalid index 5', io.get_value())\n self.assertIn('Retry with \"-rl debug\" switch before failed task to see stacktrace', io.get_value())", "def testWriteLines(self):\n file_writer = writers.FileWriter()\n\n file_writer._file = io.BytesIO()\n\n file_writer.WriteLines([\n 'First line of text',\n 'Second line of text'])\n\n file_writer._file.seek(0, os.SEEK_SET)\n output_data = file_writer._file.read()\n expected_output_data = (\n b'First line of text\\r\\nSecond line of text\\r\\n')\n self.assertEqual(output_data, expected_output_data)", "def test_traffic_analysis_human_readable(\n traffic_analysis_success, traffic_analysis_success_hr\n):\n resp = prepare_traffic_analysis_output(traffic_analysis_success)\n assert resp == traffic_analysis_success_hr", "def test_workload_get_command_human_readable(\n workload_get_success, workload_get_success_hr\n):\n hr_output = prepare_workload_get_output(workload_get_success)\n assert hr_output == workload_get_success_hr", "def test_updated_display2(self):\n capturedOutput = io.StringIO()\n sys.stdout = capturedOutput\n r2 = Rectangle(3, 2, 1, 0)\n r2.display()\n sys.stdout = sys.__stdout__\n desired = ' ###\\n ###\\n'\n self.assertEqual(capturedOutput.getvalue(), desired)", "def test_command(self):\n\n expected = \"PyFunceble has been written by Fun Ilrys.\"\n actual = Command(\"echo '%s'\" % expected).execute()\n\n self.assertEqual(expected + \"\\n\", actual)", "def check_file_output(self, actual: str, expected: str):\n assert self._program_executed, f\"You first need to `execute` the program before checking its outputs!\"\n assert actual in self._write_files, f\"Unknown output file {actual}. Did you forget to provide it to the program by calling input_write_filename?\"\n full_expected = _root_dir / expected\n assert full_expected.is_file(), f\"Reference file {full_expected} does not exist!\"\n # check to make sure the output file exists\n full_actual = _root_dir / actual\n self._test.assertTrue(full_actual.is_file(), f\"It seems like the program never created the output file {full_actual}\")\n # open and compare the files\n with open(full_actual, 'rb') as a:\n actual_bin = a.read()\n with open(full_expected, 'rb') as e:\n expected_bin = e.read()\n self._test.assertEqual(actual_bin, expected_bin, f\"Bytes of {actual} and {expected} did not match!\")", "def test_printerr_flush(self, fake_stderr):\n utils.printerr('hello world!')\n\n self.assertTrue(fake_stderr.flush.called)", "def _do_test(self, content, expected):\n nt = NewickTokenizer(stream=StringIO(content))\n e = [deepcopy(i) for i in NewickEventFactory(tokenizer=nt)]\n self.assertEqual(e, expected)\n new_e = []\n\n def append_to_new_e(event):\n new_e.append(deepcopy(event))\n\n NewickEventFactory(newick=content, event_handler=append_to_new_e)\n self.assertEqual(new_e, expected)", "def test_script(self):\n f1 = self.write_file(\"foobar\")\n f2 = self.write_file(\"foobarbaz\")\n out = io.BytesIO()\n ghdiff.main([f1, f2], stdout=out)\n output = out.getvalue()\n self.assertTrue(b\"-foobar\" in output)\n self.assertTrue(b'+foobar<span class=\"highlight\">baz</span>' in output)", "def receive_reply(self, msg, content):\n expected_content = self.expected.pop().strip()\n actual_content = content.__repr__().strip()\n print('expected: %s' % expected_content)\n print('actual: %s' % actual_content)\n if expected_content == actual_content:\n self.passed_tests += 1\n print('PASS')\n else:\n print('FAIL')\n print('---')\n\n if not self.sent.is_empty():\n sm = self.sent.pop()\n self.send(self.get_perf(self.msg_counter, sm))\n self.msg_counter += 1\n if self.expected.is_empty():\n print('%d PASSED / %d FAILED' % \\\n (self.passed_tests, self.total_tests-self.passed_tests))\n sys.exit(0)", "def test_basic():\n line = \"test\"\n assert wrap_line(line) == \"test\"", "def test_display_method2(self):\n capturedOutput = io.StringIO()\n sys.stdout = capturedOutput\n r2 = Rectangle(2, 2)\n r2.display()\n sys.stdout = sys.__stdout__\n desired = '##\\n##\\n'\n self.assertEqual(capturedOutput.getvalue(), desired)", "def test_EOF(self):\n with patch('sys.stdout', new=StringIO()) as f:\n HBNBCommand().onecmd(\"EOF\")\n out = f.getvalue()\n self.assertTrue(len(out) == 1)\n self.assertEqual(\"\\n\", out)\n with patch('sys.stdout', new=StringIO()) as f:\n HBNBCommand().onecmd(\"EOF fake\")\n msj = f.getvalue().strip()\n self.assertFalse(len(msj) == 1)\n self.assertEqual(\"\", msj)", "def test_content(test):\n # from bs4 import BeautifulSoup\n # assert 'GitHub' in BeautifulSoup(response.content).title.string", "def assert_response_correct(self, response, expected_status, expected_content):\n assert response.status_code == expected_status\n parsed_content = json.loads(response.content.decode('utf-8'))\n assert parsed_content == expected_content", "def test_order2(self):\n\n process = subprocess.Popen(\n ['python', './echo.py', '-ul', 'hello!'],\n stdout=subprocess.PIPE)\n stdout, _ = process.communicate()\n expected = 'hello!'\n\n self.assertEquals(stdout.strip('\\n'), expected)", "def test_print(self):\n writer = StringIO()\n collatz_print(writer, 1, 10, 20)\n self.assertEqual(writer.getvalue(), \"1 10 20\\n\")", "def assert_verbose(actual, expected):\n assert expected == actual, f\"Expected value: {expected}. But actual value is {actual}\"", "def test_unknown(self):\n msg = \"*** Unknown syntax: asd\\n\"\n with patch('sys.stdout', new=StringIO()) as f:\n HBNBCommand().onecmd(\"asd\")\n st = f.getvalue()\n self.assertEqual(msg, st)", "def test_display_method1(self):\n capturedOutput = io.StringIO()\n sys.stdout = capturedOutput\n r1 = Rectangle(4, 6)\n r1.display()\n sys.stdout = sys.__stdout__\n desired = '####\\n####\\n####\\n####\\n####\\n####\\n'\n self.assertEqual(capturedOutput.getvalue(), desired)", "def test_enforcement_boundary_create_command_human_readable(\n enforcement_boundary_success, enforcement_boundary_success_hr\n):\n hr_output = prepare_enforcement_boundary_create_output(enforcement_boundary_success)\n\n assert hr_output == enforcement_boundary_success_hr", "def test_execution(self):\n\n # This process will echo the input and output file name to stdout.\n the_process_unit = ProcessUnit([self.a_pattern_ds], '/another/%file%/%pattern%.txt',\n 'echo')\n\n ds_result = the_process_unit.execute(simulate=True)\n\n outfiles = [file_thing for file_thing in ds_result.files]\n self.assertEqual(len(outfiles), 1)\n\n expected_string = self.script_header + \"mkdir -p /another/file_1\\necho test_file1 /another/file_1/pattern_1.txt\\n\"\n self.assertEqual(expected_string, the_process_unit.scheduler.job.to_str())", "def test_updated_display4(self):\n capturedOutput = io.StringIO()\n sys.stdout = capturedOutput\n r4 = Rectangle(3, 2, 0, 0)\n r4.display()\n sys.stdout = sys.__stdout__\n desired = '###\\n###\\n'\n self.assertEqual(capturedOutput.getvalue(), desired)", "def _verify_export_failure(self, expectedText):\r\n resp = self.client.get(self.url, HTTP_ACCEPT='application/x-tgz')\r\n self.assertEquals(resp.status_code, 200)\r\n self.assertIsNone(resp.get('Content-Disposition'))\r\n self.assertContains(resp, 'Unable to create xml for module')\r\n self.assertContains(resp, expectedText)", "def testCapturingStdoutAndStderrToFile(self):\n stdout_path = os.path.join(self.tempdir, 'stdout')\n stderr_path = os.path.join(self.tempdir, 'stderr')\n with self.OutputCapturer(stdout_path=stdout_path, stderr_path=stderr_path):\n print('foo')\n print('bar', file=sys.stderr)\n\n # Check that output can be read by OutputCapturer.\n self.AssertOutputContainsLine('foo')\n self.AssertOutputContainsLine('bar', check_stdout=False, check_stderr=True)\n # Verify that output is actually written to the correct files.\n self.assertEqual('foo\\n', osutils.ReadFile(stdout_path))\n self.assertEqual('bar\\n', osutils.ReadFile(stderr_path))", "def test_log_success(self, mock_info):\n\n with utils.log_activity(\"for test\"):\n pass\n\n mock_info.assert_any_call(\"[jaxline] %s starting...\", \"for test\")\n mock_info.assert_any_call(\"[jaxline] %s finished.\", \"for test\")", "def test_str(self):\n self.assertEqual(str(self.content), \"Test Content\")", "def test_command(self):\n output, _error = self.executor.command(['echo', 'hello']).batch()\n self.assertEqual(output, 'hello\\n')", "def test_str2(self):\n capturedOutput = io.StringIO()\n sys.stdout = capturedOutput\n r2 = Rectangle(5, 5, 1, 0, 7)\n print(r2)\n sys.stdout = sys.__stdout__\n str_r2 = \"[Rectangle] (7) 1/0 - 5/5\\n\"\n self.assertEqual(capturedOutput.getvalue(), str_r2)" ]
[ "0.76000136", "0.7482878", "0.7107269", "0.70161605", "0.68864036", "0.68497556", "0.6828578", "0.6819774", "0.6798522", "0.6753548", "0.66351914", "0.6541468", "0.64065236", "0.6402621", "0.6388881", "0.6368317", "0.6329488", "0.63178706", "0.6316625", "0.63080114", "0.6302914", "0.6302835", "0.6290722", "0.6259788", "0.6257151", "0.6219773", "0.61775583", "0.61435056", "0.61404467", "0.61391836", "0.6136056", "0.6112861", "0.6108135", "0.6101101", "0.60994697", "0.60978556", "0.60544765", "0.6042849", "0.6028452", "0.60171527", "0.5993402", "0.59907734", "0.5985331", "0.5960853", "0.5954972", "0.5944299", "0.5941745", "0.5937712", "0.59366196", "0.5935108", "0.5925489", "0.59114146", "0.5908341", "0.59073013", "0.59071934", "0.5895333", "0.5892209", "0.58887714", "0.58715713", "0.5861705", "0.58573174", "0.58571017", "0.58544505", "0.58524114", "0.58461183", "0.5827116", "0.58139765", "0.57925487", "0.57871366", "0.5782456", "0.5781518", "0.5777719", "0.5777069", "0.5777051", "0.5771505", "0.5762126", "0.5761903", "0.57605636", "0.57593244", "0.575609", "0.57538843", "0.57527745", "0.57516795", "0.57511526", "0.57508725", "0.5750382", "0.5747011", "0.57411677", "0.5740667", "0.5734691", "0.57322145", "0.57321465", "0.57290584", "0.5726629", "0.5725824", "0.5721195", "0.5708633", "0.5703247", "0.570139", "0.5698112" ]
0.72075444
2
assert unexpected_content has not been written to stdout
def assertStdoutDoesNotContain(self, unexpected_content): if type(unexpected_content) is not types.ListType: unexpected_content = [ unexpected_content ] stdout_message = sys.stdout.getvalue() for the_text in unexpected_content: self.assertNotIn(the_text, stdout_message,('Stdout "%s" contains text "%s"' % (stdout_message, the_text)))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def check_cot_output(self, expected):\n sys.stdout = StringIO.StringIO()\n output = None\n try:\n self.instance.run()\n except (TypeError, ValueError, SyntaxError, LookupError):\n self.fail(traceback.format_exc())\n finally:\n output = sys.stdout.getvalue()\n sys.stdout = sys.__stdout__\n self.maxDiff = None\n self.assertMultiLineEqual(expected.strip(), output.strip())", "def check_stdout(self, expected: str):\n assert self._std_out is not None, f\"You first need to `execute` the program before checking stdout!\"\n self._test.assertEqual(self._std_out.strip(), expected.strip())", "def test_debug_output(self):\n assert output(self.msg) is not None", "def test_stdout(self):\n stdout = StringIO()\n self.patch(sys, 'stdout', stdout)\n\n # Suppress warnings so that if there are any old-style plugins that\n # lore queries for don't confuse the assertion below. See #3070.\n self.patch(warnings, 'warn', lambda *a, **kw: None)\n self.test_buildTeX()\n self.assertEqual(stdout.getvalue(), '')", "def assertOutput(cls, expected, actual):\n if expected != actual:\n raise Exception(\"'\" + expected + \"' != '\" + actual + \"'\")", "def assert_output(self, parser_args, expected_output):\n c = count_nginx_log_frequency(\n parser_args.file,\n parser_args.segment,\n NGINX_ACCESS_LOG_REGEX\n )\n saved_stdout = sys.stdout\n try:\n out = StringIO()\n sys.stdout = out\n print_report(\n c,\n parser_args.segment,\n parser_args.limit,\n parser_args.file\n )\n output = out.getvalue().strip()\n assert output == expected_output\n finally:\n sys.stdout = saved_stdout", "def assertStdoutContains(self, expected_content):\n if type(expected_content) is not types.ListType:\n expected_content = [ expected_content ]\n stdout_message = sys.stdout.getvalue()\n for the_text in expected_content:\n self.assertIn(the_text, stdout_message,('Stdout \"%s\" does not contain text \"%s\"' % (stdout_message, the_text)))", "def test_ignore_capture():\n\n sys.stdout.write('Print to stdout')\n sys.stderr.write('Print to stderr')\n\n assert True", "def test_is_information_written_through_stderr_methods(self):\n\n io = BufferedSystemIO()\n io._stdout = lambda *args, **kwargs: None\n\n try:\n raise IndexError('Invalid index 5')\n except Exception as exc:\n output_formatted_exception(exc, ':my-test-task', io)\n\n self.assertIn('IndexError', io.get_value())\n self.assertIn('Invalid index 5', io.get_value())\n self.assertIn('Retry with \"-rl debug\" switch before failed task to see stacktrace', io.get_value())", "def test_output_interception(self):\n expected_output = 'testing, 1, 2, 3 ..'\n actual_output = capture(['echo', expected_output])\n assert actual_output.strip() == expected_output.strip()", "def test_capture_stdout():\n\n sys.stdout.write('Print to stdout')\n\n assert False", "def test_no_eof(self):", "def test_unknown(self):\n msg = \"*** Unknown syntax: asd\\n\"\n with patch('sys.stdout', new=StringIO()) as f:\n HBNBCommand().onecmd(\"asd\")\n st = f.getvalue()\n self.assertEqual(msg, st)", "def assertContent(self, response, expected_response):\n self.assertEqual(list(response.streaming_content)[0], expected_response)", "def test_stdout_pattern(f, result):\n if not os.path.exists(f):\n return\n\n expected = open(f, encoding=\"utf-8\").read()\n\n # curl debug logs are too dependent on the context, so we filter\n # them and not take them into account for testing differences.\n expected = remove_curl_debug_lines(expected)\n expected_lines = expected.split(\"\\n\")\n expected_pattern_lines = [parse_pattern(line) for line in expected_lines]\n\n actual = decode_string(result.stdout)\n actual = remove_curl_debug_lines(actual)\n actual_lines = re.split(r\"\\r?\\n\", actual)\n\n if len(actual_lines) != len(expected_pattern_lines):\n print(\">>> error in stdout / mismatch in number of lines\")\n print(\n f\"actual: {len(actual_lines)} lines\\nexpected: {len(expected_pattern_lines)} lines\"\n )\n print(f\"actual <{actual}>\")\n print(\"# Actual lines\")\n for i, line in enumerate(actual_lines):\n print(\"%2d: %s\" % (i, line))\n print(\"# Expected lines\")\n for i, line in enumerate(expected_lines):\n print(\"%2d: %s\" % (i, line))\n print(\"# Expected Pattern lines\")\n for i, line in enumerate(expected_pattern_lines):\n print(\"%2d: %s\" % (i, line))\n\n sys.exit(1)\n for i in range(len(expected_pattern_lines)):\n if not re.match(expected_pattern_lines[i], actual_lines[i]):\n print(f\">>> error in stdout in line {i+1}\")\n print(f\"actual: <{actual_lines[i]}>\")\n print(\n f\"expected: <{expected_lines[i]}> (translated to regex <{expected_pattern_lines[i]}>)\"\n )\n sys.exit(1)", "def testStdoutAndStderr(self):\n with self.OutputCapturer():\n print('foo')\n print('bar', file=sys.stderr)\n self.AssertOutputContainsLine('foo')\n self.AssertOutputContainsLine('bar', check_stdout=False, check_stderr=True)", "def test_EOF(self):\n with patch('sys.stdout', new=StringIO()) as f:\n HBNBCommand().onecmd(\"EOF\")\n out = f.getvalue()\n self.assertTrue(len(out) == 1)\n self.assertEqual(\"\\n\", out)\n with patch('sys.stdout', new=StringIO()) as f:\n HBNBCommand().onecmd(\"EOF fake\")\n msj = f.getvalue().strip()\n self.assertFalse(len(msj) == 1)\n self.assertEqual(\"\", msj)", "def _verify_export_failure(self, expectedText):\r\n resp = self.client.get(self.url, HTTP_ACCEPT='application/x-tgz')\r\n self.assertEquals(resp.status_code, 200)\r\n self.assertIsNone(resp.get('Content-Disposition'))\r\n self.assertContains(resp, 'Unable to create xml for module')\r\n self.assertContains(resp, expectedText)", "def compare_output(self, input, output, expected):\n if type(input) == UnicodeType:\n input = input.encode('raw_unicode_escape')\n if type(output) == UnicodeType:\n output = output.encode('raw_unicode_escape')\n if type(expected) == UnicodeType:\n expected = expected.encode('raw_unicode_escape')\n # Remove \"generated on\" lines.\n output = self.remove_lines(output, ('generated on --',))\n expected = self.remove_lines(expected, ('generated on --',))\n try:\n self.assertEquals('\\n' + output, '\\n' + expected)\n except AssertionError:\n print >>sys.stderr, '\\n%s\\ninput:' % (self,)\n print >>sys.stderr, input\n print >>sys.stderr, '-: expected\\n+: output'\n print >>sys.stderr, ''.join(self.compare(expected.splitlines(1),\n output.splitlines(1)))\n raise", "def test_print_mimic_no_newlines(self):\n d = self.module.create_mimic_dict(\"imdev.txt\")\n buffer = StringIO()\n with redirect_stdout(buffer):\n self.module.print_mimic_random(d, 200)\n output = buffer.getvalue()\n self.assertNotIn(\n '\\n', output,\n \"There should not be any newline (\\\\n) characters in output\"\n )", "def test_do_not_need_alternate(self):\n os.unlink(os.path.join(self.reports_dir,\n 'TEST-testutil.manual_test.LintTest-fail.xml'\n ))\n os.unlink(os.path.join(self.reports_dir,\n 'TEST-testutil.manual_test.LintTest-success.xml'\n ))\n actual = self._analyze_make_output()\n self.assertEqual(1, actual)\n self.assertIn('E999 lint error from txt-file.', self.errors[0])", "def nostdout():\n f = io.StringIO()\n with redirect_stdout(f):\n try:\n yield\n except Exception as err:\n raise err", "def test_handle_print_rich_exception(self):\n\n with io.StringIO() as buf:\n # Capture stdout logs (rich logs to stdout)\n with contextlib.redirect_stdout(buf):\n _print_rich_exception(Exception(\"boom!\"))\n # Capture the stdout output\n captured_output = buf.getvalue()\n\n assert \"Exception:\" in captured_output\n assert \"boom!\" in captured_output", "def test_normal_goes_normal(self):\n eq_(self.msg, output(self.msg,\"OUTPUT\"))", "def expect_output(self, file, parse_json=False):\n contents = self._data_file(file)\n patcher = mock.patch('sys.stdout', new_callable=StringIO)\n output = patcher.start()\n yield\n patcher.stop()\n if parse_json:\n self.assertEqual(json.loads(output.getvalue()),\n json.loads(contents))\n else:\n self.assertEqual(output.getvalue().split('\\n'), contents.split('\\n'))", "def test_capture_both():\n\n sys.stdout.write('Print to stdout')\n sys.stderr.write('Print to stderr')\n\n assert False", "def test_is_not_google_file(self):\r\n bad_file = StringIO.StringIO()\r\n bad_file.write('failing tests please')", "def test_add_unexpected_success(self):\n self.protocol.addUnexpectedSuccess(self.test)\n self.assertEqual(\n self.io.getvalue(), compat._b(\"uxsuccess: %s\\n\" % self.test.id()))", "def test_output_invalid(self):\n assert (\n self.route.output_invalid(hug_core.output_format.json).route[\"output_invalid\"]\n == hug_core.output_format.json\n )", "def out_test(self, func, arg, expect):\n std_out = StringIO()\n sys.stdout = std_out\n func(arg)\n output = std_out.getvalue()\n self.assertEqual(output, expect + '\\n')\n return output", "def test_printerr(self, fake_stderr):\n msg = 'hello world!'\n utils.printerr(msg)\n\n the_args, _ = fake_stderr.write.call_args\n written_msg = the_args[0]\n expected = '{}\\n'.format(msg)\n\n self.assertEqual(written_msg, expected)", "def testStdoutReadDuringCapture(self):\n with self.OutputCapturer():\n print('foo')\n self.AssertOutputContainsLine('foo')\n print('bar')\n self.AssertOutputContainsLine('bar')\n self.AssertOutputContainsLine('foo')\n self.AssertOutputContainsLine('bar')", "def test_printerr_happy_path(self, mocked_stderr):\n generic.printerr('some error')\n self.assertEqual(mocked_stderr.write.call_count, 1)\n self.assertEqual(mocked_stderr.flush.call_count, 1)", "def test_printerr_newline(self, mocked_stderr):\n generic.printerr('some error')\n args, _ = mocked_stderr.write.call_args\n message = args[0]\n self.assertTrue(message.endswith('\\n'))", "def test_print(capsys):\n text = \"hello\"\n err = \"world\"\n print(text)\n sys.stderr.write(\"world\")\n captured = capsys.readouterr()\n assert text in captured.out\n assert err in captured.err", "def test_errors_on_output(self):\n mb = self.maria_backup\n\n # normal run\n errors = b\"\"\"\n 220309 11:19:09 Finished backing up non-InnoDB tables and files\n 220309 11:19:09 Executing FLUSH NO_WRITE_TO_BINLOG ENGINE LOGS...\n xtrabackup: The latest check point (for incremental): '92134324'\n xtrabackup: Stopping log copying thread..\n 220309 11:19:10 >> log scanned up to (900123121)\n 220309 11:19:10 Executing UNLOCK TABLES\n 220309 11:19:10 All tables unlocked\n 220309 11:19:10 Backup created in directory '/a/dir'\n 220309 11:19:10 [00] Writing backup-my.cnf\n 220309 11:19:10 [00] ...done\n 220309 11:19:10 [00] Writing xtrabackup_info\n 220309 11:19:10 [00] ...done\n xtrabackup: Transaction log of lsn (89423125) to (900123121) was copied.\n 220309 11:19:10 completed OK!\n \"\"\"\n self.assertFalse(mb.errors_on_output(b'', errors))\n\n # failed run\n errors = b\"\"\"\n xtrabackup: error: log block numbers mismatch:\n xtrabackup: error: expected log block no. 293842034, but got no. 13324598 from the log file.\n xtrabackup: error: it looks like InnoDB log has wrapped around before xtrabackup\n could process all records due to either log copying being too slow, or log files being too small.\n xtrabackup: Error: xtrabackup_copy_logfile() failed\n \"\"\"\n self.assertTrue(mb.errors_on_output(b'', errors))", "def test_capture_stderr():\n\n sys.stderr.write('Print to stderr')\n\n assert False", "def test_main_error_output(runner: CliRunner) -> None:\n output_random = os.path.join(\n tempfile.gettempdir(),\n \"\".join(secrets.choice(string.hexdigits) for i in range(7)),\n )\n\n with open(output_random, \"w\") as file:\n file.write(\"This is a test\")\n\n result = runner.invoke(\n __main__.main, f\"-c tests/clippings-es.txt -o {output_random}\"\n )\n assert result.exit_code != 0", "def test_raw_empty(self):\n self.assertRaisesHeaderError([''])", "def test_text(self):\n result = self._do_output(o.TextOutput(o.Color.Never), self._demo_msgs)\n self.assertEqual(result,\n \"mock: mock.cmake(1): error: short text\\n\"\n \"mock: mock.cmake(2): warning: short text\\n\"\n \"mock: mock.cmake(3): notice: short text\\n\"\n \"mock: error: short text\\n\"\n \"mock: mock.cmake: error: short text\\n\"\n )", "def test_validate_and_write_emit(req):\n handle = StringIO()\n req.get('http://fake/', text=u'This is a sequence file, honest.')\n r = requests.get('http://fake/')\n output = StringIO()\n config = core.Config()\n config.emit = output.write\n core._validate_and_write(r, handle, 'FAKE', config)\n\n assert output.getvalue() == u'.\\n'\n assert handle.getvalue() == u'This is a sequence file, honest.'", "def _do_test(self, content, expected):\n self.assertEqual(list(NewickTokenizer(StringIO(content))), expected)\n self.assertEqual(list(NewickTokenizer(newick=content)), expected)\n fp = path_map.next_unique_scratch_filepath('tok_test')\n try:\n write_to_filepath(content, fp)\n self.assertEqual(list(NewickTokenizer(filepath=fp)), expected)\n finally:\n try:\n os.unlink(fp)\n except: # pragma: no cover\n pass", "def test_invalid_format(self):\n input_file = self.copy_and_mark_for_cleanup(\"Medline/pubmed_result1.txt\")\n\n cline = XXmotifCommandline(outdir=self.out_dir, seqfile=input_file)\n\n try:\n stdout, stderr = cline()\n except ApplicationError as err:\n self.assertEqual(err.returncode, 255)\n else:\n self.fail(f\"Should have failed, returned:\\n{stdout}\\n{stderr}\")", "def assert_response_correct(self, response, expected_status, expected_content):\n assert response.status_code == expected_status\n\n if expected_content:\n assert response.content.decode('utf-8') == expected_content", "def test_capture_output(capsys):\n print(\"hello world\")\n out, err = capsys.readouterr()\n assert out == \"hello world\\n\"\n assert err == \"\"", "def test_add_unexpected_success_details(self):\n self.protocol.addUnexpectedSuccess(\n self.test, details=self.sample_details)\n self.assertEqual(\n self.io.getvalue(), compat._b(\n \"uxsuccess: %s [ multipart\\n\"\n \"Content-Type: text/plain\\n\"\n \"something\\n\"\n \"F\\r\\nserialised\\nform0\\r\\n]\\n\" % self.test.id()))", "def test_no_change_for_expected(self):\n self.write_contents(\n 'external/wpt/fail.html.ini', \"\"\"\\\n [fail.html]\n expected: [FAIL, CRASH]\n \"\"\")\n self.update(\n {\n 'results': [{\n 'test': '/fail.html',\n 'status': 'CRASH',\n 'expected': 'FAIL',\n 'known_intermittent': ['CRASH'],\n }],\n },\n disable_intermittent='flaky')\n self.assert_contents(\n 'external/wpt/fail.html.ini', \"\"\"\\\n [fail.html]\n expected: [FAIL, CRASH]\n \"\"\")", "def test_no_output_format(self):\n pandoc_default_files = [\n os.path.join(TEST_DEFAULT_FILES_PATH, \"no_output_format.yaml\")\n ]\n\n settings = get_settings(PANDOC_DEFAULT_FILES=pandoc_default_files)\n\n pandoc_reader = PandocReader(settings)\n source_path = os.path.join(TEST_CONTENT_PATH, \"valid_content.md\")\n\n with self.assertRaises(ValueError) as context_manager:\n pandoc_reader.read(source_path)\n\n message = str(context_manager.exception)\n self.assertEqual(\n \"Output format type must be either html or html5.\", message\n )", "def nostdout():\n\n save_stdout = sys.stdout\n sys.stdout = cStringIO.StringIO()\n yield\n sys.stdout = save_stdout", "def assert_response_correct(self, response, expected_status, expected_content):\n assert response.status_code == expected_status\n\n if expected_content:\n assert str(response.content) == expected_content", "def test_outputs_not_created(self):\n one_process_workflow = \"\"\"file://B <- file://A\n echo A does not produce B\n \"\"\"\n process = run_first_process(one_process_workflow)\n assert process.success is False, process.error_message\n assert process.error_message.find(\"these resources should have been created\") >= 0, process.error_message\n assert process.error_message.find(\"* file://B\") >= 0, process.error_message", "def assert3(*extra_args, stdin):\n sys.argv[1:] = []\n sys.argv.append('markdown')\n _stdout = io.StringIO()\n pf.stdio(*extra_args, input_stream=io.StringIO(stdin), output_stream=_stdout)\n _stdout = pf.convert_text(_stdout.getvalue(), 'json', 'markdown')\n assert _stdout == out1", "def test_to_stderr_no_data(self, ext):\n eprint = MagicMock()\n with patch(\"smdba.basegate.eprint\", eprint):\n out = smdba.basegate.BaseGate.to_stderr(\"\")\n\n assert type(out) == bool\n assert not out\n assert not eprint.called\n assert not ext.called", "def test_invalid_writer_output_format(self):\n pandoc_default_files = [\n os.path.join(\n TEST_DEFAULT_FILES_PATH, \"invalid_writer_output_format.yaml\"\n )\n ]\n\n settings = get_settings(PANDOC_DEFAULT_FILES=pandoc_default_files)\n\n pandoc_reader = PandocReader(settings)\n source_path = os.path.join(TEST_CONTENT_PATH, \"valid_content.md\")\n\n with self.assertRaises(ValueError) as context_manager:\n pandoc_reader.read(source_path)\n\n message = str(context_manager.exception)\n self.assertEqual(\n \"Output format type must be either html or html5.\", message\n )", "def compare_output(self, input, output, expected):\n if type(input) == UnicodeType:\n input = input.encode('raw_unicode_escape')\n if type(output) == UnicodeType:\n output = output.encode('raw_unicode_escape')\n if type(expected) == UnicodeType:\n expected = expected.encode('raw_unicode_escape')\n try:\n self.assertEquals('\\n' + output, '\\n' + expected)\n except AssertionError:\n print >>sys.stderr, '\\n%s\\ninput:' % (self,)\n print >>sys.stderr, input\n print >>sys.stderr, '-: expected\\n+: output'\n print >>sys.stderr, ''.join(self.compare(expected.splitlines(1),\n output.splitlines(1)))\n raise", "def assert_bytes_sent(self, bytes):\n sent = b''.join(args[0] for args, _ in self.client.writer.write.call_args_list)\n assert sent == bytes\n self.client.writer.write.reset_mock()", "def test_printerr_flush(self, fake_stderr):\n utils.printerr('hello world!')\n\n self.assertTrue(fake_stderr.flush.called)", "def check_output_contains(context, text, err_msg):\n res = re.search(text, context.output.decode('utf-8'))\n if res is None:\n print(context.output.decode('utf-8'))\n raise Exception(err_msg)", "def test_lj_no_url(self):\n text = self.transfer('msg3.txt')\n assert self.mark not in text", "def test_very_verbose_output_not_truncated(self, monkeypatch):\n hooks = setup_hooks(very_verbose=True)\n line_length = 20\n monkeypatch.setattr(\n \"repobee_junit4._output._truncate_lines\",\n partial(_output._truncate_lines, max_len=line_length),\n )\n\n result = hooks.act_on_cloned_repo(FAIL_REPO)\n\n lines = result.msg.split(os.linesep)\n assert len(lines) > 1\n # the first line can be somewhat longer due to staus message\n # and color codes\n assert any([len(line) > line_length for line in lines[1:]])", "def assertLines(self, indata, expected_output, message=None):\n outstream = StringIO()\n giganticGrep(indata, outstream)\n value = outstream.getvalue()\n actual_lines = value.split('\\n')\n expected_lines = expected_output + ['']\n\n present = set(expected_lines) & set(actual_lines)\n missing = set(expected_lines) - set(actual_lines)\n extra = set(actual_lines) - set(expected_lines)\n\n if missing or extra:\n self.fail('Expected these lines:\\n%s\\n\\nPresent:\\n%s\\n\\nExtra:\\n%s\\n\\nMissing:\\n%s' % (\n '\\n'.join(expected_output),\n '\\n'.join(list(present)),\n '\\n'.join(list(extra)),\n '\\n'.join(list(missing))))", "def test_bad_stdout():\n stdout = \"/x/test_bad_stdout.stdout\"\n stderr = \"test_bad_stdout.stderr\"\n fu = echo_to_streams(\"Hello world\", stderr=stderr, stdout=stdout)\n\n try:\n fu.result()\n except Exception as e:\n assert isinstance(\n e, perror.BadStdStreamFile), \"Expected BadStdStreamFile, got :{0}\".format(type(e))\n\n return", "def test_write_key__to_stdout(isatty_cleanup):\n\n download.sys.stdout.isatty = mock.Mock(return_value=False)\n key = mock.Mock()\n download.write_key(key)\n key.get_contents_to_file.assert_called_once_with(download.sys.stdout)", "def not_existing_error_test(self):\n client = TestClient()\n error = client.run(\"upload some_nonsense\", ignore_error=True)\n self.assertTrue(error)\n self.assertIn(\"ERROR: No packages found matching pattern 'some_nonsense'\",\n client.user_io.out)", "def test_incorrect_input():\n content = 'hi'\n filename = {}\n\n with pytest.raises(TypeError):\n write_file(content, filename)\n\n content = {}\n filename = 'hi'\n\n with pytest.raises(TypeError):\n write_file(content, filename)", "def test_invalid_to_output_format(self):\n pandoc_default_files = [\n os.path.join(\n TEST_DEFAULT_FILES_PATH, \"invalid_to_output_format.yaml\"\n )\n ]\n\n settings = get_settings(PANDOC_DEFAULT_FILES=pandoc_default_files)\n\n pandoc_reader = PandocReader(settings)\n source_path = os.path.join(TEST_CONTENT_PATH, \"valid_content.md\")\n\n with self.assertRaises(ValueError) as context_manager:\n pandoc_reader.read(source_path)\n\n message = str(context_manager.exception)\n self.assertEqual(\n \"Output format type must be either html or html5.\", message\n )", "def test_unkown_command(self):\n with patch('sys.stdout', new=StringIO()) as f:\n HBNBCommand().onecmd(\"something\")\n self.assertEqual(f.getvalue().strip(),\n \"*** Unknown syntax: something\")", "def verify_output(self, output):\n return output == self.output", "def test_03_out(self, mock_stdout):\n msg = udocker.Msg(udocker.Msg.MSG)\n msg.out(\"111\", \"222\", \"333\", 444, ('555'))\n self.assertEqual(\"111 222 333 444 555\\n\", mock_stdout.getvalue())\n sys.stdout = STDOUT\n sys.stderr = STDERR", "def test_bad_content():\n def bad_content(request):\n for n in xrange(10):\n # what json we pass doesn't matter. It's not verifying the\n # strcuture, only checking that it's parsable\n yield \"[1,2,3]\"\n yield \"[1,2, I need no stinking close brace\"\n yield \"[1,2,3]\"\n\n def do_test(klass, *args):\n with test_server(handler=bad_content, methods=(\"post\", \"get\"),\n port=\"random\") as server:\n stream = klass(\"foo\", \"bar\", *args, url=server.baseurl)\n for tweet in stream:\n pass\n\n assert_raises(ConnectionError, do_test, TweetStream)\n assert_raises(ConnectionError, do_test, FollowStream, [1, 2, 3])\n assert_raises(ConnectionError, do_test, TrackStream, [\"opera\"])", "def test_stdout_to_pipe(self):\n original_stdout = sys.stdout\n with self.stdout_to_pipe() as output:\n self.assertNotEqual(original_stdout, sys.stdout)\n print \"Hello world!\"\n self.assertEqual(output.readline(), \"Hello world!\\n\")\n # Line without CR should be readable after closing\n sys.stdout.write(\"Goodbye\")\n self.assertEqual(original_stdout, sys.stdout)\n # Now that writing side is closed, we should be able to read\n # up to EOF.\n self.assertEqual(output.readline(), \"Goodbye\")", "def test_insert_content(self):\n self.request.path = \"/non_ascii_request/\"\n response = self.panel.process_request(self.request)\n # ensure the panel does not have content yet.\n self.assertNotIn(\"nôt åscíì\", self.panel.content)\n self.panel.generate_stats(self.request, response)\n # ensure the panel renders correctly.\n content = self.panel.content\n self.assertIn(\"nôt åscíì\", content)\n self.assertValidHTML(content)", "def test_fatal_error_would_be_thrown_in_case_of_a_formatting_failure(self):\n\n def mock_fatal(*args, **kwargs):\n raise Exception('Fatal!')\n\n io = BufferedSystemIO()\n io.print_line = mock_fatal\n\n exit_code = 0\n\n try:\n raise IndexError('Invalid index 5')\n except Exception as exc:\n stdout_bckp = sys.stdout # needed to silence the output in tests (we do not want this stack trace in tests)\n\n try:\n with open('/dev/null', 'w') as sys.stdout:\n output_formatted_exception(exc, ':my-test-task', io)\n\n except SystemExit as sys_exit:\n exit_code = sys_exit.code\n finally:\n sys.stdout = stdout_bckp # restore stdout\n\n self.assertEqual(1, exit_code)", "def test_make_output_fail():\n with pytest.raises(ValueError):\n make_output_format('dummy_format', LOG_DIR)", "def compare(self, output, expected, ignore_imports=True):\n if ignore_imports:\n output = self.strip_future_imports(output)\n expected = self.strip_future_imports(expected)\n if isinstance(output, bytes) and not isinstance(expected, bytes):\n output = output.decode('utf-8')\n if isinstance(expected, bytes) and not isinstance(output, bytes):\n expected = expected.decode('utf-8')\n self.assertEqual(order_future_lines(output.rstrip()),\n expected.rstrip())", "def pass_result(content):\n for line in content:\n if \"[Failed]\" in line:\n return False\n return True", "def _test_text(self, url, content, buffering):\n # read(-1), readable(), seekable()\n with wfdb.io._url.openurl(url, \"r\", buffering=buffering) as tf:\n self.assertTrue(tf.readable())\n self.assertTrue(tf.seekable())\n self.assertEqual(tf.read(), content)\n self.assertEqual(tf.read(), \"\")\n\n # read(10)\n with wfdb.io._url.openurl(url, \"r\", buffering=buffering) as tf:\n result = \"\"\n while True:\n chunk = tf.read(10)\n result += chunk\n if len(chunk) < 10:\n break\n self.assertEqual(result, content)\n\n # readline(), seek(), tell()\n with wfdb.io._url.openurl(url, \"r\", buffering=buffering) as tf:\n result = \"\"\n while True:\n rpos = tf.tell()\n tf.seek(0)\n tf.seek(rpos)\n chunk = tf.readline()\n result += chunk\n if len(chunk) == 0:\n break\n self.assertEqual(result, content)", "def test_content():\n # PREPARE\n expected_f = open(\n 'tests/pages/expected/stepanenkoartem-github-io.html',\n 'rb',\n )\n expected_dom = BeautifulSoup(\n expected_f.read(),\n 'html.parser',\n )\n\n actual_f = open(\n os.path.join(TEMP_DIR, path.for_page(URL)),\n )\n actual_dom = BeautifulSoup(actual_f, 'html.parser')\n\n # CHECK\n assert actual_dom.decode() == expected_dom.decode()", "def test_trailing_data(self):", "def testIgnoredError(self):\n cmds = \"\"\"-chown 0 missingFile\npwd\nexit\n\"\"\"\n def _cbCheckResult(res):\n self.assertIn(self.testDir.asBytesMode().path, res)\n\n d = self._getBatchOutput(cmds)\n d.addCallback(_cbCheckResult)\n return d", "def setUp(self):\n self.actualstdout = sys.stdout\n sys.stdout = StringIO.StringIO()", "def test_none(self):\n output, _err = self.executor.prepare('do-stuff', 'special', verbose=None).batch()\n self.assertEqual(output, 'doing stuff slightly more verbosely')", "def test_writer_wrong():\n GCMT(write=2)\n assert not Writer.on", "def test_no_level_goes_normal(self):\n eq_(self.msg, output(self.msg))", "def test_111(self):\n user_input = [\"1\",\"1\",\"1\"]\n with patch(\"builtins.input\", side_effect=user_input) as input_call:\n with patch(\"sys.stdout\", new=StringIO()) as output:\n import attempt\n self.assertEqual(output.getvalue().strip(),\"You cannot take this course, sorry!\")", "def test_void_msg(self):\n assert output(None) is None\n assert output() is None", "def assertOutput(self, expected=None, timeout=5, message=None):\n assertTimeout = int (globalVar.assertTimeout)\n if (timeout != 0) :\n assertTimeout = timeout\n p = self.spawnProc\n \n #If any expected output is specified, append it to the List \n if not expected:\n expected = self.prompt \n expList.append(expected) \n \n if not message :\n message = \"Expected output %s not received\" %expected\n \n # Wait for the output \n result = p.expect(expList, assertTimeout)\n # If expected is true and the output is not expected, Call the _postCheck function\n if (result != expList.index(expected)):\n self._postCheck(result, message)\n expList.remove(expected)", "def test_garbage_stream(self):\r\n valid: bytes = b\"!AIVDM,1,1,,B,B43JRq00LhTWc5VejDI>wwWUoP06,0*29\"\r\n mock_file = MockFile([b\"Foo\", b\"Bar\", b\"1337\", valid])\r\n for msg in BinaryIOStream(mock_file):\r\n self.assertEqual(msg.raw, valid)", "def test_000(self):\n user_input = [\"0\",\"0\",\"0\"]\n with patch(\"builtins.input\", side_effect=user_input) as input_call:\n with patch(\"sys.stdout\", new=StringIO()) as output:\n import attempt\n self.assertEqual(output.getvalue().strip(),\"You cannot take this course, sorry!\")", "def assertValue(self, indata, expected_output, message=None):\n outstream = StringIO()\n giganticGrep(indata, outstream)\n value = outstream.getvalue()\n self.assertEqual(value, expected_output, message)", "def test_EOF(self):\n _help = 'EOF method to exit cmd program\\n'\n with patch('sys.stdout', new=StringIO()) as f:\n HBNBCommand().onecmd(\"help EOF\")\n self.assertEqual(f.getvalue(), _help)", "def do_test_expected(self):\n self.maxDiff = None\n\n # We currently don't throw any exceptions in Writer, so this\n # this is always false\n if 'error' in test_src:\n self.assertRaises(test_src['error'], yamlish.dumps,\n test_src['in'], options)\n else:\n logging.debug(\"out:\\n%s\", textwrap.dedent(test_src['out']))\n want = yaml.load(textwrap.dedent(test_src['out']))\n logging.debug(\"want:\\n%s\", want)\n with tempfile.NamedTemporaryFile() as test_file:\n tested_function(test_src['in'], test_file)\n test_file.seek(0)\n got_str = test_file.read()\n logging.debug(\"got_str = %s\", got_str)\n got = yaml.load(got_str)\n self.assertEqual(got, want, \"Result matches\")", "def test_print_title_negativie(capsys, title, result):\n GC.print_title(title)\n out, err = capsys.readouterr()\n print(err)\n assert out != result", "def test_100(self):\n user_input = [\"1\",\"0\",\"0\"]\n with patch(\"builtins.input\", side_effect=user_input) as input_call:\n with patch(\"sys.stdout\", new=StringIO()) as output:\n import attempt\n self.assertEqual(output.getvalue().strip(),\"You cannot take this course, sorry!\")", "def pytest_internalerror(self, excrepr, excinfo):\n for line in str(excrepr).split(\"\\n\"):\n sys.stderr.write(\"INTERNALERROR> {}\\n\".format(line))\n sys.stderr.flush()\n tb = _postmortem_traceback(excinfo)\n post_mortem(tb, excinfo)", "def test_101(self):\n user_input = [\"1\",\"0\",\"1\"]\n with patch(\"builtins.input\", side_effect=user_input) as input_call:\n with patch(\"sys.stdout\", new=StringIO()) as output:\n import attempt\n self.assertEqual(output.getvalue().strip(),\"You cannot take this course, sorry!\")", "def test_incorrectContentMD5(self):\n req = FakeRequest()\n req.received_headers['content-md5'] = '72VMQKtPF0f8aZkV1PcJAg=='\n req.content = StringIO('wrongdata')\n self.assertRaises(ValueError, self.creator.handlePUT, req)", "def fail_and_annotate_streams(self, result, outcome, program_name, cause,\n stdout=None, stderr=None, annotate={}):\n annotations = dict(annotate)\n unexp=[]\n if stdout:\n unexp.append(\"stdout\")\n annotations[\"%s_STDOUT\" % program_name]= stdout\n if stderr: # gbak prints nothing to stderr if everything went ok\n unexp.append(\"stderr\")\n annotations[\"%s_STDERR\" % program_name]= stderr\n if unexp: # if we got something in stdout, stderr or both\n cause += \"\\nUnexpected \" + \" and \".join(unexp) + \\\n \" stream%s \" % \"s\"[len(unexp)==1:] + \\\n \"received from %s.\" % program_name\n result.set_outcome(outcome,cause,annotations)", "def test_script(self):\n f1 = self.write_file(\"foobar\")\n f2 = self.write_file(\"foobarbaz\")\n out = io.BytesIO()\n ghdiff.main([f1, f2], stdout=out)\n output = out.getvalue()\n self.assertTrue(b\"-foobar\" in output)\n self.assertTrue(b'+foobar<span class=\"highlight\">baz</span>' in output)", "def test_invalid_output(self):\n b1 = Block()\n self.configure_block(b1, {})\n b1.notify_signals([Signal()], \"invalid_output\")\n self.assert_num_signals_notified(1, b1, \"invalid_output\")" ]
[ "0.6872373", "0.65048116", "0.6423676", "0.6372315", "0.63059235", "0.6282092", "0.62726283", "0.62563837", "0.61430126", "0.6134692", "0.61007786", "0.60741216", "0.6065867", "0.60580695", "0.6008732", "0.597988", "0.58616245", "0.58479875", "0.5832719", "0.5825116", "0.5822783", "0.58165854", "0.5811914", "0.5793671", "0.578525", "0.57807565", "0.5743345", "0.5714698", "0.57134557", "0.5711795", "0.57091737", "0.5697278", "0.56929815", "0.5677058", "0.5653061", "0.5644985", "0.5619267", "0.5602036", "0.56009233", "0.5593906", "0.5587499", "0.5585704", "0.55761975", "0.55747366", "0.55495626", "0.55468357", "0.5528637", "0.5519304", "0.5512591", "0.5511419", "0.5508516", "0.5496328", "0.5494866", "0.5494023", "0.5491984", "0.5487609", "0.54843664", "0.5474163", "0.5471064", "0.54688555", "0.54610443", "0.5444518", "0.54428715", "0.54338443", "0.5431179", "0.5430928", "0.54301035", "0.5424503", "0.5412553", "0.5409882", "0.53993326", "0.5396707", "0.5387257", "0.5381783", "0.53817326", "0.53662056", "0.53566486", "0.5355549", "0.5348871", "0.5343328", "0.5342806", "0.5341471", "0.5340224", "0.53345203", "0.5330752", "0.53252083", "0.5317784", "0.53132707", "0.5310457", "0.5307307", "0.52998245", "0.5298585", "0.52894026", "0.528695", "0.5285732", "0.5282649", "0.5280448", "0.5274468", "0.52730256", "0.5271833" ]
0.69911766
0
Call this method to extract the widget to pack into your GUI when you are building the viewer into things.
def get_widget(self): return self.imgwin
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def assemble_widget(self) -> widgets.Widget:\n graph_selection = self._create_layer_selection(layer_type=\"graphs\")\n map_selection = self._create_layer_selection(layer_type=\"maps\")\n view_buttons = self.create_visibility_buttons()\n\n widget = widgets.VBox(\n [\n widget_utils.create_html_header(\"Graph Selection\"),\n graph_selection,\n widget_utils.HRULE,\n widget_utils.create_html_header(\"Map Selection\"),\n map_selection,\n widget_utils.HRULE,\n widget_utils.create_html_header(\"View Selection\"),\n view_buttons,\n ]\n )\n\n return widget", "def create_widgets(self):", "def _setup_ui(self):\n\n self.window = ui.Widget()\n self.window.dimensions = ui.normalize_dimension((\n 0, 0,\n self.normalized_screen_resolution[0],\n self.normalized_screen_resolution[1]\n ))\n self.window.background_color = ImageColor.getcolor('#000000', 'RGB')\n\n interface_frame = ui.Widget(parent=self.window)\n interface_frame.dimensions = ui.normalize_dimension((\n self.preview_renderer.window[2],\n 0,\n self.normalized_screen_resolution[0] - self.preview_renderer.window[2],\n self.normalized_screen_resolution[1]\n ))\n interface_frame.background_color = ImageColor.getcolor('#ffffff', 'RGB')\n\n number = ui.LabelWidget(\"\",\n name=NAME_GET_STARTED,\n parent=interface_frame,\n align=\"center\",\n font_color=(0, 0, 0, 255))\n number.dimensions = (\n 5, 5,\n interface_frame.width - 10,\n interface_frame.height - 10\n )", "def _build_gui(self):\n box = qt.QHBoxLayout(self)\n box.addWidget(self._but)\n\n lab = self._lab\n Pol = qt.QSizePolicy\n lab.setSizePolicy(Pol.Expanding, Pol.Preferred)\n lab.setFrameStyle(qt.QLabel.Panel)\n box.addWidget(lab)", "def _build_gui(self):\n box = qt.QHBoxLayout(self)\n box.addWidget(self._but)\n\n lab = self._lab\n Pol = qt.QSizePolicy\n lab.setSizePolicy(Pol.Expanding, Pol.Preferred)\n lab.setFrameStyle(qt.QLabel.Panel)\n box.addWidget(lab)", "def create_widget(self):\n pass", "def init_layout(self):\n\t\tself.pack_start(self.edit, expand=True)\n\t\tself.pack_start(self.button, expand=False)\n\t\tself.show_all()", "def create_widgets( self ):", "def get_init_ui(self, container):\n w = self.get_frame(container)\n self.cols_configure(w)\n w.grid(row=0, column=0, sticky=tk.N+tk.W+tk.S+tk.E)\n\n return w", "def getWidget(self):", "def _setupUi(self, widget):\n \n widget._setup_vertical_layout()\n widget._setup_horizontal_layout()\n widget._setup_vertical_layout()\n for field in self._fields:\n if field=='channel_idx':\n widget._exit_layout()\n widget._setup_vertical_layout()\n choices = None\n if hasattr(self, field + 's'):\n choices = self.__getattribute__(field + 's')\n widget._setup_gui_element(field, choices)\n widget._exit_layout()\n widget._exit_layout()\n self._setup_fetch_buttons(widget)", "def visualise(self):\n self.w = VisualizeSetupBox(self.master, self._df)\n self.master.wait_window(self.w.top)", "def inicialUI(self):\r\n\r\n self.setGeometry(500, 500, 500, 500)\r\n self.setWindownTitle(\"Pesquisa\")\r\n self.displayWidgets()\r\n\r\n self.show()", "def _build_gui(self):\n vlayout = qt.QVBoxLayout()\n \n box = qt.QHBoxLayout()\n box.addWidget(self._but)\n\n lab = self._lab\n Pol = qt.QSizePolicy\n lab.setSizePolicy(Pol.Expanding, Pol.Preferred)\n lab.setFrameStyle(qt.QLabel.Panel)\n box.addWidget(lab)\n title = u\"Select a mesh from the Salomé object browser\"\n vlayout.addWidget(qt.QLabel(title))\n vlayout.addLayout(box)\n return vlayout", "def init_widget(self):", "def buildUI(self):\n\n if cmds.window(\"pyART_AddToCanvasWIN\", exists=True):\n cmds.deleteUI(\"pyART_AddToCanvasWIN\", wnd=True)\n\n # create the main window\n self.mainWin = QtWidgets.QMainWindow(self.pickerUI)\n\n # create the main widget\n self.mainWidget = QtWidgets.QWidget()\n self.mainWin.setCentralWidget(self.mainWidget)\n\n # create the mainLayout\n self.layout = QtWidgets.QVBoxLayout(self.mainWidget)\n\n # load stylesheet\n styleSheetFile = utils.returnNicePath(self.toolsPath, \"Core/Scripts/Interfaces/StyleSheets/animPicker.qss\")\n f = open(styleSheetFile, \"r\")\n self.style = f.read()\n f.close()\n\n self.mainWin.setStyleSheet(self.style)\n\n self.mainWin.setMinimumSize(QtCore.QSize(250, 400))\n self.mainWin.setMaximumSize(QtCore.QSize(250, 400))\n self.mainWin.resize(250, 400)\n\n # set qt object name\n self.mainWin.setObjectName(\"pyART_AddToCanvasWIN\")\n self.mainWin.setWindowTitle(\"Add Module To Canvas\")\n\n # label, listWidget, button\n label = QtWidgets.QLabel(\"Available Modules:\")\n label.setProperty(\"boldFont\", True)\n self.layout.addWidget(label)\n\n self.moduleList = QtWidgets.QListWidget()\n self.moduleList.setMaximumSize(230, 300)\n self.moduleList.setMinimumSize(230, 300)\n self.layout.addWidget(self.moduleList)\n\n # add modules to listWidget\n self.addModulesToList()\n\n # create add button\n button = QtWidgets.QPushButton(\"Add Selected To Canvas\")\n self.layout.addWidget(button)\n button.setObjectName(\"blueButton\")\n button.clicked.connect(self.addSelectedToCanvas)\n\n # show ui\n self.mainWin.show()", "def build(self):\n with self.set_master(sticky=\"nsew\", row_weights=[1], column_weights=[0, 1], auto_columns=0):\n self.build_category_canvas()\n with self.set_master(sticky=\"nsew\", row_weights=[0, 1, 0], column_weights=[1, 1]):\n self.build_previous_range_button(row=0, column=0)\n self.build_hidden_fields_checkbutton(row=0, column=1)\n with self.set_master(sticky=\"nsew\", row=1, column=0, row_weights=[1], column_weights=[1]):\n self.build_entry_frame()\n with self.set_master(sticky=\"nsew\", row=1, column=1, row_weights=[1], column_weights=[1]):\n self.build_field_frame()\n self.build_next_range_button(row=2, column=0)", "def _init_widgets(self):\n # Container frame\n self.container = Frame(self)\n # Workspace block\n self.main_container = Frame(self.container)\n\n self.text = Label(self.main_container)\n self.text.config(text=\"PyEventLogViewer is a timeline-based tool used to simplify the way\\n\"\n \"a user can view and explore Windows EVTX files. To begin using this\\n\"\n \"software you must do the following:\\n\\n\"\n \"\\t1) File → New → 'Create a new project'\\n\"\n \"\\t2) Tools → Import Log File → 'Open a specified EVTX file'\\n\"\n \"\\t3) Explore the presented timeline.\\n\"\n \"\\t4) Double-click a specific record to view the XML data for that record.\\n\"\n \"\\t5) File → Export → 'Generate a CSV or HTML file for timeline presentation.'\\n\\n\"\n \"At this point, only System and Security EVTX files are parsable with this software.\")\n\n self.show_var = BooleanVar()\n self.show_check = Checkbutton(self.main_container, text=\"Don't Show on Startup\", variable=self.show_var)\n\n # Action block\n self.button_ok = Button(self.main_container, text='Ok', underline=0, command=self.callback_close)\n self.bind('<Return>', self.callback_close)\n self.bind('<Escape>', self.callback_close)\n\n # Focus on window - required for binds to work.\n self.focus_set()", "def widget(self) -> tk.Frame:\r\n return self.main_frame", "def createWidgets(self):\r\n top = self.winfo_toplevel()\r\n top.rowconfigure(0, weight=1)\r\n top.columnconfigure(0, weight=1)\r\n self.rowconfigure(0, weight=1)\r\n self.columnconfigure(0, weight=1) \r\n\r\n self.button_quit = tk.Button(self, text='Quit', command=self.quit)\r\n self.button_quit.grid(row=0, column=0, sticky=tk.N+tk.S+tk.E+tk.W)", "def prepare_UI(self):", "def setup(self):\n self.ui.setup_window()", "def __init__(self, viewer: geoviewer.GeoGraphViewer) -> None:\n super().__init__(viewer=viewer)\n\n # Resetting all prior visibility control\n self.viewer.hide_all_layers()\n\n widget = self.assemble_widget()\n self.children = [widget]", "def _place_widgets(self):\n padding = 15\n\n # Workspace block\n self.main_container.columnconfigure(0, weight=4)\n self.main_container.grid(row=0, column=0, rowspan=4, columnspan=5, sticky='EW')\n\n # Information block\n self.text.grid(row=1, column=0, columnspan=5, padx=padding, pady=padding, sticky='NESW')\n self.button_ok.grid(row=4, column=0, padx=padding, pady=padding, sticky='NESW')\n self.show_check.grid(row=4, column=1, padx=padding, pady=padding, sticky='NESW')\n\n # Specify which portion to auto-expand\n self.container.columnconfigure(0, weight=4)\n self.container.pack(side=LEFT, fill=BOTH)", "def __init_ui(self):\n self.__maximize_button.setFixedSize(31, 31)\n self.__maximize_button.setIcon(QIcon(SystemInfo.RESOURCES + 'images/buttons/maximize.svg'))\n\n self.__diagram_group.setStyleSheet(\"QGroupBox { border: 1px solid gray; background: white; }\")\n self.__diagram_layout.addWidget(self.__diagram_group)\n\n self.__button_layout = QHBoxLayout()\n self.__button_layout.addWidget(self.__start_button)\n self.__button_layout.addStretch()\n self.__button_layout.addWidget(self.__maximize_button)\n\n main_layout = QVBoxLayout()\n main_layout.addLayout(self.__button_layout, 1)\n main_layout.addLayout(self.__diagram_layout, 1)\n main_layout.addStretch(0)\n\n self.setLayout(main_layout)", "def build_ui_widget(self):\n if self._tool_actions:\n multi_button_layout = QtWidgets.QHBoxLayout()\n multi_button_layout.setContentsMargins(0, 0, 0, 0)\n for name, func in self._tool_actions.items():\n btn = ui_utils.ContentResizeButton(name)\n btn.setSizePolicy(QtWidgets.QSizePolicy.Ignored, QtWidgets.QSizePolicy.Ignored)\n\n btn.clicked.connect(partial(self._run, func))\n multi_button_layout.addWidget(btn)\n\n multi_button_widget = QtWidgets.QWidget()\n multi_button_widget.setLayout(multi_button_layout)\n main_widget = multi_button_widget\n else:\n btn = ui_utils.ContentResizeButton(\"{}\".format(self.TOOL_NAME))\n btn.setSizePolicy(QtWidgets.QSizePolicy.Ignored, QtWidgets.QSizePolicy.Ignored)\n btn.clicked.connect(self._run)\n\n # set Icon on button\n if self.ICON:\n # if it's a string, assume it's a path to an icon image\n if isinstance(self.ICON, str):\n self.ICON = QtGui.QIcon(self.ICON)\n btn.setIcon(self.ICON)\n\n main_widget = btn\n\n return main_widget", "def createWidgets(self):\n raise NotImplementedError", "def create_widgets(self):\r\n self.create_containers()\r\n self.setup_containers()\r\n self.create_panel_widgets()\r\n self.setup_scrollbar()", "def create_widgets(self):\n #create description label\n Label(self,\n text = \"Patient Info:\"\n ).grid(row = 0, column = 0, sticky = W)", "def add_vtk_window_widget(self):\n base_brain_file = os.path.basename(self.app.BRAIN_FILE)\n base_mask_file = os.path.basename(self.app.MASK_FILE)\n object_title = \"Brain: {0} (min: {1:.2f}, max: {2:.2f}) Mask: {3}\".format(base_brain_file,\n self.brain.scalar_range[0],\n self.brain.scalar_range[1],\n base_mask_file)\n object_group_box = QtWidgets.QGroupBox(object_title)\n object_layout = QtWidgets.QVBoxLayout()\n object_layout.addWidget(self.vtk_widget)\n object_group_box.setLayout(object_layout)\n self.grid.addWidget(object_group_box, 0, 2, 5, 5)\n # must manually set column width for vtk_widget to maintain height:width ratio\n self.grid.setColumnMinimumWidth(2, 700)", "def initUI(self) -> None:\n ratio = 70\n width_to_set = (ratio * self.get_current_window_info()[0]) / 100.0\n height_to_set = (ratio * self.get_current_window_info()[1]) / 100.0\n self.setGeometry(200, 100, width_to_set, height_to_set)\n self.createTable()\n # Add box layout, add table to box layout and add box layout to widget\n self.layout = QVBoxLayout()\n self.layout.addWidget(self.tableWidget)\n self.setLayout(self.layout)\n self.setWindowTitle('View files')\n self.show()", "def __init__(self, viewer: geoviewer.GeoGraphViewer) -> None:\n super().__init__(viewer=viewer)\n\n # Creating individual (sub-)widgets\n visibility_widget = RadioVisibilityWidget(viewer=self.viewer)\n metrics_widget = MetricsWidget(viewer=self.viewer)\n settings_widget = SettingsWidget(viewer=self.viewer)\n\n viewer_height = int(viewer.layout.height.replace(\"px\", \"\"))\n metrics_widget.layout.height = \"{}px\".format(viewer_height * 0.3)\n\n if self.viewer.small_screen:\n view_tab = [visibility_widget]\n else:\n view_tab = [visibility_widget, widget_utils.HRULE, metrics_widget]\n\n # Create combined widget, each key corresponds to a tab\n combined_widget_dict = dict()\n combined_widget_dict[\"View\"] = widgets.VBox(view_tab)\n if self.viewer.small_screen:\n combined_widget_dict[\"Metrics\"] = metrics_widget\n combined_widget_dict[\"Settings\"] = settings_widget\n combined_widget_dict[\"Log\"] = self.log_handler.out\n\n combined_widget = widgets.Tab()\n combined_widget.children = list(combined_widget_dict.values())\n for i, title in enumerate(combined_widget_dict):\n combined_widget.set_title(i, title)\n\n self.children = [combined_widget]", "def widgets(self):\r\n self.setWindowTitle(\"PyCrypt\")\r\n self.setMinimumSize(QSize(500, 500))\r\n self.setMaximumSize(QSize(500, 500))\r\n# Adding the sub def for widgets etc\r\n self.add_menus_and_status()\r\n self.add_buttons()", "def create_widget(self):\n self.widget = wxDockPane(self.parent_widget())", "def widgetSetup(self):\n self.master.resizable(0, 0)\n self.master.iconbitmap('logo.ico')\n self.master.title(\"Ejercicio POO\")\n\n self.master.bind(\"<Return>\", lambda e: self.create())\n self.master.bind(\"<Delete>\", lambda e: self.delete())", "def initUI(self):\n # Setting the main layout as Vertical.\n self.mainLayout = QHBoxLayout()\n\n # Create title.\n self.title = QLabel(self.__name + \" : \")\n\n # Add description as tooltip.\n self.title.setToolTip(self.__description)\n\n # Add title to main layout.\n self.mainLayout.addWidget(self.title)\n\n # Create ComboBox.\n self.dropDown = QComboBox()\n\n # Add datas to drop down.\n self.dropDown.addItems(self.__datas)\n\n # Set default index to dropdown.\n self.dropDown.setCurrentIndex(self.__currentValue)\n\n # Connect dropdown with update method.\n self.dropDown.currentIndexChanged.connect(self.changeCurrentValue)\n\n # Add ComboBox to main layout.\n self.mainLayout.addWidget(self.dropDown)\n\n # Add the main layout to the window.\n self.setLayout(self.mainLayout)", "def create_widgets( self ):\n\n self.selectionView = SelectionView()\n self.selectionView.setModel( self.proxyPhotosModel )\n self.selectionView.activated.connect( self.selectionActivation )\n self.selectionView.selectionModel().selectionChanged.connect( self.selectionChange )\n self.selectionView.setColumnHidden( self.ID_COLUMN, True ) # hide the ID\n\n self.selectionBox = QComboBox()\n\n self.selectionBox.addItem( \"all\", \"all\" )\n for state in self.db.get_processing_states():\n self.selectionBox.addItem( state, state )\n\n self.selectionBox.activated.connect( self.selectionTypeActivation )\n\n self.selectionBoxLabel = QLabel( \"&Processing Type:\" )\n self.selectionBoxLabel.setBuddy( self.selectionBox )\n\n self.previewArea = grafwidgets.PhotoPreviewArea()\n\n # informational labels for the photo record.\n self.infoStateLabel = QLabel()\n self.infoSummaryLabel = QLabel()\n self.infoLocationLabel = QLabel()\n self.infoTakenLabel = QLabel()\n self.infoTagsLabel = QLabel()\n\n # dock widget which will hold the selection layout once created\n # in create_layout, for now it gets an empty widget.\n self.selection_dock = QDockWidget()\n self.selection_dock.setFeatures( QDockWidget.DockWidgetMovable )\n self.selection_dock.setWidget( QWidget() )", "def set_ui(self):\r\n\r\n self.canvas = tk.Canvas(self)\r\n self.canvas.pack()\r\n\r\n self.entry = ttk.Entry(self.canvas, justify=\"center\", font=(\"Calibri\", 12))\r\n\r\n self.grid = Grid(self.canvas)", "def create_widget(self):\n self.widget = UILabel()", "def __init_UI(self):\r\n\r\n ## Setting up the vertical bar\r\n # self.bar = self.verticalScrollBar()\r\n\r\n # Create the inner widget of the scroll area\r\n self.inner_widget = QWidget(self)\r\n self.setWidget(self.inner_widget)\r\n\r\n # Create a vertical layout inside the previous widget\r\n self.__layout = QVBoxLayout(self)\r\n self.inner_widget.setLayout(self.__layout)\r\n\r\n # More settings\r\n self.setWidgetResizable(True)", "def _initUI(self):\n\n vlayout = QtWidgets.QVBoxLayout()\n\n # Description\n #----------------------------------------------------------------\n hlayout = QtWidgets.QHBoxLayout()\n\n label = QtWidgets.QLabel()\n label.setText('Locatie:')\n label.setFixedWidth(100)\n hlayout.addWidget(label)\n\n label = QtWidgets.QLabel()\n label.setText(self.name)\n hlayout.addWidget(label)\n hlayout.setSpacing(10)\n\n vlayout.addLayout(hlayout)\n\n # Exportnaam\n #----------------------------------------------------------------\n self.exportname = ParameterInputLine(label='Exportnaam:', labelwidth=100)\n self.exportname.LineEdit.setMinimumWidth(200)\n vlayout.addLayout(self.exportname.layout)\n\n # Exportdatabase\n #----------------------------------------------------------------\n self.exportpath = ExtendedLineEdit(label='SQLite-database:', labelwidth=100, browsebutton=True)\n self.exportpath.BrowseButton.clicked.connect(self._get_path_database)\n vlayout.addLayout(self.exportpath.layout)\n\n # Line\n line = QtWidgets.QFrame()\n line.setFrameShape(QtWidgets.QFrame.HLine)\n line.setFrameShadow(QtWidgets.QFrame.Sunken)\n\n vlayout.addWidget(line)\n\n # Buttons\n #----------------------------------------------------------------\n hbox = QtWidgets.QHBoxLayout()\n hbox.addItem(QtWidgets.QSpacerItem(0, 0, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Maximum))\n # Add ok/close\n self.closebutton = QtWidgets.QPushButton('Sluiten')\n self.closebutton.clicked.connect(self.close)\n hbox.addWidget(self.closebutton)\n # Add ok/close\n self.savebutton = QtWidgets.QPushButton('Opslaan')\n self.savebutton.clicked.connect(self._save)\n hbox.addWidget(self.savebutton)\n\n vlayout.addLayout(hbox)\n\n # Add layout to widget\n self.setLayout(vlayout)", "def add_widgets(self):\n tkinter.Label(self.top_frame, text=\"File Path:\").grid(row=1, column=0)\n self.data_path_entry = tkinter.Entry(self.top_frame)\n self.data_path_entry.grid(row=1, column=1)\n self.data_path_entry.insert(10, self.data_path)\n # Create the Browse button\n tkinter.Button(self.top_frame,\n text=\"Browse...\",\n command=self.get_file).grid(row=1, column=2)\n # Create the Ok button\n tkinter.Button(self.top_frame,\n text=\"OK\",\n command=self.save_configurations).grid(row=2, column=0, sticky=tkinter.W, pady=3)\n # Create the Cancel button\n tkinter.Button(self.top_frame,\n text=\"Cancel\",\n command=self.exit).grid(row=2, column=1, sticky=tkinter.E, pady=3)", "def __init__(self):\n # Root window\n self.root = tk.Tk()\n self.root.title(\"Crossword\")\n # Padding frame\n self.frame = tk.Frame(self.root)\n self.frame.pack(fill=\"both\", padx=PAD, pady=PAD)\n # Initialize widget groups\n self.header = HeaderView(self)\n self.puzzle = PuzzleView(self)\n self.clues = CluesView(self)\n # Show widgets\n self.header.show()\n self.puzzle.show()\n self.clues.show()", "def process_widgets(self):\r\n\r\n self.runmode_menu.add_radiobutton(label=\"Graphical User Interface\", value=0, variable=self.gui_menu_var,\r\n command=self.disable_debugging_mode)\r\n self.runmode_menu.add_radiobutton(label=\"Command Line Interface\", value=1, variable=self.gui_menu_var,\r\n command=lambda gui=self: load_cli(self))\r\n self.runmode_menu.add_radiobutton(label=\"Debugging Mode (GUI + CLI)\", value=2, variable=self.gui_menu_var,\r\n command=self.enable_debugging_mode)\r\n\r\n # Placing all the submenus\r\n self.filemenu.add_cascade(label=\"Run Mode\", menu=self.runmode_menu)\r\n self.menubar.add_cascade(label=\"File\", menu=self.filemenu)\r\n\r\n self.config(menu=self.menubar) # Indicating that the \"menubar\" variable is the filemenu of the application\r\n\r\n self.folder_frame.pack()\r\n\r\n # self.folder_locator.pack(side=LEFT, padx=10, pady=10)\r\n\r\n self.media_folder_label.pack(side=LEFT, padx=10, pady=10)\r\n\r\n self.folder_button.pack(side=LEFT)\r\n\r\n self.path_frame_parent.pack(side=LEFT)\r\n\r\n self.search_frame.pack()\r\n\r\n self.search_frame.pack()\r\n self.search_entry.grid(row=0, column=0, padx=10, pady=20)\r\n self.search_button.grid(row=0, column=1, padx=5)\r\n # self.advanced_search_button.grid(row=0, column=2, padx=5)\r\n\r\n self.media_frame.pack()\r\n\r\n self.button_frame.pack()", "def _get_from_builder(self):\n # Load the ui from a glade file.\n self.builder = Gtk.Builder()\n try:\n self.builder.add_from_file(os.path.join(self.app.BASE_DIR,\n 'ui',\n 'selectsynaptics.glade')\n )\n except Exception as ex:\n print(str(ex))\n print('\\n{}:\\n{}\\n{}'.format(_('Error loading from Glade file'),\n os.path.join(self.app.BASE_DIR,\n 'ui',\n 'selectsynaptics.glade'), repr(ex))\n )\n sys.exit(ERROR_INVALID_GLADE_FILE)\n\n # Get gui objects.\n self.boxForFooter = self.builder.get_object('boxForFooter')\n self.boxMain = self.builder.get_object('boxMain')\n self.buttonCancel = self.builder.get_object('buttonCancel')\n self.buttonOK = self.builder.get_object('buttonOK')\n self.comboboxtextDevices = self.builder.get_object('comboboxtextDevices')\n self.label1 = self.builder.get_object('label1')\n self.labelSelected = self.builder.get_object('labelSelected')\n self.labelWarning = self.builder.get_object('labelWarning')\n\n # Connect signals existing in the Glade file.\n self.builder.connect_signals(self)\n\n # Reparent our main container from glader file,\n # this way we have all Gtk.Window functionality using \"self\".\n thechild = self.builder.get_object('windowMain').get_child()\n thechild.get_parent().remove(thechild)\n self.add(thechild)\n\n # Connect generated signals:\n # top window signals and/or other generated signals.\n # top window signals were connected, by builder's \"connect_signals\" function,\n # to builder's main window\n self.connect('delete-event', self.on_windowMain_delete_event)\n self.connect('destroy', self.on_windowMain_destroy)\n self.connect('size-allocate', self.on_windowMain_size_allocate)\n self.connect('window-state-event', self.on_windowMain_window_state_event)\n\n\n # :builder top window properties.\n self.can_focus = 'False'\n\n # Load window icon from app, if any.\n self.set_icon(self.app.icon)", "def get_widget(self):\n\t\treturn None", "def initGui(self):\r\n\r\n # Create help action \r\n self.helpAction = QAction( QIcon(\":/plugins/layercombinations/about.png\"), u\"Help\", self.iface.mainWindow())\r\n # connect the action \r\n self.helpAction.triggered.connect( self.showHelp )\r\n # Add menu item\r\n self.iface.addPluginToMenu(u\"&Layer Combinations\", self.helpAction)\r\n\r\n # Create the action that allows to change the widget type\r\n self.changeWidgetAction = QAction(\"Change widget type\", self.iface.mainWindow())\r\n self.changeWidgetAction.triggered.connect( self.changeWidget )\r\n self.iface.addPluginToMenu(u\"&Layer Combinations\", self.changeWidgetAction)\r\n\r\n # Create the action that will toggle the plugin panel\r\n self.action = QAction(QIcon(\":/plugins/layercombinations/icon.png\"), \"Show/hide the Layer Combinations widgets\", self.iface.mainWindow())\r\n self.action.triggered.connect( self.widget.toggle )\r\n # Add toolbar button and menu item\r\n self.iface.addToolBarIcon(self.action)\r\n self.iface.addPluginToMenu(u\"&Layer Combinations\", self.action)\r\n\r\n\r\n # Add the widget to the mainWindow\r\n self.widget.addToiFace(self.iface)", "def init_ui(self):\n self.panel_sizer = wx.BoxSizer(wx.VERTICAL)\n self.figure_bmp = wx.StaticBitmap(self, wx.ID_ANY,\n bitmap=self.controller.empty_bitmap(self.bitmap_width,\n self.bitmap_height),\n pos=wx.DefaultPosition, size=wx.DefaultSize)\n self.panel_sizer.Add(self.figure_bmp, ui_defaults.ctrl_pct, wx.CENTER,\n ui_defaults.widget_margin)\n self.SetSizerAndFit(self.panel_sizer)", "def _setupUi(self):\n self.setupUi(self)\n self.twTree.setStyleSheet(\"background-color: rgb(200, 200, 200)\")", "def setup_ui(self):\n self.vertical_layout = QtWidgets.QVBoxLayout(self)\n\n from anima.ui.lib import QtCore, QtGui\n # the widget should consist of a QGraphic\n self.thumbnail_graphics_view = QtWidgets.QGraphicsView(self)\n\n # set size policy\n size_policy = QtWidgets.QSizePolicy(\n QtWidgets.QSizePolicy.Fixed,\n QtWidgets.QSizePolicy.Fixed\n )\n size_policy.setHorizontalStretch(0)\n size_policy.setVerticalStretch(0)\n size_policy.setHeightForWidth(\n self.thumbnail_graphics_view.sizePolicy().hasHeightForWidth())\n self.thumbnail_graphics_view.setSizePolicy(size_policy)\n\n # set size\n default_size = QtCore.QSize(\n self.default_thumbnail_size,\n self.default_thumbnail_size\n )\n\n self.thumbnail_graphics_view.setMinimumSize(default_size)\n self.thumbnail_graphics_view.setMaximumSize(default_size)\n\n self.thumbnail_graphics_view.setAutoFillBackground(False)\n self.thumbnail_graphics_view.setVerticalScrollBarPolicy(\n QtCore.Qt.ScrollBarAlwaysOff\n )\n self.thumbnail_graphics_view.setHorizontalScrollBarPolicy(\n QtCore.Qt.ScrollBarAlwaysOff\n )\n brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))\n brush.setStyle(QtCore.Qt.SolidPattern)\n self.thumbnail_graphics_view.setBackgroundBrush(brush)\n self.thumbnail_graphics_view.setInteractive(False)\n self.thumbnail_graphics_view.setRenderHints(\n QtGui.QPainter.Antialiasing |\n QtGui.QPainter.HighQualityAntialiasing |\n QtGui.QPainter.SmoothPixmapTransform |\n QtGui.QPainter.TextAntialiasing\n )\n self.vertical_layout.addWidget(self.thumbnail_graphics_view)", "def __init_widgets(self):\n self.add_events(Gdk.EventMask.ENTER_NOTIFY_MASK)\n\n headerbar = self.get_header_bar()\n scanner_image = Gtk.Image.new_from_icon_name(\"qrscanner-symbolic\", Gtk.IconSize.BUTTON)\n self.scan_button.add(scanner_image)\n\n self.add_button.get_style_context().add_class(\"suggested-action\")\n self.add_button.set_sensitive(False)\n self.add_button.set_can_default(True)\n\n headerbar.pack_end(self.add_button)\n headerbar.pack_end(self.scan_button)\n\n self.account_config.connect(\"changed\", self._on_account_config_changed)\n self.add_button.connect(\"clicked\", self._on_add)\n self.scan_button.connect(\"clicked\", self.account_config.scan_qr)\n self.get_content_area().pack_start(self.account_config, True, True, 0)", "def create_widgets(self):\n root.title(\"Universal Converter\")\n root.resizable(False, False)\n root.config(bg=\"white\")\n head_frame = Frame(root, bd=3, bg=\"Navy\", relief=GROOVE)\n Label(head_frame, text=\"UNIVERSAL CONVERTER\", font=(\"Trebuchet MS\", 24, \"bold\"), fg=\"White\", bg=\"Navy\").pack(pady=50)\n head_frame.grid(row=0, column=0, columnspan=4, ipadx=20, sticky=\"ew\")\n Label(root, text=\" Choose the Converter \", font=(\"Trebuchet MS\", 16, \"bold\"), fg=\"Navy\", bg=\"White\").grid(row=2, column=0, columnspan=4, ipadx=20, ipady=20)\n button_frame = Frame(root, bd=5, bg=\"Navy\", relief=FLAT)\n self.measurements_list = [\"Angle\", \"Area\", \"Bit Byte\", \"Density\", \"Electric Current\", \"Energy\", \"Force\", \"Fuel Consumption\", \"Length\", \"Mass\", \"Power\", \"Pressure\", \"Speed\", \"Temperature\", \"Time\", \"Volume\"]\n self.measurements_dict = {\"Angle\": self.angle, \"Area\": self.area, \"Bit Byte\": self.bitbyte, \"Density\": self.density, \"Electric Current\": self.electriccurrent, \"Energy\": self.energy, \"Force\": self.force, \"Fuel Consumption\": self.fuelconsumption, \"Length\": self.length, \"Mass\": self.mass, \"Power\": self.power, \"Pressure\": self.pressure, \"Speed\": self.speed, \"Temperature\": self.temperature, \"Time\": self.time, \"Volume\": self.volume}\n for i in range(16):\n self.button = Button(button_frame, text=self.measurements_list[i], font=(\"Trebuchet MS\", 12), width=13, fg=\"Navy\", bg=\"White\", relief=FLAT, overrelief=SOLID, bd=5, activebackground=\"Navy\", activeforeground=\"White\", command=self.measurements_dict[self.measurements_list[i]])\n self.button.grid(row=i/4+4, column=i%4, ipady=15, ipadx=15, padx=2, pady=2)\n button_frame.grid(row=3, column=0, columnspan=4, sticky=\"we\", padx=5, pady=5)\n root.protocol(\"WM_DELETE_WINDOW\", self.callback) #When user will quit, program will show you the messagebox", "def init_layout(self):\n\t\t# layout splitter. directoryList + buttons | logWidget\n\t\tself.pack_start(self.splitter)\n\t\tframe = gtk.Frame()\n\t\tframe.set_shadow_type(gtk.SHADOW_IN)\n\t\tbox1 = gtk.VBox()\n\t\tframe.add(box1)\n\t\tself.splitter.pack1(frame, resize=True, shrink=False)\n\t\tbox2 = gtk.HBox()\n\t\tbox1.pack_start(self.directoryListScroll)\n\t\tbox1.pack_start(box2, expand=False)\n\t\tbox2.pack_start(self.buttonNew)\n\t\tbox2.pack_start(self.buttonEdit)\n\t\tbox2.pack_start(self.buttonUp)\n\t\tbox2.pack_start(self.buttonDown)\n\t\tbox2.pack_start(self.buttonRemove)\n\t\tself.logView.set_shadow_type(gtk.SHADOW_IN)\n\t\tself.splitter.pack2(self.logView, resize=True, shrink=False)\n\n\t\tself.pack_start(self.buttonImport, expand=False)\n\n\t\t# layout first column of settings widgets\n\t\tbox2 = gtk.HBox()\n\t\tself.pack_start(box2, expand=False)\n\t\tbox3 = gtk.VBox()\n\t\tbox2.pack_start(box3)\n\t\tbox3.pack_start(self.checkAutoStartImport)\n\t\tbox3.pack_start(self.checkAppendMessages)\n\n\t\tbox2.pack_start(gtk.VSeparator())\n\n\t\t# layout secund column of settings widgets\n\t\t#TODO: layout is a bit messy here with labels + spinboxes\n\t\t# have not found a way to properly align labes / boxes yet\n\t\tbox3 = gtk.VBox()\n\t\tbox2.pack_start(box3)\n\t\tbox4 = gtk.HBox()\n\t\tbox3.pack_start(box4)\n\t\tbox4.pack_start(self.labelImportTimeout, expand=False)\n\t\tbox4.pack_start(self.spinImportTimeout)\n\t\tbox4 = gtk.HBox()\n\t\tbox3.pack_start(box4)\n\t\tbox4.pack_start(self.labelMaxLogLines, expand=False)\n\t\tbox4.pack_start(self.spinMaxLogLines)\n\n\t\tself.show_all()", "def createUI(self):\n\n q.getQItem(windowID, QtWidgets.QWidget)\n cmds.setParent(q.fullPath)\n\n # ################################################\n # Active Render Layer\n\n # cmds.separator(height=12, style='none')\n addFrameLayout(\n '%s_frameLayoutLayers' % windowID,\n 'Visible Render Layer', collapsable=False,\n labelVisible=False,\n marginHeight=0\n )\n\n addRowLayout(\n '%s_rowLayoutActiveRenderLayer' % windowID,\n 4,\n columnAlign4=('left', 'left', 'right', 'right'),\n columnAttach4=('left', 'both', 'right', 'right'),\n columnWidth4=(\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.075,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.775,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.075,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.075\n )\n )\n\n\n addButton('%s_addNewLayer' % windowID, 'New', rsAddNewLayer,\n image='RS_create_layer', size=(21, 21))\n addOptionMenu('%s_selectActiveLayer' % windowID,\n 'Active Layer ', (), rsSelectActiveLayer)\n addButton('rsOpenRenderSetupWindow', 'Render Setup',\n rsOpenRenderSetupWindow, image='render_setup.png',\n size=(21, 21))\n addButton('rsOpenUnifiedRenderGlobals', 'Render Globals',\n rsOpenUnifiedRenderGlobals, image='render_setup.png',\n size=(21, 21))\n\n # ################################################\n # Work Render Layers\n\n cmds.setParent(q.fullPath)\n addFrameLayout('%s_frameLayoutLayersB' % windowID,\n 'Work Render Layer', collapsable=False,\n labelVisible=False, marginHeight=0)\n addRowLayout('%s_rowLayoutVisibleRenderLayer' % windowID, 3,\n columnAlign3=('left', 'left', 'right'),\n columnAttach3=('left', 'both', 'right'),\n columnWidth3=((WINDOW_WIDTH - FRAME_MARGIN * 2)\n * 0.075, (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.85,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.075))\n\n cmds.separator()\n addOptionMenu('%s_selectVisibleLayer' % windowID,\n 'Visible Layer ', (), rsSelectVisibleLayer)\n cmds.separator()\n\n cmds.setParent(q.fullPath)\n cmds.separator(height=12, style='none')\n\n # ################################################\n # Collections\n\n addFrameLayout('%s_frameLayout02' % windowID, 'Collections',\n labelVisible=False, marginHeight=0)\n\n addRowLayout(\n '%s_rowLayout02' % windowID,\n 6,\n columnAlign6=('left', 'left', 'left', 'left', 'left', 'left'),\n columnAttach6=('both', 'both', 'right', 'right', 'right', 'right'),\n columnWidth6=(\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.18,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.18,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.415,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.075,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.075,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.075,\n )\n )\n\n addButton('rsAddCollection', 'Add', rsAddCollection)\n addButton('rsRemoveCollection', 'Remove', rsRemoveCollection)\n addButton('rsSelectShapes', 'Select Shapes', rsSelectShapes,\n image='selectObject.png', size=(21, 21))\n addButton('rsRenameShader', 'Rename Shader', rsRenameShader,\n size=(21, 21), image='QR_rename.png')\n addButton('rsDuplicateShader', 'Duplicate Shader',\n duplicateShader, size=(21, 21), image='newPreset.png')\n addButton('rsRefreshUI', 'Refresh', rsRefreshUI, size=(21, 21),\n image='QR_refresh.png')\n\n # ###########################\n # Filter List\n\n cmds.setParent('%s_frameLayout02' % windowID)\n addRowLayout('%s_rowLayout03' % windowID, 2,\n columnAlign2=('left', 'left'),\n columnAttach2=('both', 'both'),\n columnWidth2=((WINDOW_WIDTH - FRAME_MARGIN * 2)\n * 0.6, (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.42))\n\n addTextField('%s_filterShaderList' % windowID, 'Search',\n rsFilterShaderList_off, rsFilterShaderList_off,\n window.updateUI)\n addOptionMenu('rsShaderGroups', '|', (), rsShaderGroups)\n\n # ###########################\n # The shaders scroll list\n\n cmds.setParent('%s_frameLayout02' % windowID)\n addRowLayout('%s_rowLayout04' % windowID, 1, columnAlign1='both', columnAttach1='both', columnWidth1=WINDOW_WIDTH\n + 12)\n addTextScrollList('%s_ShaderScrollList' % windowID, (),\n rsShaderScrollList_doubleClick,\n rsShaderScrollList_onSelect,\n rsShaderScrollList_deleteKey)\n\n # Add popup menu:\n\n cmds.popupMenu('rsShaderScrollListPopupMenu',\n parent='%s_ShaderScrollList' % windowID,\n allowOptionBoxes=False, markingMenu=True,\n postMenuCommand=postMenuCommand)\n cmds.menuItem('%s_popupMenuItem02' % windowID,\n label='Duplicate Shader', command=duplicateShader)\n cmds.menuItem(divider=True)\n cmds.menuItem('%s_popupMenuItem04' % windowID,\n label='Graph Shader')\n cmds.menuItem(divider=True)\n cmds.menuItem('%s_popupMenuItem03' % windowID,\n label='Select Shader')\n cmds.menuItem(divider=True)\n cmds.menuItem('%s_popupMenuItem05' % windowID,\n label='Select Assigned Shapes')\n cmds.menuItem('%s_popupMenuItem06' % windowID,\n label='Select Assigned Transforms')\n\n # ##################################################\n # Arnold Property Overrides\n\n cmds.setParent('%s_frameLayout02' % windowID)\n cmds.columnLayout(\n '%s_columnLayout20' % windowID,\n width=WINDOW_WIDTH - FRAME_MARGIN * 2,\n columnAlign='left',\n columnAttach=('left', 0),\n adjustableColumn=False,\n rowSpacing=0,\n )\n\n cmds.separator(parent='%s_columnLayout20' % windowID, height=4,\n style='none')\n\n addRowLayout('%s_rowLayout05' % windowID, 2,\n columnAlign2=('left', 'both'),\n columnAttach2=('left', 'right'),\n columnWidth2=((WINDOW_WIDTH - FRAME_MARGIN * 2)\n * 0.75, (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.25))\n addText('%s_textArnoldPropertyOverridesLabel' % windowID,\n 'Apply Arnold Property Overrides', 'plainLabelFont')\n addCheckBox('rsArnoldPropertyOverridesCheckBox', '',\n rsArnoldPropertyOverridesCheckBox,\n rsArnoldPropertyOverridesCheckBox)\n cmds.separator(parent='%s_columnLayout20' % windowID, height=4,\n style='none')\n\n # Column Layout to toggle\n\n cmds.setParent('%s_columnLayout20' % windowID)\n cmds.columnLayout(\n '%s_columnLayout02' % windowID,\n width=WINDOW_WIDTH - FRAME_MARGIN * 2,\n columnAlign='left',\n columnAttach=('left', 0),\n adjustableColumn=False,\n rowSpacing=0,\n )\n\n addCheckboxes('%s_columnLayout02' % windowID)\n cmds.columnLayout('%s_columnLayout02' % windowID, edit=True,\n visible=False)\n\n # #################################################\n # Shader Override\n\n cmds.setParent('%s_frameLayout02' % windowID)\n cmds.columnLayout(\n '%s_columnLayout21' % windowID,\n width=WINDOW_WIDTH - FRAME_MARGIN * 2,\n columnAlign='left',\n columnAttach=('left', 0),\n adjustableColumn=False,\n rowSpacing=0,\n )\n cmds.separator(parent='%s_columnLayout21' % windowID, height=4,\n style='none')\n addRowLayout('%s_rowLayout06' % windowID, 2,\n columnAlign2=('left', 'right'),\n columnAttach2=('left', 'right'),\n columnWidth2=((WINDOW_WIDTH - FRAME_MARGIN * 2)\n * 0.75, (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.25))\n addText('%s_shaderOverrideLabel' % windowID, 'Shader Override',\n 'plainLabelFont')\n addCheckBox('%s_shaderOverrideCheckbox' % windowID, '',\n rsShaderOverrideCheckbox, rsShaderOverrideCheckbox)\n cmds.separator(parent='%s_columnLayout21' % windowID, height=4,\n style='none')\n\n cmds.setParent('%s_columnLayout21' % windowID)\n cmds.columnLayout(\n '%s_columnLayout03' % windowID,\n width=WINDOW_WIDTH - FRAME_MARGIN * 2,\n columnAlign='left',\n columnAttach=('both', 4),\n adjustableColumn=True,\n rowSpacing=0,\n )\n cmds.setParent('%s_columnLayout03' % windowID)\n addOptionMenu('%s_optionMenu02' % windowID, 'Select: ', (),\n rsShaderOverridesMenu)\n\n global selectedShaderOverride\n\n # default selection\n\n selectedShaderOverride = SHADER_OVERRIDE_OPTIONS[0]['ui']\n cmds.columnLayout('%s_columnLayout03' % windowID, edit=True,\n visible=False)\n\n # #################################################\n\n cmds.setParent(q.fullPath)\n cmds.separator(height=10, style='none')\n\n # #################################################\n # Extras\n\n addFrameLayout('%s_frameLayout50' % windowID, 'Extras',\n collapsable=True, marginHeight=0,\n labelVisible=False)\n\n # #################################################\n # Add & Assign Shader Groups\n\n addFrameLayout(\n '%s_frameLayout05' % windowID,\n 'Add & Assign Shader Groups',\n collapsable=True,\n marginWidth=0,\n marginHeight=0,\n collapse=False,\n labelVisible=True,\n )\n\n # Add the renamer window\n\n self.gwCustomRenamer = CustomRenamer()\n self.gwCustomRenamer.createUI()\n\n # #################################################\n # AutoConnect\n\n cmds.setParent('%s_frameLayout50' % windowID)\n\n addFrameLayout(\n '%s_frameLayout03' % windowID,\n 'Adobe Connector',\n collapsable=True,\n marginWidth=0,\n marginHeight=0,\n collapse=True,\n labelVisible=True,\n )\n addRowLayout('%s_rowLayout07', 3, columnAlign3=('left', 'left',\n 'left'), columnAttach3=('both', 'both', 'both'),\n columnWidth3=((WINDOW_WIDTH - FRAME_MARGIN * 2)\n * 0.4, (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.3,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.3))\n addButton('updateConnections', '> Update Connections <',\n updateConnections)\n addButton('uvSnapshot', 'UV Snapshot', uvSnapshot)\n addButton('editTexture', 'Edit Texture', editTexture)\n\n # After Effects\n\n cmds.setParent('%s_frameLayout03' % windowID)\n addRowLayout('%s_rowLayout11' % windowID, 2,\n columnAlign2=('left', 'left'),\n columnAttach2=('both', 'both'),\n columnWidth2=((WINDOW_WIDTH - FRAME_MARGIN * 2)\n * 0.4, (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.6))\n addText('%s_text90' % windowID, 'Send to After Effects:')\n addButton('makeCompButton', 'Send to After Effects', rsMakeComp)\n\n # #################################################\n # Render Setup /\n # Output settings\n\n cmds.setParent('%s_frameLayout50' % windowID)\n addFrameLayout(\n '%s_frameLayout04' % windowID,\n 'Output Settings',\n collapsable=True,\n marginWidth=0,\n marginHeight=0,\n collapse=True,\n labelVisible=True,\n )\n addRowLayout('%s_rowLayout08' % windowID, 1,\n columnAlign1='center', columnAttach1='both',\n columnWidth1=WINDOW_WIDTH - FRAME_MARGIN * 2)\n addButton('%s_revealOutputDirectory' % windowID,\n 'Output path not set yet', rsRevealOutputDirectory)\n\n cmds.setParent('%s_frameLayout04' % windowID)\n addRowLayout('%s_rowLayout09' % windowID, 3,\n columnAlign3=('left', 'right', 'right'),\n columnAttach3=('left', 'right', 'right'),\n columnWidth3=((WINDOW_WIDTH - FRAME_MARGIN * 2)\n * 0.8, (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.14,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.06))\n\n addOptionMenu('%s_optionMenu05' % windowID, '', (),\n rsSelectOutputTemplate)\n addOptionMenu('%s_outputVersionMenu' % windowID, '', (),\n rsSelectOutputVersion)\n cmds.menuItem(label='v001')\n\n cmds.setParent('%s_rowLayout09' % windowID)\n addButton('%s_incrementOutputVersionButton' % windowID, '+1',\n rsIncrementOutputVersion, size=(21, 21))\n\n cmds.setParent('%s_frameLayout04' % windowID)\n addRowLayout('%s_rowLayout10' % windowID, 2,\n columnAlign2=('left', 'left'),\n columnAttach2=('both', 'right'),\n columnWidth2=((WINDOW_WIDTH - FRAME_MARGIN * 2)\n * 0.7, (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.3))\n addOptionMenu('%s_optionMenu03' % windowID, 'Format:', (),\n rsOutputTemplatesMenu)\n addOptionMenu('%s_optionMenu06' % windowID, '', (),\n rsSetFPSMenu)\n\n cmds.setParent('%s_frameLayout04' % windowID)\n addRowLayout('%s_rowLayout12' % windowID, 4,\n columnAlign4=('right', 'left', 'right', 'left'),\n columnAttach4=('both', 'both', 'both', 'both'),\n columnWidth4=((WINDOW_WIDTH - FRAME_MARGIN * 2)\n * 0.50, (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.15,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.20,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.15))\n\n addText('%s_setInFrameLabel' % windowID, 'In Frame ')\n addTextField('%s_setInFrame' % windowID, '', setInFrame,\n setInFrame, setInFrame)\n\n addText('%s_setOutFrameLabel' % windowID, 'Out Frame ')\n addTextField('%s_setOutFrame' % windowID, '', setOutFrame,\n setOutFrame, setOutFrame)", "def initCentralUic(self):\n self.initFileTableWidget()\n self.initViewerStack()\n self.splitter.setSizes([150, 850])", "def init_ui(self):\n # Create GUI elements, set them in dict structure\n labelwidth = 150\n\n # Add parameter line edit for Factor Tm to Tp\n self.input_elements['factor Tm Tp'] = widgets.ParameterInputLine(\n label='Factor Tm naar Tp:',\n labelwidth=labelwidth,\n unitlabel='(NVT: Tp aanwezig)' if 'Tp' in self.hydraulic_loads.columns else '',\n validator=QtGui.QDoubleValidator(0.01, 99.99, 20),\n )\n\n if 'Tp' in self.hydraulic_loads.columns or self.parent_tab.step != 'I1':\n self.input_elements['factor Tm Tp'].set_enabled(False)\n\n # Add line edit with browsebutton for Master template\n self.input_elements['mastertemplate'] = widgets.ExtendedLineEdit(\n label='Master template bestand:',\n labelwidth=labelwidth,\n browsebutton=QtWidgets.QPushButton('...', clicked=self.select_master_template)\n )\n\n # Add line edit with browsebutton for depth file\n self.input_elements['depthfile'] = widgets.ExtendedLineEdit(\n label='Bathymetry bestand:',\n labelwidth=labelwidth,\n browsebutton=QtWidgets.QPushButton('...', clicked=self.select_bathymetry_file)\n )\n\n # Add line edit with browsebutton for swan result folder\n self.input_elements['swanfolder'] = widgets.ExtendedLineEdit(\n label='SWAN uitvoer folder:',\n labelwidth=labelwidth,\n browsebutton=QtWidgets.QPushButton('...', clicked=self.select_swan_folder)\n )\n\n\n self.setLayout(QtWidgets.QVBoxLayout())\n self.layout().setSpacing(10)\n\n for _, item in self.input_elements.items():\n self.layout().addWidget(item)\n\n line = QtWidgets.QFrame()\n line.setFrameShape(QtWidgets.QFrame.HLine)\n line.setFrameShadow(QtWidgets.QFrame.Sunken)\n self.layout().addWidget(line)\n\n # OK and Cancel buttons\n self.generateButton = QtWidgets.QPushButton('Genereer invoer')\n self.generateButton.setDefault(True)\n self.generateButton.clicked.connect(self.generate)\n\n self.cancelButton = QtWidgets.QPushButton('Annuleren')\n self.cancelButton.setAutoDefault(False)\n self.cancelButton.clicked.connect(self.cancel)\n\n button_box = QtWidgets.QDialogButtonBox(QtCore.Qt.Horizontal, self)\n button_box.addButton(self.generateButton, QtWidgets.QDialogButtonBox.ActionRole)\n button_box.addButton(self.cancelButton, QtWidgets.QDialogButtonBox.RejectRole)\n button_box.accepted.connect(QtWidgets.QDialog.accept)\n\n self.layout().addWidget(button_box)", "def create(self, parent):\n self.widget = QFrame(parent)", "def create_view(self):\n title_label = Label(self, text='Upload, Preview, Describe and Visualize',\n fg='blue', font=('Arial', 16))\n title_label.pack(fill=BOTH, expand=True)\n select_file_button = Button(self, background='White', text='Select Data File [.csv, .xlsx, .xls, .json, .txt]',\n command=self.start_upload)\n select_file_button.pack(padx=5, pady=10)", "def widget(self) -> ttk.Treeview:\r\n return self.wrapper", "def init_layout(self):\n\t\tbox1 = gtk.VBox()\n\t\tbox1.pack_start(self.labelName)\n\t\tbox1.pack_start(self.labelDirectory)\n\t\tif self.labelDefaults is not None:\n\t\t\tbox1.pack_start(self.labelDefaults)\n\n\t\tbox2 = gtk.HBox()\n\t\tbox2.pack_start(self.directorySelector.edit, expand=True)\n\t\tbox2.pack_start(self.directorySelector.button, expand=False)\n\n\t\tbox3 = gtk.VBox()\n\t\tbox3.pack_start(self.editName)\n\t\tbox3.pack_start(box2)\n\t\tif self.comboDefaults is not None:\n\t\t\tbox3.pack_start(self.comboDefaults)\n\n\t\tbox4 = gtk.HBox()\n\t\tbox4.pack_start(box1, expand=False)\n\t\tbox4.pack_start(box3)\n\n\t\tself.vbox.pack_start(box4, expand=False)\n\t\tself.vbox.pack_start(gtk.VBox())\n\n\t\tself.show_all()", "def widgets(self):\n raise NotImplementedError(\"This method is not ready to be used yet\")", "def initUI(self):\n self.logger.debug('Setting up the Measurement GUI')\n self.setWindowTitle(self.title)\n\n self.show()\n\n self.make_combobox_scanner()\n self.make_combobox_movements()\n self.make_combobox_configurate()\n self.make_combobox_basic()", "def create_widgets(self):\n self.pack(fill=tk.BOTH, expand=True)\n self.top_frame = tk.Frame(self)\n self.top_frame.pack(fill=tk.X, expand=False)\n\n # Create obstacle button\n self.create_obstacle_button = tk.Button(\n self.top_frame,\n text=self.OBSTACLE_CREATION_INACTIVE_LABEL,\n command=self._toggle_creation_mode_cb\n )\n self.create_obstacle_button.pack(side=tk.LEFT)\n\n # Load button\n self.load_button = tk.Button(\n self.top_frame,\n text=self.LOAD_BUTTON_LABEL,\n command=self._load_button_cb\n )\n self.load_button.pack(side=tk.LEFT)\n\n # Export button\n export_button = tk.Button(\n self.top_frame,\n text=self.EXPORT_BUTTON_LABEL,\n command=self._export_button_cb\n )\n export_button.pack(side=tk.RIGHT)\n\n # Main canvas\n self.canvas = tk.Canvas(self, background='white')\n self.canvas.config(width=self.CANVAS_WIDTH, height=self.CANVAS_HEIGHT)\n self.canvas.bind('<ButtonRelease-1>', self._draw_line)\n self.canvas.pack(fill=tk.BOTH, expand=True)\n self.canvas.focus_set()", "def create(self, parent):\n self.widget = wxBitmapWidget(parent)", "def init_widget(self):\n self._build_config()\n self._raw_toolbar.initToolbar(self.config)", "def construct(self):\n top = Toplevel()\n top.withdraw()\n top.protocol(\"WM_DELETE_WINDOW\", self.view_xml_pane)\n top.columnconfigure(0, weight=1)\n top.rowconfigure(0, weight=1)\n top.title(\"XML Preview\")\n self._pane = top\n\n xml_area = Text(top, borderwidth=2, relief=\"sunken\")\n xml_area.config(font=(\"consolas\", 12), undo=True, wrap='word', state=DISABLED)\n xml_area.grid(row=0, column=0, sticky=\"nsew\", padx=2, pady=2)\n\n scrollbar = Scrollbar(top, command=xml_area.yview)\n scrollbar.grid(row=0, column=1, sticky='nsew')\n xml_area['yscrollcommand'] = scrollbar.set\n\n self._text_area = xml_area", "def iniciaUI(self):\n\n self.setGeometry(100,100, 300, 200)\n self.setWindowTitle(\"Formulario\")\n self.displayWidgets()\n\n self.show()", "def do_layout(self):\n self.define_panel_structure()\n self.layout_selection()\n self.layout_data_list()\n self.layout_batch()\n self.layout_button()", "def _init_ui(self):\r\n\t\t\r\n\t\tself.input_frame = Input(self)\r\n\t\tself.input_frame.pack()\r\n\t\t\r\n\t\tbutton_ok = Button(self, text = \"Ping\", command = self._go)\r\n\t\tbutton_ok.pack()\r\n\t\t\r\n\t\tself.result_frame = Result(self)\r\n\t\tself.result_frame.pack()", "def createWidget(self, parent):\n raise NotImplementedError()", "def build_ui(self):\n self.ui = UI_procstep.Ui_Form()#.Ui_USGSContactInfoWidgetMain()\n self.ui.setupUi(self)\n self.setup_dragdrop(self)\n\n self.proc_step = RepeatingElement(which='tab',\n tab_label='Step', add_text='Additional Step',\n widget=ProcessStep, remove_text='Remove Step', italic_text='Processing Steps Taken')\n\n #self.proc_step = RepeatingElement(params=params, which='tab', tab_label='Source',)\n self.proc_step.add_another()\n self.ui.widget_procstep.layout().addWidget(self.proc_step)", "def createWidget(self):\n figure = Figure(figsize=(4,2), dpi=100)\n \"\"\"Figure size is measured in inches.\"\"\"\n graph = figure.add_subplot(111)\n \"\"\"The default subplot, which creates one row, one column, with index one.\"\"\"\n graph.plot(self.wave_table[0], self.wave_table[1])\n\n canvas = FigureCanvasTkAgg(figure, self.master)\n canvas.draw()\n canvas.get_tk_widget().pack(fill=tk.BOTH, expand=True)", "def initUI(self):\n \n self.setWindowTitle(\"Intecol Flir camera\")\n self.setGeometry(300, 100, 1012, 622)", "def _init_ui(self):\n hlayout = QtWidgets.QHBoxLayout()\n\n hlayout.addWidget(QtWidgets.QLabel('Kies een normtraject:'))\n\n self.section_combobox = QtWidgets.QComboBox()\n self.section_combobox.setFixedWidth(60)\n self._update_combobox()\n\n hlayout.addWidget(self.section_combobox)\n\n self.remove_button = QtWidgets.QPushButton('Verwijderen', clicked=self._del_flooddefence)\n hlayout.addWidget(self.remove_button)\n\n vlayout = QtWidgets.QVBoxLayout()\n vlayout.addLayout(hlayout)\n\n line = QtWidgets.QFrame()\n line.setFrameShape(QtWidgets.QFrame.HLine)\n line.setFrameShadow(QtWidgets.QFrame.Sunken)\n\n vlayout.addWidget(line)\n\n self.close_button = QtWidgets.QPushButton('Sluiten', clicked=self.close)\n vlayout.addWidget(self.close_button, 0, QtCore.Qt.AlignRight)\n\n self.setLayout(vlayout)\n\n self.setWindowTitle(\"HB Havens: normtrajecten\")\n self.setWindowFlags(self.windowFlags() & ~QtCore.Qt.WindowContextHelpButtonHint)", "def buildUI(self):\n outside = QtWidgets.QVBoxLayout(self)\n columns = QtWidgets.QHBoxLayout(self)\n layout = QtWidgets.QVBoxLayout(self)\n self.details_layout = QtWidgets.QVBoxLayout(self) #column with edit panel\n self.restriction = QtWidgets.QWidget() #restricts size of details_layout\n self.restriction.setLayout(self.details_layout)\n self.restriction.setFixedWidth(200)\n columns.addLayout(layout)\n columns.addWidget(self.restriction)\n outside.addLayout(columns)\n\n #tab widget\n self.tabwidget = tabsWindow(self) #QtWidgets.QTabWidget(tabsClosable = True, movable = True)\n layout.addWidget(self.tabwidget)\n #add base tab\n self.tabwidget.newTab(name =\"Untitled\", image = \"\")\n\n #add second column with details\n self.updateDetails(\"edit\")\n\n #edit button\n layout_btns = QtWidgets.QHBoxLayout()\n editBtn = QtWidgets.QPushButton(\"Stop Editing\")\n editBtn.clicked.connect(lambda: self.editChange(editBtn))\n layout_btns.addWidget(editBtn)\n\n #save button\n saveBtn = QtWidgets.QPushButton(\"Save\")\n saveBtn.clicked.connect(self.save)\n layout_btns.addWidget(saveBtn)\n\n #load button\n loadBtn = QtWidgets.QPushButton(\"Load\")\n loadBtn.clicked.connect(self.load)\n layout_btns.addWidget(loadBtn)\n\n #close button\n closeBtn = QtWidgets.QPushButton('Close')\n closeBtn.clicked.connect(self.closeEvent)\n layout_btns.addWidget(closeBtn)\n\n outside.addLayout(layout_btns) #add buttons to layout", "def initGUI(self):\n\n\t\t# Set main frame's location \n\t\tself.grid(row=0, column=0, sticky=\"nsew\")\n\n\t\t# Set path entry frame and its location\n\t\tself.entryFrame = Frame(self, relief = RAISED, borderwidth = 1)\n\t\tself.entryFrame.pack(fill = BOTH, expand = False)\n\t\t# Make label\n\t\tif self.message:\n\t\t\tmessageLabel = Label(self.entryFrame, text = self.message, font=(\"Bradley\", 10))\n\t\t\tmessageLabel.pack(anchor=W, padx=0, pady=0)\n\n\t\t# Set path entry and its location\n\t\tself.filePathEntry = Entry(self.entryFrame, bd = 4, width = 50)\n\t\tself.filePathEntry.pack(side = LEFT, padx=2, pady=1)", "def initUI(self):\n # Set the main layout component.\n self.mainLayout = QVBoxLayout()\n\n if(self.__itemListSize > 0):\n self.mainLayout.setSpacing(0)\n self.mainLayout.setContentsMargins(0, 0, 0, 0)\n\n # Build the loop for QHBoxLayout creation.\n for y in range(self.__ySize):\n # Creating the horizontal layout for X.\n horizontalLayout = QHBoxLayout()\n horizontalLayout.setSpacing(0)\n horizontalLayout.setSizeConstraint(QLayout.SetMaximumSize)\n horizontalLayout.setContentsMargins(0, 0, 0, 0)\n\n for x in range(self.__xSize):\n itemCount = x + y * self.__xSize\n\n if(itemCount < self.__itemListSize):\n # Create the widget.\n item = self.__itemList[itemCount]\n \n else:\n # Add empty string if no item available to keep grid.\n item = QLabel(\"\")\n \n item.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)\n item.setMinimumWidth(self.__parentGeometry.width()/self.__xSize - 10)\n horizontalLayout.addWidget(item)\n \n self.mainLayout.addLayout(horizontalLayout)\n \n else:\n self.textDisplay = QLabel(self.__emptyLabel)\n self.mainLayout.addWidget(self.textDisplay)\n\n # Set main layout to the window.\n self.setLayout(self.mainLayout)", "def _init_widgets(self):\n comps = self.ui.component_list\n comps.addItems(sorted(self._labels.keys()))\n data = self.ui.data_list\n data.addItems(sorted(self._data.keys()))", "def setup_gui(self):\n # if there are more than 1 visualizer we need to assure that there\n # will not be tag conflicts\n BaseRealTimeVisualizer.setup_gui_lock.acquire()\n # look for valid tag\n dpg.create_context()\n\n self.id = 0\n while dpg.does_item_exist(f'main_window_{self.id}'):\n self.id += 1\n\n with dpg.texture_registry(show=False):\n dpg.add_dynamic_texture(\n width=self.width,\n height=self.height,\n default_value=np.zeros((self.width, self.height, 3)),\n tag=f'input_image_texture_{self.id}',\n )\n\n with dpg.window(\n tag=f'main_window_{self.id}',\n no_title_bar=True,\n autosize=True\n ):\n dpg.add_image(\n texture_tag=f'input_image_texture_{self.id}',\n tag=f'image_render_{self.id}',\n pos=(_PADDING, _PADDING)\n )\n\n dpg.set_global_font_scale(_FONT_SCALE)\n\n if self.id == 0:\n dpg.set_primary_window(f'main_window_{self.id}', True)\n dpg.create_viewport(\n title=self.title,\n width=self.width + _PADDING*2,\n height=self.height + _PADDING*2,\n resizable=True\n )\n dpg.setup_dearpygui()\n dpg.show_viewport()\n elif self.id == 1:\n dpg.set_primary_window('main_window_0', False)\n\n BaseRealTimeVisualizer.setup_gui_lock.release()", "def widget(self):\n return self.containedWidget", "def layout(self):\n\t\t\n\t\tself.mainSizer = wx.BoxSizer(wx.VERTICAL)\n\t\tbtnSizer = wx.BoxSizer(wx.HORIZONTAL)\n\t\t\n\t\timg = wx.Image(self.photoMaxSize,self.photoMaxSize)\n\t\tself.imageCtrl = wx.StaticBitmap(self, wx.ID_ANY, \n\t\t\t\t\t\t\t\t\t\t wx.Bitmap(img))\n\t\tself.mainSizer.Add(self.imageCtrl, 0, wx.ALL|wx.CENTER, 5)\n\t\tself.imageLabel = wx.StaticText(self, label=\"\")\n\t\tself.mainSizer.Add(self.imageLabel, 0, wx.ALL|wx.CENTER, 5)\n\t\t\n\t\tbtnData = [(\"Previous\", btnSizer, self.onPrevious),\n\t\t\t\t (\"Slide Show\", btnSizer, self.onSlideShow),\n\t\t\t\t (\"Next\", btnSizer, self.onNext)]\n\t\tfor data in btnData:\n\t\t\tlabel, sizer, handler = data\n\t\t\tself.btnBuilder(label, sizer, handler)\n\t\t\t\n\t\tself.mainSizer.Add(btnSizer, 0, wx.CENTER)\n\t\tself.SetSizer(self.mainSizer)", "def _init_ui(self):\n self.setWindowTitle(\"HB Havens: resultaten\")\n self.setWindowFlags(self.windowFlags() & ~QtCore.Qt.WindowContextHelpButtonHint)\n\n self.setLayout(QtWidgets.QVBoxLayout())\n\n # Create figure\n self.figure = Figure(figsize=(4,4))\n self.ax = self.figure.add_subplot()\n\n self.ax.grid()\n self.ax.spines['right'].set_visible(False)\n self.ax.spines['top'].set_visible(False)\n self.ax.tick_params(axis='y', color='0.75')\n self.ax.tick_params(axis='x', color='0.75')\n self.ax.set_aspect(1)\n\n # Add canvas\n self.canvas = FigureCanvasQTAgg(self.figure)\n\n # this is the Navigation widget\n # it takes the Canvas widget and a parent\n self.layout().addWidget(self.canvas)\n\n # Add location selection\n hbox = QtWidgets.QHBoxLayout()\n label = QtWidgets.QLabel('Locatie:')\n label.setFixedWidth(80)\n hbox.addWidget(label)\n self.location_combobox = QtWidgets.QComboBox()\n self.location_combobox.addItems(self.result_locations)\n self.location_combobox.setCurrentIndex(self.locid)\n self.location_combobox.currentIndexChanged.connect(self._set_location)\n hbox.addWidget(self.location_combobox)\n self.layout().addLayout(hbox)\n\n # Add parameter selection\n hbox = QtWidgets.QHBoxLayout()\n label = QtWidgets.QLabel('Parameter:')\n label.setFixedWidth(80)\n hbox.addWidget(label)\n self.parameter_combobox = QtWidgets.QComboBox()\n self.input_parameters = self.modelunctab.mainmodel.hydraulic_loads.result_columns[:]\n self.parameter_combobox.addItems(self.input_parameters)\n self.parameter_combobox.currentIndexChanged.connect(self._set_parameter)\n self.parameter_combobox.setCurrentIndex(0)\n self._set_parameter()\n self.figure.tight_layout()\n hbox.addWidget(self.parameter_combobox)\n self.layout().addLayout(hbox)\n\n # Line\n line = QtWidgets.QFrame()\n line.setFrameShape(QtWidgets.QFrame.HLine)\n line.setFrameShadow(QtWidgets.QFrame.Sunken)\n\n self.layout().addWidget(line)\n\n # Add ok/close\n self.closebutton = QtWidgets.QPushButton('Sluiten')\n self.closebutton.clicked.connect(self.close)\n self.layout().addWidget(self.closebutton, 0, QtCore.Qt.AlignRight)\n\n self.layout().setSizeConstraint(QtWidgets.QLayout.SetFixedSize)", "def _layout_widgets(self):\n self.main_layout = QtGui.QVBoxLayout()\n self.main_layout.addWidget(self.window_message)\n\n self.hbox_layout_buttons = QtGui.QHBoxLayout()\n self.main_layout.addLayout(self.hbox_layout_buttons)\n\n self.hbox_layout_buttons.addStretch()\n self.hbox_layout_buttons.addWidget(self.turn_visibility_on)\n self.hbox_layout_buttons.addWidget(self.turn_visibility_off)\n\n self.setLayout(self.main_layout)", "def create_widgets( self ):\n\n # photo preview.\n self.photoPreview = grafwidgets.MultiRubberBandedLabel( self.preview_pixmap )\n self.previewArea = grafwidgets.PhotoPreviewArea( self.photoPreview )\n\n # draw the art record regions.\n # XXX: factor this into a separate routine\n for art in self.art_records:\n if art[\"region\"] is not None:\n self.photoPreview.add_band( art[\"id\"], art[\"region\"] )\n\n # processing type.\n self.selectionBox = QComboBox()\n\n self.selectionBox.addItem( \"all\", \"all\" )\n for state in self.db.get_processing_states():\n self.selectionBox.addItem( state, state )\n\n self.selectionBox.activated.connect( self.selectionTypeActivation )\n\n self.selectionBoxLabel = QLabel( \"&Processing Type:\" )\n self.selectionBoxLabel.setBuddy( self.selectionBox )\n\n # art record selection.\n #\n # XXX: describe these.\n self.selectionView = SelectionView()\n self.selectionView.setModel( self.proxyArtModel )\n self.selectionView.activated.connect( self.selectionActivation )\n self.selectionView.selectionModel().selectionChanged.connect( self.recordSelectionChange )\n\n # record addition and removal buttons.\n self.newRecordButton = QPushButton( \"&New Record\" )\n self.newRecordButton.clicked.connect( self.create_new_record )\n\n self.deleteRecordButton = QPushButton( \"&Delete Record\" )\n self.deleteRecordButton.clicked.connect( self.delete_record )\n\n # we shouldn't be able to push the delete button until we have a\n # record selected.\n self.deleteRecordButton.setEnabled( False )\n\n # art record summary labels.\n self.artTypeLabel = QLabel()\n self.artSizeLabel = QLabel()\n self.artQualityLabel = QLabel()\n self.artDateLabel = QLabel()\n self.artArtistsLabel = QLabel()\n self.artAssociatesLabel = QLabel()\n self.artVandalsLabel = QLabel()\n self.artTagsLabel = QLabel()\n\n # photo record processing state.\n self.photoProcessingStateComboBox = QComboBox()\n for state in self.db.get_processing_states():\n self.photoProcessingStateComboBox.addItem( state, state )\n self.photoProcessingStateComboLabel = QLabel( \"Stat&e:\" )\n self.photoProcessingStateComboLabel.setBuddy( self.photoProcessingStateComboBox )\n\n # photo record tags.\n #\n # NOTE: our accelerator is chosen to match the ArtRecordEditor's\n # accelerator.\n #\n self.photoTagsLineEdit = QLineEdit( \"\" )\n self.photoTagsLabel = QLabel( \"Ta&gs:\" )\n self.photoTagsLabel.setBuddy( self.photoTagsLineEdit )\n\n # XXX: need to add\n #\n # * description", "def create_layout( self ):\n\n # XXX: debugging layout\n self.setStyleSheet( \"border: 1px solid black\" )\n\n selection_layout = QVBoxLayout()\n selection_layout.setContentsMargins( 0, 0, 0, 0 )\n selection_layout.setSpacing( 0 )\n selection_layout.addWidget( self.selectionView )\n\n selection_type_layout = QHBoxLayout()\n selection_type_layout.setContentsMargins( 0, 0, 0, 0 )\n selection_type_layout.setSpacing( 0 )\n selection_type_layout.addWidget( self.selectionBoxLabel )\n selection_type_layout.addWidget( self.selectionBox )\n selection_type_layout.setStretchFactor( self.selectionBox, 1 )\n\n selection_layout.addLayout( selection_type_layout )\n selection_layout.setStretchFactor( self.selectionView, 1 )\n\n info_layout = QVBoxLayout()\n info_layout.setContentsMargins( 0, 0, 0, 0 )\n info_layout.setSpacing( 0 )\n\n stats_layout = QGridLayout()\n stats_layout.setContentsMargins( 0, 0, 0, 0 )\n stats_layout.setVerticalSpacing( 1 )\n stats_layout.setHorizontalSpacing( 10 )\n\n stats_layout.addWidget( QLabel( \"State:\" ),\n 0, 0 )\n stats_layout.addWidget( self.infoStateLabel,\n 0, 1 )\n\n stats_layout.addWidget( QLabel( \"Art Records:\" ),\n 1, 0 )\n stats_layout.addWidget( self.infoSummaryLabel,\n 1, 1 )\n\n stats_layout.addWidget( QLabel( \"Location:\" ),\n 2, 0 )\n stats_layout.addWidget( self.infoLocationLabel,\n 2, 1 )\n\n stats_layout.addWidget( QLabel( \"Taken:\" ),\n 3, 0 )\n stats_layout.addWidget( self.infoTakenLabel,\n 3, 1 )\n\n stats_layout.addWidget( QLabel( \"Tags:\" ),\n 4, 0 )\n stats_layout.addWidget( self.infoTagsLabel,\n 4, 1 )\n\n stats_layout.setColumnStretch( 1, 1 )\n\n info_layout.addWidget( self.previewArea )\n info_layout.addLayout( stats_layout )\n info_layout.setStretchFactor( self.previewArea, 1 )\n\n self.centralWidget = QWidget()\n self.centralWidget.setLayout( info_layout )\n\n self.selection_dock.widget().setLayout( selection_layout )\n\n self.addDockWidget( Qt.LeftDockWidgetArea, self.selection_dock )\n\n self.setCentralWidget( self.centralWidget )", "def _initialize(self):\n self._frame = ttk.Frame(master=self._root)\n self._ingredients_frame = ttk.Frame(master=self._frame)\n\n self._create_header()\n self._show_ingredient_list()\n self._create_footer()\n\n self._ingredients_frame.grid(row=1, column=1, columnspan=2)\n self._frame.grid_columnconfigure(1, weight=1, minsize=250)", "def initComposerGui(self, qgsComposerZoom):\r\n\r\n dockWidgetForComposer = LcComposerPalette(self.manager, qgsComposerZoom)\r\n\r\n self.compDockWidgets.append(dockWidgetForComposer)\r\n\r\n qgsComposerZoom.composerWindow().addDockWidget(Qt.RightDockWidgetArea, dockWidgetForComposer )", "def _init_ui(self):\n\n hlayout = QtWidgets.QHBoxLayout()\n\n label = QtWidgets.QLabel('Kies een normtraject:')\n\n hlayout.addWidget(label)\n\n self.section_combobox = QtWidgets.QComboBox()\n self.section_combobox.setFixedWidth(60)\n self.section_ids = sorted([''] + io.geometry.import_section_ids(self.datadir))\n self.section_combobox.addItems(self.section_ids)\n\n hlayout.addWidget(self.section_combobox)\n\n self.add_button = QtWidgets.QPushButton('Toevoegen', clicked=self._add_flooddefence)\n\n hlayout.addWidget(self.add_button)\n\n vlayout = QtWidgets.QVBoxLayout()\n vlayout.addLayout(hlayout)\n\n line = QtWidgets.QFrame()\n line.setFrameShape(QtWidgets.QFrame.HLine)\n line.setFrameShadow(QtWidgets.QFrame.Sunken)\n\n vlayout.addWidget(line)\n\n self.close_button = QtWidgets.QPushButton('Sluiten', clicked=self.close)\n vlayout.addWidget(self.close_button, 0, QtCore.Qt.AlignRight)\n\n self.setLayout(vlayout)\n\n self.setWindowTitle(\"HB Havens: normtrajecten\")\n self.setWindowFlags(self.windowFlags() & ~QtCore.Qt.WindowContextHelpButtonHint)", "def init_ui(self):\n self.master.title(\"Backbone\")\n self.master.geometry(\"300x150\")\n\n self.pack(fill=BOTH, expand=1)\n\n self.btn_upload_file = Button(self, text=\"Upload file\", command=self.upload_file)\n self.btn_upload_file.place(x=90, y=10)\n\n self.btn_create_training_file = Button(self, text=\"Create & upload training file\",\n command=self.create_training_file)\n self.btn_create_training_file.place(x=30, y=40)\n\n self.btn_run_algorithm = Button(self, text=\"Run algorithm\", command=self.run_algorithm)\n self.btn_run_algorithm.place(x=80, y=70)\n\n self.btn_view_results = Button(self, text=\"View Results\", command=self.view_results)\n self.btn_view_results.place(x=85, y=100)", "def populateUI(self, widget, exportItems, editMode=None):\n layout = QtGui.QVBoxLayout(widget)\n layout.setContentsMargins(0, 0, 0, 0)\n header = ProcessorHeader()\n defaultForm = QtGui.QWidget()\n layout.addWidget(header)\n layout.addWidget(defaultForm)\n \n super(WiretapShotProcessor, self).populateUI(defaultForm, exportItems,\n editMode)\n \n header.exportTemplate = self._exportTemplate\n header.exportStructureViewer = self._exportStructureViewer", "def layout(self):\n\n # Initialise all plots and widgets\n widgets = self.widgets(width=200)\n\n plot_width = 500\n sizing_mode = 'stretch_height'\n self.init_grid_plot()\n self.init_line_plot(width=plot_width, mode=sizing_mode)\n self.init_distribution_plot(width=plot_width, mode=sizing_mode)\n self.init_school_composition_plot(width=plot_width, mode=sizing_mode)\n self.init_neighbourhood_composition_plot(width=plot_width,\n mode=sizing_mode)\n self.init_distance_plot(width=plot_width, mode=sizing_mode)\n\n # Row with widgets\n if self.params['case'].lower() == 'lattice':\n width = 420\n split = int(len(widgets) / 2.) + 1\n widget_row = row(\n [column(widgets[:split]),\n column(widgets[split:])],\n width=width)\n else:\n width = 210\n widget_row = column(widgets, width=width)\n\n desc = Div(text=open(join(dirname(__file__),\n \"description.html\")).read(),\n margin=0)\n # Column with all the controls and description\n first_col = column(widget_row, width=width, sizing_mode='fixed')\n\n # Column with the grid/map\n second_col = column([\n desc,\n row(self.buttons(), sizing_mode='stretch_width'),\n row(self.grid, sizing_mode='stretch_width')\n ],\n sizing_mode='stretch_width')\n\n # Column with the plots\n third_col = column([\n self.plot, self.distribution_plot, self.distance_plot,\n self.school_composition_plot, self.neighbourhood_composition_plot\n ])\n\n vis_layout = gridplot([[first_col, second_col, third_col]],\n toolbar_location=None)\n\n self.doc.add_root(vis_layout)\n self.doc.title = \"COMPASS\"", "def _ui_layout(self):\n layout = QtWidgets.QVBoxLayout()\n #layout.setContentsMargins(0,0,0,0)\n\n # layout child widgets\n layout.addWidget(self._label_description)\n layout.addWidget(self._table)\n layout.addWidget(self._checkbox_remember)\n layout.addWidget(self._checkbox_ignore_missing)\n\n # scale widget dimensions based on DPI\n height = get_dpi_scale() * 250\n width = get_dpi_scale() * 400\n self.setMinimumHeight(height)\n self.setMinimumWidth(width)\n\n # apply the widget layout\n self.setLayout(layout)", "def init_ui(self):\n # Create GUI elements, set them in dict structure\n labelwidth = 150\n\n # Add parameter line edit for Factor Tm to Tp\n\n # Add line edit with browsebutton for swan result folder\n self.input_elements['hares folder'] = widgets.ExtendedLineEdit(\n label='HARES uitvoerbestanden folder:',\n labelwidth=labelwidth,\n browsebutton=QtWidgets.QPushButton('...', clicked=self.select_hares_folder)\n )\n\n\n self.setLayout(QtWidgets.QVBoxLayout())\n self.layout().setSpacing(10)\n\n for _, item in self.input_elements.items():\n self.layout().addWidget(item)\n\n line = QtWidgets.QFrame()\n line.setFrameShape(QtWidgets.QFrame.HLine)\n line.setFrameShadow(QtWidgets.QFrame.Sunken)\n self.layout().addWidget(line)\n\n # OK and Cancel buttons\n self.generateButton = QtWidgets.QPushButton('Start lezen uitvoerbestanden')\n self.generateButton.setDefault(True)\n self.generateButton.clicked.connect(self.generate)\n\n self.cancelButton = QtWidgets.QPushButton('Annuleren')\n self.cancelButton.setAutoDefault(False)\n self.cancelButton.clicked.connect(self.cancel)\n\n button_box = QtWidgets.QDialogButtonBox(QtCore.Qt.Horizontal, self)\n button_box.addButton(self.generateButton, QtWidgets.QDialogButtonBox.ActionRole)\n button_box.addButton(self.cancelButton, QtWidgets.QDialogButtonBox.RejectRole)\n button_box.accepted.connect(QtWidgets.QDialog.accept)\n\n self.layout().addWidget(button_box)", "def __init__(self):\n super().__init__()\n self.geometry('{}x{}'.format(425, 185))\n self.title('PAD Tracker')\n self.frame = Frame(self)\n self.populateFields()\n self.frame.pack()", "def init_info_pane(self):\n self.single_acq = QtWidgets.QPushButton(\"Single Acquisition\")\n self.start_acq = QtWidgets.QPushButton(\"Start Acquisition\")\n self.stop_acq = QtWidgets.QPushButton(\"Stop Acquisition\")\n\n self.exposure = QtWidgets.QDoubleSpinBox()\n self.exposure.setSuffix(\" ms\")\n self.get_exposure_params()\n\n self.maj_radius = QtWidgets.QLabel()\n self.min_radius = QtWidgets.QLabel()\n self.avg_radius = QtWidgets.QLabel()\n self.ellipticity = QtWidgets.QLabel()\n self.x_radius = QtWidgets.QLabel()\n self.y_radius = QtWidgets.QLabel()\n self.x_centroid = QtWidgets.QLabel()\n self.y_centroid = QtWidgets.QLabel()\n\n # Mark current beam position\n self.mark = QtWidgets.QPushButton(\"Mark\")\n self.unmark = QtWidgets.QPushButton(\"Unmark\")\n\n # Mark location\n self.mark_x = QtWidgets.QLabel()\n self.mark_y = QtWidgets.QLabel()\n\n # Beam distance from marked location\n self.x_delta = QtWidgets.QLabel()\n self.y_delta = QtWidgets.QLabel()\n\n # Keep a list of mark sub-widgets so we can hide/show them\n # Obviously we don't want to hide the mark buttons themselves\n self.mark_widgets.extend([\n self.mark_x, self.mark_y,\n # self.x_delta, self.y_delta,\n ])\n\n self.fps = QtWidgets.QLabel()\n self.message = QtWidgets.QLabel()\n self.status = QtWidgets.QLabel(\"Stopped\")", "def createWidgets(self):\n ########### Main Window Properties #####\n # self.iconbitmap(default=\"\") # add ico file here, replaces default red icon\n self.root.geometry(\"540x500\")\n self.root.title(\"NOAA Satellite Download App\")\n self.root.attributes(\"-alpha\", 1.0)\n #self.root.protocol(\"DELETE_my_root\", master.destroy())\n\n #### Images in Base64 ########\n self.program_title_gif = PhotoImage(format='gif', data=\"R0lGODlh9AF9AMQCAKurq8LCwu3t7bu7u7Kysi0tLfz8/Nra2uXl5enp6YeHh97e3vr6+uLi4mVlZfb29kZGRs3NzcjIyFNTU3Z2dpqamvf39/Dw8NXV1dHR0ZKSkqKiohUVFfPz8wAAAP///yH/C1hNUCBEYXRhWE1QPD94cGFja2V0IGJlZ2luPSLvu78iIGlkPSJXNU0wTXBDZWhpSHpyZVN6TlRjemtjOWQiPz4gPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iQWRvYmUgWE1QIENvcmUgNS4zLWMwMTEgNjYuMTQ1NjYxLCAyMDEyLzAyLzA2LTE0OjU2OjI3ICAgICAgICAiPiA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPiA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtbG5zOnhtcE1NPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvbW0vIiB4bWxuczpzdFJlZj0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL3NUeXBlL1Jlc291cmNlUmVmIyIgeG1wOkNyZWF0b3JUb29sPSJBZG9iZSBQaG90b3Nob3AgQ1M2IChXaW5kb3dzKSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDoxNTVDMkM2OTMxM0UxMUUyQURFNEVGMEUyRDEzN0MzOCIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDoxNTVDMkM2QTMxM0UxMUUyQURFNEVGMEUyRDEzN0MzOCI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOjE1NUMyQzY3MzEzRTExRTJBREU0RUYwRTJEMTM3QzM4IiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOjE1NUMyQzY4MzEzRTExRTJBREU0RUYwRTJEMTM3QzM4Ii8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+Af/+/fz7+vn49/b19PPy8fDv7u3s6+rp6Ofm5eTj4uHg397d3Nva2djX1tXU09LR0M/OzczLysnIx8bFxMPCwcC/vr28u7q5uLe2tbSzsrGwr66trKuqqainpqWko6KhoJ+enZybmpmYl5aVlJOSkZCPjo2Mi4qJiIeGhYSDgoGAf359fHt6eXh3dnV0c3JxcG9ubWxramloZ2ZlZGNiYWBfXl1cW1pZWFdWVVRTUlFQT05NTEtKSUhHRkVEQ0JBQD8+PTw7Ojk4NzY1NDMyMTAvLi0sKyopKCcmJSQjIiEgHx4dHBsaGRgXFhUUExIREA8ODQwLCgkIBwYFBAMCAQAAIfkEAQAAAgAsAAAAAPQBfQAABf+gII5kaZ5oqq5s675wLM90bd94ru987//AoHBILBqPyKRyyWw6n9CodEqtWq/YrHbL7Xq/4LB4TC6bz+i0es1uu1+BDQVSKHju+DqEUhkk3oCBgmYJAwSHiIgZRoaJijkNGhAceJWWlxwQCgdLB46OAVeNn4cLg6eoLwSXlRwYRax3FH81krG3t5oISBC4HLtUGLgeDrSpx8gjq7gQC8ZAtw7OMg0KlMPYmBSvRBHYCqZTDsOuwMnnpwDfB889CdER4S8a19n2lhwK5kDjwwUZ8p4cqHdr2z50CNtswMZhQMAeDW5NCMDORQY79zJeKhDg4I4EBG9VwOBxiYZsBST/PEzIEs1CbBAiNACyQOIAki3oadyJT8FMHyezQVAZBSO2ChlKtlwKJsFLbAZ/HJBIICmLCTyzWppQkYdRbARwOhlwb+hKpmi7OL0HgJOPqbEgEIigVASCrwwLQHDAV4ECvg7ohPRHdwfZexTiPcF6L2zdtJCrrLVXQObbXHPrNsB7KxMFBRo0VBi9ofSG0RU0UJgwOFbKxzAY2+NAsR0SuIiJRt6dZbI9B2J34L4kt/CJzSgdgK6wAQAAAgOiS49OwPlpCpxZvcaBoTUuBZaZUIjlnXZX3uin+LZH4HyO4ZaKK5XdWbmG5tADSIiQAYP//xlEIEEAhgAgh3eVQBAc/w3jaZedBxBQxARIsfTDCniwpaehEeuhpJhwmBlXQoPMgNbcAAFEgMEBCzSAQAIwxohAAwscgEEEBBrYyzAOHJChCt7xdYtjS6jDCgfYxSXhhkxO+NQ9wP3oAnwJZmbCMrhMoEAFAKCYwQEN2IZCAg0ckEGOFt6igXswGImJX95NoFsS9FWiZZqWENnknkd0aM8GbNZAJR7ylbAAgnc4cB8BKfpIAwIH4EiAAuQEcFYLdeIxQWg7YhIABmIKIUFBokUzJ5+oBuEnSqfaMOgdhZKA5yWKbkCABBj8ZEMDGEgw6TATWBXDqx5suUFQrCSmqxEkWlLAfRt0ik9tqVb7w/+qdzyYqLA3EBurCIdJdN8AETiaA6QRDNAsKxUEusK6eXB5yIMcucWhdxTgh+wlylrr70dPbqWtB4CGCoO3Vo4wMATjxmPwDAkskG6mlVR26ZhBboBiAPDisQG3RFRAHpcoDuBdSvb+qzIN2HowAQUIWowDwiK6eeSiH/ogscm4aLDgCiIfWQGjEaQrUatCSGunxhIEOKvHIK8stQstK9explHLQLM5SlvCx63u5pDAARLsuxHSKXQNq8blRqr2HZ8+vMOosSgAwKc2hnuJnBdP7XcJVYv29h1ryr0CzT/R7ZoGd//cAwIYBDA4wY6foPgldlM0IwZmV9IvEU97UMD/0PEgsMBF5N1k+N9TV93cBgg21DcLiIsQuiwaZ7AsEA1kEPBWaJtw9ehz/STxyUsKQSErDmzw6S6QUxrL56xXD/jveFDw3K8S6S5oiGEiEDOXEqQsBNmD0wbqCuLHMoHG6wuAQAa3fyxlLSIBMKfEt9Rr/f8jaBkFGEUgiuHBZ6s7Ac0WgAAsMQ9+97sB5IIWCw2EJwWdwwP5AnIAm8UneDt4EAT6wK35GZBgWQNg67Ani1sJaADakh0NFtiA23Fgg0hYgAQQFCHzmYBeJNxHAyKgrbAkcAaX89rdzHcAChIHhCr8lwDJZSMJOJE43tNaiA5wAG1BIHcRxMH8bBi3/xQk0XNLBBwGruYB6vnAgDcsXgmGqK0yRvFvUwRImQJwu2JV7nBbPCMak3eEA7DwDgC4oAngSLqVLEBv0/KhDrrjPvi1IwEYuJ0b77iyKeYqAZCDIS4GIElAxuVWnMMF6XZXhCEiiALBG0glb+KR+Q0OKWF0gfQupL+zPLIzhOSkyjwJDIkdUnSKbAHCmna7AkBQCZDT1kRKuUvMpdEEY6vmB2cHMSAmbI6oq2AKhZkqYopgbHz8zh9RgLAUDQ4C11TCAZqpOmx6M5kj0CGC9LQDD1aiedTC5gG0mSAokrNJnjQG5AIwMFIeDDP60Zb2DBqEBVyxFVXxCCT/6bz4mf/AhPDgZgwYeauL8W9I6zwokxJKgiH6s2L4TEE7BYkHBchxCQ1w4CVuSoITDg2KHQRm2GbgrWeiAKQVyplKy3lICtSTBGRjIzFSClWIBkCVtGQC5HDRyxIUdQDjFEEDJKAt8LCyBmzMXClH0EShHnGpaWEpCRY6sPZMCTMDuGglLDkhStYtmGzUXjCxucZcDJZlJ1vlCuh4C7PClakFeWpLI6BTZylVprnoUgaz5by1FqGLBQlmeRS7gl/eYgNUdcFL7/A+yY4pk4b17GN3I1evSoCg/0wtwgawWdF11gmgnR4hV+syvrIvA4PjWw4GxziKCsC0seDnbDdU27lGbnL/bZGbtzTbv642YQESlewJ7ebcbALTozSgqeiC2ILeSdO5011KdScLgJgZdLsDwG22vMsE8EY2fuodHVilpM+CxBQGgY3nCsxLHjvGNz3zJYEO9ZuolOKXwgXg7xL8O72sCoCNAB1qCZCqncO+YHmYaOQLTjq9Az8YLREO4HVxUTDMxqVLFOaAhpXA4WRlFcX4+KlI2UpcFOZyBHplrVFbQOKzyfbFLInxCHpHgJhdtqo3HsAxd5yEmjSWlglIMoTACAMdehG+JyirglkwNjEbGcq8kXI+y4YLB4wTvxutxN2GTBMa38RFg8vXn8scAak6GAYBbq6IAXeAACBIuXCO/4ycz9noExLMXXh+pYmNcMyOLuCq5HHAZ5jjHFI4wjoVkOomXXC7PSzHOdszdXWck5rJuTbS8m3qrU8wP56l7iy7ZWg0Nl2E3joTb33UCkqIjTFlK3vVuE7IpPMZgd66LItYZsUEuiS5XOzaCAhoZuP86uys2C8G1i63P5gd7WNMm9LpVBOb2hlv7WRUq9K8WwYorG6YoFkEA+s3jcPabnfrGr0q6HUMHeJViNL5tC4egjAKQrSACxylR8zzxf3N54ID4t3UdnOwltVOHOEClh3ngZuHFoeN8wTlL7C0y6OLcI8jA+Tw7iMC2YqZ/UjgnQFFQgJO6ExGTW7mRwq6Cv/IjfRsQNvmqMC5XTAgylswXABj67nTuEpwIDB9KxrzddPtgaEW8HvskYQ6OqQuAmPiojLhW6Z/iuyAf+uA33ZTV2c4wHcOFIDvf/c74Acv+MIH/u/MYLcAYtf3wxPe8YZ/PN971nW1u4HtWI+qOhvAqy1GKoaHJgKQnUUybSkHNKFJvepXz3rWK2By0j0BcZGE+tbb/vahsUZsLX/zg7916sK2+ooysMUFFNbAZw2Ctd9HgN7mQzTNgbX0p0/96m+A33Wf3Qm1ROrqe//7BqoA7FPLezVgXqwRKHKEihaiFpHNF0pXHqJI9s5F6UcC+M+//vfP//wTiACgN1TEUiz/JHN//XeAB0gg1pZ95Rd1vjdDEqBqAQBqrFAcDNQ7fTQBEccDUjVCzvEdTLMiCzCCJFiCJniCJGgmHPMdLsZGo3M35YKCMjiDNYIjsRN/DXh5DzgDdOVn4JN5GucxNecDYnchttJMP2UuOMArFKgd5dNs05M704ADEVNoLJh8OWh+OzgDLhUzxFUcP9E7llYAV1dRC0MyCAJQ5PcCvTM552YCYhZHdkc7QQghipeFZHB+UHVbWfKDmVdlzIBtP2Bp+WArzidkbyFyaJNcS6YDtkRja4iHYaCHI7BQR3dKIjJEUgUhkThSPEJ/EtGIOnA8nXFr6lUsayY2B7B8cyiJ/1xAiVOWAfW1E9+CTgGnIEe2AjL3LF1SZGr1A5DTR5vkgqTVAwXWYEPoimUAi3N2dhX4TfIji4gCIRKQiyewAEdXiIyChDzVAwdQh/4TQGnYWb/nAmN0chuojE2xhTWASQEgc8QBjc/1cL5QASmHAoA4DF+jH+MYejvAWKfFLYdYUkEQVP3zhOqYBswYi0UIE/KITsnGWh1RAxEAjy5DMnw4MgQJBJjEb2ahMLlAZrxDRANnjQmpHuxoAzqUbvEhj9E4ABHJWgQQRu9oD5vSJfzhRexVUafoAc+TAKf4i6pyfHHRiidJBQsZQJVWFi75XOkSk3fAAQ7QLgazABvgAP/TaCcNQ3Unl4qPgwGWlhgswo3peAM65AvfdpRekJSVmAENGSJKsTNQ2Qp6QQF2OQeIpxGbwjZcRJZYKBxuRoYCMo5puQOPiHxqmYcpaZbVZg/fwmjp4oxIVytUVCOPZlxCMFYDowErmD9GOSyBeYeJKRmLaQMQKRRNSWnpUgEWd3EFsCU46SMIwG84JHpE+Yw6OWBFQIoBaZKjuQRsaV1VxwypeU47AwBz6Wzch5PTcBe5sJNEAF3T05WiKUGwlQuf+ZvAWZqM6WawUpz5dCYEIH5jpwn2BxAjUIeC9WSGGQGToy3EU5Y68I3DYETauZbcaZqNlmwToJsswCsSMAD/ACAJWZkRmfBqA4Ary3JCcSSfqiiZ2oaZROBe6PiX90kFh6QA1QkDPWgTaxgx4gkAFeAArekPWiIazxEAANEOadg45bgrEVCgl4CIfTJQb+egF+oEEkMAp5F6zDEX98iGbimirPej5fNW6BIA1XEagVGiUbkXy4EfjVISO9OjPsqcSYBOPJoaq4carNccz5MEvSOgXHql5GKhORoFCyWgBuIcKNKJMnA6SmodpVFqKoqm2FQmOFIgPfoXgQEBgBqoe2EfoUFq+VEuYYICTDhrdQod5fKiN8CEbGoa1hd9jCKIR7AzS1qnXYIrvpmmRYAuAzKB+6GEQaCnozqB+qEi/3g6JmVyI/+3qagxq6jRqIe6IomqAsYnIKq6HxgwhUqAqqo6rMRKqog6IbuaqqsKJqCqFjTCRSySq6L3rNAarZ8qP68aIBMoHZ8gHRPIH7h6P6ZTI1zUItdqmtRareoKrS0CqYaZrtbarJEGSiNoIxiQAQFSNPzRHyLIQO4qrwAbVwgwsARbsP8asAibsAq7sAzbsA77sBB7Ao4wABGrIRngCBFwDkqaCDuwsYhwBRigAPSyCTKwRndJASegLie7sizbsgN0AiubsS1wsgQwA6lmlzUrAyprl8rgsj7bsjlrsj/rszk7JSdrAgewsgowAyz7AisbpCKwAaxxJAzzZP9D+7Ma0AI7e7VEmwLVQC+wxIW2oB11Zzg0C4wxiSQRtC4A0A6b6HQHoVMU8COek0udMrcy0CyJ+rZQESaVhRj340C7U1kE8FaVhadym0uwkw1S+RhaUQAZwrf66BENADPZICdHZLnYkBIPU7cQ4aQuA6wrEBIQYKqSW2fuUSeugKb/ZKotYBQ9EkbN8iULcLrRwCLHxCOLpgyVkCskUFmlez+ZgqknoLqR2ACX6CwTeQKP67rCU2498gyOZqD6wALBhxhY2LrnKgDIUTEvY5eBsTeiiwJ6RUrGYLsVEhyDogAphQD/dGUqkACwC78q0CyNkpxJhQHeWWdraDNhKgL//tQQuysAg/K/KOANmEN+3eu9dzkBeYkHDVFLj0u8IwK9wdE5mSBqFOAAU2sJ4KAC09sKEPC9qzEYEzC+8vO+UHsCafKanNqmFVAn9sgCnVIPDnBBkjprBiIH/0Qa0fccb6oruJUSfRMRmoKDKSC/RzzA55Qmt9Ife6rDpdEsU8kcP3yoGeBEfGDF4JcfvtsCT3KmI4A9FEDBJpBB3fi8ZyNSeDEBXCx9V+ksP2PEsOLD4Jei9DtlkVMgdErFdlxqQSwCTegBSEKpsHZ9ePHBJxDCUdk8L+wcGtDGbELHLoPEcVox3Qcd1FEdaRITf3k5FmIetAApAzIKzuHHlnoI/yhSPsCAYs0CAOvkZazlYe1lFP0ZieGmZyp6r6WMCKfcw6mcH02TxcBcarK2ys57AuuxkavCAQgZv9nBZeKIB2kCy7CRJoWIH5pMHSJKELErYQkCfbFmagn6xSlAyrE6a6hszMIMJmNDECPUqNMxa3UCACuxwBACLUDMrZzswe4hyy5DyzagxfKyqvmaI9LSHgZDIppQCY4lVmYiIKOaXz3MbffHH8wKwBAsspoCQsMxAffmAgvcn5VXidWsoiwS0fi3rdU0lRaNf+BqIwT90geI0a26Hte0HvXwhikQLgThlSJAQflgy1DkQBxwH0CcIvxRNAE6nvUgQzxHKAVtgP/9pyIobAJlEiArTSAtXYAX/SUuggD0MTonYtBajSYVg6nNQtYpuh/4yquG0Cy0wUF2EtI30CwY+athracB4EQKME6uPLaic6SVmK69Uk3aU87s6iIB1CnKEcr+KAIfDZ6GYsuUPVcnnSsDa9gZKQuduiLlGtYW5TkhuK7Vaq4n9iQ5/SSdAmkpIBv08Vso0NoaIBvqYzBfkS+dCtYE+6oBitjcMhxflKBfYtrlykAvMCPkWkWI/dmL/SJAicm2Qi56vdk1qKTSEhUiEKMQjNTlbK7KHSlKSh92Zg6TLSLtGMos57y9w8g3fClCPRrNwtMf1VbZAzZ1cTmF6jkHdt7/99NjII3e7HPSyQwpWozfx3HgrMwDOL0k62G/ySgA5PbKJqbfpZI9B6Y3bhybJYEuYkfEUQ0rjSOtPmDgnoPg2EQfCmArKorCZBI5NgPiItAsut3iHvHivkIQCJl1mnLZbJYmNkW/vbMaE6Al9Au7plEPrp3gngNWf8nQVtwp4Yi0dS3gumrZVn7OBO61WuzkR/UkTmXG7ajaDg7mRvF0ArBLz6LLpQTlpSHlz1wCQM42V62atv3M573CbNjlZjwcL3inCRc59DES0FMPL+jl59xozfLXu8DjMpnlMTCbrZBIJrWnXnJWlyMvBADZEf7ghenKttJ8e5VC/r1iWC6u/1s+JmCelp4e4SxD5m6xHn+RB5t25tXklaB+CPtC3+dk6LZC2PFLNo2QoPJQ6kPQ6qHiRM0jxsf1JHICJg7UPEDKAgtVUAwEKVX+qUBJEEgiAckHokWzoixMKCweADZTxliI7Ckg1NNNIEqONsbeAgDu43aR6sq86sno6SUNAw0e608CGjudQopzQ/vileyeH2K35CIwcYkikuxDNqSKbXkuevgeKngN1CZgZrT+Jc3COJZsKCQZlX9WJtn+Hk+zB37wUTTSrrxmwzgpIEYx5dhU8WlDKJ0qIGnij/HOAvMO6bxm7zPf5PkO5mBzsNfD5uf0JKIB22hjIW7sRF7Z2v83LwE5j3A2I5RMZiP9AawfvUTbq+pCLyZKzFnMTu0ZQB/Ebdu2Ip9j48TxUCMljwM6NIaKIgEy8iIYBMG/PnzzDdg0bzmVoAGMsiIYcO74tPOlderJDfRHnz2s/iR+Vwd6IPl0MPmhkNpIj3VPwhzIctuAUw/QkvkkoN+DbyOGvzvzncYJR67IHeKELPmTH/t68Sw3oO4fZRRfFNns5MQRlQeiqKv7ogHlY3xxb50BoHv+4AAd8TC43yUAgS52Ejy2r8aETOemE06iM1iIf+U97vMfxfgkMP2NvxMbcD/9nvSVwCUAcObJhCxf9IF4kIoW4kzkwkASE/PBlCa/f7P/LAsCDuWIknAKh7eyres6CDrT9ZxsLTVgid20JoDAwWYULCotwCBQYEGGxWOtoWQpApgDJkiIyKji2SIyAFAKnNdrEvDZJC0NgehDZCAsjhZOw+nw+N2srTgMYcggYFC0bGSEoaiwTHxFji08rVSCjaEgOCwRHQGy7PQYlbKtslQcDFKpekidyG4AEDSuQEhMoeitUOASiPrWFnocaikyOkJ+hq4USMEKALOubIQlTGKvOhxcet7kmAraLAQBSCyMn1gtNWl6CLG7CyBcBWtxeXXeU1mQIQABABsUTFDDioMGcSeieeCwgUCvOxj0eaAQoYENWaeqCdAwR4qiAwOC/9ib0Y2eJXeZKLUcB6oYqXLBznW06e2FhkTjZNESYIvAgAHICPg8IWcFB1xFac4QyUIDSXwmUbY7MZNpUBrzsFVY4IPbzm8ZOAJE4REnjXQwN97L54joPAgUs8rESCFAhn5vHaad0QCDBIIADG6oQAECMkdo1SJzMFFLAwQCjbIoMKrGWlQ15hW4pWUBgsEBrnlgm8IfYCMvN8X0tHVFV846P6bS6UCBhgobfgMPzmTZT51BbTUJMIGFgwh4ddEbHgAqCtCiMZBegOF0iz5aIc7a7LWshrMJEKyEwNt38Pa4+D5O2xnkaw924bpDAN3DxAGgv+AlG0YKDNBXF38FRv+FZRhEUNgABBRkEEIucIBBfAqU9cIjDs1Xw1IZmoLfCStx0ho685SYH3i1/XGbauS04EBvtxxWY40QBhDBK8VBBdQAEkgAwB5EKIIMHQEASZ0AH4KYEX76tVCBeDMQUIEGCihAgZZaIqNALwig14J6vtlYJoQ/JhVYhzXUV8AX8Y2BwHIs9HcNB3fFtd8GfPkF2z8JplIaFxk0GMCDEjZGQVIJfAUiLwHW4qJnM4AHIh/GkBgbJihqKsZs4RnTYiCTispcBdIFkKqqSErQl4m29WjcABFkIME8FTgnlzRMtBqBkpVmqNmOuu4T6jsDHXrYb/NkEU6YUJxK1KqrAhn/gYWACiUpfeDh6U4DoA0H3gYpyWQnIn2y9CegA2ypwR+lLXAAoYYCgCEUzp1wUpN7vLjmJ401WV4kmaorRn0pyrTilKXeRCqMpvC6xQETUzzxAg2AVFOsomSAQQbQQcDPnBkN0XEG1CEAMIiPYLyImC9aVWuqRRU0amXpbfBjXxVXfPGrnvgrmL3F3jMdJcNh5MDCBiMzDT8H+vmzJ9AV0Np5DchLEGiUybkHBF9DMIHYY08AwVcawRl0SGKCTfbYX4tJLsFSo3BwpwoqbKxa2uamA0XhYAsrC8cZx88BEhxV2BJ/a0edVLu07XbZqPHiLAaoAVBwLVhz4bEEevFw/7OYiGAcuBhqa2X0LjAfAZ4CQxTWNA90nzB0MoxDnS7tVDxeIZxVbAfarAdEgMwE69Fo428YCctwRi+Chnzyyib2FVIykIXgOHZrfsSnLD7ccMZrnWW684SLYqFAI+8AURQFXrxSV9HPOD1iFVhvIda2T0BuR+dpBzWnEN1bIGU+wYmvI5dzhIjEgAFkSERnEXBdA2PRmIk4J16sMZ2+VtCT39GgARH4jwQwYDsFkIkoRVkhhACAmg2F73mkYpIDUrjCGxYEPBoRy7Oi5hJOdc8I31va3kY1Pr4dMFIbGxyR0GOTOxmpDs6Snx0E0MFk2PCGNAOADnWEteJFKU2kIP+GzXqouyRqzBwOk4ROmucJ8CjDQELaAwF2NIb9vE9/czPdAmRnRyFesRI5Ak20kBSBQyKyQQN4HD38FzTw+KYOEkgkIgtDRqY0cY/bA+Luhqg3JarxiEZEY7aWSBs7JECE84DINGbFkeyd0lkj40AkDUlJRV4yIqPRDiMLMIA/GuEAjRqgGRFGSufh5gcRoFwFbcBIifytAWUADwTwJYY5TmUdU9xg4BYAHg78MmMC+FYLFFAH3UykhD4DE5iwNpDGIMUP/joAMiAwGexUhp3w2s7IPFCBDCygcQX84UDH0YC8pTGBOQlE+dAIlM08dEcJaMYLlIEKWIJqMPW85zr/2SnN7aAGV/GTAGpCI5YjjPAFxCSYAY+JulpgIJf2aagRGEmy2VkFM1AIAAhRsIHGSKZA2GMpH3XKFAUArgYA+Io96/BCKZpIIF18jL9OSLojDAYjvNhC7qJAGXGqRZhH+6onvKkkZLKulKb42+6AVrgpRLRuRm1Ft1bDROxYlTJYvYiYiCDNDBCgMRxojkMWQAGVyZCAuyAdWE330hOUwXYeKEBYqpHSGKUTLw3IAEYiogDS1CAC/bTPZOCaHsZiaxGdjcgEDnEYDTigUQxhglHtKVQxXGVIvvDXPKBpzSOUwXrWOtm+pJFKsRbXA2A6AEL7FkrnJpcC7oirWmP5/4k8uKCVzxgRx5A7WQy2lAyIawEBAIqeCAAAYByAgGvRwJjsqlGxyW1sLJDYkQMEYD9M2c1hFNMoLCLiEofTgHpF0N80vCA0TNiuXedrMAkQeF+zrYPrwFcDPHzln9tA4hUPkVa1YACOOfJVcgswMQz8N0Mnbu5Cn9vi5DqgpwgEFSitW4sD2FQYU6IiemOEiMYuooukQU+QULMTDigAIitNLguSKh/72gAPAYhwkySzYDgtYsqIxcYEbjGrAK1kX06O8nZcCKIuv2e80shZMzlzANtVEy2yIJAd+1mBugbkinw4JJMLMNwUl2W4K/Kf8+gsSiY7QIwJnUVKgOK/Mv9A8M4NdMsK6iDaKOEZuHr+Ej4Ik4stu2ACvXlcs5bL5BX8FiBzDqcYNjuADRiZFerxcqpRYBoCjJYVBaAArYFJ6eLWGngEqUBCVsGBAkjGy63yTwGaTQEp0necGWg2ta2ZAAJQuwAaqKI1qC0EnMYpAwmhduZGmO1zozvb/cuAuNPt7nPzJQMKyDaAqIBtam/7kye497v7TQGaioHfbvqtwOsN4jQ026KDyAS1LW22ZguBOOEed7MBYMdFSOAMizG2jGgE62Zr4EsJYHi/+12RBAk832UdSJWKzYb1KuAWlg7vOLdDgA3E1tiz7o+OYEHykqf75FQYTMYLcpAtaUn/A785DFGsFVDCPKgOAI/F4aIOP1ub4UwiKoPVgw3crEuL3WYwSAXK3p6zO6WEDCJIYsx+duEQpVUeMxSOFB1CsLty6GNvO3vevvT3TB2rZiiKxMc5+CYomutMz/sMskx4azGo6zRnE95TfV7DAEADSBcGjVR4Q/hY5Ax89zvcfwTMe4iw6zIOIdSNniWk/z3sk6950SW0efYcpgkArcYiRF/2vvs97aePctaijgumD6MJ6kQAAAmDJIDGxflIShM3GjTJP17++tHWCvFS1aoTR4DuEBo/+clflO/HK/xRLz/7z5SjA8TL+ZOcffWrNfwbVL39+peWjmhX/1YFyP9B/98flIGqOEc1CIT9xV/4aV9+dN/0pY12OMj4Hd+Z/AitiB2tnJRQFOD67Z/58cXsUZ319ZwDIgv5HV/yvR/doEet0EwLRYj7WYuMdeAHlp/uiaCtcUGhTIshYcclHNfErJ4QxYsQ0oBlWIw4oIfFbJ9axB/8sZN2HBKQUGEVViGtQCEAyYv1WWEXHtIPXg3FbKCCFCFoecoOcmEXWqG1jFmclCEsICH8gcTIieEccg5phCETqloRHkBPLQgPUgsb5pM0XcwgjByDpKEaUuEXjmFgxKEZot4OFsa0TFJflI4DMsgkUgv6gdUhTqEiXqHTNaFWSNOJYYDE+EwqJMAo/pHBKqqiKrLiDTDfETYAIQbULeJiQFUGLFxNLvqiLlYGZ4yiK+aHLf5iLu6imogTMZICM06XM6raLMZCKXaOxASjTNTiMfpiLbZVLMRi41HjKaJiN4JjEYpjEm5faRijNnJj4KziOx5TPMrjPNJjPdrjPc4jPOLjPj4jNDqWPvJjQArkQBJkQRrkQSJkQiqkQYYAADs=\")\n self.open_img = PhotoImage(format='gif', data=\"R0lGODlhEAAQAIcAADFKY0L/QplnAZpoApxqBJ5sBqBuCKJwCqNxC6RyDKVzDad1D6x6FLB+GLOBG7WCHbeEH7qHIr2KJcaaGcaaGsKPKsiVMMmWMcuYM8yZNMmgIc+iJte4QNq/bOKzQ+LBUP3VcP/bdfDkev/kf5SlvZylvbe3t5ytxqW11qm92r3GxrnK5P/XhP/rhP/viffwif/4k///mf//nP//pcTExMXFxc3NzdHR0cbW69jh8efv9+vz//r7/P///wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACH5BAMAAAEALAAAAAAQABAAAAiZAAMIHEiwoMGDBzNkwHDBAkKBGXpI5MGjAsKIMjJm7CEhAoQHDhoIxNBDo0mJEhncCHChB4yXMGPKWFBjgs2bOG1+aIGAxoQYJk3G6DCBhQGfQGPClPFiAogCNAL8dEG1KtUZGjwQiPpTxoivYEfM4LBhQFSpMUKoXatWBAUBNQROUECXboIDBgoQGGDCxkAbNAILHuz34cGAADs=\")\n \n # Define text styles\n title_padding = 30\n title_fonts = (\"Helvetica\", 18)\n\n ############### main frame ##############\n self.fFrame = Frame(self.root, padx=5, pady=15)\n self.fFrame.pack()\n tool_title = Label(self.fFrame, text='NOAA Satellite Image Tool', image=self.program_title_gif)\n tool_title.pack(side=LEFT)\n self.frame_settings = Frame(self.root, padx=0)\n self.frame_settings.pack()\n\n #### Location Settings\n setting_location = Label(self.frame_settings, text='LOCATION', padx=title_padding, font=(title_fonts))\n setting_location.grid(row=0, column=0)\n\n #### Location Listbox\n frame_location = Frame(self.frame_settings, padx=5, pady=5)\n frame_location.grid(row=1, column=0)\n\n scrollBar_loc = Scrollbar(frame_location)\n scrollBar_loc.pack(side=RIGHT, fill=Y)\n frame_location.listBox = Listbox(frame_location, selectmode=EXTENDED , exportselection=0, width=35)\n frame_location.listBox.pack(side=LEFT, fill=Y)\n scrollBar_loc.config(command=frame_location.listBox.yview)\n frame_location.listBox.config(yscrollcommand=scrollBar_loc.set)\n area.sort()\n for place in area:\n frame_location.listBox.insert(END, place)\n frame_location.listBox.select_set(2) # Set Default selection\n\n #### Image Type Settings\n setting_imgtype = Label(self.frame_settings, text='IMAGE TYPES', padx=title_padding, font=(title_fonts))\n setting_imgtype.grid(row=0, column=1)\n\n #### Location Listbox\n frame_imgtype = Frame(self.frame_settings, padx=5, pady=5)\n frame_imgtype.grid(row=1, column=1)\n\n frame_imgtype.listBox = Listbox(frame_imgtype, selectmode=EXTENDED , exportselection=0, width=35)\n frame_imgtype.listBox.pack(side=LEFT, fill=Y)\n imgtype.sort()\n for place in imgtype:\n frame_imgtype.listBox.insert(END, place)\n frame_imgtype.listBox.select_set(0) # Set Default selection\n\n #### Start File browser button\n frame_directory = Frame(self.frame_settings)\n self.svDir = StringVar(value='C:/') # Directory string for button \n frame_directory.grid(row=2, column=0, pady=5)\n print(\"self.svDir is this: %s\" % self.svDir.get())\n\n frame_directory.run_button = Button(self.frame_settings, image=self.open_img, width=20, pady=20, command = self.load_dir)\n frame_directory.run_button.grid(row=3, column=0, sticky=W)\n \n self.eDir = Entry(self.frame_settings, width=30, textvariable=self.svDir)\n self.eDir.grid(row=4, column=0)\n\n #### Run Button\n frame_run_button = Frame(self.frame_settings)\n frame_run_button.grid(row=2, column=1, pady=5)\n def check_selected_options():\n # areas = frame_location.listBox.curselection()\n # areas = [area.[int(item)] for item in areas]\n # imgtypes = frame_imgtype.listBox.curselection()\n # imgtypes = [imgtype[int(item)] for item in imgtypes]\n # return areas, imgtypes\n\n frame_run_button.run_button = Button(self.frame_settings, text='Run!', bg=\"LightSkyBlue\", width=10, pady=10, command=lambda : run_satellite_function(self.svDir.get(), check_selected_options()))\n frame_run_button.run_button.grid(row=3, column=1, sticky=E)", "def create_widget(self):\n item = QNodeItem(self)\n self.widget = item", "def init_gui(self):\n # Choose a layout.\n main_vb = QtGui.QVBoxLayout(self)\n\n # Add a list or tree view.\n self.list_view = QtGui.QListWidget()\n\n # Add the buttons.\n load_btn = QtGui.QPushButton('Load Selected')\n cancel_btn = QtGui.QPushButton('Cancel')\n load_btn.clicked.connect(self.update_list_view)\n cancel_btn.clicked.connect(self.close)\n\n # Connect the list/tree view with a method appropriate for user interaction.\n self.list_view.currentItemChanged['QListWidgetItem*', 'QListWidgetItem*'].connect(self.set_current_name)\n self.list_view.itemChanged['QListWidgetItem*'].connect(self.change_name)\n\n # Add the widgets to the layout.\n btn_hb = QtGui.QHBoxLayout()\n btn_hb.addWidget(load_btn)\n btn_hb.addWidget(cancel_btn)\n main_vb.addWidget(self.list_view)\n main_vb.addLayout(btn_hb)\n\n # Show the GUI.\n self.setGeometry(300, 300, 450, 300)\n self.setWindowTitle('Hello World')\n img_icon = 'C:/Users/caj150430/code/so_much_win.png'\n self.setWindowIcon(QtGui.QIcon(img_icon))\n self.show()", "def build(self):\n self.title = 'Processamento Digital de Imagens'\n self.main_layout = MainLayout()\n return self.main_layout", "def __createWidgets(self):\n # Widget canvas, used to draw rubik's cube\n self.cv = Canvas(self.master)\n self.cv['bg'] = 'white' # Background color\n self.cv['height'] = '440' # Height of canvas\n self.cv['width'] = '560' # Width of canvas\n self.cv.place(x=0, y=0)\n self.__drawCube()", "def init_UI(self):\n\n self.master.title(\"Search for different companies\")\n self.master.geometry(\"400x400\")\n\n self.label_combobox = Label(self, text=\"Search by\")\n self.label_combobox.pack()\n\n self.combo_searching_options = Combobox(self, state=\"readonly\")\n self.combo_searching_options['values'] = self.combobox_values\n self.combo_searching_options.pack()\n\n self.label_input = Label(self, text=\"Entry the value\")\n self.label_input.pack()\n\n self.user_input = Entry(self, width=40)\n self.user_input.pack()\n\n self.btn_submit = Button(self, text=\"Submit\", command=self.submit)\n self.btn_submit.pack()\n\n self.text_area = scrolledtext.ScrolledText(self)\n self.text_area.pack()\n\n sys.stdout = RedirectOutputText(self.text_area)\n\n self.btn_back = Button(self, text=\"Back\", command=self.go_back)\n self.btn_back.pack()" ]
[ "0.6719459", "0.6683136", "0.6647989", "0.6631315", "0.6631315", "0.66061366", "0.6587179", "0.6580063", "0.6485855", "0.64484394", "0.643607", "0.6428566", "0.6412553", "0.6378195", "0.63732135", "0.6308009", "0.6247851", "0.62427694", "0.62391776", "0.6239147", "0.6238216", "0.62343806", "0.6214879", "0.6210052", "0.6202873", "0.61964786", "0.6188289", "0.61797446", "0.6174954", "0.61723095", "0.6165965", "0.6164565", "0.61553663", "0.6154863", "0.615029", "0.61482227", "0.61458987", "0.61361885", "0.6126042", "0.6118736", "0.609209", "0.6087924", "0.6046619", "0.6040278", "0.6028973", "0.60238135", "0.6020823", "0.60168964", "0.6007825", "0.6003025", "0.5999335", "0.59910554", "0.5986237", "0.5981642", "0.5981285", "0.5969733", "0.5967421", "0.596454", "0.5960594", "0.59556305", "0.5952845", "0.5947923", "0.59437865", "0.5943036", "0.593827", "0.59366906", "0.5931331", "0.5930133", "0.5925795", "0.5923951", "0.5920523", "0.5915696", "0.5914192", "0.59038186", "0.5902031", "0.5896402", "0.5893902", "0.5890224", "0.5887733", "0.5885885", "0.58830875", "0.5882494", "0.5882022", "0.5880183", "0.58784795", "0.58779585", "0.5874282", "0.5869207", "0.5859832", "0.5854873", "0.5853521", "0.5844288", "0.5843909", "0.5840471", "0.58315855", "0.58265996", "0.58264697", "0.58261144", "0.5822966", "0.5817718", "0.5816931" ]
0.0
-1
Render the image represented by (rgbobj) at dst_x, dst_y in the offscreen pixmap.
def render_image(self, rgbobj, dst_x, dst_y): self.logger.debug("redraw pixmap=%s" % (self.pixmap)) if self.pixmap is None: return self.logger.debug("drawing to pixmap") # Prepare array for rendering arr = rgbobj.get_array(self.rgb_order, dtype=np.uint8) (height, width) = arr.shape[:2] return self._render_offscreen(self.pixmap, arr, dst_x, dst_y, width, height)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def draw(self):\r\n self.screen.blit(self.image, self.image.get_rect())", "def draw(self, surface):\r\n surface.blit(self.image, self.rect)", "def draw(self):\n self.screen.blit(self.image, self.rect)", "def draw(self, surface):\n surface.blit(self.image, self.rect)", "def draw(self, surface):\n surface.blit(self.image, self.rect)", "def paint(self, game_screen: pygame.Surface) -> None:\r\n game_screen.blit(self._img, (self._x - self._l_w / 2, self._y - self._l_w / 2,\r\n self._l_w, self._l_w))", "def draw(self, screen):\n\n if self.exist:\n screen.blit(self._img, self._rect)", "def blit(self, screen):\r\n self.image.blit(screen, (self.rect.x, self.rect.y))\r\n pygame.display.update()", "def blitme(self):\r\n #draw the image to the screen at the position specifid by self.rect.\r\n self.screen.blit(self.image,self.rect)", "def blit(self):\n self.screen.blit(self.image, self.rect)", "def draw(self):\n if self.dirty or (self.image is None):\n self._render()\n self.screen.blit(self.image, self.rect)", "def draw(self, screen):\n pg.draw.rect(screen, self.bg_color, self.rect)\n\n for y, surf in enumerate(self.images):\n # Don't blit below the rect area.\n if y * self.font_height + self.font_height > self.rect.h:\n break\n screen.blit(surf, (self.rect.x, self.rect.y+y*self.font_height))", "def draw(self, surface):\n\n\t\tsurface.blit(self.image, self.rect.topleft)", "def draw(self):\n self.game.screen.blit(self.image, self.game.off(self.pos))", "def blitme(self):\n self.screen.blit(self.image, self.rect)\n # print('y = ' + str(self.rect.centery))\n # print('x = ' + str(self.rect.centerx))", "def blitme(self):\r\n self.screen.blit(self.image, self.rect)", "def blitme(self):\r\n self.screen.blit(self.image, self.rect)", "def blitme(self):\r\n self.screen.blit(self.image, self.rect)", "def blitme(self):\r\n self.screen.blit(self.image, self.rect)", "def draw(self, frame):\n xpos = OFS + self.x * TILE_SIZE\n ypos = OFS + self.y * TILE_SIZE\n frame[ypos:ypos+TILE_SIZE, xpos:xpos+TILE_SIZE] = self.image", "def draw(self, surface):\r\n if self.visible:\r\n surface.blit(self.image, (self.x, self.y))", "def render_image(self,\n frame=None,\n factor=4,\n antialias=True,\n trim=False,\n transparent=False):\n if frame is not None:\n self.frame = frame\n params = dict(\n factor=factor,\n antialias=antialias,\n trim=trim,\n transparent=transparent)\n self._remote_call('_exportImage', target='Widget', kwargs=params)", "def blitme(self):\n self.screen.blit(self.image, self.rect)", "def blitme(self):\n self.screen.blit(self.image, self.rect)", "def blitme(self):\n self.screen.blit(self.image, self.rect)", "def blitme(self):\n self.screen.blit(self.image, self.rect)", "def blitme(self):\n self.screen.blit(self.image, self.rect)", "def blitme(self):\n self.screen.blit(self.image, self.rect)", "def blitme(self):\n self.screen.blit(self.image, self.rect)", "def blitme(self):\n self.screen.blit(self.image, self.rect)", "def blitme(self):\n self.screen.blit(self.image, self.rect)", "def blitme(self):\n self.screen.blit(self.image, self.rect)", "def draw(self, screen):\n halfScale = int(self.screenScale / 2)\n\n x = int(self.x)\n y = int(self.y)\n for i in range(-halfScale, halfScale):\n for j in range(-halfScale, halfScale):\n\n pygame.Surface.set_at(\n screen, (x * self.screenScale + i, y * self.screenScale + j), self.color)", "def draw_offscreen(context):\n offscreen = SprytileGui.offscreen\n target_img = SprytileGui.texture_grid\n tex_size = SprytileGui.tex_size\n\n offscreen.bind()\n glClear(GL_COLOR_BUFFER_BIT)\n glDisable(GL_DEPTH_TEST)\n glEnable(GL_BLEND)\n glMatrixMode(GL_PROJECTION)\n glLoadIdentity()\n gluOrtho2D(0, tex_size[0], 0, tex_size[1])\n\n def draw_full_quad():\n texco = [(0, 0), (0, 1), (1, 1), (1, 0)]\n verco = [(0, 0), (0, tex_size[1]), (tex_size[0], tex_size[1]), (tex_size[0], 0)]\n glBegin(bgl.GL_QUADS)\n for i in range(4):\n glTexCoord2f(texco[i][0], texco[i][1])\n glVertex2f(verco[i][0], verco[i][1])\n glEnd()\n\n glColor4f(0.0, 0.0, 0.0, 0.5)\n draw_full_quad()\n\n if target_img is not None:\n glColor4f(1.0, 1.0, 1.0, 1.0)\n target_img.gl_load(0, GL_NEAREST, GL_NEAREST)\n glBindTexture(GL_TEXTURE_2D, target_img.bindcode[0])\n # We need to backup and restore the MAG_FILTER to avoid messing up the Blender viewport\n old_mag_filter = Buffer(GL_INT, 1)\n glGetTexParameteriv(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, old_mag_filter)\n glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST)\n glEnable(GL_TEXTURE_2D)\n draw_full_quad()\n glTexParameteriv(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, old_mag_filter)\n\n # Translate the gl context by grid matrix\n grid_matrix = sprytile_utils.get_grid_matrix(SprytileGui.loaded_grid)\n matrix_vals = [grid_matrix[j][i] for i in range(4) for j in range(4)]\n grid_buff = bgl.Buffer(bgl.GL_FLOAT, 16, matrix_vals)\n\n glMatrixMode(GL_MODELVIEW)\n glPushMatrix()\n glLoadIdentity()\n glLoadMatrixf(grid_buff)\n\n glDisable(GL_TEXTURE_2D)\n\n # Get data for drawing additional overlays\n grid_size = SprytileGui.loaded_grid.grid\n padding = SprytileGui.loaded_grid.padding\n margin = SprytileGui.loaded_grid.margin\n curr_sel = SprytileGui.loaded_grid.tile_selection\n is_pixel_grid = sprytile_utils.grid_is_single_pixel(SprytileGui.loaded_grid)\n is_use_mouse = context.scene.sprytile_ui.use_mouse\n is_selecting = SprytileGui.is_selecting\n\n glLineWidth(1)\n\n # Draw box for currently selected tile(s)\n # Pixel grid selection is drawn in draw_tile_select_ui\n sprytile_data = context.scene.sprytile_data\n is_not_base_layer = sprytile_data.work_layer != \"BASE\"\n draw_outline = sprytile_data.outline_preview or is_not_base_layer\n if draw_outline and is_selecting is False and not is_pixel_grid:\n if is_not_base_layer:\n glColor4f(0.98, 0.94, 0.12, 1.0)\n elif SprytileGui.is_moving:\n glColor4f(1.0, 0.0, 0.0, 1.0)\n else:\n glColor4f(1.0, 1.0, 1.0, 1.0)\n curr_sel_min, curr_sel_max = SprytileGui.get_sel_bounds(\n grid_size, padding, margin,\n curr_sel[0], curr_sel[1],\n curr_sel[2], curr_sel[3]\n )\n SprytileGui.draw_selection(curr_sel_min, curr_sel_max)\n\n # Inside gui, draw appropriate selection for under mouse\n if is_use_mouse and is_selecting is False and SprytileGui.cursor_grid_pos is not None:\n\n cursor_pos = SprytileGui.cursor_grid_pos\n # In pixel grid, draw cross hair\n if is_pixel_grid and SprytileGui.is_moving is False:\n glColor4f(1.0, 1.0, 1.0, 0.5)\n glBegin(GL_LINE_STRIP)\n glVertex2i(0, int(cursor_pos.y + 1))\n glVertex2i(tex_size[0], int(cursor_pos.y + 1))\n glEnd()\n\n glBegin(GL_LINE_STRIP)\n glVertex2i(int(cursor_pos.x + 1), 0)\n glVertex2i(int(cursor_pos.x + 1), tex_size[1])\n glEnd()\n # Draw box around selection\n elif SprytileGui.is_moving is False:\n glColor4f(1.0, 0.0, 0.0, 1.0)\n cursor_min, cursor_max = SprytileGui.get_sel_bounds(grid_size, padding, margin,\n int(cursor_pos.x), int(cursor_pos.y),)\n SprytileGui.draw_selection(cursor_min, cursor_max)\n\n glPopMatrix()\n offscreen.unbind()", "def blitme(self):\n self.screen.blit(self.image,self.rect)", "def blitme(self):\n self.screen.blit(self.image,self.rect)", "def blitme(self):\n self.screen.blit(self.image,self.rect)", "def CreateSubBitmap(*args, **kwargs):\n return _gdi_.GraphicsRenderer_CreateSubBitmap(*args, **kwargs)", "def _show_rgb(self):\n R, G, B = self._rgb_frames()\n image = numpy.dstack((R, G, B))\n imageItem = self.parent.image.getImageItem()\n imageItem.updateImage(image)", "def draw(self, screen: pygame.Surface) -> None:\n screen.blit(PIPE_SPRITES[PipesEnum.Bottom], (int(self.x), self.bottom_pipe_y))\n screen.blit(PIPE_SPRITES[PipesEnum.Top], (int(self.x), self.top_pipe_y))", "def draw(self):\n self.write_image()\n self.update()", "def draw(self):\n return ImageDraw.Draw(self.buffer)", "def blitme(self):\n\t\tself.screen.blit(self.image, self.rect)", "def blitme(self):\n\t\tself.screen.blit(self.image, self.rect)", "def blitme(self):\n self.screen.blit(self.image, self.rect)", "def draw(self, background):\n background.blit(self.image, (self.x_pos, self.y_pos))", "def draw(self, screen):\n screen.blit(self.surface, self.rect)", "def blitme(self):\n\t\tself.screen.blit(self.image,self.rect)", "def Draw(self):\n\t\tGameImage.Draw(self, self.coords)", "def grabWindowPixmap(self):\n return QtGui.QPixmap.grabWidget(self)", "def save_to_buffer(self) -> io.BytesIO:\n image = get_screenshot_as_png(self._layout)\n buffer = io.BytesIO()\n image.save(buffer, \"png\")\n return buffer", "def render_image(self, arr, order, win_coord):\n self.logger.debug(\"redraw surface\")\n if self.surface is None:\n return\n\n dst_x, dst_y = win_coord[:2]\n\n daht, dawd, depth = arr.shape\n self.logger.debug(\"arr shape is %dx%dx%d\" % (dawd, daht, depth))\n\n cr = cairo.Context(self.surface)\n # TODO: is it really necessary to hang on to this context?\n self.cr = cr\n\n # fill surface with background color\n imgwin_wd, imgwin_ht = self.viewer.get_window_size()\n cr.rectangle(0, 0, imgwin_wd, imgwin_ht)\n r, g, b = self.viewer.get_bg()\n cr.set_source_rgba(r, g, b)\n cr.fill()\n\n stride = cairo.ImageSurface.format_stride_for_width(cairo.FORMAT_ARGB32,\n dawd)\n img_surface = cairo.ImageSurface.create_for_data(arr,\n cairo.FORMAT_ARGB32,\n dawd, daht, stride)\n\n cr.set_source_surface(img_surface, dst_x, dst_y)\n cr.set_operator(cairo.OPERATOR_SOURCE)\n\n cr.mask_surface(img_surface, dst_x, dst_y)\n cr.fill()", "def draw_img(self, i, j, k):\n if k < len(self.images):\n img = self.images[k]\n r = self.get_rect(i, j)\n self.screen.blit(img, r)", "def draw_pixel_to_display(self):\n register = self.return_middle_registers(self.opcode)\n x = self.registers[register[0]]\n y = self.registers[register[1]]\n height = self.opcode & 0xF\n\n self.registers[0xF] = 0\n\n x = bit_utils.wrap_around(x, self.display.width)\n y = bit_utils.wrap_around(y, self.display.height)\n\n for yline in range(0, height):\n pixels = self.memory[self.I + yline]\n y1 = bit_utils.wrap_around(y + yline, self.display.height)\n for xline in range(0, 8):\n x1 = bit_utils.wrap_around(x + xline, self.display.width)\n if pixels & (0x80 >> xline) != 0:\n if self.display.set_pixel(x1, y1):\n self.registers[0xF] = 1\n\n self.display.draw_flag = True\n logger.info(\"Drawing sprite from {} to {} at {}, {}\".format(\n hex(self.I),\n hex(self.I + height),\n x, y))", "def DrawBitmap(*args):\n return _gdi_.GraphicsContext_DrawBitmap(*args)", "def draw(self, screen, rotate=0):\n sub_img = pygame.image.load(self.filename_img)\n sub_img = pygame.transform.rotate(sub_img, rotate)\n sub_img_rect = sub_img.get_rect().move(*self.loc)\n \n\n # draw ports\n for port_rel_loc in self.input_ports + self.output_ports:\n self.draw_port(screen, (255, 182, 193), port_rel_loc) # make constant?\n\n return sub_img_rect, sub_img", "def drawPixmap(self, stack_type):\r\n if stack_type==\"xy\":\r\n im=Image.fromarray(to_rgb(self.img[self.z_stack]))\r\n image= ImageQt.ImageQt(im)\r\n image2= QtGui.QImage(image)\r\n pixmap=QtGui.QPixmap.fromImage(image2).scaled(250,250)\r\n return pixmap\r\n \r\n elif stack_type==\"xz\":\r\n im=Image.fromarray(to_rgb(self.img[:,self.y_stack,:]))\r\n image= ImageQt.ImageQt(im)\r\n image2= QtGui.QImage(image)\r\n pixmap=QtGui.QPixmap.fromImage(image2).scaled(250,250)\r\n return pixmap\r\n \r\n else:\r\n im=Image.fromarray(to_rgb(self.img[:,:,self.x_stack]))\r\n image= ImageQt.ImageQt(im)\r\n image2= QtGui.QImage(image)\r\n pixmap=QtGui.QPixmap.fromImage(image2).scaled(250,250)\r\n return pixmap\r\n \r\n #self.lbl2.setPixmap(self.pixmap2)\r", "def _greenscreen(self, obs):\n\t\tif 'video' in self._mode:\n\t\t\tbg = self._data[self._current_frame % len(self._data)] # select frame\n\t\t\tbg = self._interpolate_bg(bg, obs.shape[1:]) # scale bg to observation size\n\t\t\treturn do_green_screen(obs, bg) # apply greenscreen\n\t\treturn obs", "def draw_piece(self):\n self.screen.blit(self.image, self.rect)", "def draw(self, screen):\n if self.state == self.S_ACTIVE:\n screen.blit(self.image, self.rect)", "def draw_a50(self):\r\n\t\tpg.draw.rect(self.image, (100, 200, 100), self.rect)\r\n\t\r\n\t\t#self.display_surface.blit(self.image, self.rect)\r", "def draw(self):\n self.screen.blit(self.msg_image, self.msg_image_rect)", "def draw(self):\n self.screen.blit(self.image, (self.x_pos1, self.y_pos))\n self.screen.blit(self.image, (self.x_pos2, self.y_pos))", "def draw(self,surface):\n surface.blit(self.image, self.rect)\n for moving in self.shots.values():\n moving.draw()", "def draw_image(self):\n self.PDF.saveState()\n self.PDF.scale(1, -1)\n # self.PDF.drawImage(\n # LOGO, 490, -78, width=80, preserveAspectRatio=True, mask=\"auto\"\n # )\n self.PDF.restoreState()", "def draw(self, frame):\n frame[OFS:OFS+self.image.shape[0], OFS:OFS+self.image.shape[1]] = self.image", "def display(self):\n\t\tself.imgDisplay.set_from_pixbuf(self.getVisible())\n\t\tgc.collect()", "def __make_png(self, abspath_img_rgb):\n if not os.path.exists(DIR_PNG):\n os.makedirs(DIR_PNG)\n\n outsize = '{}%'.format(OUTSIZE_RGB)\n img_name_rgb = os.path.basename(abspath_img_rgb)\n suffix_extension_tif = Utils.get_suffix_tif(img_name_rgb)\n img_png = img_name_rgb.replace(suffix_extension_tif, '.png')\n path_img_png = os.path.join(DIR_PNG, img_png)\n\n command = \"gdal_translate -ot byte -of PNG -outsize {} {} \" \\\n \"-a_nodata 0 -q {} {}\".format(\n outsize, outsize, abspath_img_rgb, path_img_png\n )\n os.system(command)\n return os.path.join(DIR_PNG_TO_DB, img_png)", "def draw(self, screen, size_block):\n pos = self.board.coordinate_to_position(self.coordinate)\n screen.blit(pygame.transform.scale(self.image, (size_block, size_block)), (pos[0], pos[1]))", "def _repr_png_(self):\n mol = self.owner.mol\n keku = IPythonConsole.kekulizeStructures\n size = IPythonConsole.molSize\n opts = IPythonConsole.drawOptions\n return Draw._moltoimg(\n mol, size, self.aix, \"\", returnPNG=True, drawOptions=opts,\n kekulize=keku, highlightBonds=self.bix\n )", "def _repr_png_(self):\n mol = self.owner.mol\n keku = IPythonConsole.kekulizeStructures\n size = IPythonConsole.molSize\n opts = IPythonConsole.drawOptions\n return Draw._moltoimg(\n mol, size, self.aix, \"\", returnPNG=True, drawOptions=opts,\n kekulize=keku, highlightBonds=self.bix\n )", "def draw(self, surface, force=False):\n if self.redraw or force:\n surface.blit(self.image, self.loc)\n self.redraw = False", "def __draw_board_texture(self, texture):\n\n textureWidth, textureHeight = texture.size\n\n for x in range(0, self.width, textureWidth):\n for y in range(0, self.height, textureHeight):\n self.baseImage.paste(texture, (x, y))", "def draw(self, screen):", "def __draw(self, screen):\n\n pygame.draw.rect(screen, (200, 255, 200), (self.x, self.y, self.width, self.height))", "def screenshot(self):\n self.context.draw.window.screenshot(self.filename)", "def draw(self, surface):\n temp = pygame.Surface(self.renderer.pixel_size)\n self.renderer.render_map(temp)\n pygame.transform.smoothscale(temp, surface.get_size(), surface)", "def draw_onscreen(self):\n for spr in self.all_sprites:\n if spr.x >= self.cam_pos[0] and spr.x <= (self.cam_pos[0] +\n self.spr_width):\n if spr.y >= self.cam_pos[1] and spr.y <= (self.cam_pos[1]\n + self.spr_height):\n self.screen.blit(spr.image, spr.rect)", "def show(self, screen):\n x_display = self.xy_position[0] * constants.CELL_SIZE\n y_display = self.xy_position[1] * constants.CELL_SIZE\n screen.blit(self.image, (x_display, y_display))", "def draw(self, screen):\n screen.blit(self.image, (self.rect.x, self.rect.y))\n if self.boss.treasureCaptured:\n screen.blit(pygame.transform.scale(self.image, (7, 7)), (self.rect.x + self.boss.width -4, self.rect.y - 15))", "def draw_sprite(self, buf, x, y, w, h):\n x2 = x + w - 1\n y2 = y + h - 1\n if self.is_off_grid(x, y, x2, y2):\n return\n self.set_window(x, y, x2, y2, buf)", "def _draw_overlay(self, gc, view_bounds=None, mode='normal'):\n self._draw_component(gc, view_bounds, mode)\n return", "def process(self,pixmap):", "def draw_end(self, screen):\n screen.fill(BLACK) \n game_over_pic = pygame.transform.scale(pygame.image.load('game_over_mushroom.jpg').convert(), [350, 350])\n screen.blit(game_over_pic, (SCREEN_W_MID-175, SCREEN_H_MID-175))", "def TakeScreenShot(rect):\r\n\r\n # Create a DC for the whole screen area\r\n dcScreen = wx.ScreenDC()\r\n\r\n # Create a Bitmap that will later on hold the screenshot image\r\n # Note that the Bitmap must have a size big enough to hold the screenshot\r\n # -1 means using the current default colour depth\r\n bmp = wx.EmptyBitmap(rect.width, rect.height)\r\n\r\n # Create a memory DC that will be used for actually taking the screenshot\r\n memDC = wx.MemoryDC()\r\n\r\n # Tell the memory DC to use our Bitmap\r\n # all drawing action on the memory DC will go to the Bitmap now\r\n memDC.SelectObject(bmp)\r\n\r\n # Blit (in this case copy) the actual screen on the memory DC\r\n # and thus the Bitmap\r\n memDC.Blit( 0, # Copy to this X coordinate\r\n 0, # Copy to this Y coordinate\r\n rect.width, # Copy this width\r\n rect.height, # Copy this height\r\n dcScreen, # From where do we copy?\r\n rect.x, # What's the X offset in the original DC?\r\n rect.y # What's the Y offset in the original DC?\r\n )\r\n\r\n # Select the Bitmap out of the memory DC by selecting a new\r\n # uninitialized Bitmap\r\n memDC.SelectObject(wx.NullBitmap)\r\n\r\n return bmp", "def draw_kame(self):\r\n #pygame.draw.rect(self.screen, self.color, self.rect, self.image)\r\n self.screen.blit(self.image, self.rect)", "def draw(self):\n if self.state == 'alive':\n for i in range(len(self.tail)):\n pygame.draw.rect(display, black, (squareToXPix(self.tail[-(i + 1)][0], objectSize), squareToYPix(self.tail[-(i + 1)][1], objectSize), objectSize, objectSize))\n\n pygame.draw.rect(display, black, (squareToXPix(self.x, objectSize), squareToYPix(self.y, objectSize), objectSize, objectSize))\n\n else:\n for i in range(len(self.tail)):\n pygame.draw.rect(display, red, (squareToXPix(self.tail[-(i + 1)][0], objectSize), squareToYPix(self.tail[-(i + 1)][1], objectSize), objectSize, objectSize))\n\n pygame.draw.rect(display, red, (squareToXPix(self.x, objectSize), squareToYPix(self.y, objectSize), objectSize, objectSize))", "def on_draw(self, widget, cr):\n #print \"starting to draw\"\n if self.double_buffer is not None:\n self.draw_tiles()\n cr.set_source_surface(self.double_buffer, 0.0, 0.0)\n cr.paint()\n else:\n print('Invalid double buffer')\n #print \"done drawing\"\n return False", "def CopyToBuffer(*args, **kwargs):\n return _gdi_.Bitmap_CopyToBuffer(*args, **kwargs)", "def draw_object(self, img):\n cv2.drawChessboardCorners(img, self.patternSize, self.corners, self.patternFound)", "def on_draw_over_image(self):", "def get_image(self, frame):\n self.surface.fill((0, 0, 0, 0))\n for sprite in self.sprites:\n self.surface.blit(sprite.get_image(frame), (0, 0))\n return self.surface", "def draw(self, drawDC=None):\n FigureCanvasAgg.draw(self)\n self.bitmap = _rgba_to_wx_bitmap(self.get_renderer().buffer_rgba())\n self._isDrawn = True\n self.gui_repaint(drawDC=drawDC)", "def draw(self,screen):\n for tile in self.tile_list:\n screen.blit(tile[0],tile[1])\n # pygame.draw.rect(screen,(255,255,255),tile[1],2)\n\n for tile in self.objList:\n screen.blit(tile[0],tile[1])\n # pygame.draw.rect(screen,(255,255,255),tile[1],2)\n # rectangle print for tiles", "def pixmap(self):\n return self._pixmap", "def draw_image(self, image, src_coor, src_size, dest_coor, dest_size, angle = 0):\n img = Image_process.update(image, src_coor, src_size, dest_size, angle)\n self.canvas.create_image(dest_coor, image=img)", "def draw(self, surface, camera=None):\n if camera:\n surface.blit(self.image, camera.apply(self.rect))\n else:\n surface.blit(self.image, self.rect)", "def render(self, src_color_tex, src_depth_tex, dst_color_tex, dst_format):\n # NOTE: cannot actually use src_depth_tex as a sample texture (BindingCollision)\n assert src_depth_tex is None\n assert isinstance(src_color_tex, wgpu.base.GPUTextureView)\n assert isinstance(dst_color_tex, wgpu.base.GPUTextureView)\n\n # Recreate pipeline? Use ._internal as a true identifier of the texture view\n hash = src_color_tex.size, src_color_tex._internal\n stored_hash = self._pipelines.get(dst_format, [\"invalidhash\"])[0]\n if hash != stored_hash:\n bind_group, render_pipeline = self._create_pipeline(\n src_color_tex, dst_format\n )\n self._pipelines[dst_format] = hash, bind_group, render_pipeline\n\n self._update_uniforms(src_color_tex, dst_color_tex)\n self._render(dst_color_tex, dst_format)", "def drawItemPixmap(self, QPainter, QRect, p_int, QPixmap): # real signature unknown; restored from __doc__\n pass", "def paint(self) -> None:\n pix = QPixmap(0, 0)\n pix.convertFromImage(self.image)\n self.scene.addPixmap(pix)" ]
[ "0.61686844", "0.5998365", "0.5990171", "0.59325355", "0.59325355", "0.5924451", "0.5898981", "0.5876628", "0.58228207", "0.5799751", "0.5746103", "0.5726643", "0.5718437", "0.56835777", "0.565392", "0.5641117", "0.5641117", "0.5641117", "0.5641117", "0.56196475", "0.5606678", "0.5602619", "0.55967903", "0.55967903", "0.55967903", "0.55967903", "0.55967903", "0.55967903", "0.55967903", "0.55967903", "0.55967903", "0.55967903", "0.5591539", "0.55915225", "0.5564483", "0.5564483", "0.5564483", "0.5552927", "0.55472827", "0.5540241", "0.5513973", "0.5510207", "0.5490904", "0.5490904", "0.5488586", "0.5460954", "0.5455277", "0.5442916", "0.54262877", "0.54135436", "0.54052866", "0.5400333", "0.53857666", "0.5384934", "0.5381335", "0.5376951", "0.53748226", "0.536458", "0.53575885", "0.5352096", "0.5342265", "0.534185", "0.5337163", "0.5314422", "0.53089446", "0.5305598", "0.53047067", "0.52971655", "0.52926725", "0.5289114", "0.5289114", "0.5278036", "0.52592736", "0.52560407", "0.52502483", "0.52422166", "0.5230348", "0.522127", "0.52201945", "0.5220052", "0.5219632", "0.5212883", "0.52120334", "0.5206943", "0.5201661", "0.5190134", "0.51884073", "0.51875865", "0.5180037", "0.517478", "0.5167866", "0.5167591", "0.5165622", "0.5165458", "0.5160903", "0.5156159", "0.5154619", "0.5154377", "0.51521033", "0.5149716" ]
0.8753144
0
This method is called by the event handler when the size of the window changes (or it can be called manually). We allocate an offscreen pixmap of the appropriate size and inform the superclass of our window size.
def configure_window(self, width, height): self.configure_surface(width, height)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __ev_resize(self, event):\n\n new_size = event.dict['size']\n surface_size = self.__screen.get_size()\n old_center = self.__screen.get_rect().center\n if new_size != surface_size:\n self.__screen = pygame.display.set_mode(new_size,\n self.__screen.get_flags(),\n self.__screen.get_bitsize())\n self.init(offset=vect_diff(self.__screen.get_rect().center,\n old_center))\n self.__screen_width, self.__screen_height = self.__screen.get_size()", "def ev_windowsizechanged(self, event: WindowResized) -> None:", "def ev_windowresized(self, event: WindowResized) -> None:", "def on_canvas_resize(self, event) -> None:\r\n\r\n self.painter.adjust_to_canvas()\r\n self.painter.draw_board()", "def _resize_image(self, event):\n self.window_width = event.width\n self.window_height = event.height", "def resizeEvent(self, event):\n size = self.size()\n size = QSize(int(size.width()),int(size.height()))\n\n scaledPix = self.pixmap.scaled(size, Qt.KeepAspectRatio, transformMode = Qt.FastTransformation )\n self.setPixmap(scaledPix)", "def resizeEvent(self, event):\n self.refresh_images(resize=True)\n QMainWindow.resizeEvent(self, event)", "def on_size(self, event):\n # Forces reconfiguration of the viewport, modelview and projection\n # matrices on the next paint event\n self.init = False", "def on_size(self, event):\n # Forces reconfiguration of the viewport, modelview and projection\n # matrices on the next paint event\n self.init = False", "def ev_windowsizechanged(self, event: tcod.event.WindowResized) -> T | None:", "def resizeEvent(self, event):\n self.resized.emit()\n return super(PiWndow, self).resizeEvent(event)", "def OnSize(self, event):\r\n\r\n self.Layout()", "def __window_resizeTo(self, iWidth, iHeight):\n pass", "def on_resize(self, _: int = 0) -> None:\n assert CursesMenu.stdscr is not None\n screen_rows, screen_cols = CursesMenu.stdscr.getmaxyx()\n curses.resizeterm(screen_rows, screen_cols)\n self.draw()", "def onSize(self,event=None):\n if self.app.DEBUG:\n print 'Event: Parent: %s.onSize'%self.__class__\n if self.redraw:self.redraw()", "def _changed_size(self, **kw):\n\t\tself._clear_matrix()\n\t\t\n\t\tself._recalc_adjustments()\n\t\t\n\t\tif self.flags() & gtk.REALIZED:\n\t\t\tif kw.get('resize', True): self.queue_resize()\n\t\t\tif kw.get('draw', True): self.queue_draw()", "def resizeEvent(self, event):\n self.updateViewer()", "def _set_size(self):\n if self.width_key is not None:\n width = config.get(self.width_key)\n height = config.get(self.height_key)\n self.window.resize(width, height)", "def resize(self, event=None):\n #self.render()\n self.__resize_background(event)\n #self.__delete_background()\n #self.__create_background(self._imfname)\n for sym in self.itersymbols():\n sym.sym.resize(event)", "def autoResize(self):\n\t\t#self.infoLabelBox.set_size_request(1,1)\n\t\timgSize = [self.currentPixbuf.get_width() * self.scaleFactor, self.currentPixbuf.get_height() * self.scaleFactor]\n\t\timgSize = map(lambda x: max(int(x), 1), imgSize)\n\t\tif not self.fullscreenToggle:\n\t\t\tself.resize(imgSize[0], imgSize[1])\n\t\t\tposition = ( int(0.5 * (self.get_screen().get_width() - imgSize[0])),\n\t\t\t\tint(0.5 * (self.get_screen().get_height() - imgSize[1])))\n\t\t\tself.move(position[0], position[1])\n\t\t\tself.fixed.move(self.imgDisplay, 0, 0)\n\t\t\tif not self.hideTransparent and self.imgTrans.bgOn:\n\t\t\t\tself.imgTrans.set_size_request(imgSize[0], imgSize[1])\n\t\t\t# make eventbox the same size as image\n\t\t\t# this will not be correct when infoLabelBox is visible\n\t\t\tself.eventBox.set_size_request(imgSize[0], imgSize[1])\n\t\telse:\n\t\t\tself.fixed.move(self.imgDisplay, max(0, int((self.get_size()[0] - imgSize[0]) / 2)),\n\t\t\t\tmax(0, int((self.get_size()[1] - imgSize[1]) / 2)))\n\t\t\tif not self.hideTransparent and self.imgTrans.bgOn:\n\t\t\t\tself.imgTrans.set_size_request(int(self.get_size()[0]), int(self.get_size()[1]))\n\t\t\t# make eventbox the same size as screen\n\t\t\tself.eventBox.set_size_request(self.get_size()[0],self.get_size()[1])", "def update_dimensions(self):\r\n # stores the old screen height for cleaning the screen\r\n old_w_height = self.w_height\r\n\r\n self.w_width, self.w_height = get_terminal_size()\r\n # see __init__\r\n self.w_width -= self.w_width % 2\r\n self.w_height -= self.w_height % 2\r\n\r\n # no need to clear screen if window size hasn't changed\r\n if old_w_height != self.w_height:\r\n self.clear_screen(old_w_height)", "def paintEvent(self, event):\n pixmap = self._pixmap\n if pixmap is None:\n return\n\n pm_size = pixmap.size()\n pm_width = pm_size.width()\n pm_height = pm_size.height()\n if pm_width == 0 or pm_height == 0:\n return \n\n evt_rect = event.rect()\n evt_x = evt_rect.x()\n evt_y = evt_rect.y()\n evt_width = evt_rect.width()\n evt_height = evt_rect.height()\n\n if not self._scaled_contents:\n # If the image isn't scaled, it is centered if possible.\n # Otherwise, it's painted at the origin and clipped.\n paint_x = max(0, int((evt_width / 2. - pm_width / 2.) + evt_x))\n paint_y = max(0, int((evt_height / 2. - pm_height / 2.) + evt_y))\n paint_width = pm_width\n paint_height = pm_height\n else:\n # If the image *is* scaled, it's scaled size depends on the \n # size of the paint area as well as the other scaling flags.\n if self._preserve_aspect_ratio:\n pm_ratio = float(pm_width) / pm_height\n evt_ratio = float(evt_width) / evt_height\n if evt_ratio >= pm_ratio:\n if self._allow_upscaling:\n paint_height = evt_height\n else:\n paint_height = min(pm_height, evt_height)\n paint_width = int(paint_height * pm_ratio)\n else:\n if self._allow_upscaling:\n paint_width = evt_width\n else:\n paint_width = min(pm_width, evt_width)\n paint_height = int(paint_width / pm_ratio)\n else:\n if self._allow_upscaling:\n paint_height = evt_height\n paint_width = evt_width\n else:\n paint_height = min(pm_height, evt_height)\n paint_width = min(pm_width, evt_width)\n # In all cases of scaling, we know that the scaled image is\n # no larger than the paint area, and can thus be centered.\n paint_x = int((evt_width / 2. - paint_width / 2.) + evt_x)\n paint_y = int((evt_height / 2. - paint_height / 2.) + evt_y)\n \n # Finally, draw the pixmap into the calculated rect.\n painter = QPainter(self)\n painter.setRenderHint(QPainter.SmoothPixmapTransform)\n painter.drawPixmap(paint_x, paint_y, paint_width, paint_height, pixmap)", "def on_scale (self):\n\t\tif self.has_started:\n\t\t\tself.init_buffers()\n\t\t\tself.redraw_foreground()\n\t\t\tself.redraw_background()\n\n\t\tif self.expand2 == _('Use a scrollbar'):\n\t\t\tself.width = int((self.icon_size * 2 * self.rows + ((self.border_size+self.shadow_size)*2)+15 ) + 24/self.scale)\n\t\t\tself.update_scrollbar()", "def resize(self):\r\n del self.win\r\n self.__create_win()", "def SetUniformBitmapSize(self, size):\r\n\r\n self._requested_bmp_size = wx.Size(*size)\r\n\r\n # if window is already initialized, recalculate the tab height\r\n if self._dummy_wnd:\r\n self.UpdateTabCtrlHeight()", "def OnSize(self, event):\r\n \r\n skipped = False\r\n if isinstance(self._frame, AuiFloatingFrame) and self._frame.IsShownOnScreen():\r\n skipped = True\r\n event.Skip()\r\n\r\n if self._frame:\r\n \r\n self.DoFrameLayout()\r\n if wx.Platform == \"__WXMAC__\":\r\n self._frame.Refresh()\r\n else:\r\n self.Repaint()\r\n \r\n if isinstance(self._frame, wx.MDIParentFrame) or isinstance(self._frame, tabmdi.AuiMDIClientWindow) \\\r\n or isinstance(self._frame, tabmdi.AuiMDIParentFrame):\r\n # for MDI parent frames, this event must not\r\n # be \"skipped\". In other words, the parent frame\r\n # must not be allowed to resize the client window\r\n # after we are finished processing sizing changes\r\n return\r\n\r\n if not skipped:\r\n event.Skip()\r\n\r\n # For the snap to screen...\r\n self.OnMove(None)", "def OnSize(self, event):\r\n\r\n if self._owner_mgr and self._send_size:\r\n self._owner_mgr.OnFloatingPaneResized(self._pane_window, event.GetSize())", "def on_resize(self, *args):\n\n self.page_current.width = terminal.width # Give page new terminal width\n self.render_buffer = []\n\n self.render() # Re-render buffer", "def resize(self, *args):\n if self.parent is None: # when deleted\n return\n if self.parent.render_window is None: # BasePlotter\n return\n\n if self._prior_window_size != self.parent.window_size:\n self._prior_window_size = self.parent.window_size\n\n actor = self._actors['background']\n image_data = actor.GetInput()\n origin = image_data.GetOrigin()\n extent = image_data.GetExtent()\n spacing = image_data.GetSpacing()\n xc = origin[0] + 0.5 * (extent[0] + extent[1]) * spacing[0]\n yc = origin[1] + 0.5 * (extent[2] + extent[3]) * spacing[1]\n yd = (extent[3] - extent[2] + 1) * spacing[1]\n dist = self.camera.distance\n\n # make the longest dimensions match the plotting window\n img_dim = np.array(image_data.dimensions[:2])\n self.camera.focus = np.array([xc, yc, 0.0])\n self.camera.position = np.array([xc, yc, dist])\n\n ratio = img_dim / np.array(self.parent.window_size)\n scale_value = 1\n if ratio.max() > 1:\n # images are not scaled if larger than the window\n scale_value = ratio.max()\n\n if self._scale is not None:\n scale_value /= self._scale\n\n self.camera.parallel_scale = 0.5 * yd / self._scale", "def resizeEvent(self, event):\n event.accept()\n self.overlay.resize(event.size())\n # Move gif to the center of the widget\n self.overlay.move(self.rect().center() - self.overlay.rect().center())", "def signal_handler(self,sig,data):\n self.resize_child_window()", "def init_window(self, size, screen=None):\n # enforce minimum size\n (mw, mh), (w, h) = config.minsize, size\n if w < mw or h < mh:\n size = mw, mh\n\n # init view surface and pass it to screen\n self.view = pygame.display.set_mode(size, pygame.RESIZABLE)\n self.view.fill((0, 0, 0))\n if screen is not None:\n screen.resize_view()", "def adjust_screen_size(self) -> None:\n if self.screen:\n max_row, max_cols = self.screen.getmaxyx()\n if max_row < MIN_SIZE + len(self.all_items):\n self.screen.resize(self.menu_height, max_cols)\n self.draw()", "def resize(self, dims):\n width, height = dims[:2]\n self.logger.debug(\"renderer reconfigured to %dx%d\" % (\n width, height))\n\n # create cairo surface the size of the window\n #surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, width, height)\n depth = len(self.rgb_order)\n self.surface_arr = np.zeros((height, width, depth), dtype=np.uint8)\n\n stride = cairo.ImageSurface.format_stride_for_width(cairo.FORMAT_ARGB32,\n width)\n surface = cairo.ImageSurface.create_for_data(self.surface_arr,\n cairo.FORMAT_ARGB32,\n width, height, stride)\n self.surface = surface\n\n # fill surface with background color;\n # this reduces unwanted garbage in the resizing window\n cr = cairo.Context(self.surface)\n\n # fill surface with background color\n cr.rectangle(0, 0, width, height)\n r, g, b = self.viewer.get_bg()\n cr.set_source_rgba(r, g, b)\n cr.fill()\n\n super(CanvasRenderer, self).resize(dims)", "def initialise_window(self):\n self.imageLabel.setBackgroundRole(QtGui.QPalette.Base)\n self.imageLabel.setScaledContents(True)\n self.scrollArea.setWidget(self.imageLabel)\n self.setCentralWidget(self.scrollArea)\n self.scrollArea.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff) # Disable horizontal scrollbar.\n self.scrollArea.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff) # Disable vertical scrollbar.\n self.setWindowTitle(\"Robot Map\") # Set title.\n self.showFullScreen() # Make fullscreen.", "def rescale(self, event: tkinter.Event) -> None:\n # the properties which are linked to the event of reconfiguration\n # contain all the new sizes of the panel :\n self.width, self.height = event.width - 4, event.height - 4\n # The subtraction of 4 pixels is here to compensate the width\n # of the 'highlight bordure' rolling the canvas)\n self.draw_board()", "def __onResize(self, ev):\n ev.Skip()\n self.__calcCanvasSizes()", "def grabWindowPixmap(self):\n return QtGui.QPixmap.grabWidget(self)", "def resize_display(self, (w, h)):\n self.surface = pygame.display.set_mode((w, h), pygame.RESIZABLE)", "def handleResize(self):\n pass", "def OnResize(self, event):\n self._resizing = True\n self._resize_timer.Start(60, True)", "def OnResizeEnd(self, event):\n self._resizing = False\n self.Refresh()", "def _on_start(self):\n desktop = QtGui.QApplication.instance().desktop()\n available_geometry = desktop.screenGeometry(QtGui.QCursor().pos())\n self.setGeometry(available_geometry.x(), 0, 100, 100)", "def resizeEvent(self, event):\n super().resizeEvent(event)\n self.resized.emit()", "def OnSize(self, event):\r\n \r\n self.UpdateHintWindowSize()\r\n event.Skip()", "def ev_windowminimized(self, event: WindowEvent) -> None:", "def _prep_window(self, parent=None):\n self.toolkit.app.initialize()\n if not self.initialized:\n self.setup(parent)\n self.resize_to_initial()\n self.update_minimum_size()\n self.update_maximum_size()", "def on_parent_resize(self, event):\n #self.resize()\n #self.resize_scaled(drag_rootx=self.resize_frame.winfo_rootx())\n self.resize_scaled(current=MathStat.lerp(0,\n self.prop_frame.winfo_width(), self.last_right_bias))", "def resizeEvent(self, *args, **kwargs):\n self.windowMoved.emit()", "def _request_redraw(self):\n self.screen_manager.req(Screens.PRODUCTENTRY)", "def ev_windowresized(self, event: tcod.event.WindowResized) -> T | None:", "def OnSize(self, event):\r\n\r\n s = event.GetSize()\r\n self.SetTabRect(wx.Rect(0, 0, s.GetWidth(), s.GetHeight()))", "def resizeEvent(self, event: 'QResizeEvent'):\n newSize = event.size()\n # self.setMask(QRegion(self.rect()))\n\n if self._movedToCenter < 3: # 2 size adjustments are done before self reaches its final assigned size.\n self.move(self.screenSize.width() / 2 - newSize.width() / 2,\n self.screenSize.height() / 2 - newSize.height() / 2)\n self._movedToCenter += 1", "def do_expose_event(self, widget, event):\n\n self.set_up_pangocairo(widget, event)\n\n self.draw(*self.window.get_size())", "def setwinsize(self, rows, cols):", "def set_screen(self, size):\r\n self.screen = size", "def expose (self,widget,event):\n #Creating Cairo drawing context\n self.ctx = self.bin_window.cairo_create()\n #Setting context size to available size\n self.ctx.rectangle(event.area.x, event.area.y, event.area.width, event.area.height)\n self.ctx.clip()\n self.ctx.translate(0.5,-0.5)\n #Obtaining available width and height\n self.available_width = event.area.width\n self.available_height = event.area.height\n #Drawing\n self.draw(self.ctx)\n return False", "def on_resize_parent(self,event):\n #print(\"parent event size=\"+str(event.width)+\" X \"+str(event.height))\n self.canvas_width = event.width\n self.canvas_height = event.height\n self.canvas.get_tk_widget().config(width=self.canvas_width, height=self.canvas_height)\n self.show_image()", "def resize(self):\n h, w = self.win.getmaxyx()\n self.maxh, self.maxw = h, w\n if w == 0 or h == 2:\n return\n self.win.resize(h, w)\n self.lpane.do_resize(h, w)\n self.rpane.do_resize(h, w)\n self.statusbar.resize(h, w)\n self.tabbar.resize(1,w)\n self.regenerate()\n self.display()", "def do_relayout(self):\n # This method is called whenever a relayout is requested. By\n # default, this is when the layout children change. In that case\n # we just need to update the min and max sizes. We are a top\n # level window, so no one really cares about our size hint. \n self.update_minimum_size()\n self.update_maximum_size()", "def __init__(self):\n self.app = qt.QApplication(sys.argv)\n self.window = qt.QMainWindow()\n self.screenSize = qt.QDesktopWidget().screenGeometry(-1)\n self.window.setGeometry(self.getDims()[1]/4, self.getDims()[0]/4, self.getDims()[1]/2, self.getDims()[0]/2)", "def expose (self,widget,event):\n #Creating Cairo drawing context\n self.ctx = self.bin_window.cairo_create()\n #Setting context size to available size\n self.ctx.rectangle(event.area.x, event.area.y, event.area.width, event.area.height)\n self.ctx.clip()\n self.ctx.translate(20.5,-0.5)\n #Obtaining available width and height\n self.available_width = event.area.width\n self.available_height = event.area.height\n #Drawing\n self.draw(self.ctx)\n return False", "def resizeEvent(self, event):\n self.ui.main_edit.setGeometry(QtCore.QRect(0, 0, event.size().width(),\n event.size().height()-73))\n self.ui.dialog_map.setGeometry(QtCore.QRect(0, 0, event.size().width(),\n event.size().height()-73))", "def setPixmap(self, pixmap: QPixmap):\n self.pixmap = pixmap\n self.resize(self.pixmap.size())\n self.sRequestFitInView.emit(QRectF(self.pixmap.rect()))", "def ev_windowmaximized(self, event: WindowEvent) -> None:", "def resizeEvent(self, event):\n\n self.settings.setValue(\"geometry\", self.saveGeometry())\n\n super().resizeEvent(event)", "def resizeEvent(self, event):\r\n QDialog.resizeEvent(self, event)\r\n self.emit(SIGNAL(\"size_change(QSize)\"), self.size())", "def draw_final_screen(self):\r\n root = Tk()\r\n MapGUI(root, self)\r\n root.geometry('710x540')\r\n root.mainloop()", "def OnSize(self, event):\n wx.CallAfter(self.DoSetViewport)\n event.Skip()", "def relayout(self): \n\t\t#self.urmaswin.Layout()\n\t\t#wx.CallAfter(self.urmaswin.Layout)\n\t\t#wx.CallAfter(self.visualizer.OnSize)", "def OnSize(self, event):\r\n\r\n if self._blindMode or not self.CanSetTransparent():\r\n self.MakeVenetianBlinds()", "def resize(self, rows, cols, minecount, event=None):\n self.clearFrame()\n #reset relevant instance variables\n self.rows = rows\n self.cols = cols\n self.numMines = minecount\n self.numChecked = 0\n self.numFlags = 0\n self.minesArmed = False\n self.startTime = None\n\n #re-add all elements on the board\n self.setUpFrame()\n self.addTiles(rows,cols,minecount)\n\n #resize window to fit the new board size\n windowWidth = str(20*cols+40)\n windowHeight = str(20*rows+60)\n self.parent.minsize(windowWidth, windowHeight)\n self.parent.maxsize(windowWidth, windowHeight)\n self.parent.geometry(windowWidth+'x'+windowHeight)", "def resize(self):\r\n Win.resize(self)\r\n self.write(\"### console has been resized\")", "def _update_screen(self) -> None:\n # setup the source image with an alpha channel\n alpha = 255 * np.ones_like(self.image[..., 0:1])\n image = np.concatenate([self._image, alpha], axis=-1).astype(np.uint8)\n # setup the super pixel segmentations\n super_pixels = np.zeros_like(self.image)\n super_pixels = mark_boundaries(\n super_pixels,\n self._super_pixel_segments,\n self._super_pixel_color\n )\n # concatenate the first channel of sup as the alpha channel\n super_pixels = [super_pixels, super_pixels[..., 0:1]]\n super_pixels = np.concatenate(super_pixels, axis=-1).astype(np.uint8)\n # setup the segmentation image with an alpha channel scaled by the\n # opacity parameter of the application [0, 9]\n intensity = 255 * (self._opacity / 9)\n intensity = intensity * np.ones_like(self._segmentation[..., 0:1])\n segmentation = np.concatenate([self._segmentation, intensity], axis=-1)\n segmentation = segmentation.astype(np.uint8)\n # send the images to the window\n self._view.show([image, segmentation, super_pixels])", "def resizeEvent(self, event):\n self.autosize()\n super().resizeEvent(event)", "def update_resize(self, viewer, dims, xy_lim):\n # adjust bg to window size, in case it changed\n x_lo, y_lo, x_hi, y_hi = xy_lim\n wd, ht = dims[:2]\n\n self.bg.x1, self.bg.y1 = x_lo, y_lo\n self.bg.x2, self.bg.y2 = x_hi, y_hi\n\n # adjust warning/alert lines\n if self.warn_y is not None:\n x, y = self.get_canvas_xy(viewer, (0, self.warn_y))\n self.ln_warn.x1, self.ln_warn.x2 = x_lo, x_hi\n self.ln_warn.y1 = self.ln_warn.y2 = y\n\n if self.alert_y is not None:\n x, y = self.get_canvas_xy(viewer, (0, self.alert_y))\n self.ln_alert.x1, self.ln_alert.x2 = x_lo, x_hi\n self.ln_alert.y1 = self.ln_alert.y2 = y", "def on_resize(event):\n gloo.set_viewport(0, 0, *event.physical_size)", "def size_with_window(self, size_with_window):\n\n self.container['size_with_window'] = size_with_window", "def _onPaint(self, evt):\n if not self._isRealized:\n self.realize()\n if self._drawn < 2:\n self.draw(repaint = False)\n self._drawn += 1\n self.gui_repaint(drawDC=wx.PaintDC(self))", "def on_user_resize_start(self, event):\n # Save offset for use while dragging.\n self._mouse_drag_offset = self.resize_frame.winfo_rootx() - event.x_root", "def Pane_Resized( self, new_sizes ):\r\n if(new_sizes[0] > 200 ):\r\n cb.xtotal = new_sizes[0]-100\r\n self.canvas_one.config(width = new_sizes[0])\r\n self.canvas_scale.config(width = new_sizes[0])\r\n else:\r\n cb.xtotal = 200-100\r\n self.canvas_one.config(width = 200)\r\n self.canvas_scale.config(width = 200)\r\n if (len(new_sizes) > 1 ):\r\n self.canvas_two.config(width=new_sizes[1])\r\n self.system.Draw()", "def _request_redraw(self):\n self.screen_manager.req(self.screen_id)", "def __window_resizeBy(self, xDelta, yDelta):\n pass", "def display(self):\n\t\tself.imgDisplay.set_from_pixbuf(self.getVisible())\n\t\tgc.collect()", "def on_size(self, event):\n size = self.GetSize()\n self.SetSize(size)\n gauge_pos, gauge_size = self.get_gauge_dimensions()\n self.gauge.SetSize(gauge_size)\n event.Skip()\n self.Update()", "def OnPaint(self, event=None):\r\n dc = wx.PaintDC(self)\r\n dc.SetBackground(wx.MEDIUM_GREY_BRUSH)\r\n if self.scaled:\r\n if self.GetSizeTuple() != self.oldSize:\r\n self.Rescale()\r\n panelWidth,panelHeight = self.GetSizeTuple()\r\n xPos = max(0,(panelWidth - self.scaled.GetWidth())/2)\r\n yPos = max(0,(panelHeight - self.scaled.GetHeight())/2)\r\n dc.Clear()\r\n dc.DrawBitmap(self.scaled,xPos,yPos,False)\r\n else:\r\n dc.Clear()\r\n #dc.SetPen(wx.Pen(\"BLACK\", 1))\r\n #dc.SetBrush(wx.TRANSPARENT_BRUSH)\r\n #(width,height) = self.GetSize()\r\n #dc.DrawRectangle(0,0,width,height)\r", "def configure_cb(self, darea, event):\n self.width, self.height = darea.window.get_size()\n self.surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, self.width,\n self.height)\n self.cr = cairo.Context(self.surface)\n self.draw(self.cr, self.width, self.height)\n\n return True", "def on_window_resized(self):\n self._compute_jitters()", "def __create_win(self):\r\n self.__calc_size()\r\n try:\r\n self.win = curses.newwin(self.height, self.width, self.posy, self.posx)\r\n self.panel = curses.panel.new_panel(self.win)\r\n self.win.scrollok(True)\r\n self.win.keypad(1)\r\n self.do_paint()\r\n except Exception:\r\n self.win = None\r\n self.panel = None", "def on_resize(self, width, height):\n self.gamestatemanager.peek().on_resize(width, height)", "def _configureWindow(self):\n if self._win_type == WindowType.IMMERSIVE:\n pg.setConfigOptions(\n foreground='d',\n background=(_DARK_COLOUR if self._dark else _LIGHT_COLOUR))\n self._win = pg.plot(title=\"Abstact Map Visualisation\")\n self._plt = self._win.plotItem\n self._plt.setAspectLocked(True, 1)\n self._plt.hideAxis('left')\n self._plt.hideAxis('bottom')\n else: # DEFAULT\n pg.setConfigOptions(foreground='k', background='w')\n self._win = pg.plot(title=\"Abstact Map Visualisation\")\n self._plt = self._win.plotItem\n\n # Set up the overlay objects as they are static\n self._overlay_items = [\n QtWidgets.QGraphicsRectItem(-_OVERLAY_WIDTH / 2,\n -_OVERLAY_HEIGHT / 2, _OVERLAY_WIDTH,\n _OVERLAY_HEIGHT)\n ]\n self._overlay_items[0].setBrush(pg.mkBrush(_OVERLAY_COLOUR))\n self._overlay_items[0].setZValue(1000)\n self._win.addItem(self._overlay_items[0])\n self.toggleOverlay(enable=False)\n\n # Do any last settings in the window\n # self._win.parentWidget().showMaximized()\n limit = 30\n self._win.setRange(xRange=[-limit, limit], yRange=[-limit, limit])", "def resize_child_window(self):\n s = struct.pack('HHHH', 0, 0, 0, 0)\n x = fcntl.ioctl(0,termios.TIOCGWINSZ,s)\n fcntl.ioctl(self.child_fd,termios.TIOCSWINSZ,x)", "def on_resize_parentx(self,event):\n ##print(\"parent event size=\"+str(event.width)+\" X \"+str(event.height))\n self.canvas_width = event.width\n self.canvas.get_tk_widget().config(width=self.canvas_width)\n self.show_image()", "def DoSetSize(self, x, y, width, height, flags=wx.SIZE_AUTO):\r\n\r\n self._rect = wx.Rect(x, y, max(1, width), max(1, height))\r\n self.DoSizing()", "def update(self):\n cv2.imshow(self.window_name, self.map.get_crop())", "def notifyResized(self, function, **kwargs):\n self._sig_resized.subscribe(function, **kwargs)", "def resize(self, yx=None):\n if yx == None:\n yx = self.screen.getmaxyx()\n self.screen.clear()\n curses.resizeterm(yx[0], yx[1])\n self.setup_windows(resize = True)\n self.screen.refresh()", "def OnStateChange(self, sz):\r\n\r\n # minimal size has priority over the best size so set here our min size\r\n self.SetMinSize(sz)\r\n self.SetSize(sz)\r\n\r\n if self.HasFlag(wx.CP_NO_TLW_RESIZE):\r\n # the user asked to explicitely handle the resizing itself...\r\n return\r\n \r\n # NB: the following block of code has been accurately designed to\r\n # as much flicker-free as possible be careful when modifying it!\r\n\r\n top = wx.GetTopLevelParent(self)\r\n if top:\r\n # NB: don't Layout() the 'top' window as its size has not been correctly\r\n # updated yet and we don't want to do an initial Layout() with the old\r\n # size immediately followed by a SetClientSize/Fit call for the new\r\n # size that would provoke flickering!\r\n\r\n if top.GetSizer():\r\n if (wx.Platform == \"__WXGTK__\" and self.IsCollapsed()) or wx.Platform != \"__WXGTK__\":\r\n # FIXME: the SetSizeHints() call would be required also for GTK+ for\r\n # the expanded.collapsed transition. Unfortunately if we\r\n # enable this line, then the GTK+ top window won't always be\r\n # resized by the SetClientSize() call below! As a side effect\r\n # of this dirty fix, the minimal size for the pane window is\r\n # not set in GTK+ and the user can hide it shrinking the \"top\"\r\n # window...\r\n\r\n top.GetSizer().SetSizeHints(top)\r\n\r\n\r\n # we shouldn't attempt to resize a maximized window, whatever happens\r\n if not top.IsMaximized():\r\n \r\n if self.IsCollapsed():\r\n # expanded . collapsed transition\r\n if top.GetSizer():\r\n # we have just set the size hints...\r\n sz = top.GetSizer().CalcMin()\r\n\r\n # use SetClientSize() and not SetSize() otherwise the size for\r\n # e.g. a wxFrame with a menubar wouldn't be correctly set\r\n top.SetClientSize(sz)\r\n \r\n else:\r\n \r\n top.Layout()\r\n \r\n else:\r\n \r\n # collapsed . expanded transition\r\n\r\n # force our parent to \"fit\", i.e. expand so that it can honour\r\n # our minimal size\r\n top.Fit()", "def resize(self):\n\t\tself.win.erase()\n\t\tfor c in self.components:\n\t\t\tc.resize()\n\t\tself.draw(True)", "def on_user_resize(self, event):\n self.resize_scaled(drag_rootx=event.x_root + self._mouse_drag_offset)" ]
[ "0.72156596", "0.7100522", "0.6702604", "0.6601584", "0.65673465", "0.65583104", "0.65517545", "0.6497365", "0.6497365", "0.647053", "0.643673", "0.63569677", "0.6356837", "0.6324894", "0.6282427", "0.6277227", "0.62645644", "0.6249151", "0.6226418", "0.62170535", "0.6205427", "0.62017554", "0.61746866", "0.6172087", "0.61392725", "0.61387414", "0.61380386", "0.61299115", "0.6119867", "0.610742", "0.60796195", "0.6051426", "0.60496074", "0.6048114", "0.6047191", "0.59839755", "0.597899", "0.59687555", "0.5944601", "0.594121", "0.5880627", "0.5866386", "0.5864627", "0.5864453", "0.5863559", "0.5862763", "0.5855931", "0.58460647", "0.58375174", "0.58257836", "0.5822383", "0.5814103", "0.5812492", "0.5805309", "0.5798912", "0.5796352", "0.5785686", "0.57702076", "0.57670134", "0.5763183", "0.57548654", "0.5745018", "0.5740516", "0.57295245", "0.5727805", "0.5726564", "0.5724321", "0.57081187", "0.5693691", "0.5685868", "0.5683889", "0.5679884", "0.56718856", "0.5671672", "0.5664499", "0.56631505", "0.5660821", "0.56520635", "0.5640127", "0.563682", "0.56274766", "0.5625327", "0.56196284", "0.56146026", "0.5602939", "0.55949825", "0.5594246", "0.5593722", "0.55563164", "0.554995", "0.5537107", "0.55324405", "0.5525097", "0.5522796", "0.5519444", "0.5517368", "0.5517129", "0.5516227", "0.55125475", "0.5486879" ]
0.56322706
80
Used for generating thumbnails. Does not include overlaid graphics.
def get_plain_image_as_widget(self): arr = self.getwin_array(order=self.rgb_order) # convert numpy array to native image widget image_w = self._get_wimage(arr) return image_w
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_thumbnail(self, target, format=None):", "def generate_image(self):\n pass", "def _prepareImage(self):\n painter = QPainter(self)\n if len(self.thumbs) == 0:\n return\n destwidth = self.width()\n division = len(self.thumbs)\n NF = division\n slit_width = destwidth // division + 1\n if slit_width < self.minwidth:\n slit_width = self.minwidth\n division = destwidth // slit_width - 1\n for slit in range(division):\n point = QPoint(slit*destwidth // division,0)\n i = slit*NF // division\n thumb = self.transformer(self.thumbs[i])\n w = thumb.width()\n h = thumb.height()\n if w > slit_width:\n w0 = (w-slit_width)//2\n cropped = thumb.copy(w0,0,slit_width,h)\n painter.drawImage(point, cropped)\n else:\n painter.drawImage(point, thumb)", "def generate_thumbnail(progress_controller=None):\n # TODO: For now skip if this is Maya2017\n import pymel\n\n if pymel.versions.current() >= 201700:\n return\n\n # skip this if maya is running in batch mode\n if pm.general.about(batch=1):\n return\n\n from anima.dcc.mayaEnv import auxiliary\n\n auxiliary.generate_thumbnail()", "def bigThumbnail(self):\n\t\tfileCount = len(self.fileList)\n\t\tthumbSize = (200, 200)\n\t\timgHoriz = int(self.get_screen().get_width() / (thumbSize[1] + 20))\n\t\timgSize = (self.get_screen().get_width(), (thumbSize[1] + 20) * (int(fileCount / imgHoriz) + 2))\n\n\t\tpixbuf = gtk.gdk.Pixbuf(gtk.gdk.COLORSPACE_RGB, True, 8, imgSize[0], imgSize[1])\n\t\tfor file in range(len(self.fileList)):\n\t\t\ttry:\n\t\t\t\ttimg = gtk.gdk.pixbuf_new_from_file(self.fileList[file])\n\t\t\texcept:\n\t\t\t\tprint >> sys.stderr, \"Failed to load image %s\" % self.fileList[file]\n\t\t\t\tcontinue\n\t\t\ttimgSize = [timg.get_width(), timg.get_height()]\n\t\t\tif timgSize[0] > thumbSize[0] or timgSize[1] > thumbSize[1]:\n\t\t\t\tscaleFactor = 1.0 * thumbSize[0] / timgSize[0]\n\t\t\t\tif timgSize[1] * scaleFactor > thumbSize[1]:\n\t\t\t\t\tscaleFactor = 1.0 * thumbSize[1] / timgSize[1]\n\t\t\t\tself.scaleFactor = scaleFactor\n\t\t\t\ttimgSize[0] = int(timgSize[0] * scaleFactor)\n\t\t\t\ttimgSize[1] = int(timgSize[1] * scaleFactor)\n\t\t\t\ttimg = timg.scale_simple(timgSize[0], timgSize[1], gtk.gdk.INTERP_BILINEAR)\n\t\t\tpos = ( (file % imgHoriz) * (thumbSize[0] + 20) + 10 + (thumbSize[0] - timgSize[0]) / 2,\n\t\t\t\tint(file / imgHoriz) * (thumbSize[1] + 20) + 10)\n\n\t\t\tprint \" Rendering thumbnails; %d of %d\\r\" % (file, len(self.fileList)),\n\t\t\tsys.stdout.flush()\n\n\t\t\ttimg.copy_area(0, 0, timgSize[0], timgSize[1], pixbuf, pos[0], pos[1])\n\t\t\tdel timg\n\t\t\tgc.collect()\n\t\tprint\n\t\tself.currentPixbuf = pixbuf\n\t\tself.fileList = [ \"#\" ]\n\t\tself.fileName = \"#\"\n\t\tself.autoScale()\n\t\tself.display()", "def generate_thumbnail():\n import tempfile\n import glob\n from anima.env import mayaEnv\n m_env = mayaEnv.Maya()\n v = m_env.get_current_version()\n\n if not v:\n return\n\n # do not generate a thumbnail from a Repr\n if '@' in v.take_name:\n return\n\n task = v.task\n project = task.project\n # repo = project.repository\n imf = project.image_format\n width = int(imf.width * 0.5)\n height = int(imf.height * 0.5)\n\n temp_output = tempfile.mktemp()\n\n current_frame = pm.currentTime(q=1)\n output_file = pm.playblast(\n fmt='image',\n startTime=current_frame,\n endTime=current_frame,\n sequenceTime=1,\n forceOverwrite=1,\n filename=temp_output,\n clearCache=1,\n showOrnaments=1,\n percent=100,\n wh=(width, height),\n offScreen=1,\n viewer=0,\n compression='PNG',\n quality=70,\n framePadding=0\n )\n pm.currentTime(current_frame)\n\n output_file = output_file.replace('####', '*')\n found_output_file = glob.glob(output_file)\n if found_output_file:\n output_file = found_output_file[0]\n\n from anima.ui import utils\n utils.upload_thumbnail(task, output_file)\n\n return found_output_file", "def get_thumbnail(format):", "def plot_thumb(self, data_fname):\n thumbnail = self.controller.plot_thumb(data_fname, self.bitmap_width, self.bitmap_height)\n if thumbnail is not None:\n self.figure_bmp.SetBitmap(thumbnail)\n else:\n self.plot_blank()", "def write_thumbnails(self, appstruct):\n slugser = slugify(appstruct[\"serial\"])\n pdf_filename = \"thumbnails/%s/uploaded.pdf\" % slugser\n top_file = \"thumbnails/%s/top.png\" % slugser\n mos_file = \"thumbnails/%s/mosaic.png\" % slugser\n \n thumg = ThumbnailGenerator(pdf_filename)\n self.save_blob(thumg.top_thumbnail(), top_file)\n self.save_blob(thumg.mosaic_thumbnail(), mos_file)", "def get_thumbnail_url():", "def setThumbnailImage(*args):", "def small_image(self):\n pass", "def create_image_caption_pairs(self):", "def generateThumbnail(img):\n\n if not img._thumbfn:\n return\n\n aimgfn = join(opts.root, img._filename)\n if not opts.fast:\n img._size = imageSize(aimgfn)\n\n athumbfn = join(opts.root, img._thumbfn)\n\n if opts.thumb_force:\n if opts.quiet: print \"forced regeneration of '%s'\" % img._thumbfn\n elif not exists(athumbfn):\n if opts.quiet: print \"thumbnail absent '%s'\" % img._thumbfn\n else:\n # Check if thumbsize has changed\n if not opts.fast:\n img._thumbsize = imageSize(athumbfn)\n if not checkThumbSize(img._size, \\\n img._thumbsize, \\\n opts.thumb_size):\n if opts.quiet: print \"thumbnail '%s size has changed\" % img._thumbfn\n try:\n # Clear cache for thumbnail size.\n del imageSizeCache[ athumbfn ]\n except:\n pass\n else:\n# pass\n# if opts.quiet: print \"thumbnail '%s' already generated (size ok)\" \\\n# % img._thumbfn\n return\n else:\n if opts.quiet: print \"thumbnail '%s' already generated\" % img._thumbfn\n return\n\n if opts.no_magick:\n if opts.quiet: print \"ImageMagick tools disabled, can't create thumbnail\"\n return\n\n # create necessary directories\n d = dirname(athumbfn)\n if not exists(d):\n os.makedirs(d)\n\n if opts.pil:\n\n try:\n im = PilImage.open(aimgfn)\n im.thumbnail((opts.thumb_size, opts.thumb_size), config.Thumbnails[\"Interpolation\"])\n im.save(athumbfn)\n\n img._thumbsize = im.size\n except IOError, e:\n raise SystemExit(\\\n \"Error: identifying file '%s'\" % aimgfn + str(e))\n\n else:\n\n cmd = getMagickProg('convert') + ' -border 2x2 '\n # FIXME check if this is a problem if not specified\n #cmd += '-interlace NONE '\n\n cmd += '-geometry %dx%d ' % (opts.thumb_size, opts.thumb_size)\n\n if opts.thumb_quality:\n cmd += '-quality %d ' % opts.thumb_quality\n\n # This doesn't add text into the picture itself, just the comment in\n # the header.\n if opts.copyright:\n cmd += '-comment \\\"%s\\\" ' % opts.copyright\n\n # We use [1] to extract the thumbnail when there is one.\n # It is harmless otherwise.\n subimg = \"\"\n if img._ext.lower() in [ \".jpg\", \".tif\", \".tiff\" ]:\n subimg = \"[1]\"\n\n cmd += '\"%s%s\" \"%s\"' % (aimgfn, subimg, athumbfn)\n\n if opts.quiet: print \"generating thumbnail '%s'\" % img._thumbfn\n\n (chin, chout, cherr) = os.popen3(cmd)\n errs = cherr.readlines()\n chout.close()\n cherr.close()\n if errs:\n print >> sys.stderr, \\\n \"Error: running convert program on %s:\" % aimgfn\n errs = string.join(errs, '\\n')\n print errs\n\n if subimg and \\\n re.compile('Unable to read subimage').search(errs):\n if opts.quiet: print \"retrying without subimage\"\n cmd = string.replace(cmd, subimg, \"\")\n\n (chin, chout, cherr) = os.popen3(cmd)\n errs = cherr.readlines()\n chout.close()\n cherr.close()\n if errs:\n print >> sys.stderr, \\\n \"Error: running convert program on %s:\" % aimgfn\n print string.join(errs, '\\n')\n\n else:\n img._thumbsize = imageSize(athumbfn)", "def generate_thumbnail(self, img_path):\n\n thumb_path = self.thumbnail_path(img_path)\n dirpath = os.path.dirname(thumb_path)\n try:\n os.makedirs(dirpath)\n except OSError: # path exists\n pass\n\n cmd = [\n '/usr/local/bin/gm',\n 'convert',\n '-thumbnail', '256x256>',\n '-background', 'transparent',\n '-gravity', 'center',\n '-extent', '256x256',\n img_path, thumb_path\n ]\n\n retcode = subprocess.call(cmd)\n\n if retcode:\n log.error('convert exited with %d : %s', retcode, img_path)\n return False\n\n log.debug('Wrote thumbnail for `%s` to `%s`.', img_path, thumb_path)\n\n return True", "def thumbnail(self, fnameIn, fnameOut):\n cmd = \"convert -define jpeg:size=500x150 \"\n cmd += '\"%s\" ' % os.path.join(self.downloadFolder, fnameIn)\n cmd += \"-auto-orient -thumbnail 250x150 \"\n cmd += '\"%s\" ' % os.path.join(self.thumbnailFolder, fnameOut)\n self.log(\"creating thumbnail ...\")\n self.log(cmd)\n process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)\n process.wait()", "def clear_thumbnails(self):", "def build_filler_images(self):", "def image_preview(self):\r\n h = '<img src=\"%s\" alt=\"%s\"/>' % (self.image_resized_url, self.title)\r\n return mark_safe(h)", "def large_image(self):\n pass", "def generate_thumbnail(image_data, min_source_height, max_source_height, min_source_width, max_source_width, content_type, width, height, overlay_path, valign, top_crop_pct=None, bottom_crop_pct=None, left_crop_pct=None, right_crop_pct=None, crop_x=None, crop_y=None, post_crop_uniform_scale_pct=None):\n # figure out the width/height of the image from the datastore\n \n# img = images.Image(image_data=image_data)\n# img.crop(left_x=0.25, top_y=0.25, right_x=0.25, bottom_y=0.25)\n# img.resize(width=width, height=height)\n# logging.info('(b) w=%i, h=%i' % (img.width, img.height))\n# output = img.execute_transforms(output_encoding=img.format)\n \n image = images.Image(image_data)\n \n if min_source_height is not None and image.height < min_source_height:\n return None\n if max_source_height is not None and image.height > max_source_height:\n return None\n \n if min_source_width is not None and image.width < min_source_width:\n return None\n if max_source_width is not None and image.width > max_source_width:\n return None\n \n \n if content_type == 'image/png':\n output_encoding = images.PNG\n else:\n output_encoding = images.JPEG\n if crop_x is not None and crop_y is not None and valign is None and top_crop_pct is None and bottom_crop_pct is None and (image.width >= crop_x + width) and (image.height >= crop_y + height):\n fw = float(image.width)\n fh = float(image.height)\n try:\n output = images.crop(image_data, float(crop_x) / fw, float(crop_y) / fh, float(crop_x + width) / fw, float(crop_y + height) / fh, output_encoding=output_encoding)\n except:\n output = image_data\n else:\n if width > image.width and height > image.height:\n output = image_data\n# # this would result in scaling the image UP, that's no good\n# if image.width > image.height:\n# width = image.width\n# else:\n# height = image.height\n# \n# output = images.resize(image_data, width, height, output_encoding)\n else:\n output = rescale(image, width, height, halign='middle', valign=valign, top_crop_pct=top_crop_pct, bottom_crop_pct=bottom_crop_pct, left_crop_pct=left_crop_pct, right_crop_pct=right_crop_pct)\n \n if post_crop_uniform_scale_pct is not None:\n output = images.resize(output, width=int(width * post_crop_uniform_scale_pct), output_encoding=output_encoding)\n \n if overlay_path is not None:\n # read the overlay into memory\n overlay_data = open(overlay_path,'r').read()\n # composite the overlay onto the rescaled output\n if content_type == 'image/png':\n output_encoding = images.PNG\n else:\n output_encoding = images.JPEG\n output = images.composite(\n inputs=[\n (output,0,0,1.0,images.CENTER_CENTER),\n (overlay_data,0,0,1.0,images.CENTER_CENTER),\n ],\n width=width,\n height=height,\n output_encoding=output_encoding\n )\n return output", "def create_thumbnail(image_name):\n try:\n # SMALL\n write_thumbnail(image_name, 'small')\n # MEDIUM\n write_thumbnail(image_name, 'medium')\n # LARGE\n write_thumbnail(image_name, 'large')\n\n except IOError:\n print('create thumbnail error')\n pass", "def createThumbnail(self, useCursorPosition=False, dbPath = None, versionInt = None):\n\n return \"\"\n # logger.debug(\"Func: createThumbnail\")\n # projectPath = self.projectDir\n # if useCursorPosition:\n # versionInt = self.currentVersionIndex\n # dbPath = self.currentDatabasePath\n # else:\n # if not dbPath or not versionInt:\n # msg = \"Both dbPath and version must be defined if useCursorPosition=False\"\n # raise Exception ([360, msg])\n #\n # versionStr = \"v%s\" % (str(versionInt).zfill(3))\n # dbDir, shotNameWithExt = os.path.split(dbPath)\n # shotName = os.path.splitext(shotNameWithExt)[0]\n #\n # thumbPath = \"{0}_{1}_thumb.jpg\".format(os.path.join(dbDir, shotName), versionStr)\n # relThumbPath = os.path.relpath(thumbPath, projectPath)\n #\n # # create a thumbnail using playblast\n # thumbDir = os.path.split(thumbPath)[0]\n # if os.path.exists(thumbDir):\n # # frame = pm.currentTime(query=True)\n # frame = cmds.currentTime(query=True)\n # # store = pm.getAttr(\"defaultRenderGlobals.imageFormat\")\n # store = cmds.getAttr(\"defaultRenderGlobals.imageFormat\")\n # # pm.setAttr(\"defaultRenderGlobals.imageFormat\", 8) # This is the value for jpeg\n # cmds.setAttr(\"defaultRenderGlobals.imageFormat\", 8) # This is the value for jpeg\n # # pm.playblast(completeFilename=thumbPath, forceOverwrite=True, format='image', width=221, height=124, showOrnaments=False, frame=[frame], viewer=False, percent=100)\n # cmds.playblast(completeFilename=thumbPath, forceOverwrite=True, format='image', width=221, height=124, showOrnaments=False, frame=[frame], viewer=False, percent=100)\n # # pm.setAttr(\"defaultRenderGlobals.imageFormat\", store) #take it back\n # cmds.setAttr(\"defaultRenderGlobals.imageFormat\", store) #take it back\n # else:\n # # pm.warning(\"something went wrong with thumbnail. Skipping thumbnail\")\n # cmds.warning(\"something went wrong with thumbnail. Skipping thumbnail\")\n # return \"\"\n # # return thumbPath\n # return relThumbPath", "def img_url_thumbnail(self):\n url = '%s=s%s-c' % (self.img_url, self.THUMBNAIL_SIZE_PX)\n if self.img_rot in Plaque.ALLOWED_ROTATIONS:\n url = \"%s-r%s\" % (url, self.img_rot)\n return url", "def top_thumbnail(self):\n serial = slugify(self.request.matchdict[\"serial\"])\n filename = \"thumbnails/%s/top.png\" % serial\n return FileResponse(filename)", "def generate_thumb_filename(instance, filename):\n return _generate_filename(instance, filename, 'thumbs')", "def RescaleScreenShot(bmp, thumbnail_size=200):\r\n\r\n bmpW, bmpH = bmp.GetWidth(), bmp.GetHeight()\r\n img = bmp.ConvertToImage()\r\n\r\n newW, newH = bmpW, bmpH\r\n \r\n if bmpW > bmpH:\r\n if bmpW > thumbnail_size:\r\n ratio = bmpW/float(thumbnail_size)\r\n newW, newH = int(bmpW/ratio), int(bmpH/ratio)\r\n img.Rescale(newW, newH, wx.IMAGE_QUALITY_HIGH)\r\n else:\r\n if bmpH > thumbnail_size:\r\n ratio = bmpH/float(thumbnail_size)\r\n newW, newH = int(bmpW/ratio), int(bmpH/ratio)\r\n img.Rescale(newW, newH, wx.IMAGE_QUALITY_HIGH)\r\n\r\n newBmp = img.ConvertToBitmap()\r\n otherBmp = wx.EmptyBitmap(newW+5, newH+5) \r\n\r\n memDC = wx.MemoryDC()\r\n memDC.SelectObject(otherBmp)\r\n memDC.SetBackground(wx.WHITE_BRUSH)\r\n memDC.Clear()\r\n \r\n memDC.SetPen(wx.TRANSPARENT_PEN)\r\n\r\n pos = 0\r\n for i in xrange(5, 0, -1):\r\n brush = wx.Brush(wx.Colour(50*i, 50*i, 50*i))\r\n memDC.SetBrush(brush)\r\n memDC.DrawRoundedRectangle(0, 0, newW+5-pos, newH+5-pos, 2)\r\n pos += 1\r\n\r\n memDC.DrawBitmap(newBmp, 0, 0, True)\r\n \r\n # Select the Bitmap out of the memory DC by selecting a new\r\n # uninitialized Bitmap\r\n memDC.SelectObject(wx.NullBitmap)\r\n\r\n return otherBmp", "def GetThumbnail(self, type, maxsize): # real signature unknown; restored from __doc__\n pass", "def thumbnail_url(self):\n return None", "def clear_thumbnail(self):\n from anima.ui import utils\n utils.clear_thumbnail(self.thumbnail_graphics_view)", "def admin_photo(self, obj=None, size='default'):\n self = obj if obj else self\n if hasattr(self, 'get_thumbnail_url'):\n return '<a class=\"thumb-'+size+'\" href=\"{}\"><img src=\"{}\"></a>'.format(\n self.admin_url, self.get_thumbnail_url(size))", "def create_full_pic(self):\n self.create_half_pic()\n mirror_update(self.flag)", "def create_strip(self, resolution_ratio=None):\n\n if not resolution_ratio:\n resolution_ratio = self.strip_resolution_ratio\n\n padding = 40\n photo_width = int(self.photo_resolution[0] * resolution_ratio)\n photo_height = int(self.photo_resolution[1] * resolution_ratio)\n width = (photo_width * 2) + (padding * 4)\n height = (photo_height * self.picture_count) + (padding * (self.picture_count + 1))\n\n strip = Image.new('RGB', (width, height))\n canvas = ImageDraw.Draw(strip)\n canvas.rectangle((0, 0, width, height), fill=ImageColor.getcolor('#ffffff', 'RGB'))\n\n for i in range(0, self.picture_count):\n image = Image.open(self.pictures_taken[i])\n image = image.convert(mode='RGB')\n image = image.resize((photo_width, photo_height), resample=Image.LANCZOS)\n strip.paste(image, box=(\n padding,\n padding + (padding * i) + (photo_height * i)\n ))\n strip.paste(image, box=(\n padding + photo_width + padding + padding,\n padding + (padding * i) + (photo_height * i)\n ))\n del image\n\n strip = strip.transpose(Image.FLIP_LEFT_RIGHT)\n strip = strip.filter(ImageFilter.DETAIL)\n strip = strip.filter(ImageFilter.SHARPEN)\n\n (handle, file_name) = mkstemp(suffix='.jpg', prefix='photoberry-strip')\n os.close(handle)\n handle = open(file_name, 'wb')\n strip.save(handle, format='jpeg', quality=95, optimize=True)\n handle.close()\n handle.close()\n del strip\n return file_name", "def GET_link_thumb(self, *a, **kw):\r\n return \"nothing to see here.\"", "def main():\r\n original = SimpleImage(\"images/poppy.png\")\r\n original.show()\r\n # shrink function\r\n after_shrink = shrink('images/poppy.png')\r\n after_shrink.show()", "def make_thumbnail(image, size=(100, 100)):\n logging.debug(image)\n\n im = create_colorblind_image(image)\n\n thumb_io = BytesIO() # create a BytesIO object\n\n im.save(thumb_io, 'PNG', quality=85) # save image to BytesIO object\n\n thumbnail = File(thumb_io, name=image.name) # create a django friendly File object\n\n return thumbnail", "def create_base_image(self, builder, template, parameters):", "def generatePreview(self):\n self.saveParameters()\n image=self.simulation.generatePreview()\n # convert pil image to a tkinter image\n self.photo = ImageTk.PhotoImage(image)\n\n # display image\n self.preview.create_image(0, 0, anchor='nw', image=self.photo)", "def _repr_png_(self):\n mol = self.owner.mol\n keku = IPythonConsole.kekulizeStructures\n size = IPythonConsole.molSize\n opts = IPythonConsole.drawOptions\n return Draw._moltoimg(\n mol, size, self.aix, \"\", returnPNG=True, drawOptions=opts,\n kekulize=keku, highlightBonds=self.bix\n )", "def _repr_png_(self):\n mol = self.owner.mol\n keku = IPythonConsole.kekulizeStructures\n size = IPythonConsole.molSize\n opts = IPythonConsole.drawOptions\n return Draw._moltoimg(\n mol, size, self.aix, \"\", returnPNG=True, drawOptions=opts,\n kekulize=keku, highlightBonds=self.bix\n )", "def get_thumbnail_name(self, thumbnail_name, with_size=None):", "def make_image(self, path):\n\t\treturn None", "def test_create_thumbnails(self):\n \n logging.info('create_thumbnail')\n \n for size in self.article.sizes.keys():\n assert not self.article.thumbnail_exists(size)\n\n self.article.image.save('avatar.png', ContentFile(self.image.read()))\n self.article.create_thumbnails()\n \n for size in self.article.sizes.keys():\n assert self.article.thumbnail_exists(size)", "def mosaic_thumbnail(self):\n serial = slugify(self.request.matchdict[\"serial\"])\n filename = \"thumbnails/%s/mosaic.png\" % serial\n return FileResponse(filename)", "def thumbnail(im, config):\n\n im.thumbnail(\n (config['width'], config['height']),\n ANTIALIAS,\n )\n\n return im", "def _gen_thumbs_filename(instance, filename):\n return _unique_path(instance.owner.pk, filename, category='thumbs')", "def create_thumb(source_fame, target_fame, target_w = 260, target_h=205):\r\n size = target_w, target_h\r\n im = Image.open(source_fame)\r\n width = im.size[0]\r\n height = im.size[1]\r\n newwidth = int(size[0])\r\n newheight = int(height*(newwidth/float(width)))\r\n if newheight > int(size[1]):\r\n newheight = int(size[1])\r\n newwidth = int(width*(newheight/float(height)))\r\n size = newwidth, newheight\r\n # Resize and save the image\r\n im.thumbnail(size, Image.ANTIALIAS)\r\n im.save(target_fame)", "def exportImg(self):\n if self.superSampling:\n print(\"Exporting with size adjusted\")\n self.img = self.img.resize((int(self.width/2),int(self.height/2)),Image.NEAREST)\n self.img.save(self.fileName,\"PNG\")", "def genThumbnail(filename,thumbnailType,config,regen=False):\n # define the sizes of the various thumbnails\n thumbnailTypeDefinitions={\n 's': (75,75), #should be square eventually\n 'q': (150,150), #should be square eventually\n 't': (100,100),\n 'm': (240,240),\n 'n': (320,230),\n 'k': (500,500),\n 'c': (800,800),\n 'b': (1024,1024)}\n size = thumbnailTypeDefinitions[thumbnailType]\n thumbFilename = filename.split('.')[0] + '_' + thumbnailType + '.' + filename.split('.')[1]\n if os.path.isfile(config['LOCALARCHIVEPATH']+'/'+thumbFilename) and regen == False:\n return(thumbFilename)\n else:\n try:\n logger.info('Generating thumbnail: %s' %(config['LOCALARCHIVEPATH']+'/'+thumbFilename))\n img = Image.open(config['LOCALARCHIVEPATH']+'/'+filename)\n icc_profile = img.info.get('icc_profile')\n img.thumbnail(size,Image.ANTIALIAS)\n img.save(config['LOCALARCHIVEPATH']+'/'+thumbFilename, 'JPEG', icc_profile=icc_profile, quality=95)\n return(thumbFilename)\n except IOError as e:\n raise e", "def make_visual(self,\r\n ruler = None,\r\n options: list = ['rows',\r\n 'centers',\r\n 'distances']\r\n ) -> 'Image':\r\n \r\n original = self.get_picture().get_photo()\r\n\r\n # Copy the original image for drawing\r\n img = original.copy()\r\n draw = ImageDraw.Draw(img)\r\n\r\n # check all the choices provided by the user\r\n for i in options:\r\n \r\n if i == 'clusters':\r\n # Color all cluster pixels red\r\n \r\n for j in self.get_clusters():\r\n for k in j:\r\n img.putpixel(k, (25,275,25))\r\n\r\n elif i == 'row_ids':\r\n # Make row id numbers\r\n\r\n # Font specifications\r\n size = 75\r\n font = ImageFont.truetype('ariblk.ttf', size)\r\n color = (88, 214, 216)\r\n num = 1\r\n\r\n # Draw the ids\r\n for j in self.rows:\r\n draw.text((j[0].get_center()[0],\r\n j[0].get_center()[1] - 0.25 * size),\r\n str(num),\r\n fill = color,\r\n font = font)\r\n num += 1\r\n\r\n elif i == 'boxes':\r\n # Show all bounding boxes\r\n \r\n for i in self.get_boxes():\r\n draw.rectangle(i, outline=(255, 0, 255))\r\n\r\n elif i == 'dirt':\r\n # Remove Background\r\n \r\n img = Image.new('RGB', img.size, (130, 90, 50))\r\n draw = ImageDraw.Draw(img)\r\n\r\n elif i == 'centers':\r\n # Show all centers\r\n \r\n rad = 9\r\n for i in self.get_centers():\r\n draw.arc([(i[0] - rad, i[1] - rad),\r\n (i[0] + rad, i[1] + rad)],\r\n 0, 360, (0, 0, 255))\r\n\r\n elif i == 'ditches':\r\n # Show ditches between plants\r\n\r\n # Line attribute settings\r\n width = 10\r\n color = (55,65,65)\r\n\r\n # Iterate over all ditches\r\n for line in self.ditches:\r\n line = [line[0], line[1]]\r\n\r\n # Point in ditch on left border of picture\r\n start_point = (0, line[1])\r\n\r\n # Point in ditch on right border of picture\r\n end_point = (self.picture.get_size()[0] - 1,\r\n line[0]\r\n * (self.picture.get_size()[0] - 1)\r\n + line[1])\r\n\r\n ## Check if the end point is within the picture\r\n if end_point[1] < 0:\r\n if start_point[1] < 0:\r\n continue\r\n \r\n # Point in ditch on top border of picture\r\n end_point = (-1 * line[1] / line[0], 0)\r\n\r\n elif end_point[1] > self.picture.get_size()[1] - 1:\r\n if start_point[1] > self.picture.get_size()[1] - 1:\r\n continue\r\n \r\n # Point in ditch on bottom border of picture\r\n end_point = (-1\r\n * (self.picture.get_size()[1] - 1)\r\n / line[0],\r\n self.picture.get_size()[1] - 1)\r\n\r\n # Draw the ditches\r\n for i in self.get_rows():\r\n draw.line((start_point, end_point), color, width)\r\n\r\n elif i == 'lines':\r\n # Show row line approximations\r\n\r\n # Line attribute settings\r\n width = 1\r\n color = (255, 255, 75)\r\n\r\n # Iterate over all the lines\r\n for line in self.lines:\r\n line = [line[0], line[1]]\r\n\r\n # Point on line on left border of picture\r\n start_point = (0, line[1])\r\n\r\n # Point on line on right border of picture\r\n end_point = (self.picture.get_size()[0] - 1,\r\n line[0]\r\n * (self.picture.get_size()[0] - 1)\r\n + line[1])\r\n\r\n\r\n ## Check if the end point is within the picture\r\n if end_point[1] < 0:\r\n if start_point[1] < 0:\r\n continue\r\n\r\n # Point on line on top border of picture\r\n end_point = (-1 * line[1] / line[0], 0)\r\n \r\n\r\n elif end_point[1] > self.picture.get_size()[1] - 1:\r\n if start_point[1] > self.picture.get_size()[1] - 1:\r\n continue\r\n\r\n # Point on line on bottom border of picture\r\n end_point = (-1\r\n * (self.picture.get_size()[1] - 1)\r\n / line[0],\r\n self.picture.get_size()[1] - 1)\r\n\r\n # Draw the lines\r\n for i in self.get_rows():\r\n draw.line((start_point, end_point), color, width)\r\n\r\n elif i == 'rows':\r\n if self.get_rows():\r\n # Show lines between rows\r\n \r\n width = 3\r\n color = (255,0,0)\r\n\r\n for i in self.get_rows():\r\n draw.line([j.get_center() for j in i], color, width)\r\n else:\r\n print('Rows have not been made for this field')\r\n\r\n elif i == 'numbers':\r\n # Display numbers between plants\r\n\r\n # Find where to put the numbers\r\n midpoints = [(int(round((row[c].get_center()[0]\r\n + row[c + 1].get_center()[0]) / 2)),\r\n int(round((row[c].get_center()[1]\r\n + row[c + 1].get_center()[1]) / 2)))\r\n \r\n for row in self.get_rows()\r\n for c in range(len(row) - 1)]\r\n\r\n # Font specifications\r\n size = 10\r\n font = ImageFont.truetype('ariblk.ttf', size)\r\n num = 1\r\n\r\n # Write numbers\r\n for i in midpoints:\r\n draw.text((i[0] - 3 * len(str(round(num, 1))),\r\n i[1]),\r\n str(round(num,1)), font = font)\r\n \r\n num += 1\r\n\r\n elif i == 'tight':\r\n # Display tight boxes\r\n\r\n for i in self.get_tight_boxes():\r\n draw.rectangle(i, outline=(100, 255, 255))\r\n\r\n elif i == 'distances':\r\n # display distances between plants\r\n\r\n # find where to put the distances\r\n midpoints = [(int(round((row[c].get_center()[0]\r\n + row[c + 1].get_center()[0]) / 2)),\r\n int(round((row[c].get_center()[1]\r\n + row[c + 1].get_center()[1]) / 2)))\r\n for row in self.get_rows()\r\n for c in range(len(row) - 1)]\r\n\r\n # Font specifications\r\n size = 10\r\n font = ImageFont.truetype('arial.ttf', size)\r\n num = 1\r\n\r\n # Write numbers\r\n for i in midpoints:\r\n draw.text((i[0] - 3 * len(str(ruler.get_distances()[num])),\r\n i[1]),\r\n str(ruler.get_distances()[num]) + '\"',\r\n font = font)\r\n \r\n num += 1\r\n\r\n # If the user inputs something that isn't an option \r\n else:\r\n raise Exception(i + ' is not a valid option.\\n')\r\n\r\n return img", "def getimage(self):", "def CreateBitmap(self, notebook, page, button_state, tabArt):\r\n\r\n control = page.control\r\n memory = wx.MemoryDC(wx.EmptyBitmap(1, 1))\r\n\r\n tab_size, x_extent = tabArt.GetTabSize(memory, notebook, page.caption, page.bitmap, page.active,\r\n button_state, control)\r\n \r\n tab_width, tab_height = tab_size\r\n rect = wx.Rect(0, 0, tab_width, tab_height)\r\n\r\n bitmap = wx.EmptyBitmap(tab_width+1, tab_height+1)\r\n memory.SelectObject(bitmap)\r\n\r\n if wx.Platform == \"__WXMAC__\":\r\n memory.SetBackground(wx.TRANSPARENT_BRUSH)\r\n else:\r\n memory.SetBackground(wx.Brush(self._backgroundColour))\r\n \r\n memory.SetBackgroundMode(wx.TRANSPARENT)\r\n memory.Clear()\r\n\r\n paint_control = wx.Platform != \"__WXMAC__\"\r\n tabArt.DrawTab(memory, notebook, page, rect, button_state, paint_control=paint_control)\r\n \r\n memory.SetBrush(wx.TRANSPARENT_BRUSH)\r\n memory.SetPen(wx.BLACK_PEN)\r\n memory.DrawRoundedRectangle(0, 0, tab_width+1, tab_height+1, 2)\r\n\r\n memory.SelectObject(wx.NullBitmap)\r\n \r\n # Gtk and Windows unfortunatly don't do so well with transparent\r\n # drawing so this hack corrects the image to have a transparent\r\n # background.\r\n if wx.Platform != '__WXMAC__':\r\n timg = bitmap.ConvertToImage()\r\n if not timg.HasAlpha():\r\n timg.InitAlpha()\r\n for y in xrange(timg.GetHeight()):\r\n for x in xrange(timg.GetWidth()):\r\n pix = wx.Colour(timg.GetRed(x, y),\r\n timg.GetGreen(x, y),\r\n timg.GetBlue(x, y))\r\n if pix == self._backgroundColour:\r\n timg.SetAlpha(x, y, 0)\r\n bitmap = timg.ConvertToBitmap()\r\n return bitmap", "def make_thumbnails(self, exp_id):\n n = 99999\n\n return \"{n} number of images were processed!\".format(n=n)", "def thumbnail_generator():\n website_url = json.loads(request.data.decode())['url']\n try:\n webpage, message = url_preview.send_request(website_url)\n if webpage is not None:\n #Construct the soup object\n soup_object = url_preview.get_soup_object(webpage)\n #Get the title of the artcile\n title = url_preview.get_title(soup_object)\n #Get the website of the article\n website_name = url_preview.get_url(soup_object).rsplit(\".\", 1)[0]\n if website_name is None:\n website_name = website_url.split(\"//\", 1)[1].split(\"/\", 1)[0].rsplit(\".\", 1)[0]\n\n #Get the description of the article\n description = url_preview.get_description(soup_object)\n\n #Get the published date and time of the article\n date_time = url_preview.get_date_time(website_url)\n\n #Get the link to the preview image\n image_url = url_preview.get_preview_image(soup_object)['content']\n\n #Get the link to the favicon\n favicon_url = url_preview. get_favicon(soup_object)\n\n return render_template(\n \"success.html\",\n urlx=website_url,\n title=title,\n site_name=website_name,\n description=description,\n date_time=date_time,\n preview_image=image_url,\n favicon=favicon_url\n )\n except Exception as exp:\n return render_template('error.html', msg=str(exp))", "def genThumbnails(sha1,fileType,config,regen=False):\n (sha1Path,filename) = getSha1Path(sha1)\n relativeFilename = '%s/%s.%s' % (sha1Path,filename,fileType)\n\n thumbnailTypes = ['t','m','n','c','b']\n thumbnailFilenames = []\n for thumbnailType in thumbnailTypes:\n thumbFilename = genThumbnail(relativeFilename,thumbnailType,config,regen=regen)\n thumbnailFilenames.append(thumbFilename)\n return thumbnailFilenames", "def show_picture(self, data):\n raise NotImplementedError", "def make_image(self, frame, filename, **kwds):\n p = plot.plot(frame, **kwds)\n p.save_image(filename)", "def make_image():\n click.echo(\"make_image\")", "def on_draw_over_image(self):", "def generate_image( now ):\n cmd = \"csh mwplot.csh %s\" % (\n now.strftime(\"%Y %m %d %H %M\"),)\n subprocess.call(cmd, shell=True)", "def create_image(self):\n # how many categories?\n aspect_ratio = float(4) / 3\n self.width = int(math.sqrt(aspect_ratio * self.total))\n self.height = int(self.width / aspect_ratio)\n\n img = Image.new(\"RGB\", (self.width, self.height))\n return img", "def thumbnails_urls(request):\n return {'thumbnails_urls': ThumbnailsUrls()}", "def pibooth_setup_picture_factory(cfg, opt_index, factory):", "def _generate_pileups(self):\n pass", "def prepare_thumbnail_url(self, object):\n if object.media is not None:\n return os.path.join(settings.MEDIA_URL, object.media.media_thumb_file.name)\n else:\n return ''", "def show_me():\n # Scumbag thumbnail code\n try:\n from PIL import Image\n except ImportError:\n pass\n else:\n filename = os.path.join(app.static_folder, 'img', 'badumtss.png')\n image = Image.open(filename)\n\n return render_template('show_me.html')", "def generateImage(self):\n self.image = self.font.render(self.text, True, self.color)\n self.rect = self.image.get_rect()\n self.rect.center = self.xy", "def process(self,pixmap):", "def generate_thumb(origin, size, fn):\n assert isinstance(size, int), 'Integers are expected'\n img = Image.open(origin)\n path = os.path.dirname(origin)\n\n new_img = img.resize((size, size), Image.ANTIALIAS)\n thumb_path = os.path.join(path, fn)\n new_img.save(thumb_path)\n return thumb_path", "def create_png(image, label):\n sv = \"/home/avojtekova/Desktop/final_results/star_det/generated_images/\" \n \n for i in range(len(image)):\n data = fits.getdata(image[i][0], ext = 0)\n norm = ImageNormalize(data,interval = ZScaleInterval(), stretch = LinearStretch())\n \n print(image[i][0])\n plt.imshow(data, cmap='Greys_r', origin='lower', norm=norm)#[1250:1750, 2000:2500] add this when you want just part of image \n plt.title(label[i])\n plt.axis('off')\n plt.tight_layout()\n plt.legend\n if i<2:\n if not os.path.isdir(sv + image[i][0][-33:-25] + \"/\") :\n os.makedirs(sv + image[i][0][-33:-25] + \"/\")\n plt.savefig(sv + image[i][0][-33:-25] + \"/\" + label[i]+ \"_\" + image[i][0][-33:-25] + \"_big.png\", dpi = 1000)#,bbox_inches='tight', pad_inches = 0) \n else:\n if not os.path.isdir(sv + image[i][0][-40:-32] + \"/\") :\n os.makedirs(sv + image[i][0][-40:-32] + \"/\")\n plt.savefig(sv + image[i][0][-40:-32] + \"/\" + label[i]+image[i][0][-40:-32] + \"_big.png\", dpi = 1000)#,bbox_inches='tight', pad_inches = 0)\n plt.close()", "def make_thumbnail(self):\n # https://gist.github.com/valberg/2429288\n\n # make sure image data is set\n if not self.image_data:\n return False\n\n if self.proxy_data:\n return True\n\n # Create a resized version of the image\n image = Image.open(self.image_data)\n image.thumbnail(THUMBNAIL_SIZE, Image.BICUBIC)\n\n # Save the thumbnail to in-memory 'file'\n temp_thumb = BytesIO()\n image.save(temp_thumb, 'jpeg')\n temp_thumb.seek(0) # rewinds the file\n\n # Save image to a SimpleUploadFile which can be saved\n # into ImageField\n # TODO figure out how to pass base image's UUID before\n # image is committed to DB\n basename = os.path.basename(self.image_data.name)\n uuidname = os.path.splitext(basename)[0]\n suf = SimpleUploadedFile(uuidname,\n temp_thumb.read(), content_type='image/jpeg')\n thumb_filename = '{}_thumb.jpeg'.format(suf.name)\n\n # set save=False, or else it will infinite loop\n self.proxy_data.save(thumb_filename,\n suf,\n save=False)\n\n # Also store the real dimensions for the Pillow thumbnail\n self.proxy_width, self.proxy_height = image.size\n\n temp_thumb.close()\n\n return True", "def make_figure(self, traces):\n pass", "def thumb_profil(log):\n\tpath = get_plato_path()\n\tfrom PIL import Image\n\tsize = 100,200\n\tim = Image.open('/%s/plato_users/%s/profil_BIG.jpg'%(path,log))\n\tim.thumbnail(size, Image.ANTIALIAS)\n\tim.save('/%s/plato_users/%s/profil.jpg'%(path,log),\"JPEG\")", "def _create_placeholder(self, thumbnail_size):\n logger.debug(\"Creating placeholder. thumbnail_size: %s\", thumbnail_size)\n placeholder = Image.new(\"RGB\", (thumbnail_size, thumbnail_size))\n draw = ImageDraw.Draw(placeholder)\n draw.rectangle(((0, 0), (thumbnail_size, thumbnail_size)), outline=\"#E5E5E5\", width=1)\n placeholder = np.array(placeholder)\n self._previewcache[\"placeholder\"] = placeholder\n logger.debug(\"Created placeholder. shape: %s\", placeholder.shape)", "def printImage(imageObject):\n # TODO\n pass", "def getimgs():", "def build_thumbnail_image(self, dataset_metadata_dict, bounding_box, visibility=True):\n logger.debug(\"Building WMS thumbnail...\")\n\n #=======================================================================\n # grid_utils = NetCDFGridUtils(dataset_metadata_dict['netcdf_path'],\n # debug=self.debug\n # ) \n #=======================================================================\n \n dataset_folder_kml = self.dataset_type_folder.newfolder(name=dataset_metadata_dict['dataset_title'], visibility=True)\n\n transparent_polygon = self.build_polygon(dataset_metadata_dict,\n bounding_box, visibility=True, \n parent_folder=dataset_folder_kml,\n polygon_name=dataset_folder_kml.name\n )\n logger.debug('transparent_polygon: {}'.format(transparent_polygon))\n #transparent_polygon.color =\n transparent_polygon.style.polystyle.color = '03000000' # 99% transparent black\n transparent_polygon.style.polystyle.outline = 0 # remove the outline\n #transparent_polygon.style.linestyle.color = '80f8f8ff' # 50% transparent white\n\n try:\n logger.debug(\"Dataset WEST extent: {}\".format(dataset_metadata_dict['longitude_min']))\n logger.debug(\"BBOX WEST extent: {}\".format(bounding_box[0]))\n logger.debug(\"Dataset EAST extent: {}\".format(dataset_metadata_dict['longitude_max']))\n logger.debug(\"BBOX EAST extent: {}\".format(bounding_box[2]))\n logger.debug(\"Dataset SOUTH extent: {}\".format(dataset_metadata_dict['latitude_min']))\n logger.debug(\"BBOX SOUTH extent: {}\".format(bounding_box[1]))\n logger.debug(\"Dataset NORTH extent: {}\".format(dataset_metadata_dict['latitude_max']))\n logger.debug(\"BBOX NORTH extent: {}\".format(bounding_box[3]))\n\n wms_url = dataset_metadata_dict['distribution_url'].replace('/dodsC/', '/wms/') #TODO: Replace this hack\n\n if self.cache_images and self.url_root:\n # Retrieve image for entire dataset\n north = dataset_metadata_dict['latitude_max']\n south = dataset_metadata_dict['latitude_min']\n east = dataset_metadata_dict['longitude_max']\n west = dataset_metadata_dict['longitude_min']\n else: \n # Retrieve image for portion of dataset in view bounding box \n west = max(bounding_box[0], dataset_metadata_dict['longitude_min'])\n east = min(bounding_box[2], dataset_metadata_dict['longitude_max'])\n south = max(bounding_box[1], dataset_metadata_dict['latitude_min'])\n north = min(bounding_box[3], dataset_metadata_dict['latitude_max'])\n\n wms_url = wms_url + \"?SERVICE=WMS&VERSION=1.3.0&REQUEST=GetMap&BBOX={0},{1},{2},{3}&CRS=EPSG:4326&WIDTH={4}&HEIGHT={5}&LAYERS={6}&STYLES=&FORMAT=image/png\" \\\n \"&DPI=120&MAP_RESOLUTION=120&FORMAT_OPTIONS=dpi:120&TRANSPARENT=TRUE\" \\\n \"&COLORSCALERANGE={7}%2C{8}&NUMCOLORBANDS=127\".format(south, \n west, \n north, \n east, \n int((east - west) / self.wms_pixel_size), \n int((north - south) / self.wms_pixel_size), \n self.wms_layer_name,\n self.wms_color_range[0],\n self.wms_color_range[1]\n )\n logger.debug('wms_url: {}'.format(wms_url))\n\n #mag_tmi_anomaly\n\n # wms_url = \"http://dapds00.nci.org.au/thredds/wms/rr2/airborne_geophysics/NSW/P1027/magnetics/grid/mNSW1027/\" \\\n # \"mNSW1027.nc?SERVICE=WMS&VERSION=1.3.0&REQUEST=GetMap\" \\\n # \"&BBOX={0},{1},{2},{3}\" \\\n # \"&CRS=EPSG:4326&WIDTH=206&HEIGHT=269&LAYERS=mag_tmi_anomaly&STYLES=&FORMAT=image/png\" \\\n # \"&DPI=120&MAP_RESOLUTION=120&FORMAT_OPTIONS=dpi:120&TRANSPARENT=TRUE\" \\\n # \"&COLORSCALERANGE=-2781%2C2741&NUMCOLORBANDS=10\".format(south, west, north, east)\n\n # dataset_kml = self.dataset_type_folder.newfolder(name='overlay_test',\n # visibility=visibility)\n\n # dataset_kml.style = self.point_style\n \n if self.cache_images and self.url_root:\n # Cache image and mModify URL for cached image file\n wms_url = '{}{}'.format(self.url_root,\n cache_image_file(dataset_type=self.dataset_type, \n image_basename=os.path.splitext(dataset_metadata_dict['netcdf_basename'])[0]+'.png', \n image_source_url=wms_url)\n )\n logger.debug('wms_url: {}'.format(wms_url))\n logger.debug('wms_url: {}'.format(wms_url))\n\n ground_overlay_kml = dataset_folder_kml.newgroundoverlay(name=\"Survey Thumbnail Image\")\n ground_overlay_kml.icon.href = wms_url\n \n ground_overlay_kml.latlonbox.north = dataset_metadata_dict['latitude_max']\n ground_overlay_kml.latlonbox.south = dataset_metadata_dict['latitude_min']\n ground_overlay_kml.latlonbox.east = dataset_metadata_dict['longitude_max']\n ground_overlay_kml.latlonbox.west = dataset_metadata_dict['longitude_min']\n ground_overlay_kml.color = 'aaffffff'\n\n logger.debug('ground_overlay_kml.latlonbox: {}'.format(ground_overlay_kml.latlonbox))\n logger.debug('ground_overlay_kml: {}'.format(ground_overlay_kml))\n\n if self.timestamp_detail_view:\n self.set_timestamps(ground_overlay_kml, dataset_metadata_dict)\n\n logger.debug('ground_overlay_kml: {}'.format(ground_overlay_kml))\n return dataset_folder_kml\n \n except Exception as e:\n logger.debug('Unable to display thumbnail \"{}\": {}'.format(wms_url, e))\n pass", "def test_plot_images(self):\n save_file(self.quart.plot_images)", "def thumbnail(self, size, resample=BICUBIC):\r\n # preserve aspect ratio\r\n x, y = self.size\r\n if x > size[0]:\r\n y = int(max(y * size[0] / x, 1))\r\n x = int(size[0])\r\n if y > size[1]:\r\n x = int(max(x * size[1] / y, 1))\r\n y = int(size[1])\r\n size = x, y\r\n if size == self.size:\r\n return\r\n self.draft(None, size)\r\n self._instance = self.resize(size, resample, image=self._instance)\r\n self.readonly = 0\r\n self.pyaccess = None", "def draw_image(self):\n self.PDF.saveState()\n self.PDF.scale(1, -1)\n # self.PDF.drawImage(\n # LOGO, 490, -78, width=80, preserveAspectRatio=True, mask=\"auto\"\n # )\n self.PDF.restoreState()", "def plot_preview_png():\n name = request.args.get('prev_instance')\n name = str(name)\n fig = create_preview(name)\n output = io.BytesIO()\n FigureCanvas(fig).print_png(output)\n return Response(output.getvalue(), mimetype='image/png')", "def main():\n me = SimpleImage(\"images/me.JPG\")\n dinosaur = SimpleImage(\"images/dinosaur.jpg\")\n\n dinosaur.make_as_big_as(me)\n combine = magic(me, dinosaur)\n combine.show()", "def create_output_image(building_footprint, parcel_footprint, file_path):\n fig, ax = plt.subplots(figsize=(10, 10))\n gpd.overlay(building_footprint, parcel_footprint, how=\"symmetric_difference\").plot(\n ax=ax, color=\"lightgray\"\n )\n parcel_footprint.geometry.exterior.buffer(0.25).plot(ax=ax, color=\"black\")\n building_footprint.plot(ax=ax, color=\"black\")\n\n ax.patch.set_facecolor(\"white\")\n ax.patch.set_edgecolor(\"white\")\n fig.patch.set_visible(False)\n ax.axis(\"off\")\n fig.savefig(\n file_path,\n bbox_inches=\"tight\",\n pad_inches=0,\n facecolor=\"white\",\n edgecolor=\"white\",\n quality=IMG_QUALITY,\n )\n plt.close()", "def ShowLongitBackgroundinPDF(spectra,spectraUp,spectraDown,spectraAv,all_titles,object_name,dir_top_images,all_filt,date,figname,right_edge = 1900,NBIMGPERROW=2,vmin=0,vmax=2000,downsampling=1,verbose=False):\n NBSPEC=len(spectra)\n MAXIMGROW=max(2,m.ceil(NBSPEC/NBIMGPERROW))\n \n # fig file specif\n NBIMGROWPERPAGE=5 # number of rows per pages\n PageNum=0 # page counter\n figfilename=os.path.join(dir_top_images,figname)\n pp = PdfPages(figfilename) # create a pdf file\n \n titlepage='Longitudinal background Up/Down for obj : {} date : {} '.format(object_name,date)\n \n \n spec_index_min=100 # cut the left border\n spec_index_max=right_edge # cut the right border\n star_halfwidth=70\n \n for index in np.arange(0,NBSPEC):\n \n \n if index%(NBIMGPERROW*NBIMGROWPERPAGE) == 0:\n f, axarr = plt.subplots(NBIMGROWPERPAGE,NBIMGPERROW,figsize=(25,30))\n f.suptitle(titlepage,size=20)\n \n # index of image in the page \n indexcut=index-PageNum*(NBIMGROWPERPAGE*NBIMGPERROW) \n ix=indexcut%NBIMGPERROW\n iy=indexcut/NBIMGPERROW\n \n # plot what is wanted\n axarr[iy,ix].plot(spectra[index],'r-')\n axarr[iy,ix].plot(spectraUp[index],'b-')\n axarr[iy,ix].plot(spectraDown[index],'g-')\n axarr[iy,ix].plot(spectraAv[index],'m-')\n thetitle=\"{} : {} : {} \".format(index,all_titles[index],all_filt[index])\n axarr[iy,ix].set_title(thetitle,color='blue',fontweight='bold',fontsize=16)\n axarr[iy,ix].grid(True)\n \n star_pos=np.where(spectra[index][:spec_index_max]==spectra[index][:spec_index_max].max())[0][0]\n max_y_to_plot=(spectra[index][star_pos+star_halfwidth:spec_index_max]).max()*1.2\n \n \n axarr[iy,ix].set_ylim(0.,max_y_to_plot)\n #axarr[iy,ix].text(spec_index_min,max_y_to_plot*1.1/1.2, all_filt[index],verticalalignment='top', horizontalalignment='center',color='blue',fontweight='bold', fontsize=20)\n \n \n # save a new page\n if (index+1)%(NBIMGPERROW*NBIMGROWPERPAGE) == 0:\n PageNum+=1 # increase page Number\n f.savefig(pp, format='pdf')\n f.show()\n \n \n \n \n f.savefig(pp, format='pdf') \n f.show()\n pp.close()", "def _repr_png_(self):\n return self.tree._repr_png_()", "def add_art(self,path,size=\"500\"):\n error=False\n if path:\n print(\"processing %s to %s\" % (path,self.uid))\n f=open(path,'rb') \n filedata=f.read()\n extension=(imghdr.what('',filedata) or path.rsplit(\".\")[-1].lower()).replace('jpeg','jpg')\n if not filedata:\n error= \"NO IMAGE FOUND AT '%s'\" % path\n print(error)\n elif extension in ('bmp','png'):\n filedata=self.Image.convert(filedata)\n extension='jpg' \n elif extension not in ('gif','png','jpg','jpeg'):\n error=\"only JPEG, GIF, PNG, and BMP are supported\"\n print(error)\n if not error:\n # create a new image page\n image=self.Image.new()\n image.parent=self.uid\n image.kind='image'\n image.seq=0xFFFFFF#place at end of siblings\n # set default size \n image.stage='right full %sx%s' % (size,size) #rest of stage data will be added on the fly later by get_stage_data() \n image.set_lineage()\n image.code=\"%s.%s\" % (image.uid,extension)\n image.when=DATE()\n image.flush() #store the image page\n image.renumber_siblings_by_kind()#keep them in order\n # save the image file\n image.save_file(filedata)\n # return\n print('image \"%s\" added' % image.code)\n return image\n return None", "def draw_nonogram(self):\n image = Image.new(\"RGB\", (self.nonogram_size * 50, self.nonogram_size * 50), (255, 255, 255))\n draw = ImageDraw.Draw(image)\n\n for index, square in enumerate(reduce(lambda x, y: x+y, self.grid), 0):\n\n #print(square)\n x = index % self.nonogram_size\n y = index // self.nonogram_size\n coord = [(x * 50, y * 50), ((x + 1) * 50, (y + 1) * 50)]\n if square == EMPTY:\n draw.rectangle(coord, fill=(255, 255, 255))\n if square == FILLED:\n draw.rectangle(coord, fill=(0, 0, 0))\n return image", "def NotebookPreview(self, thumbnail_size=200):\r\n\r\n if wx.Platform == \"__WXMAC__\":\r\n return False\r\n\r\n tabCtrl = self.GetActiveTabCtrl()\r\n activePage = tabCtrl.GetActivePage()\r\n pages = tabCtrl.GetPages()\r\n\r\n pageStatus, pageText = [], []\r\n\r\n for indx, page in enumerate(pages):\r\n\r\n pageStatus.append(page.enabled)\r\n\r\n if not page.enabled:\r\n continue\r\n \r\n self.SetSelectionToPage(page) \r\n pageText.append(page.caption)\r\n\r\n rect = page.window.GetScreenRect()\r\n bmp = RescaleScreenShot(TakeScreenShot(rect), thumbnail_size)\r\n\r\n page.enabled = False\r\n if indx == 0:\r\n il = wx.ImageList(bmp.GetWidth(), bmp.GetHeight(), True)\r\n\r\n il.Add(bmp) \r\n\r\n # create the list control\r\n listCtrl = wx.ListCtrl(self, style=wx.LC_ICON|wx.LC_AUTOARRANGE|wx.LC_HRULES|wx.LC_VRULES,\r\n name=\"__fake__page__\")\r\n\r\n # assign the image list to it\r\n listCtrl.AssignImageList(il, wx.IMAGE_LIST_NORMAL)\r\n listCtrl.__previousStatus = [activePage, pageStatus]\r\n\r\n # create some items for the list\r\n for indx, text in enumerate(pageText):\r\n listCtrl.InsertImageStringItem(10000, text, indx)\r\n \r\n self.AddPage(listCtrl, \"AuiNotebook Preview\", True, bitmap=auinotebook_preview.GetBitmap(), disabled_bitmap=wx.NullBitmap)\r\n return True", "def write_thumbnail(image_name, size):\n # TODO : use something else instead of image.thumbnail\n sizes = {\n 'small' : [30,40],\n 'medium' : [70,70],\n 'large' : [120,120]\n }\n image = Image.open(f'{WRITE_FOLDER}/{USER_NAME}/original/{image_name}')\n image.thumbnail((sizes[size][0], sizes[size][1]))\n image.save(f'{WRITE_FOLDER}/{USER_NAME}/{size}/{image_name}')", "def RefreshThumbnail(self):\n if not self.property:\n self.bmp = None\n return\n\n path = self.property.DoGetValue()\n\n if not os.path.isfile(path):\n self.bmp = None\n return\n\n image = wx.Image(path)\n image.Rescale(64, 64)\n self.bmp = wx.BitmapFromImage(image)", "def resize_profile_pic(sender, instance, **kwargs):\n profile_pic = instance.profile_picture\n if profile_pic.name != \"default.png\":\n img = Image.open(profile_pic.path)\n if img.height > 300 or img.width > 300:\n output_size = (300, 300)\n img.thumbnail(output_size)\n img.save(profile_pic.path)", "def test_create_image(self):\n pass", "def delete_thumbnail(self, thumbnail_name):", "def thumbnail(self):\n return self.get_thumbnail_url()", "def get_thumbnail_url(self):\n raise NotImplementedError(\"Subclass must implement abstract method get_thumbnail_url\")", "def on_draw_over_backgroundimage(self):", "def get_thumbnail_size(self, thumbnail_name, forced=False):", "def get_picture(self):\n if self.width>50 or self.height>50:\n return \"Too big for picture.\"\n\n br = '\\n'\n s = ''\n se = ('{:*>'+str(self.width)+'}').format('')\n for i in range(self.height):\n s += se + br\n return s", "def style_snapshot(figure: Figure) -> Figure:\n figure.axis.visible = False\n figure.xgrid.visible = False\n figure.ygrid.visible = False\n figure.toolbar_location = None\n figure.toolbar.logo = None\n figure.outline_line_width = 0\n figure.outline_line_alpha = 0\n\n return figure", "def plot_blank(self):\n self.figure_bmp.SetBitmap(self.controller.plot_blank())", "def test_save_matplotlib_figures_hidpi(gallery_conf):\n ext = \"png\"\n gallery_conf[\"image_srcset\"] = [\"2x\"]\n\n import matplotlib.pyplot as plt # nest these so that Agg can be set\n\n plt.plot(1, 1)\n fname_template = os.path.join(gallery_conf[\"gallery_dir\"], \"image{0}.png\")\n image_path_iterator = ImagePathIterator(fname_template)\n block = (\"\",) * 3\n block_vars = dict(image_path_iterator=image_path_iterator)\n image_rst = save_figures(block, block_vars, gallery_conf)\n\n fname = f\"/image1.{ext}\"\n assert fname in image_rst\n assert f\"/image1_2_00x.{ext} 2.00x\" in image_rst\n\n assert len(image_path_iterator) == 1\n fname = gallery_conf[\"gallery_dir\"] + fname\n fnamehi = gallery_conf[\"gallery_dir\"] + f\"/image1_2_00x.{ext}\"\n\n assert os.path.isfile(fname)\n assert os.path.isfile(fnamehi)\n\n # Test capturing 2 images with shifted start number\n image_path_iterator.next()\n image_path_iterator.next()\n plt.plot(1, 1)\n plt.figure()\n plt.plot(1, 1)\n image_rst = save_figures(block, block_vars, gallery_conf)\n assert len(image_path_iterator) == 5\n for ii in range(4, 6):\n fname = f\"/image{ii}.{ext}\"\n assert fname in image_rst\n\n fname = gallery_conf[\"gallery_dir\"] + fname\n assert os.path.isfile(fname)\n fname = f\"/image{ii}_2_00x.{ext}\"\n assert fname in image_rst\n fname = gallery_conf[\"gallery_dir\"] + fname\n assert os.path.isfile(fname)" ]
[ "0.7301337", "0.6821647", "0.68065804", "0.6654889", "0.66077906", "0.65570223", "0.6476974", "0.64667434", "0.64605975", "0.64199996", "0.64156866", "0.63598025", "0.63443893", "0.63439167", "0.63422894", "0.63266927", "0.6324976", "0.62868947", "0.61534524", "0.60603607", "0.6002906", "0.5996318", "0.5973992", "0.59597445", "0.59584236", "0.5955276", "0.5952915", "0.594197", "0.5935294", "0.58997846", "0.5899383", "0.58738214", "0.5865264", "0.5845318", "0.5838944", "0.58189154", "0.5817616", "0.58171546", "0.5807494", "0.5807494", "0.57960767", "0.5786129", "0.576133", "0.5753485", "0.5748482", "0.572421", "0.57221663", "0.57219535", "0.57109743", "0.5693949", "0.56777316", "0.56662965", "0.56405455", "0.563841", "0.56226337", "0.5619181", "0.5613813", "0.55877864", "0.5570461", "0.55568767", "0.555592", "0.5536707", "0.55305445", "0.5522348", "0.5518817", "0.5514372", "0.5509965", "0.5506493", "0.5505423", "0.5499874", "0.5494379", "0.54924613", "0.54919255", "0.548991", "0.54885715", "0.548008", "0.54753274", "0.54733336", "0.547327", "0.5467997", "0.546768", "0.54673785", "0.54648674", "0.54625195", "0.5449673", "0.54463875", "0.54364026", "0.54268473", "0.5426706", "0.5425338", "0.54087585", "0.540721", "0.5406739", "0.5406051", "0.53886616", "0.5387059", "0.5386056", "0.53858536", "0.538375", "0.53803164", "0.53801227" ]
0.0
-1
Used for generating thumbnails. Does not include overlaid graphics.
def save_plain_image_as_file(self, filepath, format='png', quality=90): img_w = self.get_plain_image_as_widget() # assumes that the image widget has some method for saving to # a file img_w.save(filepath, format=format, quality=quality)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_thumbnail(self, target, format=None):", "def generate_image(self):\n pass", "def _prepareImage(self):\n painter = QPainter(self)\n if len(self.thumbs) == 0:\n return\n destwidth = self.width()\n division = len(self.thumbs)\n NF = division\n slit_width = destwidth // division + 1\n if slit_width < self.minwidth:\n slit_width = self.minwidth\n division = destwidth // slit_width - 1\n for slit in range(division):\n point = QPoint(slit*destwidth // division,0)\n i = slit*NF // division\n thumb = self.transformer(self.thumbs[i])\n w = thumb.width()\n h = thumb.height()\n if w > slit_width:\n w0 = (w-slit_width)//2\n cropped = thumb.copy(w0,0,slit_width,h)\n painter.drawImage(point, cropped)\n else:\n painter.drawImage(point, thumb)", "def generate_thumbnail(progress_controller=None):\n # TODO: For now skip if this is Maya2017\n import pymel\n\n if pymel.versions.current() >= 201700:\n return\n\n # skip this if maya is running in batch mode\n if pm.general.about(batch=1):\n return\n\n from anima.dcc.mayaEnv import auxiliary\n\n auxiliary.generate_thumbnail()", "def bigThumbnail(self):\n\t\tfileCount = len(self.fileList)\n\t\tthumbSize = (200, 200)\n\t\timgHoriz = int(self.get_screen().get_width() / (thumbSize[1] + 20))\n\t\timgSize = (self.get_screen().get_width(), (thumbSize[1] + 20) * (int(fileCount / imgHoriz) + 2))\n\n\t\tpixbuf = gtk.gdk.Pixbuf(gtk.gdk.COLORSPACE_RGB, True, 8, imgSize[0], imgSize[1])\n\t\tfor file in range(len(self.fileList)):\n\t\t\ttry:\n\t\t\t\ttimg = gtk.gdk.pixbuf_new_from_file(self.fileList[file])\n\t\t\texcept:\n\t\t\t\tprint >> sys.stderr, \"Failed to load image %s\" % self.fileList[file]\n\t\t\t\tcontinue\n\t\t\ttimgSize = [timg.get_width(), timg.get_height()]\n\t\t\tif timgSize[0] > thumbSize[0] or timgSize[1] > thumbSize[1]:\n\t\t\t\tscaleFactor = 1.0 * thumbSize[0] / timgSize[0]\n\t\t\t\tif timgSize[1] * scaleFactor > thumbSize[1]:\n\t\t\t\t\tscaleFactor = 1.0 * thumbSize[1] / timgSize[1]\n\t\t\t\tself.scaleFactor = scaleFactor\n\t\t\t\ttimgSize[0] = int(timgSize[0] * scaleFactor)\n\t\t\t\ttimgSize[1] = int(timgSize[1] * scaleFactor)\n\t\t\t\ttimg = timg.scale_simple(timgSize[0], timgSize[1], gtk.gdk.INTERP_BILINEAR)\n\t\t\tpos = ( (file % imgHoriz) * (thumbSize[0] + 20) + 10 + (thumbSize[0] - timgSize[0]) / 2,\n\t\t\t\tint(file / imgHoriz) * (thumbSize[1] + 20) + 10)\n\n\t\t\tprint \" Rendering thumbnails; %d of %d\\r\" % (file, len(self.fileList)),\n\t\t\tsys.stdout.flush()\n\n\t\t\ttimg.copy_area(0, 0, timgSize[0], timgSize[1], pixbuf, pos[0], pos[1])\n\t\t\tdel timg\n\t\t\tgc.collect()\n\t\tprint\n\t\tself.currentPixbuf = pixbuf\n\t\tself.fileList = [ \"#\" ]\n\t\tself.fileName = \"#\"\n\t\tself.autoScale()\n\t\tself.display()", "def generate_thumbnail():\n import tempfile\n import glob\n from anima.env import mayaEnv\n m_env = mayaEnv.Maya()\n v = m_env.get_current_version()\n\n if not v:\n return\n\n # do not generate a thumbnail from a Repr\n if '@' in v.take_name:\n return\n\n task = v.task\n project = task.project\n # repo = project.repository\n imf = project.image_format\n width = int(imf.width * 0.5)\n height = int(imf.height * 0.5)\n\n temp_output = tempfile.mktemp()\n\n current_frame = pm.currentTime(q=1)\n output_file = pm.playblast(\n fmt='image',\n startTime=current_frame,\n endTime=current_frame,\n sequenceTime=1,\n forceOverwrite=1,\n filename=temp_output,\n clearCache=1,\n showOrnaments=1,\n percent=100,\n wh=(width, height),\n offScreen=1,\n viewer=0,\n compression='PNG',\n quality=70,\n framePadding=0\n )\n pm.currentTime(current_frame)\n\n output_file = output_file.replace('####', '*')\n found_output_file = glob.glob(output_file)\n if found_output_file:\n output_file = found_output_file[0]\n\n from anima.ui import utils\n utils.upload_thumbnail(task, output_file)\n\n return found_output_file", "def get_thumbnail(format):", "def plot_thumb(self, data_fname):\n thumbnail = self.controller.plot_thumb(data_fname, self.bitmap_width, self.bitmap_height)\n if thumbnail is not None:\n self.figure_bmp.SetBitmap(thumbnail)\n else:\n self.plot_blank()", "def write_thumbnails(self, appstruct):\n slugser = slugify(appstruct[\"serial\"])\n pdf_filename = \"thumbnails/%s/uploaded.pdf\" % slugser\n top_file = \"thumbnails/%s/top.png\" % slugser\n mos_file = \"thumbnails/%s/mosaic.png\" % slugser\n \n thumg = ThumbnailGenerator(pdf_filename)\n self.save_blob(thumg.top_thumbnail(), top_file)\n self.save_blob(thumg.mosaic_thumbnail(), mos_file)", "def get_thumbnail_url():", "def setThumbnailImage(*args):", "def small_image(self):\n pass", "def create_image_caption_pairs(self):", "def generateThumbnail(img):\n\n if not img._thumbfn:\n return\n\n aimgfn = join(opts.root, img._filename)\n if not opts.fast:\n img._size = imageSize(aimgfn)\n\n athumbfn = join(opts.root, img._thumbfn)\n\n if opts.thumb_force:\n if opts.quiet: print \"forced regeneration of '%s'\" % img._thumbfn\n elif not exists(athumbfn):\n if opts.quiet: print \"thumbnail absent '%s'\" % img._thumbfn\n else:\n # Check if thumbsize has changed\n if not opts.fast:\n img._thumbsize = imageSize(athumbfn)\n if not checkThumbSize(img._size, \\\n img._thumbsize, \\\n opts.thumb_size):\n if opts.quiet: print \"thumbnail '%s size has changed\" % img._thumbfn\n try:\n # Clear cache for thumbnail size.\n del imageSizeCache[ athumbfn ]\n except:\n pass\n else:\n# pass\n# if opts.quiet: print \"thumbnail '%s' already generated (size ok)\" \\\n# % img._thumbfn\n return\n else:\n if opts.quiet: print \"thumbnail '%s' already generated\" % img._thumbfn\n return\n\n if opts.no_magick:\n if opts.quiet: print \"ImageMagick tools disabled, can't create thumbnail\"\n return\n\n # create necessary directories\n d = dirname(athumbfn)\n if not exists(d):\n os.makedirs(d)\n\n if opts.pil:\n\n try:\n im = PilImage.open(aimgfn)\n im.thumbnail((opts.thumb_size, opts.thumb_size), config.Thumbnails[\"Interpolation\"])\n im.save(athumbfn)\n\n img._thumbsize = im.size\n except IOError, e:\n raise SystemExit(\\\n \"Error: identifying file '%s'\" % aimgfn + str(e))\n\n else:\n\n cmd = getMagickProg('convert') + ' -border 2x2 '\n # FIXME check if this is a problem if not specified\n #cmd += '-interlace NONE '\n\n cmd += '-geometry %dx%d ' % (opts.thumb_size, opts.thumb_size)\n\n if opts.thumb_quality:\n cmd += '-quality %d ' % opts.thumb_quality\n\n # This doesn't add text into the picture itself, just the comment in\n # the header.\n if opts.copyright:\n cmd += '-comment \\\"%s\\\" ' % opts.copyright\n\n # We use [1] to extract the thumbnail when there is one.\n # It is harmless otherwise.\n subimg = \"\"\n if img._ext.lower() in [ \".jpg\", \".tif\", \".tiff\" ]:\n subimg = \"[1]\"\n\n cmd += '\"%s%s\" \"%s\"' % (aimgfn, subimg, athumbfn)\n\n if opts.quiet: print \"generating thumbnail '%s'\" % img._thumbfn\n\n (chin, chout, cherr) = os.popen3(cmd)\n errs = cherr.readlines()\n chout.close()\n cherr.close()\n if errs:\n print >> sys.stderr, \\\n \"Error: running convert program on %s:\" % aimgfn\n errs = string.join(errs, '\\n')\n print errs\n\n if subimg and \\\n re.compile('Unable to read subimage').search(errs):\n if opts.quiet: print \"retrying without subimage\"\n cmd = string.replace(cmd, subimg, \"\")\n\n (chin, chout, cherr) = os.popen3(cmd)\n errs = cherr.readlines()\n chout.close()\n cherr.close()\n if errs:\n print >> sys.stderr, \\\n \"Error: running convert program on %s:\" % aimgfn\n print string.join(errs, '\\n')\n\n else:\n img._thumbsize = imageSize(athumbfn)", "def generate_thumbnail(self, img_path):\n\n thumb_path = self.thumbnail_path(img_path)\n dirpath = os.path.dirname(thumb_path)\n try:\n os.makedirs(dirpath)\n except OSError: # path exists\n pass\n\n cmd = [\n '/usr/local/bin/gm',\n 'convert',\n '-thumbnail', '256x256>',\n '-background', 'transparent',\n '-gravity', 'center',\n '-extent', '256x256',\n img_path, thumb_path\n ]\n\n retcode = subprocess.call(cmd)\n\n if retcode:\n log.error('convert exited with %d : %s', retcode, img_path)\n return False\n\n log.debug('Wrote thumbnail for `%s` to `%s`.', img_path, thumb_path)\n\n return True", "def clear_thumbnails(self):", "def thumbnail(self, fnameIn, fnameOut):\n cmd = \"convert -define jpeg:size=500x150 \"\n cmd += '\"%s\" ' % os.path.join(self.downloadFolder, fnameIn)\n cmd += \"-auto-orient -thumbnail 250x150 \"\n cmd += '\"%s\" ' % os.path.join(self.thumbnailFolder, fnameOut)\n self.log(\"creating thumbnail ...\")\n self.log(cmd)\n process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)\n process.wait()", "def build_filler_images(self):", "def image_preview(self):\r\n h = '<img src=\"%s\" alt=\"%s\"/>' % (self.image_resized_url, self.title)\r\n return mark_safe(h)", "def large_image(self):\n pass", "def generate_thumbnail(image_data, min_source_height, max_source_height, min_source_width, max_source_width, content_type, width, height, overlay_path, valign, top_crop_pct=None, bottom_crop_pct=None, left_crop_pct=None, right_crop_pct=None, crop_x=None, crop_y=None, post_crop_uniform_scale_pct=None):\n # figure out the width/height of the image from the datastore\n \n# img = images.Image(image_data=image_data)\n# img.crop(left_x=0.25, top_y=0.25, right_x=0.25, bottom_y=0.25)\n# img.resize(width=width, height=height)\n# logging.info('(b) w=%i, h=%i' % (img.width, img.height))\n# output = img.execute_transforms(output_encoding=img.format)\n \n image = images.Image(image_data)\n \n if min_source_height is not None and image.height < min_source_height:\n return None\n if max_source_height is not None and image.height > max_source_height:\n return None\n \n if min_source_width is not None and image.width < min_source_width:\n return None\n if max_source_width is not None and image.width > max_source_width:\n return None\n \n \n if content_type == 'image/png':\n output_encoding = images.PNG\n else:\n output_encoding = images.JPEG\n if crop_x is not None and crop_y is not None and valign is None and top_crop_pct is None and bottom_crop_pct is None and (image.width >= crop_x + width) and (image.height >= crop_y + height):\n fw = float(image.width)\n fh = float(image.height)\n try:\n output = images.crop(image_data, float(crop_x) / fw, float(crop_y) / fh, float(crop_x + width) / fw, float(crop_y + height) / fh, output_encoding=output_encoding)\n except:\n output = image_data\n else:\n if width > image.width and height > image.height:\n output = image_data\n# # this would result in scaling the image UP, that's no good\n# if image.width > image.height:\n# width = image.width\n# else:\n# height = image.height\n# \n# output = images.resize(image_data, width, height, output_encoding)\n else:\n output = rescale(image, width, height, halign='middle', valign=valign, top_crop_pct=top_crop_pct, bottom_crop_pct=bottom_crop_pct, left_crop_pct=left_crop_pct, right_crop_pct=right_crop_pct)\n \n if post_crop_uniform_scale_pct is not None:\n output = images.resize(output, width=int(width * post_crop_uniform_scale_pct), output_encoding=output_encoding)\n \n if overlay_path is not None:\n # read the overlay into memory\n overlay_data = open(overlay_path,'r').read()\n # composite the overlay onto the rescaled output\n if content_type == 'image/png':\n output_encoding = images.PNG\n else:\n output_encoding = images.JPEG\n output = images.composite(\n inputs=[\n (output,0,0,1.0,images.CENTER_CENTER),\n (overlay_data,0,0,1.0,images.CENTER_CENTER),\n ],\n width=width,\n height=height,\n output_encoding=output_encoding\n )\n return output", "def create_thumbnail(image_name):\n try:\n # SMALL\n write_thumbnail(image_name, 'small')\n # MEDIUM\n write_thumbnail(image_name, 'medium')\n # LARGE\n write_thumbnail(image_name, 'large')\n\n except IOError:\n print('create thumbnail error')\n pass", "def createThumbnail(self, useCursorPosition=False, dbPath = None, versionInt = None):\n\n return \"\"\n # logger.debug(\"Func: createThumbnail\")\n # projectPath = self.projectDir\n # if useCursorPosition:\n # versionInt = self.currentVersionIndex\n # dbPath = self.currentDatabasePath\n # else:\n # if not dbPath or not versionInt:\n # msg = \"Both dbPath and version must be defined if useCursorPosition=False\"\n # raise Exception ([360, msg])\n #\n # versionStr = \"v%s\" % (str(versionInt).zfill(3))\n # dbDir, shotNameWithExt = os.path.split(dbPath)\n # shotName = os.path.splitext(shotNameWithExt)[0]\n #\n # thumbPath = \"{0}_{1}_thumb.jpg\".format(os.path.join(dbDir, shotName), versionStr)\n # relThumbPath = os.path.relpath(thumbPath, projectPath)\n #\n # # create a thumbnail using playblast\n # thumbDir = os.path.split(thumbPath)[0]\n # if os.path.exists(thumbDir):\n # # frame = pm.currentTime(query=True)\n # frame = cmds.currentTime(query=True)\n # # store = pm.getAttr(\"defaultRenderGlobals.imageFormat\")\n # store = cmds.getAttr(\"defaultRenderGlobals.imageFormat\")\n # # pm.setAttr(\"defaultRenderGlobals.imageFormat\", 8) # This is the value for jpeg\n # cmds.setAttr(\"defaultRenderGlobals.imageFormat\", 8) # This is the value for jpeg\n # # pm.playblast(completeFilename=thumbPath, forceOverwrite=True, format='image', width=221, height=124, showOrnaments=False, frame=[frame], viewer=False, percent=100)\n # cmds.playblast(completeFilename=thumbPath, forceOverwrite=True, format='image', width=221, height=124, showOrnaments=False, frame=[frame], viewer=False, percent=100)\n # # pm.setAttr(\"defaultRenderGlobals.imageFormat\", store) #take it back\n # cmds.setAttr(\"defaultRenderGlobals.imageFormat\", store) #take it back\n # else:\n # # pm.warning(\"something went wrong with thumbnail. Skipping thumbnail\")\n # cmds.warning(\"something went wrong with thumbnail. Skipping thumbnail\")\n # return \"\"\n # # return thumbPath\n # return relThumbPath", "def img_url_thumbnail(self):\n url = '%s=s%s-c' % (self.img_url, self.THUMBNAIL_SIZE_PX)\n if self.img_rot in Plaque.ALLOWED_ROTATIONS:\n url = \"%s-r%s\" % (url, self.img_rot)\n return url", "def top_thumbnail(self):\n serial = slugify(self.request.matchdict[\"serial\"])\n filename = \"thumbnails/%s/top.png\" % serial\n return FileResponse(filename)", "def generate_thumb_filename(instance, filename):\n return _generate_filename(instance, filename, 'thumbs')", "def RescaleScreenShot(bmp, thumbnail_size=200):\r\n\r\n bmpW, bmpH = bmp.GetWidth(), bmp.GetHeight()\r\n img = bmp.ConvertToImage()\r\n\r\n newW, newH = bmpW, bmpH\r\n \r\n if bmpW > bmpH:\r\n if bmpW > thumbnail_size:\r\n ratio = bmpW/float(thumbnail_size)\r\n newW, newH = int(bmpW/ratio), int(bmpH/ratio)\r\n img.Rescale(newW, newH, wx.IMAGE_QUALITY_HIGH)\r\n else:\r\n if bmpH > thumbnail_size:\r\n ratio = bmpH/float(thumbnail_size)\r\n newW, newH = int(bmpW/ratio), int(bmpH/ratio)\r\n img.Rescale(newW, newH, wx.IMAGE_QUALITY_HIGH)\r\n\r\n newBmp = img.ConvertToBitmap()\r\n otherBmp = wx.EmptyBitmap(newW+5, newH+5) \r\n\r\n memDC = wx.MemoryDC()\r\n memDC.SelectObject(otherBmp)\r\n memDC.SetBackground(wx.WHITE_BRUSH)\r\n memDC.Clear()\r\n \r\n memDC.SetPen(wx.TRANSPARENT_PEN)\r\n\r\n pos = 0\r\n for i in xrange(5, 0, -1):\r\n brush = wx.Brush(wx.Colour(50*i, 50*i, 50*i))\r\n memDC.SetBrush(brush)\r\n memDC.DrawRoundedRectangle(0, 0, newW+5-pos, newH+5-pos, 2)\r\n pos += 1\r\n\r\n memDC.DrawBitmap(newBmp, 0, 0, True)\r\n \r\n # Select the Bitmap out of the memory DC by selecting a new\r\n # uninitialized Bitmap\r\n memDC.SelectObject(wx.NullBitmap)\r\n\r\n return otherBmp", "def GetThumbnail(self, type, maxsize): # real signature unknown; restored from __doc__\n pass", "def thumbnail_url(self):\n return None", "def clear_thumbnail(self):\n from anima.ui import utils\n utils.clear_thumbnail(self.thumbnail_graphics_view)", "def admin_photo(self, obj=None, size='default'):\n self = obj if obj else self\n if hasattr(self, 'get_thumbnail_url'):\n return '<a class=\"thumb-'+size+'\" href=\"{}\"><img src=\"{}\"></a>'.format(\n self.admin_url, self.get_thumbnail_url(size))", "def create_full_pic(self):\n self.create_half_pic()\n mirror_update(self.flag)", "def create_strip(self, resolution_ratio=None):\n\n if not resolution_ratio:\n resolution_ratio = self.strip_resolution_ratio\n\n padding = 40\n photo_width = int(self.photo_resolution[0] * resolution_ratio)\n photo_height = int(self.photo_resolution[1] * resolution_ratio)\n width = (photo_width * 2) + (padding * 4)\n height = (photo_height * self.picture_count) + (padding * (self.picture_count + 1))\n\n strip = Image.new('RGB', (width, height))\n canvas = ImageDraw.Draw(strip)\n canvas.rectangle((0, 0, width, height), fill=ImageColor.getcolor('#ffffff', 'RGB'))\n\n for i in range(0, self.picture_count):\n image = Image.open(self.pictures_taken[i])\n image = image.convert(mode='RGB')\n image = image.resize((photo_width, photo_height), resample=Image.LANCZOS)\n strip.paste(image, box=(\n padding,\n padding + (padding * i) + (photo_height * i)\n ))\n strip.paste(image, box=(\n padding + photo_width + padding + padding,\n padding + (padding * i) + (photo_height * i)\n ))\n del image\n\n strip = strip.transpose(Image.FLIP_LEFT_RIGHT)\n strip = strip.filter(ImageFilter.DETAIL)\n strip = strip.filter(ImageFilter.SHARPEN)\n\n (handle, file_name) = mkstemp(suffix='.jpg', prefix='photoberry-strip')\n os.close(handle)\n handle = open(file_name, 'wb')\n strip.save(handle, format='jpeg', quality=95, optimize=True)\n handle.close()\n handle.close()\n del strip\n return file_name", "def GET_link_thumb(self, *a, **kw):\r\n return \"nothing to see here.\"", "def main():\r\n original = SimpleImage(\"images/poppy.png\")\r\n original.show()\r\n # shrink function\r\n after_shrink = shrink('images/poppy.png')\r\n after_shrink.show()", "def create_base_image(self, builder, template, parameters):", "def generatePreview(self):\n self.saveParameters()\n image=self.simulation.generatePreview()\n # convert pil image to a tkinter image\n self.photo = ImageTk.PhotoImage(image)\n\n # display image\n self.preview.create_image(0, 0, anchor='nw', image=self.photo)", "def make_thumbnail(image, size=(100, 100)):\n logging.debug(image)\n\n im = create_colorblind_image(image)\n\n thumb_io = BytesIO() # create a BytesIO object\n\n im.save(thumb_io, 'PNG', quality=85) # save image to BytesIO object\n\n thumbnail = File(thumb_io, name=image.name) # create a django friendly File object\n\n return thumbnail", "def _repr_png_(self):\n mol = self.owner.mol\n keku = IPythonConsole.kekulizeStructures\n size = IPythonConsole.molSize\n opts = IPythonConsole.drawOptions\n return Draw._moltoimg(\n mol, size, self.aix, \"\", returnPNG=True, drawOptions=opts,\n kekulize=keku, highlightBonds=self.bix\n )", "def _repr_png_(self):\n mol = self.owner.mol\n keku = IPythonConsole.kekulizeStructures\n size = IPythonConsole.molSize\n opts = IPythonConsole.drawOptions\n return Draw._moltoimg(\n mol, size, self.aix, \"\", returnPNG=True, drawOptions=opts,\n kekulize=keku, highlightBonds=self.bix\n )", "def get_thumbnail_name(self, thumbnail_name, with_size=None):", "def make_image(self, path):\n\t\treturn None", "def test_create_thumbnails(self):\n \n logging.info('create_thumbnail')\n \n for size in self.article.sizes.keys():\n assert not self.article.thumbnail_exists(size)\n\n self.article.image.save('avatar.png', ContentFile(self.image.read()))\n self.article.create_thumbnails()\n \n for size in self.article.sizes.keys():\n assert self.article.thumbnail_exists(size)", "def mosaic_thumbnail(self):\n serial = slugify(self.request.matchdict[\"serial\"])\n filename = \"thumbnails/%s/mosaic.png\" % serial\n return FileResponse(filename)", "def thumbnail(im, config):\n\n im.thumbnail(\n (config['width'], config['height']),\n ANTIALIAS,\n )\n\n return im", "def _gen_thumbs_filename(instance, filename):\n return _unique_path(instance.owner.pk, filename, category='thumbs')", "def exportImg(self):\n if self.superSampling:\n print(\"Exporting with size adjusted\")\n self.img = self.img.resize((int(self.width/2),int(self.height/2)),Image.NEAREST)\n self.img.save(self.fileName,\"PNG\")", "def create_thumb(source_fame, target_fame, target_w = 260, target_h=205):\r\n size = target_w, target_h\r\n im = Image.open(source_fame)\r\n width = im.size[0]\r\n height = im.size[1]\r\n newwidth = int(size[0])\r\n newheight = int(height*(newwidth/float(width)))\r\n if newheight > int(size[1]):\r\n newheight = int(size[1])\r\n newwidth = int(width*(newheight/float(height)))\r\n size = newwidth, newheight\r\n # Resize and save the image\r\n im.thumbnail(size, Image.ANTIALIAS)\r\n im.save(target_fame)", "def genThumbnail(filename,thumbnailType,config,regen=False):\n # define the sizes of the various thumbnails\n thumbnailTypeDefinitions={\n 's': (75,75), #should be square eventually\n 'q': (150,150), #should be square eventually\n 't': (100,100),\n 'm': (240,240),\n 'n': (320,230),\n 'k': (500,500),\n 'c': (800,800),\n 'b': (1024,1024)}\n size = thumbnailTypeDefinitions[thumbnailType]\n thumbFilename = filename.split('.')[0] + '_' + thumbnailType + '.' + filename.split('.')[1]\n if os.path.isfile(config['LOCALARCHIVEPATH']+'/'+thumbFilename) and regen == False:\n return(thumbFilename)\n else:\n try:\n logger.info('Generating thumbnail: %s' %(config['LOCALARCHIVEPATH']+'/'+thumbFilename))\n img = Image.open(config['LOCALARCHIVEPATH']+'/'+filename)\n icc_profile = img.info.get('icc_profile')\n img.thumbnail(size,Image.ANTIALIAS)\n img.save(config['LOCALARCHIVEPATH']+'/'+thumbFilename, 'JPEG', icc_profile=icc_profile, quality=95)\n return(thumbFilename)\n except IOError as e:\n raise e", "def make_visual(self,\r\n ruler = None,\r\n options: list = ['rows',\r\n 'centers',\r\n 'distances']\r\n ) -> 'Image':\r\n \r\n original = self.get_picture().get_photo()\r\n\r\n # Copy the original image for drawing\r\n img = original.copy()\r\n draw = ImageDraw.Draw(img)\r\n\r\n # check all the choices provided by the user\r\n for i in options:\r\n \r\n if i == 'clusters':\r\n # Color all cluster pixels red\r\n \r\n for j in self.get_clusters():\r\n for k in j:\r\n img.putpixel(k, (25,275,25))\r\n\r\n elif i == 'row_ids':\r\n # Make row id numbers\r\n\r\n # Font specifications\r\n size = 75\r\n font = ImageFont.truetype('ariblk.ttf', size)\r\n color = (88, 214, 216)\r\n num = 1\r\n\r\n # Draw the ids\r\n for j in self.rows:\r\n draw.text((j[0].get_center()[0],\r\n j[0].get_center()[1] - 0.25 * size),\r\n str(num),\r\n fill = color,\r\n font = font)\r\n num += 1\r\n\r\n elif i == 'boxes':\r\n # Show all bounding boxes\r\n \r\n for i in self.get_boxes():\r\n draw.rectangle(i, outline=(255, 0, 255))\r\n\r\n elif i == 'dirt':\r\n # Remove Background\r\n \r\n img = Image.new('RGB', img.size, (130, 90, 50))\r\n draw = ImageDraw.Draw(img)\r\n\r\n elif i == 'centers':\r\n # Show all centers\r\n \r\n rad = 9\r\n for i in self.get_centers():\r\n draw.arc([(i[0] - rad, i[1] - rad),\r\n (i[0] + rad, i[1] + rad)],\r\n 0, 360, (0, 0, 255))\r\n\r\n elif i == 'ditches':\r\n # Show ditches between plants\r\n\r\n # Line attribute settings\r\n width = 10\r\n color = (55,65,65)\r\n\r\n # Iterate over all ditches\r\n for line in self.ditches:\r\n line = [line[0], line[1]]\r\n\r\n # Point in ditch on left border of picture\r\n start_point = (0, line[1])\r\n\r\n # Point in ditch on right border of picture\r\n end_point = (self.picture.get_size()[0] - 1,\r\n line[0]\r\n * (self.picture.get_size()[0] - 1)\r\n + line[1])\r\n\r\n ## Check if the end point is within the picture\r\n if end_point[1] < 0:\r\n if start_point[1] < 0:\r\n continue\r\n \r\n # Point in ditch on top border of picture\r\n end_point = (-1 * line[1] / line[0], 0)\r\n\r\n elif end_point[1] > self.picture.get_size()[1] - 1:\r\n if start_point[1] > self.picture.get_size()[1] - 1:\r\n continue\r\n \r\n # Point in ditch on bottom border of picture\r\n end_point = (-1\r\n * (self.picture.get_size()[1] - 1)\r\n / line[0],\r\n self.picture.get_size()[1] - 1)\r\n\r\n # Draw the ditches\r\n for i in self.get_rows():\r\n draw.line((start_point, end_point), color, width)\r\n\r\n elif i == 'lines':\r\n # Show row line approximations\r\n\r\n # Line attribute settings\r\n width = 1\r\n color = (255, 255, 75)\r\n\r\n # Iterate over all the lines\r\n for line in self.lines:\r\n line = [line[0], line[1]]\r\n\r\n # Point on line on left border of picture\r\n start_point = (0, line[1])\r\n\r\n # Point on line on right border of picture\r\n end_point = (self.picture.get_size()[0] - 1,\r\n line[0]\r\n * (self.picture.get_size()[0] - 1)\r\n + line[1])\r\n\r\n\r\n ## Check if the end point is within the picture\r\n if end_point[1] < 0:\r\n if start_point[1] < 0:\r\n continue\r\n\r\n # Point on line on top border of picture\r\n end_point = (-1 * line[1] / line[0], 0)\r\n \r\n\r\n elif end_point[1] > self.picture.get_size()[1] - 1:\r\n if start_point[1] > self.picture.get_size()[1] - 1:\r\n continue\r\n\r\n # Point on line on bottom border of picture\r\n end_point = (-1\r\n * (self.picture.get_size()[1] - 1)\r\n / line[0],\r\n self.picture.get_size()[1] - 1)\r\n\r\n # Draw the lines\r\n for i in self.get_rows():\r\n draw.line((start_point, end_point), color, width)\r\n\r\n elif i == 'rows':\r\n if self.get_rows():\r\n # Show lines between rows\r\n \r\n width = 3\r\n color = (255,0,0)\r\n\r\n for i in self.get_rows():\r\n draw.line([j.get_center() for j in i], color, width)\r\n else:\r\n print('Rows have not been made for this field')\r\n\r\n elif i == 'numbers':\r\n # Display numbers between plants\r\n\r\n # Find where to put the numbers\r\n midpoints = [(int(round((row[c].get_center()[0]\r\n + row[c + 1].get_center()[0]) / 2)),\r\n int(round((row[c].get_center()[1]\r\n + row[c + 1].get_center()[1]) / 2)))\r\n \r\n for row in self.get_rows()\r\n for c in range(len(row) - 1)]\r\n\r\n # Font specifications\r\n size = 10\r\n font = ImageFont.truetype('ariblk.ttf', size)\r\n num = 1\r\n\r\n # Write numbers\r\n for i in midpoints:\r\n draw.text((i[0] - 3 * len(str(round(num, 1))),\r\n i[1]),\r\n str(round(num,1)), font = font)\r\n \r\n num += 1\r\n\r\n elif i == 'tight':\r\n # Display tight boxes\r\n\r\n for i in self.get_tight_boxes():\r\n draw.rectangle(i, outline=(100, 255, 255))\r\n\r\n elif i == 'distances':\r\n # display distances between plants\r\n\r\n # find where to put the distances\r\n midpoints = [(int(round((row[c].get_center()[0]\r\n + row[c + 1].get_center()[0]) / 2)),\r\n int(round((row[c].get_center()[1]\r\n + row[c + 1].get_center()[1]) / 2)))\r\n for row in self.get_rows()\r\n for c in range(len(row) - 1)]\r\n\r\n # Font specifications\r\n size = 10\r\n font = ImageFont.truetype('arial.ttf', size)\r\n num = 1\r\n\r\n # Write numbers\r\n for i in midpoints:\r\n draw.text((i[0] - 3 * len(str(ruler.get_distances()[num])),\r\n i[1]),\r\n str(ruler.get_distances()[num]) + '\"',\r\n font = font)\r\n \r\n num += 1\r\n\r\n # If the user inputs something that isn't an option \r\n else:\r\n raise Exception(i + ' is not a valid option.\\n')\r\n\r\n return img", "def getimage(self):", "def CreateBitmap(self, notebook, page, button_state, tabArt):\r\n\r\n control = page.control\r\n memory = wx.MemoryDC(wx.EmptyBitmap(1, 1))\r\n\r\n tab_size, x_extent = tabArt.GetTabSize(memory, notebook, page.caption, page.bitmap, page.active,\r\n button_state, control)\r\n \r\n tab_width, tab_height = tab_size\r\n rect = wx.Rect(0, 0, tab_width, tab_height)\r\n\r\n bitmap = wx.EmptyBitmap(tab_width+1, tab_height+1)\r\n memory.SelectObject(bitmap)\r\n\r\n if wx.Platform == \"__WXMAC__\":\r\n memory.SetBackground(wx.TRANSPARENT_BRUSH)\r\n else:\r\n memory.SetBackground(wx.Brush(self._backgroundColour))\r\n \r\n memory.SetBackgroundMode(wx.TRANSPARENT)\r\n memory.Clear()\r\n\r\n paint_control = wx.Platform != \"__WXMAC__\"\r\n tabArt.DrawTab(memory, notebook, page, rect, button_state, paint_control=paint_control)\r\n \r\n memory.SetBrush(wx.TRANSPARENT_BRUSH)\r\n memory.SetPen(wx.BLACK_PEN)\r\n memory.DrawRoundedRectangle(0, 0, tab_width+1, tab_height+1, 2)\r\n\r\n memory.SelectObject(wx.NullBitmap)\r\n \r\n # Gtk and Windows unfortunatly don't do so well with transparent\r\n # drawing so this hack corrects the image to have a transparent\r\n # background.\r\n if wx.Platform != '__WXMAC__':\r\n timg = bitmap.ConvertToImage()\r\n if not timg.HasAlpha():\r\n timg.InitAlpha()\r\n for y in xrange(timg.GetHeight()):\r\n for x in xrange(timg.GetWidth()):\r\n pix = wx.Colour(timg.GetRed(x, y),\r\n timg.GetGreen(x, y),\r\n timg.GetBlue(x, y))\r\n if pix == self._backgroundColour:\r\n timg.SetAlpha(x, y, 0)\r\n bitmap = timg.ConvertToBitmap()\r\n return bitmap", "def make_thumbnails(self, exp_id):\n n = 99999\n\n return \"{n} number of images were processed!\".format(n=n)", "def thumbnail_generator():\n website_url = json.loads(request.data.decode())['url']\n try:\n webpage, message = url_preview.send_request(website_url)\n if webpage is not None:\n #Construct the soup object\n soup_object = url_preview.get_soup_object(webpage)\n #Get the title of the artcile\n title = url_preview.get_title(soup_object)\n #Get the website of the article\n website_name = url_preview.get_url(soup_object).rsplit(\".\", 1)[0]\n if website_name is None:\n website_name = website_url.split(\"//\", 1)[1].split(\"/\", 1)[0].rsplit(\".\", 1)[0]\n\n #Get the description of the article\n description = url_preview.get_description(soup_object)\n\n #Get the published date and time of the article\n date_time = url_preview.get_date_time(website_url)\n\n #Get the link to the preview image\n image_url = url_preview.get_preview_image(soup_object)['content']\n\n #Get the link to the favicon\n favicon_url = url_preview. get_favicon(soup_object)\n\n return render_template(\n \"success.html\",\n urlx=website_url,\n title=title,\n site_name=website_name,\n description=description,\n date_time=date_time,\n preview_image=image_url,\n favicon=favicon_url\n )\n except Exception as exp:\n return render_template('error.html', msg=str(exp))", "def genThumbnails(sha1,fileType,config,regen=False):\n (sha1Path,filename) = getSha1Path(sha1)\n relativeFilename = '%s/%s.%s' % (sha1Path,filename,fileType)\n\n thumbnailTypes = ['t','m','n','c','b']\n thumbnailFilenames = []\n for thumbnailType in thumbnailTypes:\n thumbFilename = genThumbnail(relativeFilename,thumbnailType,config,regen=regen)\n thumbnailFilenames.append(thumbFilename)\n return thumbnailFilenames", "def show_picture(self, data):\n raise NotImplementedError", "def make_image(self, frame, filename, **kwds):\n p = plot.plot(frame, **kwds)\n p.save_image(filename)", "def make_image():\n click.echo(\"make_image\")", "def on_draw_over_image(self):", "def generate_image( now ):\n cmd = \"csh mwplot.csh %s\" % (\n now.strftime(\"%Y %m %d %H %M\"),)\n subprocess.call(cmd, shell=True)", "def create_image(self):\n # how many categories?\n aspect_ratio = float(4) / 3\n self.width = int(math.sqrt(aspect_ratio * self.total))\n self.height = int(self.width / aspect_ratio)\n\n img = Image.new(\"RGB\", (self.width, self.height))\n return img", "def thumbnails_urls(request):\n return {'thumbnails_urls': ThumbnailsUrls()}", "def pibooth_setup_picture_factory(cfg, opt_index, factory):", "def _generate_pileups(self):\n pass", "def prepare_thumbnail_url(self, object):\n if object.media is not None:\n return os.path.join(settings.MEDIA_URL, object.media.media_thumb_file.name)\n else:\n return ''", "def show_me():\n # Scumbag thumbnail code\n try:\n from PIL import Image\n except ImportError:\n pass\n else:\n filename = os.path.join(app.static_folder, 'img', 'badumtss.png')\n image = Image.open(filename)\n\n return render_template('show_me.html')", "def generateImage(self):\n self.image = self.font.render(self.text, True, self.color)\n self.rect = self.image.get_rect()\n self.rect.center = self.xy", "def process(self,pixmap):", "def generate_thumb(origin, size, fn):\n assert isinstance(size, int), 'Integers are expected'\n img = Image.open(origin)\n path = os.path.dirname(origin)\n\n new_img = img.resize((size, size), Image.ANTIALIAS)\n thumb_path = os.path.join(path, fn)\n new_img.save(thumb_path)\n return thumb_path", "def create_png(image, label):\n sv = \"/home/avojtekova/Desktop/final_results/star_det/generated_images/\" \n \n for i in range(len(image)):\n data = fits.getdata(image[i][0], ext = 0)\n norm = ImageNormalize(data,interval = ZScaleInterval(), stretch = LinearStretch())\n \n print(image[i][0])\n plt.imshow(data, cmap='Greys_r', origin='lower', norm=norm)#[1250:1750, 2000:2500] add this when you want just part of image \n plt.title(label[i])\n plt.axis('off')\n plt.tight_layout()\n plt.legend\n if i<2:\n if not os.path.isdir(sv + image[i][0][-33:-25] + \"/\") :\n os.makedirs(sv + image[i][0][-33:-25] + \"/\")\n plt.savefig(sv + image[i][0][-33:-25] + \"/\" + label[i]+ \"_\" + image[i][0][-33:-25] + \"_big.png\", dpi = 1000)#,bbox_inches='tight', pad_inches = 0) \n else:\n if not os.path.isdir(sv + image[i][0][-40:-32] + \"/\") :\n os.makedirs(sv + image[i][0][-40:-32] + \"/\")\n plt.savefig(sv + image[i][0][-40:-32] + \"/\" + label[i]+image[i][0][-40:-32] + \"_big.png\", dpi = 1000)#,bbox_inches='tight', pad_inches = 0)\n plt.close()", "def make_figure(self, traces):\n pass", "def make_thumbnail(self):\n # https://gist.github.com/valberg/2429288\n\n # make sure image data is set\n if not self.image_data:\n return False\n\n if self.proxy_data:\n return True\n\n # Create a resized version of the image\n image = Image.open(self.image_data)\n image.thumbnail(THUMBNAIL_SIZE, Image.BICUBIC)\n\n # Save the thumbnail to in-memory 'file'\n temp_thumb = BytesIO()\n image.save(temp_thumb, 'jpeg')\n temp_thumb.seek(0) # rewinds the file\n\n # Save image to a SimpleUploadFile which can be saved\n # into ImageField\n # TODO figure out how to pass base image's UUID before\n # image is committed to DB\n basename = os.path.basename(self.image_data.name)\n uuidname = os.path.splitext(basename)[0]\n suf = SimpleUploadedFile(uuidname,\n temp_thumb.read(), content_type='image/jpeg')\n thumb_filename = '{}_thumb.jpeg'.format(suf.name)\n\n # set save=False, or else it will infinite loop\n self.proxy_data.save(thumb_filename,\n suf,\n save=False)\n\n # Also store the real dimensions for the Pillow thumbnail\n self.proxy_width, self.proxy_height = image.size\n\n temp_thumb.close()\n\n return True", "def thumb_profil(log):\n\tpath = get_plato_path()\n\tfrom PIL import Image\n\tsize = 100,200\n\tim = Image.open('/%s/plato_users/%s/profil_BIG.jpg'%(path,log))\n\tim.thumbnail(size, Image.ANTIALIAS)\n\tim.save('/%s/plato_users/%s/profil.jpg'%(path,log),\"JPEG\")", "def printImage(imageObject):\n # TODO\n pass", "def _create_placeholder(self, thumbnail_size):\n logger.debug(\"Creating placeholder. thumbnail_size: %s\", thumbnail_size)\n placeholder = Image.new(\"RGB\", (thumbnail_size, thumbnail_size))\n draw = ImageDraw.Draw(placeholder)\n draw.rectangle(((0, 0), (thumbnail_size, thumbnail_size)), outline=\"#E5E5E5\", width=1)\n placeholder = np.array(placeholder)\n self._previewcache[\"placeholder\"] = placeholder\n logger.debug(\"Created placeholder. shape: %s\", placeholder.shape)", "def getimgs():", "def build_thumbnail_image(self, dataset_metadata_dict, bounding_box, visibility=True):\n logger.debug(\"Building WMS thumbnail...\")\n\n #=======================================================================\n # grid_utils = NetCDFGridUtils(dataset_metadata_dict['netcdf_path'],\n # debug=self.debug\n # ) \n #=======================================================================\n \n dataset_folder_kml = self.dataset_type_folder.newfolder(name=dataset_metadata_dict['dataset_title'], visibility=True)\n\n transparent_polygon = self.build_polygon(dataset_metadata_dict,\n bounding_box, visibility=True, \n parent_folder=dataset_folder_kml,\n polygon_name=dataset_folder_kml.name\n )\n logger.debug('transparent_polygon: {}'.format(transparent_polygon))\n #transparent_polygon.color =\n transparent_polygon.style.polystyle.color = '03000000' # 99% transparent black\n transparent_polygon.style.polystyle.outline = 0 # remove the outline\n #transparent_polygon.style.linestyle.color = '80f8f8ff' # 50% transparent white\n\n try:\n logger.debug(\"Dataset WEST extent: {}\".format(dataset_metadata_dict['longitude_min']))\n logger.debug(\"BBOX WEST extent: {}\".format(bounding_box[0]))\n logger.debug(\"Dataset EAST extent: {}\".format(dataset_metadata_dict['longitude_max']))\n logger.debug(\"BBOX EAST extent: {}\".format(bounding_box[2]))\n logger.debug(\"Dataset SOUTH extent: {}\".format(dataset_metadata_dict['latitude_min']))\n logger.debug(\"BBOX SOUTH extent: {}\".format(bounding_box[1]))\n logger.debug(\"Dataset NORTH extent: {}\".format(dataset_metadata_dict['latitude_max']))\n logger.debug(\"BBOX NORTH extent: {}\".format(bounding_box[3]))\n\n wms_url = dataset_metadata_dict['distribution_url'].replace('/dodsC/', '/wms/') #TODO: Replace this hack\n\n if self.cache_images and self.url_root:\n # Retrieve image for entire dataset\n north = dataset_metadata_dict['latitude_max']\n south = dataset_metadata_dict['latitude_min']\n east = dataset_metadata_dict['longitude_max']\n west = dataset_metadata_dict['longitude_min']\n else: \n # Retrieve image for portion of dataset in view bounding box \n west = max(bounding_box[0], dataset_metadata_dict['longitude_min'])\n east = min(bounding_box[2], dataset_metadata_dict['longitude_max'])\n south = max(bounding_box[1], dataset_metadata_dict['latitude_min'])\n north = min(bounding_box[3], dataset_metadata_dict['latitude_max'])\n\n wms_url = wms_url + \"?SERVICE=WMS&VERSION=1.3.0&REQUEST=GetMap&BBOX={0},{1},{2},{3}&CRS=EPSG:4326&WIDTH={4}&HEIGHT={5}&LAYERS={6}&STYLES=&FORMAT=image/png\" \\\n \"&DPI=120&MAP_RESOLUTION=120&FORMAT_OPTIONS=dpi:120&TRANSPARENT=TRUE\" \\\n \"&COLORSCALERANGE={7}%2C{8}&NUMCOLORBANDS=127\".format(south, \n west, \n north, \n east, \n int((east - west) / self.wms_pixel_size), \n int((north - south) / self.wms_pixel_size), \n self.wms_layer_name,\n self.wms_color_range[0],\n self.wms_color_range[1]\n )\n logger.debug('wms_url: {}'.format(wms_url))\n\n #mag_tmi_anomaly\n\n # wms_url = \"http://dapds00.nci.org.au/thredds/wms/rr2/airborne_geophysics/NSW/P1027/magnetics/grid/mNSW1027/\" \\\n # \"mNSW1027.nc?SERVICE=WMS&VERSION=1.3.0&REQUEST=GetMap\" \\\n # \"&BBOX={0},{1},{2},{3}\" \\\n # \"&CRS=EPSG:4326&WIDTH=206&HEIGHT=269&LAYERS=mag_tmi_anomaly&STYLES=&FORMAT=image/png\" \\\n # \"&DPI=120&MAP_RESOLUTION=120&FORMAT_OPTIONS=dpi:120&TRANSPARENT=TRUE\" \\\n # \"&COLORSCALERANGE=-2781%2C2741&NUMCOLORBANDS=10\".format(south, west, north, east)\n\n # dataset_kml = self.dataset_type_folder.newfolder(name='overlay_test',\n # visibility=visibility)\n\n # dataset_kml.style = self.point_style\n \n if self.cache_images and self.url_root:\n # Cache image and mModify URL for cached image file\n wms_url = '{}{}'.format(self.url_root,\n cache_image_file(dataset_type=self.dataset_type, \n image_basename=os.path.splitext(dataset_metadata_dict['netcdf_basename'])[0]+'.png', \n image_source_url=wms_url)\n )\n logger.debug('wms_url: {}'.format(wms_url))\n logger.debug('wms_url: {}'.format(wms_url))\n\n ground_overlay_kml = dataset_folder_kml.newgroundoverlay(name=\"Survey Thumbnail Image\")\n ground_overlay_kml.icon.href = wms_url\n \n ground_overlay_kml.latlonbox.north = dataset_metadata_dict['latitude_max']\n ground_overlay_kml.latlonbox.south = dataset_metadata_dict['latitude_min']\n ground_overlay_kml.latlonbox.east = dataset_metadata_dict['longitude_max']\n ground_overlay_kml.latlonbox.west = dataset_metadata_dict['longitude_min']\n ground_overlay_kml.color = 'aaffffff'\n\n logger.debug('ground_overlay_kml.latlonbox: {}'.format(ground_overlay_kml.latlonbox))\n logger.debug('ground_overlay_kml: {}'.format(ground_overlay_kml))\n\n if self.timestamp_detail_view:\n self.set_timestamps(ground_overlay_kml, dataset_metadata_dict)\n\n logger.debug('ground_overlay_kml: {}'.format(ground_overlay_kml))\n return dataset_folder_kml\n \n except Exception as e:\n logger.debug('Unable to display thumbnail \"{}\": {}'.format(wms_url, e))\n pass", "def test_plot_images(self):\n save_file(self.quart.plot_images)", "def thumbnail(self, size, resample=BICUBIC):\r\n # preserve aspect ratio\r\n x, y = self.size\r\n if x > size[0]:\r\n y = int(max(y * size[0] / x, 1))\r\n x = int(size[0])\r\n if y > size[1]:\r\n x = int(max(x * size[1] / y, 1))\r\n y = int(size[1])\r\n size = x, y\r\n if size == self.size:\r\n return\r\n self.draft(None, size)\r\n self._instance = self.resize(size, resample, image=self._instance)\r\n self.readonly = 0\r\n self.pyaccess = None", "def draw_image(self):\n self.PDF.saveState()\n self.PDF.scale(1, -1)\n # self.PDF.drawImage(\n # LOGO, 490, -78, width=80, preserveAspectRatio=True, mask=\"auto\"\n # )\n self.PDF.restoreState()", "def plot_preview_png():\n name = request.args.get('prev_instance')\n name = str(name)\n fig = create_preview(name)\n output = io.BytesIO()\n FigureCanvas(fig).print_png(output)\n return Response(output.getvalue(), mimetype='image/png')", "def main():\n me = SimpleImage(\"images/me.JPG\")\n dinosaur = SimpleImage(\"images/dinosaur.jpg\")\n\n dinosaur.make_as_big_as(me)\n combine = magic(me, dinosaur)\n combine.show()", "def create_output_image(building_footprint, parcel_footprint, file_path):\n fig, ax = plt.subplots(figsize=(10, 10))\n gpd.overlay(building_footprint, parcel_footprint, how=\"symmetric_difference\").plot(\n ax=ax, color=\"lightgray\"\n )\n parcel_footprint.geometry.exterior.buffer(0.25).plot(ax=ax, color=\"black\")\n building_footprint.plot(ax=ax, color=\"black\")\n\n ax.patch.set_facecolor(\"white\")\n ax.patch.set_edgecolor(\"white\")\n fig.patch.set_visible(False)\n ax.axis(\"off\")\n fig.savefig(\n file_path,\n bbox_inches=\"tight\",\n pad_inches=0,\n facecolor=\"white\",\n edgecolor=\"white\",\n quality=IMG_QUALITY,\n )\n plt.close()", "def ShowLongitBackgroundinPDF(spectra,spectraUp,spectraDown,spectraAv,all_titles,object_name,dir_top_images,all_filt,date,figname,right_edge = 1900,NBIMGPERROW=2,vmin=0,vmax=2000,downsampling=1,verbose=False):\n NBSPEC=len(spectra)\n MAXIMGROW=max(2,m.ceil(NBSPEC/NBIMGPERROW))\n \n # fig file specif\n NBIMGROWPERPAGE=5 # number of rows per pages\n PageNum=0 # page counter\n figfilename=os.path.join(dir_top_images,figname)\n pp = PdfPages(figfilename) # create a pdf file\n \n titlepage='Longitudinal background Up/Down for obj : {} date : {} '.format(object_name,date)\n \n \n spec_index_min=100 # cut the left border\n spec_index_max=right_edge # cut the right border\n star_halfwidth=70\n \n for index in np.arange(0,NBSPEC):\n \n \n if index%(NBIMGPERROW*NBIMGROWPERPAGE) == 0:\n f, axarr = plt.subplots(NBIMGROWPERPAGE,NBIMGPERROW,figsize=(25,30))\n f.suptitle(titlepage,size=20)\n \n # index of image in the page \n indexcut=index-PageNum*(NBIMGROWPERPAGE*NBIMGPERROW) \n ix=indexcut%NBIMGPERROW\n iy=indexcut/NBIMGPERROW\n \n # plot what is wanted\n axarr[iy,ix].plot(spectra[index],'r-')\n axarr[iy,ix].plot(spectraUp[index],'b-')\n axarr[iy,ix].plot(spectraDown[index],'g-')\n axarr[iy,ix].plot(spectraAv[index],'m-')\n thetitle=\"{} : {} : {} \".format(index,all_titles[index],all_filt[index])\n axarr[iy,ix].set_title(thetitle,color='blue',fontweight='bold',fontsize=16)\n axarr[iy,ix].grid(True)\n \n star_pos=np.where(spectra[index][:spec_index_max]==spectra[index][:spec_index_max].max())[0][0]\n max_y_to_plot=(spectra[index][star_pos+star_halfwidth:spec_index_max]).max()*1.2\n \n \n axarr[iy,ix].set_ylim(0.,max_y_to_plot)\n #axarr[iy,ix].text(spec_index_min,max_y_to_plot*1.1/1.2, all_filt[index],verticalalignment='top', horizontalalignment='center',color='blue',fontweight='bold', fontsize=20)\n \n \n # save a new page\n if (index+1)%(NBIMGPERROW*NBIMGROWPERPAGE) == 0:\n PageNum+=1 # increase page Number\n f.savefig(pp, format='pdf')\n f.show()\n \n \n \n \n f.savefig(pp, format='pdf') \n f.show()\n pp.close()", "def _repr_png_(self):\n return self.tree._repr_png_()", "def add_art(self,path,size=\"500\"):\n error=False\n if path:\n print(\"processing %s to %s\" % (path,self.uid))\n f=open(path,'rb') \n filedata=f.read()\n extension=(imghdr.what('',filedata) or path.rsplit(\".\")[-1].lower()).replace('jpeg','jpg')\n if not filedata:\n error= \"NO IMAGE FOUND AT '%s'\" % path\n print(error)\n elif extension in ('bmp','png'):\n filedata=self.Image.convert(filedata)\n extension='jpg' \n elif extension not in ('gif','png','jpg','jpeg'):\n error=\"only JPEG, GIF, PNG, and BMP are supported\"\n print(error)\n if not error:\n # create a new image page\n image=self.Image.new()\n image.parent=self.uid\n image.kind='image'\n image.seq=0xFFFFFF#place at end of siblings\n # set default size \n image.stage='right full %sx%s' % (size,size) #rest of stage data will be added on the fly later by get_stage_data() \n image.set_lineage()\n image.code=\"%s.%s\" % (image.uid,extension)\n image.when=DATE()\n image.flush() #store the image page\n image.renumber_siblings_by_kind()#keep them in order\n # save the image file\n image.save_file(filedata)\n # return\n print('image \"%s\" added' % image.code)\n return image\n return None", "def draw_nonogram(self):\n image = Image.new(\"RGB\", (self.nonogram_size * 50, self.nonogram_size * 50), (255, 255, 255))\n draw = ImageDraw.Draw(image)\n\n for index, square in enumerate(reduce(lambda x, y: x+y, self.grid), 0):\n\n #print(square)\n x = index % self.nonogram_size\n y = index // self.nonogram_size\n coord = [(x * 50, y * 50), ((x + 1) * 50, (y + 1) * 50)]\n if square == EMPTY:\n draw.rectangle(coord, fill=(255, 255, 255))\n if square == FILLED:\n draw.rectangle(coord, fill=(0, 0, 0))\n return image", "def NotebookPreview(self, thumbnail_size=200):\r\n\r\n if wx.Platform == \"__WXMAC__\":\r\n return False\r\n\r\n tabCtrl = self.GetActiveTabCtrl()\r\n activePage = tabCtrl.GetActivePage()\r\n pages = tabCtrl.GetPages()\r\n\r\n pageStatus, pageText = [], []\r\n\r\n for indx, page in enumerate(pages):\r\n\r\n pageStatus.append(page.enabled)\r\n\r\n if not page.enabled:\r\n continue\r\n \r\n self.SetSelectionToPage(page) \r\n pageText.append(page.caption)\r\n\r\n rect = page.window.GetScreenRect()\r\n bmp = RescaleScreenShot(TakeScreenShot(rect), thumbnail_size)\r\n\r\n page.enabled = False\r\n if indx == 0:\r\n il = wx.ImageList(bmp.GetWidth(), bmp.GetHeight(), True)\r\n\r\n il.Add(bmp) \r\n\r\n # create the list control\r\n listCtrl = wx.ListCtrl(self, style=wx.LC_ICON|wx.LC_AUTOARRANGE|wx.LC_HRULES|wx.LC_VRULES,\r\n name=\"__fake__page__\")\r\n\r\n # assign the image list to it\r\n listCtrl.AssignImageList(il, wx.IMAGE_LIST_NORMAL)\r\n listCtrl.__previousStatus = [activePage, pageStatus]\r\n\r\n # create some items for the list\r\n for indx, text in enumerate(pageText):\r\n listCtrl.InsertImageStringItem(10000, text, indx)\r\n \r\n self.AddPage(listCtrl, \"AuiNotebook Preview\", True, bitmap=auinotebook_preview.GetBitmap(), disabled_bitmap=wx.NullBitmap)\r\n return True", "def write_thumbnail(image_name, size):\n # TODO : use something else instead of image.thumbnail\n sizes = {\n 'small' : [30,40],\n 'medium' : [70,70],\n 'large' : [120,120]\n }\n image = Image.open(f'{WRITE_FOLDER}/{USER_NAME}/original/{image_name}')\n image.thumbnail((sizes[size][0], sizes[size][1]))\n image.save(f'{WRITE_FOLDER}/{USER_NAME}/{size}/{image_name}')", "def RefreshThumbnail(self):\n if not self.property:\n self.bmp = None\n return\n\n path = self.property.DoGetValue()\n\n if not os.path.isfile(path):\n self.bmp = None\n return\n\n image = wx.Image(path)\n image.Rescale(64, 64)\n self.bmp = wx.BitmapFromImage(image)", "def test_create_image(self):\n pass", "def resize_profile_pic(sender, instance, **kwargs):\n profile_pic = instance.profile_picture\n if profile_pic.name != \"default.png\":\n img = Image.open(profile_pic.path)\n if img.height > 300 or img.width > 300:\n output_size = (300, 300)\n img.thumbnail(output_size)\n img.save(profile_pic.path)", "def delete_thumbnail(self, thumbnail_name):", "def thumbnail(self):\n return self.get_thumbnail_url()", "def on_draw_over_backgroundimage(self):", "def style_snapshot(figure: Figure) -> Figure:\n figure.axis.visible = False\n figure.xgrid.visible = False\n figure.ygrid.visible = False\n figure.toolbar_location = None\n figure.toolbar.logo = None\n figure.outline_line_width = 0\n figure.outline_line_alpha = 0\n\n return figure", "def get_thumbnail_url(self):\n raise NotImplementedError(\"Subclass must implement abstract method get_thumbnail_url\")", "def get_picture(self):\n if self.width>50 or self.height>50:\n return \"Too big for picture.\"\n\n br = '\\n'\n s = ''\n se = ('{:*>'+str(self.width)+'}').format('')\n for i in range(self.height):\n s += se + br\n return s", "def get_thumbnail_size(self, thumbnail_name, forced=False):", "def plot_blank(self):\n self.figure_bmp.SetBitmap(self.controller.plot_blank())", "def test_save_matplotlib_figures_hidpi(gallery_conf):\n ext = \"png\"\n gallery_conf[\"image_srcset\"] = [\"2x\"]\n\n import matplotlib.pyplot as plt # nest these so that Agg can be set\n\n plt.plot(1, 1)\n fname_template = os.path.join(gallery_conf[\"gallery_dir\"], \"image{0}.png\")\n image_path_iterator = ImagePathIterator(fname_template)\n block = (\"\",) * 3\n block_vars = dict(image_path_iterator=image_path_iterator)\n image_rst = save_figures(block, block_vars, gallery_conf)\n\n fname = f\"/image1.{ext}\"\n assert fname in image_rst\n assert f\"/image1_2_00x.{ext} 2.00x\" in image_rst\n\n assert len(image_path_iterator) == 1\n fname = gallery_conf[\"gallery_dir\"] + fname\n fnamehi = gallery_conf[\"gallery_dir\"] + f\"/image1_2_00x.{ext}\"\n\n assert os.path.isfile(fname)\n assert os.path.isfile(fnamehi)\n\n # Test capturing 2 images with shifted start number\n image_path_iterator.next()\n image_path_iterator.next()\n plt.plot(1, 1)\n plt.figure()\n plt.plot(1, 1)\n image_rst = save_figures(block, block_vars, gallery_conf)\n assert len(image_path_iterator) == 5\n for ii in range(4, 6):\n fname = f\"/image{ii}.{ext}\"\n assert fname in image_rst\n\n fname = gallery_conf[\"gallery_dir\"] + fname\n assert os.path.isfile(fname)\n fname = f\"/image{ii}_2_00x.{ext}\"\n assert fname in image_rst\n fname = gallery_conf[\"gallery_dir\"] + fname\n assert os.path.isfile(fname)" ]
[ "0.7300164", "0.68219376", "0.6806696", "0.6653911", "0.6605065", "0.655592", "0.6475331", "0.64640075", "0.64590347", "0.64180756", "0.64131033", "0.63602334", "0.6345334", "0.63405406", "0.6339203", "0.6324948", "0.6323947", "0.62881386", "0.6151722", "0.6059803", "0.60021156", "0.59946644", "0.5972815", "0.59568286", "0.59559184", "0.5952423", "0.59519553", "0.5938504", "0.59332705", "0.5898628", "0.5896204", "0.58740777", "0.5865508", "0.5843672", "0.5838816", "0.5817503", "0.58166695", "0.58162856", "0.5807421", "0.5807421", "0.57943356", "0.5786515", "0.5759443", "0.57503855", "0.57454765", "0.57223755", "0.5721807", "0.5720911", "0.5708329", "0.56945646", "0.5677849", "0.56663394", "0.5639186", "0.5637331", "0.562053", "0.5618158", "0.56149733", "0.5588332", "0.5572268", "0.5557235", "0.5556316", "0.55348235", "0.55300516", "0.5523326", "0.5516695", "0.55147433", "0.55108863", "0.55062497", "0.55021703", "0.5501144", "0.5494076", "0.54907864", "0.5489768", "0.54886484", "0.54882616", "0.5480472", "0.5474242", "0.5473928", "0.54704475", "0.54688704", "0.5467678", "0.546763", "0.5466941", "0.54646415", "0.5449515", "0.5445808", "0.5438337", "0.54256696", "0.542457", "0.54223156", "0.5407564", "0.5407322", "0.5405446", "0.5402823", "0.53891337", "0.53861994", "0.53860223", "0.53859323", "0.538347", "0.53820497", "0.5380798" ]
0.0
-1
Convert the numpy array (which is in our expected order) to a native image object in this widget set.
def _get_wimage(self, arr_np): #return result raise NotImplementedError
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def convertNumpy2Image(self, array):\n cv2image = cv2.cvtColor(array, cv2.COLOR_BGR2RGBA)\n img = Image.fromarray(cv2image)\n imgtk = ImageTk.PhotoImage(image=img)\n return imgtk", "def newimagefromarray(self, *args, **kwargs):\n return _image.image_newimagefromarray(self, *args, **kwargs)", "def fromarray(self, *args, **kwargs):\n return _image.image_fromarray(self, *args, **kwargs)", "def get_plain_image_as_widget(self):\n arr = self.getwin_array(order=self.rgb_order)\n\n # convert numpy array to native image widget\n image_w = self._get_wimage(arr)\n return image_w", "def _arr_to_img(arr, verbose=False):\n return Image.fromarray(arr)", "def img_from_array(array):\n return Image.fromarray(array)", "def render(self):\n np_img = np.array(self.prev_img, dtype=np.uint8)\n np_img = np.swapaxes(np_img, 0, 2)\n return np_img", "def reconstructImage(self,arr):\n\t\tarr = arr * 256\n\t\tarr = np.array(np.round(arr),dtype=np.uint8)\n\t\t#arr = np.array(arr,dtype=np.uint8)\n\n\t\t# We need to transpose the array because we flatten X by columns\n\t\t#arr = arr.T\n\t\t#a = arr.reshape((self.width, self.height,3))\n\t\t\n\t\tif self.mode == 'L':\n\t\t\ta = arr.reshape((self.width, self.height))\n\t\telse:\n\t\t\ta = arr.reshape((self.width, self.height,3))\n\n\t\t#a = arr.reshape((3,self.width, self.height))\t\t\n\t\t#a = arr.transpose(0, 3, 1, 2)\n\n\t\tim = Image.fromarray(a,mode=self.mode)\n\n\t\treturn im", "def makearray(self, *args, **kwargs):\n return _image.image_makearray(self, *args, **kwargs)", "def makeImageFromArray(array):\n if array is None: return None\n cls = globals()[\"Image%s\" % suffixes[str(array.dtype.type)]]\n return cls(array)", "def set_image_from_numpy_array(self, numpy_data):\n\n if self.variables.scale_dynamic_range:\n min_data = numpy.min(numpy_data)\n dynamic_range = numpy.max(numpy_data) - min_data\n numpy_data = numpy.asanyarray(\n 255*(numpy_data - min_data)/dynamic_range, dtype=numpy.uint8)\n pil_image = PIL.Image.fromarray(numpy_data)\n self._set_image_from_pil_image(pil_image)", "def arr2img(ar):\n return Image.fromstring('L', (ar.shape[1], ar.shape[0]), ar.astype('b').tostring())", "def __translate(self, img):\n if not isinstance(img, Image):\n raise InvalidImageTypeException(\"display_images only accepts objects of type Image\")\n\n w = img.width()\n h = img.height()\n tkimg = Tkinter.PhotoImage(width=w, height=h)\n for x in range(w):\n for y in range(h):\n tkimg.put('#%02x%02x%02x' % img.get_rgb(x, y), (x, y))\n return tkimg", "def numpy_to_qimage(np_array: np.ndarray, show_age: bool):\n\n # Only support 2D array of bytes\n assert len(np_array.shape) == 2 and np_array.dtype == np.uint8\n\n width = np_array.shape[1]\n height = np_array.shape[0]\n bytes_per_line = width\n image = QImage(np_array, width, height, bytes_per_line, QImage.Format_Indexed8)\n\n # Maps array values to color\n if show_age:\n image.setColorTable(colors.AGE_COLOR_TABLE)\n else:\n image.setColorTable(colors.BINARY_COLOR_TABLE)\n\n return image", "def array2img(array):\n if len(array.shape) == 2:\n return Image.fromarray(np.clip(array, 0, 255).astype('uint8'), mode='L')\n elif len(array.shape) == 3:\n return Image.fromarray(np.clip(array, 0, 255).astype('uint8'), mode='RGB')\n else:\n print('Income array is not at appropriate shape!')", "def to_array(self):\n return np.array(self.to_image())", "def __call__(self, image):\n\n image = np.array(image)\n image = self.transform(image=image)['image']\n return image", "def convert_img(self):\r\n self.img = self.img.convert('RGB')", "def imageToArray(i):\r\n a=gdalnumeric.numpy.fromstring(i.tostring(),'b')\r\n a.shape=i.im.size[1], i.im.size[0]\r\n return a", "def get_image(self):\n image = np.frombuffer(self.image, dtype=np.uint8)\n return image.reshape(*self.size, self.channels)", "def create_from_array(cls, array: np.ndarray) -> \"TensorImage\":\n if array.dtype != np.uint8:\n raise ValueError(\"Expect numpy array with dtype=uint8.\")\n\n image_data = image_utils.ImageData(np.squeeze(array))\n return cls(image_data)", "def fit_array_to_image(self, array: np.ndarray) -> np.ndarray:\n return self._fit_array_to_image(self._channel_arrays[0].shape, array)", "def get_array(self, scale=1):\n array = cv2.imread(str(self.path), self.read_type)\n\n # resize original image so it can be be scaled without fractions\n x_extra = array.shape[0] % self.scaling\n y_extra = array.shape[1] % self.scaling\n\n x_extra = self.scaling - x_extra if x_extra != 0 else x_extra\n y_extra = self.scaling - y_extra if y_extra != 0 else y_extra\n\n padded_array = cv2.resize(\n array, (int(array.shape[1] + y_extra), int(array.shape[0] + x_extra))\n )\n\n # scale image\n resized_array = cv2.resize(\n padded_array,\n (int(padded_array.shape[1] * scale), int(padded_array.shape[0] * scale)),\n )\n\n # cv2 reads in array as BGR, tensorboard shows as RGB\n if not self.greyscale:\n x = np.copy(resized_array)\n resized_array[:, :, 0] = x[:, :, 2]\n resized_array[:, :, 2] = x[:, :, 0]\n\n # cv2.imshow('image',array)\n # cv2.waitKey(0)\n # cv2.destroyAllWindows()\n\n if self.greyscale:\n resized_array = np.expand_dims(resized_array, 2)\n return resized_array", "def fromarray(obj, mode=None):\r\n if isinstance(obj, np.ndarray):\r\n _mode = Image()._get_mode(obj.shape, obj.dtype)\r\n if _mode == 'RGB':\r\n obj = cv2.cvtColor(obj, cv2.COLOR_RGB2BGR)\r\n elif mode == \"RGBA\":\r\n obj = cv2.cvtColor(obj, cv2.COLOR_RGBA2BGRA)\r\n return Image(obj)\r\n else: \r\n raise TypeError(\"Cannot handle this data type\")", "def update_image(self):\n self.image = Image.fromarray(self.img)", "def update_canvas_display_from_numpy_array(self, image_data):\n\n if len(self.drop_bands) > 0:\n zeros_image = numpy.zeros_like(image_data[:, :, 0])\n for drop_band in self.drop_bands:\n image_data[:, :, drop_band] = zeros_image\n self.canvas_decimated_image = image_data\n if self.scale_to_fit_canvas:\n scale_factor = self.compute_display_scale_factor(image_data)\n self.display_rescaling_factor = scale_factor\n self.display_image = self.get_scaled_display_data(image_data)\n else:\n self.display_image = image_data", "def array2pil(x):\n if x.ndim == 2:\n mode = \"L\"\n elif x.ndim == 3 and x.shape[2] == 1:\n mode = \"L\"\n x = x.squeeze()\n elif x.ndim == 3:\n mode = \"RGB\"\n return PIL.Image.fromarray(x, mode=mode)", "def np_to_pil(img_np):\n img = np.clip(img_np * 255, 0, 255).astype(np.uint8)\n\n if img_np.shape[0] == 1:\n img = img[0]\n else:\n img = img.transpose((1, 2, 0))\n\n return Image.fromarray(img)", "def convert_image_np(inp):\n inp = inp.numpy().transpose((1, 2, 0))\n inp = (inp*255).astype(np.uint8)\n return inp", "def update_img(self):\n self.img = np.array(self.image)", "def get_array(self) -> numpy.array:\r\n \r\n return self.pic_array", "def np_to_pil(np_img: np.ndarray) -> PIL.Image.Image:\n\n def _transform_bool(image_array):\n return image_array.astype(np.uint8) * 255\n\n def _transform_float(image_array):\n return (image_array * 255).astype(np.uint8)\n\n types_factory = {\n \"bool\": _transform_bool(np_img),\n \"float64\": _transform_float(np_img),\n }\n image_array = types_factory.get(str(np_img.dtype), np_img.astype(np.uint8))\n return PIL.Image.fromarray(image_array)", "def get_rendered_image(self) -> np.ndarray:\n return np.transpose(self.state['observation'], [1, 2, 0])", "def get_img_from_array(img):\n a = np.uint8(np.clip(img, 0, 1) * 255)\n return PIL.Image.fromarray(a)", "def __call__(self, results):\n # Image is bgr\n img = results['img'][..., ::-1]\n img = Image.fromarray(img)\n img = self.transform(img)\n img = np.asarray(img)\n img = img[..., ::-1]\n results['img'] = img\n return results", "def to_pillow(self) -> PILImage:\n return PILImage.fromarray(self.rgb().to_numpy())", "def raw_image(self):\n return self.data16.transpose()", "def _convert_images(raw):\n # Convert the raw images from the data-files to floating-points.\n #raw_float = np.array(raw, dtype=float) / 255.0\n\n # Reshape the array to 4-dimensions.\n images = raw.reshape([-1, num_channels, img_size, img_size])\n\n # Reorder the indices of the array.\n images = images.transpose([0, 2, 3, 1])\n\n return images", "def save_array_as_image(array, save_location):\n image = Image.fromarray(array)\n image.save(save_location)\n image.close()", "def save_array_as_image(arr, filename):\n arr = arr.copy().clip(0, 255).astype('uint8')\n im = Image.fromarray(arr)\n im.save(filename)", "def image(self):\n return self.pixels.get_array()", "def get_np_image(self, save_image=False, filename=\"curr_image.png\"):\n responses = client.simGetImages([airsim.ImageRequest(\"front_left\", airsim.ImageType.Scene, False, False)])\n response = responses[0]\n\n # get numpy array\n img1d = np.fromstring(response.image_data_uint8, dtype=np.uint8)\n\n # reshape array to 4 channel image array H X W X 4\n img_rgb = img1d.reshape(response.height, response.width, 3)\n\n # # original image is fliped vertically\n # img_rgb = np.flipud(img_rgb)\n\n if save_image:\n cv2.imwrite(filename, img_rgb)\n\n return img_rgb", "def img_array_to_layout_image_fig(ia):\n img = img_array_to_pil_image(ia)\n return pil_image_to_layout_image_fig(img)", "def bytes_to_img(bytes_array):\n stream = BytesIO(bytes_array)\n image = Image.open(stream).convert(\"RGBA\")\n\n return image", "def get_correction_array(self):\n import numpy\n\n # Select the first datablock and rewind all the categories\n self.cbf_handle.select_datablock(0)\n self.cbf_handle.select_category(0)\n self.cbf_handle.select_column(2)\n self.cbf_handle.select_row(0)\n\n # Check the type of the element to ensure it's a binary\n # otherwise raise an exception\n type = self.cbf_handle.get_typeofvalue()\n if type.find('bnry') > -1:\n\n # Read the image data into an array\n image_string = self.cbf_handle.get_integerarray_as_string()\n image = numpy.fromstring(image_string, numpy.int32)\n\n # Get the array parameters\n parameters = self.cbf_handle.get_integerarrayparameters_wdims()\n image_size = (parameters[10], parameters[9])\n\n # Resize the image\n image.shape = (image_size)\n\n else:\n raise TypeError('Can\\'t find image')\n\n # Return the image\n return image", "def __array__(self):\n return self.to_array()", "def save_npimg(array: np.ndarray, path: str) -> None:\r\n img = Image.fromarray(array.squeeze())\r\n img.save(path)", "def image(self):\n if self.ndim == 2:\n # NAXIS=2: [Y, X]\n image = self.data[:, :].copy()\n elif self.ndim == 3 and self.shape[0] == 1:\n # NAXIS=3: [FREQ=1, Y, X]\n image = self.data[0, :, :].copy()\n elif self.ndim == 4 and self.shape[0] == 1 and self.shape[1] == 1:\n # NAXIS=4: [STOKES=1, FREQ=1, Y, X]\n image = self.data[0, 0, :, :].copy()\n else:\n raise ValueError(\"invalid data shape: {1}\".format(self.shape))\n return image", "def transform(self, X, y=None):\n if self.out == \"pil\":\n return [Image.open(x).convert(self.convert) for x in X]\n if self.out == \"numpy\":\n return np.array([np.array(Image.open(x).convert(self.convert)) for x in X])", "def __array__(self):\n return self.array", "def array_to_image(\n array,\n image_scale=1.0,\n image_shape=None,\n image_colormap=None,\n image_minmax=None,\n image_channels=None,\n mode=None,\n):\n # type: (Any, float, Optional[Sequence[int]], Optional[str], Optional[Sequence[float]], Optional[str], Optional[str]) -> Optional[Any]\n try:\n import PIL.Image\n import numpy\n from matplotlib import cm\n except ImportError:\n LOGGER.error(\n \"The Python libraries PIL, numpy, and matplotlib are required for converting a numpy array into an image\"\n )\n return None\n\n array = numpy.array(array)\n\n ## Handle image transformations here\n ## End up with a 0-255 PIL Image\n if image_minmax is not None:\n minmax = image_minmax\n else: # auto minmax\n flatten_array = flatten(array)\n min_array = min(flatten_array)\n max_array = max(flatten_array)\n if min_array == max_array:\n min_array = min_array - 0.5\n max_array = max_array + 0.5\n min_array = math.floor(min_array)\n max_array = math.ceil(max_array)\n minmax = [min_array, max_array]\n\n ## if a shape is given, try to reshape it:\n if image_shape is not None:\n try:\n ## array shape is opposite of image size(width, height)\n array = array.reshape(image_shape[1], image_shape[0])\n except Exception:\n LOGGER.info(\"WARNING: invalid image_shape; ignored\", exc_info=True)\n\n ## If 3D, but last array is flat, make it 2D:\n if len(array.shape) == 3 and array.shape[-1] == 1:\n array = array.reshape((array.shape[0], array.shape[1]))\n elif len(array.shape) == 1:\n ## if 1D, make it 2D:\n array = numpy.array([array])\n if image_channels == \"first\" and len(array.shape) == 3:\n array = numpy.moveaxis(array, 0, -1)\n\n ### Ok, now let's colorize and scale\n if image_colormap is not None:\n ## Need to be in range (0,1) for colormapping:\n array = rescale_array(array, minmax, (0, 1), \"float\")\n try:\n cm_hot = cm.get_cmap(image_colormap)\n array = cm_hot(array)\n except Exception:\n LOGGER.info(\"WARNING: invalid image_colormap; ignored\", exc_info=True)\n ## rescale again:\n array = rescale_array(array, (0, 1), (0, 255), \"uint8\")\n ## Convert to RGBA:\n image = PIL.Image.fromarray(array, \"RGBA\")\n else:\n ## Rescale (0, 255)\n array = rescale_array(array, minmax, (0, 255), \"uint8\")\n image = PIL.Image.fromarray(array)\n\n if image_scale != 1.0:\n image = image.resize(\n (int(image.size[0] * image_scale), int(image.size[1] * image_scale))\n )\n\n ## Put in a standard mode:\n if mode:\n image = image.convert(mode)\n elif image.mode not in [\"RGB\", \"RGBA\"]:\n image = image.convert(\"RGB\")\n return image", "def get_input(self, idx):\r\n img_filename = self.root / \"images\" / self._image_array[idx]\r\n x = Image.open(img_filename)\r\n return x", "def image_from_ndarray(array, format, size = None):\n\tif array.itemsize <> 1:\n\t\traise ValueError(\"Color component size must be 1 byte\")\n\tif size is not None:\n\t\twidth, height = size\n\t\tdata_size = array.size\n\t\tpixel_size = data_size // (width * height)\n\t\tif pixel_size <> len(format):\n\t\t\traise ValueError(\"Array has wrong shape for specified size and format\")\n\telse:\n\t\theight, width, pixel_size = array.shape\n\t\tif pixel_size <> len(format):\n\t\t\traise ValueError(\"Array has wrong shape for specified format\")\n\tbps = 8\n\tspp = pixel_size\n\talpha = format.endswith(\"A\")\n\tcsp = NSCalibratedRGBColorSpace\n\tbpp = bps * spp\n\tbpr = width * pixel_size\n\tfmt = NSAlphaNonpremultipliedBitmapFormat\n\tns_rep = NSBitmapImageRep.alloc()\n\tplanes = planes_t(array.ctypes.data, 0, 0, 0, 0)\n\tns_rep.initWithBitmapDataPlanes_pixelsWide_pixelsHigh_bitsPerSample_samplesPerPixel_hasAlpha_isPlanar_colorSpaceName_bitmapFormat_bytesPerRow_bitsPerPixel_(\n\t\tctypes.addressof(planes), width, height, bps, spp, alpha, False, csp, fmt, bpr, bpp)\n\timage = Image.__new__(Image)\n\timage._init_from_ns_rep(ns_rep)\n\timage._data = array\n\treturn image", "def _apply_transform(self, img: np.ndarray): \n img = self.transform(image=img)[\"image\"]\n return img", "def _apply_transform(self, img: np.ndarray): \n img = self.transform(image=img)[\"image\"]\n return img", "def fromarray(self, array):\n\n raise NotImplementedError", "def make_itk_image(arr, proto_image=None):\n\n image = itk.GetImageFromArray(arr)\n if proto_image != None:\n image.CopyInformation(proto_image)\n\n return image", "def _encode_image(image_array, fmt):\n from PIL import Image # pylint: disable=g-import-not-at-top\n pil_image = Image.fromarray(image_array)\n image_io = io.BytesIO()\n pil_image.save(image_io, format=fmt)\n return image_io.getvalue()", "def data(self, arr):\n self.bitmap(arr, 1)", "def newimagefromshape(self, *args, **kwargs):\n return _image.image_newimagefromshape(self, *args, **kwargs)", "def adapt_array(self,array):\n import io\n import array,numpy\n out = io.BytesIO()\n numpy.save(out, array)\n out.seek(0)\n \n return out.read()", "def get_image_and_prep(self,file_path):\r\n img = np.array(Image.open(file_path).convert('1'))\r\n img = img.reshape(28,28,1)\r\n return img", "def make_image(self, mode=\"L\") -> Image:\r\n return Image.fromarray(self.fb, mode=\"L\")", "def display(self):\n rows = [(self.views[0].display, len(self.views))]\n fig, axes = plt.subplots(1, len(self.views),\n figsize=self._figsize(rows),\n squeeze=True)\n for ax, view in zip(axes.ravel(), self.views):\n ax.imshow(view.display)\n ax.axis('off')\n ax.xaxis.set_visible(False)\n ax.yaxis.set_visible(False)\n ax.set(title=view.position.id)\n fig.tight_layout()\n fig.canvas.draw()\n img_array = np.array(fig.canvas.renderer._renderer)\n plt.close('all')\n return img_array", "def native_image_type(self) -> Type[Any]:\n pass", "def to_ImageTk(self, image_PIL):\r\n pic = ImageTk.PhotoImage(image_PIL)\r\n self.images.append(pic)\r\n return pic", "def process_screen(screen):\n\n # Indexing convention varies between PIL and numpy\n screen = np.swapaxes(screen, 0, 1)\n # Load the array in PIL\n im = Image.fromarray(screen, 'RGB')\n # Convert to grayscale\n im = im.convert(mode='L')\n # Crop\n im = im.crop((0, 0, 288, 405))\n # Downscale and resize\n im = im.resize((84, 84))\n # Normalise\n im = np.array(im) / 255\n\n return im", "def from_numpy(self, a):\n raise NotImplementedError(\"from_numpy\")", "def de_project(np_arr):\n item = (np_arr +1)*255 / 2\n return item.astype(np.int32, copy=True)", "def get_image(self, pvname):\n if self.protocol == \"ca\":\n pvname = pvname.replace(\":ArrayData_RBV\", \"\")\n nx = self.get(f\"{pvname}:ArraySizeX_RBV\")\n ny = self.get(f\"{pvname}:ArraySizeY_RBV\")\n dw = self.get(f\"{pvname}:dw\")\n dh = self.get(f\"{pvname}:dh\")\n image = self.get(f\"{pvname}:ArrayData_RBV\")\n image = image.reshape(int(nx), int(ny))\n\n elif self.protocol == \"pva\":\n # context returns np array with WRITEABLE=False\n # copy to manipulate array below\n output = self.get(pvname)\n attrib = output.attrib\n dw = attrib[\"dw\"]\n dh = attrib[\"dh\"]\n nx, ny = output.shape\n image = copy.copy(output)\n\n return {\n \"image\": [image],\n \"x\": [-dw / 2],\n \"y\": [-dh / 2],\n \"dw\": [dw],\n \"dh\": [dh],\n }", "def alloc2img(self):\n\t\t#NOTE: self.allocation is relative to the window object, so we ignore X and Y\n\t\talloc = self.allocation\n\t\t# This is ripped from rect2img()\n\t\tx,y = self.widget2imgcoords(0, 0)\n\t\t# Doesn't check _w2i_matrix since widget2imgcoords() does that\n\t\tw,h = self._w2i_matrix.transform_distance(alloc.width, alloc.height)\n\t\treturn frect(x,y,w,h)", "def getimage(self):", "def image_to_array(self, img):\n x = np.asarray(img, dtype=self.dtype)\n if len(x.shape) == 3:\n if self.channels_first:\n x = x.transpose(2, 0, 1)\n elif len(x.shape) == 2:\n if self.channels_first:\n x = x.reshape((1, x.shape[0], x.shape[1]))\n else:\n x = x.reshape((x.shape[0], x.shape[1], 1))\n else:\n raise ValueError('Unsupported image shape: ', x.shape)\n return x", "def _load(self) -> np.ndarray:\n with self._fs.open(self._filepath, mode=\"r\") as f:\n image = Image.open(f).convert(\"RGBA\")\n return np.asarray(image)", "def newimagefromimage(self, *args, **kwargs):\n return _image.image_newimagefromimage(self, *args, **kwargs)", "def array(self):\n return np.asarray(self)", "def save_numpy_array_as_image(narray, path, mode=\"uint8\", image_mode=\"L\"):\n img = Image.fromarray(np.asarray(np.clip(narray, 0, 255), dtype=mode), image_mode)\n img.save(path)", "def fromshape(self, *args, **kwargs):\n return _image.image_fromshape(self, *args, **kwargs)", "def __array__(self, *args, **kwargs):\n\n return self.data", "def convert_array(self,blob):\n import io\n import array,numpy\n out = io.BytesIO(blob)\n out.seek(0)\n\n return numpy.load(out)", "def create_image(self, shapes):\n img = image.IMG()\n img.draw_shapes(shapes)\n img = np.transpose(img.array(), (2, 0, 1))\n return img", "def _convert_to_yolo_img(self, img):\n\n img = img / 255.0\n h, w, c = img.shape\n img = img.transpose(2, 0, 1)\n outimg = make_image(w, h, c)\n img = img.reshape((w*h*c))\n data = c_array(c_float, img)\n outimg.data = data\n rgbgr_image(outimg)\n return outimg", "def convert_to_vector(img_arr):\n img = img_arr[0:248, 0:248, 0]\n img = img.flatten()\n return img", "def _pillow2array(img, flag='color', channel_order='bgr'):\n channel_order = channel_order.lower()\n if channel_order not in ['rgb', 'bgr']:\n raise ValueError('channel order must be either \"rgb\" or \"bgr\"')\n\n if flag == 'unchanged':\n array = np.array(img)\n if array.ndim >= 3 and array.shape[2] >= 3: # color image\n array[:, :, :3] = array[:, :, (2, 1, 0)] # RGB to BGR\n else:\n # If the image mode is not 'RGB', convert it to 'RGB' first.\n if img.mode != 'RGB':\n if img.mode != 'LA':\n # Most formats except 'LA' can be directly converted to RGB\n img = img.convert('RGB')\n else:\n # When the mode is 'LA', the default conversion will fill in\n # the canvas with black, which sometimes shadows black objects\n # in the foreground.\n #\n # Therefore, a random color (124, 117, 104) is used for canvas\n img_rgba = img.convert('RGBA')\n img = Image.new('RGB', img_rgba.size, (124, 117, 104))\n img.paste(img_rgba, mask=img_rgba.split()[3]) # 3 is alpha\n if flag == 'color':\n array = np.array(img)\n if channel_order != 'rgb':\n array = array[:, :, ::-1] # RGB to BGR\n elif flag == 'grayscale':\n img = img.convert('L')\n array = np.array(img)\n else:\n raise ValueError(\n 'flag must be \"color\", \"grayscale\" or \"unchanged\", '\n f'but got {flag}')\n return array", "def display(array):\n plt.figure()\n plt.imshow(array)\n plt.show()", "def test_fromarray_rgb_fail():\n arr = numpy.zeros((20, 10, 3), dtype='float')\n\n parameters = {'data': [arr]}\n\n images.fromarray(parameters).convert('RGB')", "def read(self):\n\n # Obtém frame da câmera.\n status , frame = super().read()\n\n if not status: return\n\n # Obtém a imagem.\n frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)\n frame = Image.fromarray(frame)\n \n # Se a opção de efeito espelho estiver ativa, a imagem será invertida.\n if self.__mirror:\n frame = frame.transpose(Image.FLIP_LEFT_RIGHT)\n \n return ImageTk.PhotoImage(frame) , frame.size", "def observation(self, img):\r\n img = img.transpose(1, 2, 0)\r\n return img", "def __array__(self):\n return np.asarray(self.data)", "def _serialize_array(self, array):\n buffer = io.BytesIO()\n np.save(buffer, array)\n return buffer.getvalue()", "def ConvertToImage(*args, **kwargs):\n return _gdi_.Bitmap_ConvertToImage(*args, **kwargs)", "def get_tkimage(self):\n self.drawer.flush()\n return PIL.ImageTk.PhotoImage(self.img)", "def image_processor(self, img_arr):\n assert img_arr.dtype == np.uint8, \\\n f\"image_processor requires uint8 array but not {img_arr.dtype}\"\n img_arr = self.transformation.run(img_arr)\n if self.is_train:\n img_arr = self.augmentation.run(img_arr)\n img_arr = self.post_transformation.run(img_arr)\n\n return img_arr", "def mat2im(X, shape):\n return X.reshape(shape)", "def __init__(self, image: np.ndarray) -> None:\n self.image = image", "def __init__(self, original_image_numpy: np.ndarray):\n self.original_image = original_image_numpy\n self.detected_objects:List[DetectedObject] = []", "def render_array(self, resolution=300, channel=\"GRAYSCALE\"):\n # Method below returns a cairocffi.ImageSurface object\n # https://cairocffi.readthedocs.io/en/latest/api.html#cairocffi.ImageSurface\n surface, width, height = self._document.write_image_surface(\n resolution=resolution\n )\n img_format = surface.get_format()\n\n # This is BGRA channel in little endian (reverse)\n if img_format != FORMAT_ARGB32:\n raise RuntimeError(\n f\"Expect surface format to be 'cairocffi.FORMAT_ARGB32', but got {img_format}.\" +\n \"Please check the underlining implementation of 'weasyprint.document.Document.write_image_surface()'\"\n )\n\n img_buffer = surface.get_data()\n # Returns image array in \"BGRA\" channel\n img_array = np.ndarray(\n shape=(height, width, 4), dtype=np.uint8, buffer=img_buffer\n )\n if channel == \"GRAYSCALE\":\n return cv2.cvtColor(img_array, cv2.COLOR_BGRA2GRAY)\n elif channel == \"RGBA\":\n return cv2.cvtColor(img_array, cv2.COLOR_BGRA2RGBA)\n elif channel == \"RGB\":\n return cv2.cvtColor(img_array, cv2.COLOR_BGRA2RGB)\n elif channel == \"BGRA\":\n return np.copy(img_array)\n elif channel == \"BGR\":\n return cv2.cvtColor(img_array, cv2.COLOR_BGRA2BGR)\n else:\n valid_channels = [\"GRAYSCALE\", \"RGB\", \"RGBA\", \"BGR\", \"BGRA\"]\n raise ValueError(\n f\"Invalid channel code {channel}. Valid values are: {valid_channels}.\"\n )", "def image(self) -> PIL.Image.Image:\n try:\n data = io.BytesIO(self.data)\n return PIL.Image.open(data)\n except Exception: # Image data is incorrect, fix as a simple transparent image\n return PIL.Image.new('RGBA', Image.MAX_IMAGE_SIZE)", "def fromimage(self, *args, **kwargs):\n return _image.image_fromimage(self, *args, **kwargs)", "def create_image(self):\n\n self._image = 255 * np.ones((self._height, self._width, 3), np.uint8)" ]
[ "0.76605815", "0.7366861", "0.7267543", "0.7149821", "0.7017206", "0.69027084", "0.6662344", "0.6589255", "0.6467619", "0.6431062", "0.63332164", "0.63316214", "0.62858456", "0.62591535", "0.6197975", "0.61858237", "0.6156995", "0.61221844", "0.6117985", "0.60852593", "0.5992304", "0.5991537", "0.5968067", "0.5946177", "0.58999383", "0.5885561", "0.58689284", "0.5863524", "0.5838236", "0.5833646", "0.5833351", "0.58037066", "0.57625145", "0.5732509", "0.5702829", "0.569919", "0.5692075", "0.56858414", "0.5669731", "0.5636183", "0.56075335", "0.55917263", "0.55790585", "0.5566917", "0.55637133", "0.55592126", "0.5557293", "0.55544823", "0.5537547", "0.5535617", "0.55348873", "0.5531662", "0.55219287", "0.55055565", "0.55055565", "0.5503814", "0.5497813", "0.5488844", "0.54887253", "0.5485559", "0.54825145", "0.5479756", "0.54783016", "0.54647255", "0.5454347", "0.54503405", "0.54484177", "0.5447272", "0.5440688", "0.5438468", "0.5436395", "0.5412904", "0.5405162", "0.53998685", "0.5392402", "0.5387981", "0.5380907", "0.53752", "0.53734857", "0.5360219", "0.53534275", "0.5351868", "0.53502935", "0.5345738", "0.53455067", "0.53437585", "0.5341189", "0.53384095", "0.533475", "0.5327865", "0.5307813", "0.52996224", "0.529823", "0.5298202", "0.52977675", "0.5294157", "0.5287857", "0.5278058", "0.5274323", "0.52738875" ]
0.5590651
42
Convert red, green and blue values specified in floats with range 01 to whatever the native widget color object is.
def _get_color(self, r, g, b): clr = (r, g, b) return clr
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_color(self, value):\n value = min(max(0,value), 1) * 510\n\n if value < 255:\n redValue = 255\n greenValue = math.sqrt(value) * 16\n greenValue = int(greenValue)\n else:\n greenValue = 255\n value = value - 255\n redValue = 255 - (value * value / 255)\n redValue = int(redValue)\n return '#' + f\"{redValue:0{2}x}\" + f\"{greenValue:0{2}x}\" + '00'", "def IntToColor(number):\n color = COLORS_INDEX.get(number)\n return color if color else 'default'", "def get_color_in_rgb_decimal():\n\n # Grabbing custom colormap from matplotlib\n a = cm.get_cmap('cool', 32)\n b = cm.get_cmap('spring', 32)\n c = cm.get_cmap('autumn_r', 64)\n d = cm.get_cmap('bwr_r', 192)\n e = cm.get_cmap('Greens', 192)\n\n # Adding the colormaps into one stack to have a more comprehensive color spectrum \n newcolors = np.vstack((a(np.linspace(0, 1, 32)), \n b(np.linspace(0, 1, 32)), \n c(np.linspace(0, 1, 64)),\n d(np.linspace(0, 0.5, 192)),\n e(np.linspace(0, 1, 192)),\n ))\n return newcolors", "def scale_to_01(color: C3I) -> C3F:\n r, g, b = color\n return r / 255, g / 255, b / 255", "def fromInts(r, g, b):\n return IColor(r/255.,g/255.,b/255.)", "def Color(red, green, blue, white = 0):\n\treturn (white << 24) | (red << 16)| (green << 8) | blue", "def int2color(x):\n # r = int(1000 * x % 255)\n # g = int(10000 * x % 255)\n # b = int(100000 * x % 255)\n x = 0 if x == 0 else int(1/x)\n b = x & 0xff\n g = (x >> 8) & 0xff\n r = (x >> 16) & 0xff\n return [r, g, b]", "def floatRgb(mag, cmin, cmax):\n\n try:\n # normalize to [0,1]\n x = float(mag-cmin)/float(cmax-cmin)\n except:\n # cmax = cmin\n x = 0.5\n blue = min((max((4*(0.75-x), 0.)), 1.))\n red = min((max((4*(x-0.25), 0.)), 1.))\n green= min((max((4*math.fabs(x-0.5)-1., 0.)), 1.))\n return (red, green, blue)", "def Color(red, green, blue, white = 0):\n return (white << 24) | (red << 16)| (green << 8) | blue", "def color_negative_red_positive_green(val):\n if val < 0:\n color = 'red'\n elif val > 0:\n color = 'green'\n else:\n color = 'black'\n\n return 'color: %s' % color", "def ramp_color_rgb(values, feature, parent): \r\n ramp_name = values[0]\r\n ramp_position = values[1]\r\n \r\n ramp = QgsStyleV2.defaultStyle().colorRampRef(ramp_name)\r\n if not ramp:\r\n parent.setEvalErrorString( QObject.tr( '\"{}\" is not a valid color ramp'.format(ramp_name)))\r\n return QColor(0,0,0).name()\r\n \r\n value, error = getFloat(ramp_position)\r\n if error:\r\n parent.setEvalErrorString(error)\r\n \r\n color = ramp.color(value)\r\n return \"{},{},{}\".format(color.red(), color.green(), color.blue())", "def floatRgb(mag, cmin, cmax):\n # Normalize to 0-1\n try: x = float(mag-cmin)/(cmax-cmin)\n except ZeroDivisionError: x = 0.5 # cmax == cmin\n blue = min((max((4*(0.75-x), 0.)), 1.))\n red = min((max((4*(x-0.25), 0.)), 1.))\n green = min((max((4*math.fabs(x-0.5)-1., 0.)), 1.))\n return red, green, blue", "def unconvert_from_RGB_255(colors):\n un_rgb_color = (colors[0]/(255.0),\n colors[1]/(255.0),\n colors[2]/(255.0))\n\n return un_rgb_color", "def color(self, data):\n\n red = np.interp(data, self.range, self.r)\n blue = np.interp(data, self.range, self.b)\n green = np.interp(data, self.range, self.g)\n # Style plot to return a grey color when value is 'nan'\n red[np.isnan(red)] = 240\n blue[np.isnan(blue)] = 240\n green[np.isnan(green)] = 240\n colors = np.dstack([red.astype(np.uint8),\n green.astype(np.uint8),\n blue.astype(np.uint8),\n np.full_like(data, 255, dtype=np.uint8)])\n #return colors.view(dtype=np.uint32).reshape(data.shape)\n c=[]\n for i in range(len(data)):\n c.append([red[i],green[i],blue[i]])\n return c", "def getColor(self,number):\n if number >= 0:\n if self.inverse:\n ret = cs.hsv_to_rgb(0,0,abs(number/self.maxp))\n else:\n ret = cs.hsv_to_rgb(0,0,1-abs(number/self.maxp))\n else:\n if self.inverse:\n ret = cs.hsv_to_rgb(0,1-abs(number/self.maxn),1)\n else:\n ret = cs.hsv_to_rgb(0,abs(number/self.maxn),1)\n return [ret[0]*255.0,ret[1]*255.0,ret[2]*255.0]", "def Amber_to_Green(val):\n\tif val == 1 :\n\t\treturn \"GREEN\"\n\telif val == -1:\n\t\treturn \"RED\"", "def convert_to_RGB_255(colors):\n return (colors[0]*255.0, colors[1]*255.0, colors[2]*255.0)", "def color_from_value(self, value):\n \n return ImageColor.getrgb(\"hsl(%d,%d%%,%d%%)\" % (int( (1.0 - value) * 360 ), 80, 50))", "def _to_color(indx, base):\n base2 = base * base\n b = 2 - indx / base2\n r = 2 - (indx % base2) / base\n g = 2 - (indx % base2) % base\n return b * 127, r * 127, g * 127", "def color_map(val):\n color_code = remap_interval(val, -1, 1, 0, 255)\n return int(color_code)", "def to_color(self):\n return (int(self.r * 255), int(self.g * 255), int(self.b * 255))", "def getColor(self,number):\n if number >= 0:\n ret = cs.hsv_to_rgb(0,0,1-abs(number/self.maxp))\n else:\n ret = cs.hsv_to_rgb(0,abs(number/self.maxn),1)\n hexcolor = '#%02x%02x%02x' % (ret[0]*255,ret[1]*255,ret[2]*255)\n return hexcolor", "def colors(self):\n\t\treturn [(0, 30, 255),(0, 30, 120)]", "def int2color_tuple(x):\n red_val = int(1000 * x % 255)\n green_val = int(10000 * x % 255)\n blue_val = int(100000 * x % 255)\n return red_val, green_val, blue_val", "def _proc_color(self, tokens):\n\n keys = tokens.keys()\n if \"red\" in keys: # RGB(A)\n rr, gg, bb = tokens[\"red\"], tokens[\"green\"], tokens[\"blue\"]\n hex2int = lambda h: int(h, 16)\n if \"alpha\" in keys:\n a = tokens[\"alpha\"]\n c = str((hex2int(rr), hex2int(gg), hex2int(bb), hex2int(a)))\n else:\n c = str((hex2int(rr), hex2int(gg), hex2int(bb)))\n elif \"hue\" in keys: # HSV\n r, g, b = hsv_to_rgb(tokens[\"hue\"],\n tokens[\"saturation\"],\n tokens[\"value\"])\n c = str((int(r*255), int(g*255), int(b*255)))\n else:\n c = tokens[\"color\"]\n\n return c", "def color_map(val):\n # NOTE: This relies on remap_interval, which you must provide\n color_code = remap_interval(val, -1, 1, 0, 255)\n return int(color_code)", "def color_map(val):\n # NOTE: This relies on remap_interval, which you must provide\n color_code = remap_interval(val, -1, 1, 0, 255)\n return int(color_code)", "def color_map(val):\n # NOTE: This relies on remap_interval, which you must provide\n color_code = remap_interval(val, -1, 1, 0, 255)\n return int(color_code)", "def color_map(val):\n # NOTE: This relies on remap_interval, which you must provide\n color_code = remap_interval(val, -1, 1, 0, 255)\n return int(color_code)", "def format_color(\n color: Union[ColorInputType, Any],\n warn_if_invalid: bool = True\n) -> Union[ColorType, Any]:\n if not isinstance(color, ColorInputInstance):\n return color\n if not isinstance(color, pygame.Color):\n try:\n if isinstance(color, VectorInstance) and 3 <= len(color) <= 4:\n if PYGAME_V2:\n for j in color:\n if not isinstance(j, int):\n raise ValueError('color cannot contain floating point values')\n c = pygame.Color(*color)\n else:\n c = pygame.Color(color)\n except ValueError:\n if warn_if_invalid:\n warn(f'invalid color value \"{color}\"')\n else:\n raise\n return color\n else:\n c = color\n return c.r, c.g, c.b, c.a", "def test_color__int_arg(self):\n for value in (0x0, 0xFFFFFFFF, 0xAABBCCDD):\n color = pygame.Color(value)\n\n self.assertEqual(color.r, (value >> 24) & 0xFF)\n self.assertEqual(color.g, (value >> 16) & 0xFF)\n self.assertEqual(color.b, (value >> 8) & 0xFF)\n self.assertEqual(color.a, value & 0xFF)", "def my_color_function(field):\n if field > 100000000:\n return \"#ff0000\"\n else:\n return \"#008000\"", "def color(value):\r\n return 'RGB({}, {}, {})'.format(value.red(), value.blue(), value.green())", "def parseColor(c):\n if c in baseColors:\n return baseColors[c]\n if len(c) == 6:\n return tuple(map(lambda x: int(x, 16), (c[:2], c[2:4], c[4:])))\n if len(c) == 3:\n return tuple(map(lambda x: 16*int(x, 16), c))\n raise ValueError(\"Can't find color '{}'\".format(c))", "def get_color(in_val, min_val=0, max_val=100):\n width = max_val - min_val\n unit = width / len(continuum)\n return continuum[min(int(in_val / unit), 19)]", "def glColor(self, r, g, b):\n if 0 <= r <= 1 or 0 <= g <= 1 or 0 <= b <= 1:\n self.vr = ceil(r * 255)\n self.vg = ceil(g * 255)\n self.vb = ceil(b * 255)\n else:\n print(\"Please insert numbers between 0 and 1\")\n sys.exit()", "def getColor(self, _color):\n c = _color.split()\n \n for n in range(len(c)):\n c[n] = float(c[n])\n \n return c", "def setColors(self):\n #productive\n profprint()\n self.color= [[0,0,0] for i in range(205)]\n self.color255= self.setColors255()\n for i in range(205):\n for j in range(3):\n self.color[i][j] = self.color255[i][j]/float(255)\n\n return self.color", "def reformatColor(self, colorStr):\n if type(colorStr) is str:\n if colorStr.startswith('#'):\n colorStr = colorStr.replace('#', '')\n else:\n raise Exception('color is not hex format')\n r = int(colorStr[:2], 16)\n g = int(colorStr[2:4], 16)\n b = int(colorStr[4:6], 16)\n return r, g, b", "def convColor(colorString):\n if len(colorString) != 6:\n return None\n r, g, b = colorString[:2], colorString[2:4], colorString[4:]\n r, g, b = [int(n, 16) for n in (r, g, b)]\n return (r, g, b)", "def led(red: int, green: int, blue: int, /) -> None:", "def test_conversion_through_rgb(self):\r\n\r\n xyz = convert_color(self.color, XYZColor)\r\n hsl = convert_color(xyz, HSLColor, through_rgb_type=AdobeRGBColor)\r\n # Notice how we don't have to pass through_rgb_type explicitly.\r\n xyz2 = convert_color(hsl, XYZColor)\r\n self.assertColorMatch(xyz, xyz2)", "def get_change_color(self, change_percentage, color_range: int = 5):\n change_percentage = str(change_percentage).split('.')[0] # before the dot\n red = colour.Color(\"#D50000\")\n white = colour.Color(\"#FFFFFF\")\n green = colour.Color(\"#1B5E20\")\n\n int_perc = int(change_percentage)\n\n if int_perc is 0:\n return self.bot.hex_to_int(\"#ffffff\")\n elif change_percentage.startswith('-'):\n colors = list(red.range_to(white, color_range))\n int_perc = int_perc * -1 # make it positive\n int_perc = color_range - int_perc\n int_perc = int_perc if int_perc > 0 else 0 # limit\n return self.bot.hex_to_int(colors[int_perc].hex_l)\n\n int_perc -= 1\n colors = list(white.range_to(green, color_range))\n int_perc = int_perc if int_perc < (color_range - 1) else (color_range - 1) # limit\n return self.bot.hex_to_int(colors[int_perc].hex_l)", "def color_conversion(string):\n if (string == 'J'):\n return 0.14\n if (string == 'I'):\n return 0.28\n if (string == 'H'):\n return 0.42\n if (string == 'G'):\n return 0.56\n if (string == 'F'):\n return 0.70\n if (string == 'E'):\n return 0.84\n if (string == 'D'):\n return 1", "def revert_color(cls, colors):\n # 0.5 is to map the color to the center of the range\n return [int((c+0.5) / cls.color_level * 256) for c in colors]", "def color_rgb(r,g,b):\n return \"#%02x%02x%02x\" % (r,g,b)", "def color_rgb(r,g,b):\n return \"#%02x%02x%02x\" % (r,g,b)", "def color_rgb(r,g,b):\n return \"#%02x%02x%02x\" % (r,g,b)", "def _color(self, args):", "def create_color_gradient():\n colors = []\n step = 10\n for red, green in zip(range(255,-step, -step), range(0, 255, step)):\n colors.append({'red': red, 'green': green, 'blue': 0})\n for green, blue in zip(range(255,-step, -step), range(0, 255, step)):\n colors.append({'red': 0, 'green': green, 'blue': blue})\n for blue, red in zip(range(255,-step, -step), range(0, 255, step)):\n colors.append({'red': red, 'green': 0, 'blue': blue})\n return colors", "def RGB_to_color(r: int, g: int, b: int, a: int=255):\n return np.clip(np.array([r, g, b, a], dtype='u1'), 0, 255)", "def _from_rgb(self, rgb):\r\n return \"#%02x%02x%02x\" % rgb", "def updateColors(self):\n self.negativeColor = (int(self.negativeRedTextField.get(\"1.0\", tk.END)),\n int(self.negativeGreenTextField.get(\"1.0\", tk.END)),\n int(self.negativeBlueTextField.get(\"1.0\", tk.END)))\n self.positiveColor = (int(self.positiveRedTextField.get(\"1.0\", tk.END)),\n int(self.positiveGreenTextField.get(\"1.0\", tk.END)),\n int(self.positiveBlueTextField.get(\"1.0\", tk.END)))\n # Update the positive and negative labels\n self.negativeLabel.config(background=self.negativeColorHex())\n self.positiveLabel.config(background=self.positiveColorHex())\n\n print(f\"Negative: {self.negativeColor}\")\n print(f\"Positive: {self.positiveColor}\")", "def __value2color(self, v):\n if np.isscalar(v):\n r = self.cmap(self.norm(np.asarray([v])))\n else:\n r = self.cmap(self.norm(v))\n return r.flatten()", "def wx_to_enable_color(color):\n \n enable_color = array((1.0,1.0,1.0,1.0))\n enable_color[:3] = asarray(color.Get())/255.\n\n return tuple(enable_color)", "def fl_mapcolor(colr, red, green, blue):\n _fl_mapcolor = library.cfuncproto(\n library.load_so_libforms(), \"fl_mapcolor\",\\\n cty.c_ulong, [xfdata.FL_COLOR, cty.c_int, cty.c_int, cty.c_int],\n \"\"\"unsigned long fl_mapcolor(FL_COLOR col, int r, int g, int b)\"\"\")\n library.check_if_flinitialized()\n #library.checknonfatal_allowed_value_in_list(colr, xfdata.COLOR_list)\n ul_colr = library.convert_to_FL_COLOR(colr)\n i_red = library.convert_to_intc(red)\n i_green = library.convert_to_intc(green)\n i_blue = library.convert_to_intc(blue)\n library.keep_elem_refs(colr, ul_colr, red, green, blue, i_red, \\\n i_green, i_blue)\n retval = _fl_mapcolor(ul_colr, i_red, i_green, i_blue)\n return retval", "def red2blue(self):\r\n for x in range(self.xspan):\r\n for y in range(self.yspan):\r\n if (self.cells[x][y] == 1):\r\n self.cells[x][y] = 2", "def _rgb(color):\n warnings.warn('Use color.rgba instead of color._rgb', FutureWarning, stacklevel=2)\n return (int(color[-6:-4], 16), int(color[-4:-2], 16), int(color[-2:], 16))", "def rgbcolor(h, f):\n # q = 1 - f\n # t = f\n if h == 0:\n return v, f, p\n elif h == 1:\n return 1 - f, v, p\n elif h == 2:\n return p, v, f\n elif h == 3:\n return p, 1 - f, v\n elif h == 4:\n return f, p, v\n elif h == 5:\n return v, p, 1 - f", "def setColors(self):\r\n # productive\r\n profprint()\r\n self.color = [[0, 0, 0] for i in range(MAXCOL)]\r\n self.color255 = self.setColors255()\r\n for i in range(MAXCOL):\r\n for j in range(3):\r\n self.color[i][j] = self.color255[i][j] / float(255)\r\n\r\n return self.color", "def color565(red, green=0, blue=0):\n try:\n red, green, blue = red # see if the first var is a tuple/list\n except TypeError:\n pass\n return (red & 0xf8) << 8 | (green & 0xfc) << 3 | blue >> 3", "def default_render_color_maker(num:int):\n if num < 7:\n log.warn('Fewer than 7 rendering colors are being generated. This may cause issues if a URDF with a 6+ axis robot is loaded.')\n\n b = np.linspace(0,255,num).astype(int) # Blue values are always unique\n\n g = [0] * b.size\n r = np.abs(255 - 2*b)\n\n colors = []\n for idx in range(num):\n colors.append([b[idx],g[idx],r[idx]])\n return colors", "def color_negative_red(value):\n\n if value == 1:\n color = 'red'\n else:\n color = 'black'\n\n return 'color: %s' % color", "def color(*args, rgbColor: List[float, float, float]=None, userDefined: int=0, **kwargs)->None:\n pass", "def getColor(n, total = 255, decimal = False):\n\n value = round(255*n/(total * 1.0))\n\n #red value\n if value < 96:\n red = 0\n elif value < 160:\n red = 255/((160 - 96)*1.0) * (value - 96)\n elif value < 224:\n red = 255\n else:\n red = 255 - ((255 - 128)/((255 - 224) * 1.0) * (value - 224))\n\n\n #Green value\n if value < 32:\n green = 0\n elif value < 96:\n green = 255/((96 - 32)*1.0) * (value - 32)\n elif value < 160:\n green = 255\n elif value < 224:\n green = 255 - (255/((224 - 160) * 1.0) * (value - 160))\n else:\n green = 0\n\n\n #Blue value\n if value < 32:\n blue = 128 + (255 - 128)/((32 - 0) * 1.0) * (value - 0)\n elif value < 96:\n blue = 255\n elif value < 160:\n blue = 255 - ((255 - 0)/((160 - 96) * 1.0) * (value - 96))\n else:\n blue = 0\n\n if decimal:\n return (red / 255.0, green / 255.0, blue / 255.0)\n return (int(red), int(green), int(blue))", "def color_positive_green(val):\n\tif val > 0: \n\t\tcolor = 'green'\n\telse: \n\t\tcolor = 'red'\n\treturn 'background-color: %s' % color", "def _color(self,c):\n return self.colorlist[c%len(self.colorlist)]", "def color(c):\n\n if isinstance(c, tuple) and len(c) == 4:\n return c\n\n if c is None:\n return c\n\n if isinstance(c, basestring):\n if c[0] == '#':\n c = c[1:]\n\n if len(c) == 6:\n r = int(c[0]+c[1], 16)\n g = int(c[2]+c[3], 16)\n b = int(c[4]+c[5], 16)\n a = 255\n elif len(c) == 8:\n r = int(c[0]+c[1], 16)\n g = int(c[2]+c[3], 16)\n b = int(c[4]+c[5], 16)\n a = int(c[6]+c[7], 16)\n elif len(c) == 3:\n r = int(c[0], 16) * 0x11\n g = int(c[1], 16) * 0x11\n b = int(c[2], 16) * 0x11\n a = 255\n elif len(c) == 4:\n r = int(c[0], 16) * 0x11\n g = int(c[1], 16) * 0x11\n b = int(c[2], 16) * 0x11\n a = int(c[3], 16) * 0x11\n else:\n raise Exception(\"Color string must be 3, 4, 6, or 8 hex digits long.\")\n\n return (r, g, b, a)\n\n raise Exception(\"Not a color: %r\" % (c,))", "def assigning_colors():\n rgb_colors = {}\n for name, hex in matplotlib.colors.cnames.items():\n color = []\n # So the values are from 0-255 and not 0-1\n for i in matplotlib.colors.to_rgb(hex):\n color.append(int(i * 255))\n\n color = tuple(color)\n rgb_colors[name] = color\n\n return rgb_colors", "def create_unique_color_float(tag, hue_step=0.41):\n h, v = (tag * hue_step) % 1, 1. - (int(tag * hue_step) % 4) / 5.\n r, g, b = colorsys.hsv_to_rgb(h, 1., v)\n return r, g, b", "def HexColor(val):\n if isinstance(val, str):\n val = int(val, 16)\n factor = 1.0 / 255\n return Color(factor * ((val >> 16) & 0xFF), factor * ((val >> 8) & 0xFF), factor * (val & 0xFF))", "def colorManagementConvert(*args, toDisplaySpace: List[float, float, float]=None,\n **kwargs)->None:\n pass", "def get_color(activePerMillion):\n activePer100k = activePerMillion / 10.0\n if activePer100k < 100:\n return \"#aaf0d1\"\n elif activePer100k < 500:\n return \"#a3f7bf\"\n elif activePer100k < 1000:\n return \"#90EE90\"\n elif activePer100k < 1500:\n return \"#00ff7f\"\n elif activePer100k < 2000:\n return \"#77dd77\"\n elif activePer100k < 2500:\n return \"#32cd32\"\n elif activePer100k < 3000:\n return \"#4cbb17\"\n elif activePer100k < 3500:\n return \"#228b22\"\n elif activePer100k < 4000:\n return \"#355e3b \"\n else:\n return \"#006400\"", "def rgb(r, g, b):\n return \"\".join([\"%02X\" % max(0, min(x, 255)) for x in [r, g, b]])", "def fl_color(colr):\n _fl_color = library.cfuncproto(\n library.load_so_libforms(), \"fl_color\",\\\n None, [xfdata.FL_COLOR],\\\n \"\"\"void fl_color(FL_COLOR col)\"\"\")\n library.check_if_flinitialized()\n #library.checknonfatal_allowed_value_in_list(colr, xfdata.COLOR_list)\n ul_colr = library.convert_to_FL_COLOR(colr)\n library.keep_elem_refs(colr, ul_colr)\n _fl_color(ul_colr)", "def convert_color(self, color):\n return [color[0]*16, color[1]*16, color[2]*16]", "def GetRGB(self, *args):\n return _XCAFDoc.XCAFDoc_Color_GetRGB(self, *args)", "def rgb_to_color(*rgb):\n if(len(rgb)==1):\n r,g,b = rgb[0]\n else:\n r,g,b = rgb\n return \"#%02x%02x%02x\" % (r,g,b)", "def _calcColor(self, colorTuple):\n return milight.color_from_rgb(*colorTuple)", "def example_lab_to_rgb():\r\n\r\n print(\"=== RGB Example: Lab->RGB ===\")\r\n # Instantiate an Lab color object with the given values.\r\n lab = LabColor(0.903, 16.296, -2.217)\r\n # Show a string representation.\r\n print(lab)\r\n # Convert to XYZ.\r\n rgb = convert_color(lab, sRGBColor)\r\n print(rgb)\r\n print(\"=== End Example ===\\n\")", "def change( p ):\n red = p[0]\n green = p[1]\n blue = p[2]\n return [ 255-red, 255-green, 255-blue ]", "def _update_color(self, rgb_tuple):\n for color in rgb_tuple._fields:\n pin = getattr(PINS, color)\n value = getattr(rgb_tuple, color)\n # Ensure color between 0 and 255\n value = max(min(value, 255), 0)\n # print(pin, value)\n self.pi.set_PWM_dutycycle(pin, value)", "def hex_to_rgb(self,value):\n value = value.lstrip('#')\n lv = len(value)\n return tuple(int(value[i:i + lv // 3], 16) for i in range(0, lv, lv // 3))", "def rgb(r, g, b):\n return (r/255, g/255, b/255)", "def _cc(self, args):\n if isinstance(args, str):\n return args\n try:\n r, g, b = args\n except (TypeError, ValueError):\n raise TurtleGraphicsError(\"bad color arguments: %s\" % str(args))\n if self.screen._colormode == 1.0:\n r, g, b = [round(255.0*x) for x in (r, g, b)]\n if not ((0 <= r <= 255) and (0 <= g <= 255) and (0 <= b <= 255)):\n raise TurtleGraphicsError(\"bad color sequence: %s\" % str(args))\n return \"#%02x%02x%02x\" % (r, g, b)", "def unlabel_rgb(colors):\n str_vals = ''\n for index in range(len(colors)):\n try:\n float(colors[index])\n str_vals = str_vals + colors[index]\n except ValueError:\n if colors[index] == ',' or colors[index] == '.':\n str_vals = str_vals + colors[index]\n\n str_vals = str_vals + ','\n numbers = []\n str_num = ''\n for char in str_vals:\n if char != ',':\n str_num = str_num + char\n else:\n numbers.append(float(str_num))\n str_num = ''\n return (numbers[0], numbers[1], numbers[2])", "def HTMLColorToRGB(colorstring):", "def example_rgb_to_xyz():\r\n\r\n print(\"=== RGB Example: RGB->XYZ ===\")\r\n # Instantiate an Lab color object with the given values.\r\n rgb = sRGBColor(120, 130, 140)\r\n # Show a string representation.\r\n print(rgb)\r\n # Convert RGB to XYZ using a D50 illuminant.\r\n xyz = convert_color(rgb, XYZColor, target_illuminant='D50')\r\n print(xyz)\r\n print(\"=== End Example ===\\n\")", "def change(widget, colors): \n\t\n new_val = '#'\n for name in ('red', 'green', 'blue'):\n new_val += colors[name].get()\n widget['bg'] = new_val", "def float_to_rgb(self, float_rgb):\n s = struct.pack('>f', float_rgb)\n i = struct.unpack('>l', s)[0]\n pack = ctypes.c_uint32(i).value\n\n r = (pack & 0x00FF0000) >> 16\n g = (pack & 0x0000FF00) >> 8\n b = (pack & 0x000000FF)\n\n color = [r, g, b]\n\n return color", "def _indexTupleToColor(self, index):\n coltuple = self.graphColors[index]\n color = wx.Colour()\n color.Set(coltuple[0] * 255, coltuple[1] * 255, coltuple[2] * 255)\n return color", "def test_conversion_to_rgb_zero_div(self):\r\n\r\n lchab = LCHabColor(0.0, 0.0, 0.0)\r\n rgb = convert_color(lchab, sRGBColor)\r\n self.assertColorMatch(rgb, sRGBColor(0.0, 0.0, 0.0))", "def color_rgb(self):\n return tuple(int(self.color[i : i + 2], 16) for i in (0, 2, 4))", "def _bool_to_color(value) -> int:\n if value is True:\n return RED\n return BLACK", "def _from_rgb(rgb):\n return \"#%02x%02x%02x\" % rgb", "def rgb_slider_moved(self, event):\n slider_red = int(self.slider_r.get_value())\n slider_green = int(self.slider_g.get_value())\n slider_blue = int(self.slider_b.get_value())\n\n self.change_color((slider_red, slider_green, slider_blue))", "def matplotlib_rgb_color(rgb_color):\r\n return tuple([i / 255. for i in rgb_color])", "def color_negative_red(val):\n if val == 'k':\n color = 'red' \n else:\n color = 'yellow'\n return ['color: %s' % color]*3", "def test_color(self):\n self._calibration_test(\"color_full\")", "def get_rgb(self, r,g,b):\n return \"#%02x%02x%02x\" % (r,g,b)" ]
[ "0.6687373", "0.66278064", "0.6625339", "0.6615278", "0.65561914", "0.64596033", "0.64479345", "0.6447677", "0.6441892", "0.64142656", "0.6391666", "0.6331374", "0.6328157", "0.6308797", "0.6283234", "0.62787604", "0.624464", "0.6223854", "0.6213597", "0.6211883", "0.6205541", "0.6171425", "0.6166079", "0.6159253", "0.6139879", "0.6121332", "0.6121332", "0.6121332", "0.6121332", "0.6120204", "0.61072344", "0.6106902", "0.61025554", "0.6085604", "0.60804313", "0.60768354", "0.6067291", "0.6048135", "0.603985", "0.6025673", "0.6018406", "0.6006659", "0.59860814", "0.59751856", "0.5974249", "0.5972353", "0.5972353", "0.5972353", "0.59680825", "0.59559166", "0.59543854", "0.5953285", "0.5933454", "0.592953", "0.5923576", "0.59169996", "0.5915104", "0.5891211", "0.58907676", "0.5879183", "0.5876974", "0.5876216", "0.58728915", "0.5866013", "0.58641946", "0.5851625", "0.5848478", "0.5828442", "0.58225346", "0.58176726", "0.5812567", "0.58000934", "0.5799214", "0.57983893", "0.57857203", "0.57797146", "0.57754457", "0.5774757", "0.57606214", "0.57596874", "0.575613", "0.57528645", "0.57410544", "0.5738148", "0.57336086", "0.57320964", "0.57166445", "0.57090193", "0.5707036", "0.57068676", "0.5692427", "0.5690982", "0.56881773", "0.56835043", "0.56826174", "0.5678353", "0.56776077", "0.56734693", "0.56729394", "0.5663843" ]
0.6279571
15
Translate a keycode/keyname in the widget set to a ginga standard ASCII symbol.
def transkey(self, keycode, keyname): self.logger.debug("keycode=%d keyname='%s'" % ( keycode, keyname)) try: return self._keytbl[keyname.lower()] except KeyError: return keyname
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _translate_keyname(inp):\n convert = {'Equal': '=', 'Escape': 'Esc', 'Delete': 'Del', 'Return': 'Enter',\n 'Page_up': 'PgUp', 'Page_down': 'PgDn'}\n if inp in convert:\n out = convert[inp]\n else:\n out = inp\n return out", "def process_key(key):\n print(chr(key))", "def key(event):\n nonlocal button_save_string\n self.is_binding = False\n try:\n if event.keysym == 'space':\n initialise_control_button(button, button_save_string)\n return\n button['text'] = str(event.keysym)\n set_command_for(button_save_string, event.keysym)\n\n except Exception:\n if event.char == 'space':\n initialise_control_button(button, button_save_string)\n return\n button['text'] = str(event.char)\n set_command_for(button_save_string, event)\n\n unbind_keys(button)", "def player_to_symbol(player):\n symbol = '_'\n if player == 1:\n symbol = 'x'\n elif player == 2:\n symbol = 'o'\n return symbol", "def label(self) -> str:\n return self.keysym.label", "def label(mi_, ma_):\n\treturn \"caractères Unicode des points de code {} à {}\".format(mi_, ma_)", "def key_press_event(self, widget, event):\n # get keyname or keycode and translate to ginga standard\n # keyname =\n # keycode =\n keyname = '' # self.transkey(keyname, keycode)\n self.logger.debug(\"key press event, key=%s\" % (keyname))\n return self.make_ui_callback('key-press', keyname)", "def FSMLetterSymbol(letter):\n return FSMEmptyWordSymbol if letter is None else repr(letter)", "def arrow_key(event):\n set_command_for(button_save_string, event.keysym)\n button['text'] = event.keysym + ' arrow key'\n unbind_keys(button)", "def say_letter(self, keyboard, keycode, char, modifiers):\n\n if keycode[1] in ('shift', 'rshift'):\n return # ignore.. shifted keys will have their Shift modifier set\n elif keycode[1] == 'tab':\n self.play_sound('tab')\n elif keycode[1] == 'delete':\n self.play_sound('delete')\n elif keycode[1] == 'backspace':\n self.textbox.text = self.textbox.text[:-1]\n self.play_sound('backspace')\n elif keycode[1] == 'enter':\n self.textbox.text += '\\n'\n self.play_sound('enter')\n elif char == ' ':\n self.textbox.text += ' '\n self.play_sound('space') \n elif char is None:\n self.play_sound('error')\n else:\n if 'shift' in modifiers or 'rshift' in modifiers:\n self.textbox.text += char.upper()\n else:\n self.textbox.text += char\n if RENAMED_CHAR.get(char):\n self.play_sound(RENAMED_CHAR[char])\n else: \n self.play_sound(char)", "def _transformed_name(key: Text) -> Text:\n return key + \"_xf\"", "def _key_name(self, key):\n if type(key) == type(\"\"):\n return str(curses.keyname(ord(key)).decode(\"utf-8\"))\n return False", "def setChar(self, char):\n self.label.setText(str(char))", "def _key_func_2(entry: tuple[str, list]) -> str:\n key = unicodedata.normalize('NFD', entry[0].lower())\n if key.startswith('\\N{RIGHT-TO-LEFT MARK}'):\n key = key[1:]\n if key[0:1].isalpha() or key.startswith('_'):\n key = chr(127) + key\n return key", "def unicode_ion_symbol(self) -> str:\n superscripts = {\n \"+\": \"\\u207A\",\n \"-\": \"\\u207B\",\n \"0\": \"\\u2070\",\n \"1\": \"\\u00B9\",\n \"2\": \"\\u00B2\",\n \"3\": \"\\u00B3\",\n \"4\": \"\\u2074\",\n \"5\": \"\\u2075\",\n \"6\": \"\\u2076\",\n \"7\": \"\\u2077\",\n \"8\": \"\\u2078\",\n \"9\": \"\\u2079\",\n }\n table = str.maketrans(superscripts)\n template = \"+\" if self.charge > 0 else \"-\"\n\n if abs(self.charge) != 1:\n template = str(abs(self.charge)) + template\n\n return self.symbol + template.translate(table)", "def int_21H_1(self):\r\n\r\n ascii_char = self.GUI.get_key_value() # ten do w wczytania\r\n self.registers['AX'].move_into(ascii_char, 0, is_int=True) # zapisanie kodu ascii do AXL\r", "def enlabel(mi_, ma_):\n\treturn \"Unicode characters from {} to {} codepoints\".format(mi_, ma_)", "def keypad_key(m) -> str:\n return f\"keypad_{m.digit}\"", "def keysym_to_str(keysym):\n return keysymdef.names.get(keysym, '')", "def translate_key(self, mod, c):\n\n alt = keyboard.alt in mod\n ctrl = keyboard.ctrl in mod\n shift = keyboard.shift in mod\n\n if alt:\n meta = '\\x1b'\n else:\n meta = ''\n\n if isinstance(c, str):\n if shift:\n raise KaaError(\n 'Cannot use shift key for character: {!r}'.format((mod, c)))\n if ctrl:\n c = c.upper()\n if not (0x40 <= ord(c) <= 0x5f):\n raise KaaError(\n 'Cannot use control key for character: {!r}'.format((mod, c)))\n return meta + chr(ord(c) - 0x40)\n else:\n return meta + c\n else:\n ret = keydef.keyfromname(c, ctrl, shift)\n if ret is None:\n raise KaaError(\n 'Cannot convert character: {!r}'.format((mod, c)))\n\n return [ret] if not meta else [meta, ret]", "def replace(letter, key):\n return letter", "def convert_case(sym):\n lower = sym\n upper = sym\n\n enc = sym >> 8\n\n if enc == 0: # latin1\n if ((sym >= keysymdef.keysyms[\"A\"]) and (sym <= keysymdef.keysyms[\"Z\"])):\n lower += (keysymdef.keysyms[\"a\"] - keysymdef.keysyms[\"A\"])\n elif ((sym >= keysymdef.keysyms[\"a\"]) and (sym <= keysymdef.keysyms[\"z\"])):\n upper -= (keysymdef.keysyms[\"a\"] - keysymdef.keysyms[\"A\"])\n elif ((sym >= keysymdef.keysyms[\"Agrave\"])\n and (sym <= keysymdef.keysyms[\"Odiaeresis\"])):\n lower += (keysymdef.keysyms[\"agrave\"] - keysymdef.keysyms[\"Agrave\"])\n elif ((sym >= keysymdef.keysyms[\"agrave\"])\n and (sym <= keysymdef.keysyms[\"odiaeresis\"])):\n upper -= (keysymdef.keysyms[\"agrave\"] - keysymdef.keysyms[\"Agrave\"])\n elif ((sym >= keysymdef.keysyms[\"Ooblique\"]) and (sym <= keysymdef.keysyms[\"Thorn\"])):\n lower += (keysymdef.keysyms[\"oslash\"] - keysymdef.keysyms[\"Ooblique\"])\n elif ((sym >= keysymdef.keysyms[\"oslash\"]) and (sym <= keysymdef.keysyms[\"thorn\"])):\n upper -= (keysymdef.keysyms[\"oslash\"] - keysymdef.keysyms[\"Ooblique\"])\n elif enc == 1: # latin2\n # Assume the KeySym is a legal value (ignore discontinuities)\n if (sym == keysymdef.keysyms[\"Aogonek\"]):\n lower = keysymdef.keysyms[\"aogonek\"]\n elif (sym >= keysymdef.keysyms[\"Lstroke\"] and sym <= keysymdef.keysyms[\"Sacute\"]):\n lower += (keysymdef.keysyms[\"lstroke\"] - keysymdef.keysyms[\"Lstroke\"])\n elif (sym >= keysymdef.keysyms[\"Scaron\"] and sym <= keysymdef.keysyms[\"Zacute\"]):\n lower += (keysymdef.keysyms[\"scaron\"] - keysymdef.keysyms[\"Scaron\"])\n elif (sym >= keysymdef.keysyms[\"Zcaron\"] and sym <= keysymdef.keysyms[\"Zabovedot\"]):\n lower += (keysymdef.keysyms[\"zcaron\"] - keysymdef.keysyms[\"Zcaron\"])\n elif (sym == keysymdef.keysyms[\"aogonek\"]):\n upper = keysymdef.keysyms[\"Aogonek\"]\n elif (sym >= keysymdef.keysyms[\"lstroke\"] and sym <= keysymdef.keysyms[\"sacute\"]):\n upper -= (keysymdef.keysyms[\"lstroke\"] - keysymdef.keysyms[\"Lstroke\"])\n elif (sym >= keysymdef.keysyms[\"scaron\"] and sym <= keysymdef.keysyms[\"zacute\"]):\n upper -= (keysymdef.keysyms[\"scaron\"] - keysymdef.keysyms[\"Scaron\"])\n elif (sym >= keysymdef.keysyms[\"zcaron\"] and sym <= keysymdef.keysyms[\"zabovedot\"]):\n upper -= (keysymdef.keysyms[\"zcaron\"] - keysymdef.keysyms[\"Zcaron\"])\n elif (sym >= keysymdef.keysyms[\"Racute\"] and sym <= keysymdef.keysyms[\"Tcedilla\"]):\n lower += (keysymdef.keysyms[\"racute\"] - keysymdef.keysyms[\"Racute\"])\n elif (sym >= keysymdef.keysyms[\"racute\"] and sym <= keysymdef.keysyms[\"tcedilla\"]):\n upper -= (keysymdef.keysyms[\"racute\"] - keysymdef.keysyms[\"Racute\"])\n elif enc == 2: # latin3\n # Assume the KeySym is a legal value (ignore discontinuities)\n if (sym >= keysymdef.keysyms[\"Hstroke\"] and sym <= keysymdef.keysyms[\"Hcircumflex\"]):\n lower += (keysymdef.keysyms[\"hstroke\"] - keysymdef.keysyms[\"Hstroke\"])\n elif (sym >= keysymdef.keysyms[\"Gbreve\"] and sym <= keysymdef.keysyms[\"Jcircumflex\"]):\n lower += (keysymdef.keysyms[\"gbreve\"] - keysymdef.keysyms[\"Gbreve\"])\n elif (sym >= keysymdef.keysyms[\"hstroke\"] and sym <= keysymdef.keysyms[\"hcircumflex\"]):\n upper -= (keysymdef.keysyms[\"hstroke\"] - keysymdef.keysyms[\"Hstroke\"])\n elif (sym >= keysymdef.keysyms[\"gbreve\"] and sym <= keysymdef.keysyms[\"jcircumflex\"]):\n upper -= (keysymdef.keysyms[\"gbreve\"] - keysymdef.keysyms[\"Gbreve\"])\n elif (sym >= keysymdef.keysyms[\"Cabovedot\"]\n and sym <= keysymdef.keysyms[\"Scircumflex\"]):\n lower += (keysymdef.keysyms[\"cabovedot\"] - keysymdef.keysyms[\"Cabovedot\"])\n elif (sym >= keysymdef.keysyms[\"cabovedot\"]\n and sym <= keysymdef.keysyms[\"scircumflex\"]):\n upper -= (keysymdef.keysyms[\"cabovedot\"] - keysymdef.keysyms[\"Cabovedot\"])\n elif enc == 3: # latin4\n # Assume the KeySym is a legal value (ignore discontinuities)\n if (sym >= keysymdef.keysyms[\"Rcedilla\"] and sym <= keysymdef.keysyms[\"Tslash\"]):\n lower += (keysymdef.keysyms[\"rcedilla\"] - keysymdef.keysyms[\"Rcedilla\"])\n elif (sym >= keysymdef.keysyms[\"rcedilla\"] and sym <= keysymdef.keysyms[\"tslash\"]):\n upper -= (keysymdef.keysyms[\"rcedilla\"] - keysymdef.keysyms[\"Rcedilla\"])\n elif (sym == keysymdef.keysyms[\"ENG\"]):\n lower = keysymdef.keysyms[\"eng\"]\n elif (sym == keysymdef.keysyms[\"eng\"]):\n upper = keysymdef.keysyms[\"ENG\"]\n elif (sym >= keysymdef.keysyms[\"Amacron\"] and sym <= keysymdef.keysyms[\"Umacron\"]):\n lower += (keysymdef.keysyms[\"amacron\"] - keysymdef.keysyms[\"Amacron\"])\n elif (sym >= keysymdef.keysyms[\"amacron\"] and sym <= keysymdef.keysyms[\"umacron\"]):\n upper -= (keysymdef.keysyms[\"amacron\"] - keysymdef.keysyms[\"Amacron\"])\n elif enc == 6: # cyrillic\n # Assume the KeySym is a legal value (ignore discontinuities)\n if (sym >= keysymdef.keysyms[\"Serbian_DJE\"]\n and sym <= keysymdef.keysyms[\"Serbian_DZE\"]):\n lower -= (keysymdef.keysyms[\"Serbian_DJE\"] - keysymdef.keysyms[\"Serbian_dje\"])\n elif (sym >= keysymdef.keysyms[\"Serbian_dje\"]\n and sym <= keysymdef.keysyms[\"Serbian_dze\"]):\n upper += (keysymdef.keysyms[\"Serbian_DJE\"] - keysymdef.keysyms[\"Serbian_dje\"])\n elif (sym >= keysymdef.keysyms[\"Cyrillic_YU\"]\n and sym <= keysymdef.keysyms[\"Cyrillic_HARDSIGN\"]):\n lower -= (keysymdef.keysyms[\"Cyrillic_YU\"] - keysymdef.keysyms[\"Cyrillic_yu\"])\n elif (sym >= keysymdef.keysyms[\"Cyrillic_yu\"]\n and sym <= keysymdef.keysyms[\"Cyrillic_hardsign\"]):\n upper += (keysymdef.keysyms[\"Cyrillic_YU\"] - keysymdef.keysyms[\"Cyrillic_yu\"])\n elif enc == 7: # greek\n if (sym >= keysymdef.keysyms[\"Greek_ALPHAaccent\"]\n and sym <= keysymdef.keysyms[\"Greek_OMEGAaccent\"]):\n lower += (keysymdef.keysyms[\"Greek_alphaaccent\"] -\n keysymdef.keysyms[\"Greek_ALPHAaccent\"])\n elif (sym >= keysymdef.keysyms[\"Greek_alphaaccent\"]\n and sym <= keysymdef.keysyms[\"Greek_omegaaccent\"] and\n sym != keysymdef.keysyms[\"Greek_iotaaccentdieresis\"] and\n sym != keysymdef.keysyms[\"Greek_upsilonaccentdieresis\"]):\n upper -= (keysymdef.keysyms[\"Greek_alphaaccent\"] -\n keysymdef.keysyms[\"Greek_ALPHAaccent\"])\n elif (sym >= keysymdef.keysyms[\"Greek_ALPHA\"]\n and sym <= keysymdef.keysyms[\"Greek_OMEGA\"]):\n lower += (keysymdef.keysyms[\"Greek_alpha\"] - keysymdef.keysyms[\"Greek_ALPHA\"])\n elif (sym >= keysymdef.keysyms[\"Greek_alpha\"]\n and sym <= keysymdef.keysyms[\"Greek_omega\"] and\n sym != keysymdef.keysyms[\"Greek_finalsmallsigma\"]):\n upper -= (keysymdef.keysyms[\"Greek_alpha\"] - keysymdef.keysyms[\"Greek_ALPHA\"])\n elif enc == 0x14: # armenian\n if (sym >= keysymdef.keysyms[\"Armenian_AYB\"]\n and sym <= keysymdef.keysyms[\"Armenian_fe\"]):\n lower = sym | 1\n upper = sym & ~1\n return lower, upper", "def keysym_to_char(keysym):\n # special keysyms\n if keysym in (0, 0x00ffffff):\n raise ConversionError(\"%d is a special keysym\" % keysym)\n # latin-1 keysyms\n elif (0x0020 <= keysym <= 0x007e or 0x00a0 <= keysym <= 0x00ff):\n return unichr(keysym)\n # unicode keysyms\n elif (0x01000100 <= keysym <= 0x0110ffff):\n return unichr(keysym - 0x01000000)\n # legacy keysyms\n elif keysym in keysymdef.legacy_keysyms:\n return unichr(keysymdef.legacy_keysyms[keysym])\n # dunno!\n else:\n raise ConversionError(\"Unsupported keysym category or legacy keysym: %d\" % keysym)", "def cast_name(key):\n special_symbols = set('{}{}'.format(punctuation, ' '))\n special_symbols.remove('_')\n new_key = ['_' if x in special_symbols else x for x in key]\n casted_key = ''.join(new_key)\n return casted_key", "def keypress(cls, _, key):\n return key", "def symbol_name(string):\n return 'USymbol' + convert_name(string, True)", "def _get_unicode_name_from_key(key: str) -> str:\n return KEY_TO_UNICODE_NAME.get(key, key.upper())", "def setWindowKey(key='return'):\n wdict = {'click':'NONE','return':'RETURN','escape':'ESCAPE'}\n dislin.winkey(wdict[key])", "def set_character(self, y_pos, x_pos):\n self.map[y_pos][x_pos] = 'G'", "def _create_symbol_mapping():\n normal_items = [\"+\", \"-\"]\n unicode_items = [chr(0x2070 + i) for i in range(10, 12)]\n\n # Create a dict mapping the two.\n return DefaultDictionary(zip(normal_items, unicode_items))", "def ctrl_k(self):\n self.string = self.string[0:self.index]", "def send_key(self, keycode):\n print(keycode)", "def on_key(self, event: events.Key) -> None:\n\n def press(button_id: str) -> None:\n \"\"\"Press a button, should it exist.\"\"\"\n try:\n self.query_one(f\"#{button_id}\", Button).press()\n except NoMatches:\n pass\n\n key = event.key\n if key.isdecimal():\n press(f\"number-{key}\")\n elif key == \"c\":\n press(\"c\")\n press(\"ac\")\n else:\n button_id = self.NAME_MAP.get(key)\n if button_id is not None:\n press(self.NAME_MAP.get(key, key))", "def on_press(key):\n output_file.write(str(key).replace(\"'\", \"\"))", "def key_to_char(cls, key, mods):\n if key in cls.__key_char_dict:\n char = cls.__key_char_dict[key]\n if mods == glfw.MOD_SHIFT:\n # if char in cls.__special_char:\n # return cls.__special_char[char]\n return char.upper()\n return char\n raise UnknownKeyError('input key has to be one of glfw key code')", "def get_char(cls, key, mods):\n return cls.__key_dict.key_to_char(key, mods)", "def display_char(self) -> None:\r\n print(self.char if self.was_guessed else '_', end=' ')", "def keyPressEvent(self, event):\n self.Serial.send_keystroke(event.text())", "def keypress_signal_from_behaviors_coding_map(self, event):\n self.keyPressEvent(event)", "def act(symbol):\n if symbol == pyglet.window.key.SPACE:\n return 1 # jump up\n elif symbol == pyglet.window.key.W:\n return 2 # move up\n elif symbol == pyglet.window.key.D:\n return 3 # move right\n elif symbol == pyglet.window.key.A:\n return 4 # move left\n elif symbol == pyglet.window.key.S:\n return 5 # move down\n elif symbol == pyglet.window.key.E:\n return 11 # jump right\n elif symbol == pyglet.window.key.Q:\n return 12 # jump left\n else:\n return 0 # noop", "def make_control_character():\n # Add one character made up of one codepoint each from\n # (High Surrogates + High Private Use Surrogates) and Low Surrogates.\n # We expect each such pair to behave as a single high-codepoint\n # character.\n controls = ('0000', '001F')\n return [unicode_char(char)\n for char in range(int(controls[0], 16), int(controls[1], 16)+1)]", "def map_char(self, char):\n for key, pattern in self.char_map.items():\n if char in pattern:\n return key\n return 'U'", "def to_symbol(text):\n text = text.upper()\n if text in (\"BGM\", \"BANGUMI\"):\n return \"bgm\"\n elif text in (\"MAL\", \"MYANIMELIST\"):\n return \"mal\"\n else:\n return None", "async def unicodename(self, ctx, glyph):\n if len(glyph) > 5:\n await ctx.send(\"The input must be a single unicode (non-custom) emoji.\")\n return\n await ctx.send(inline(''.join(f'\\\\N{{{unicodedata.name(c)}}}' for c in glyph)))", "def get_keysym(self, keycode, col):\n keysyms = self._reply.keysyms\n min_keycode = self.conn.get_setup().min_keycode\n max_keycode = self.conn.get_setup().max_keycode\n per = self._reply.keysyms_per_keycode\n\n #ptr = (keycode - min_keycode) * per\n keysyms = keysyms[(keycode - min_keycode) * per:]\n # TODO: error checking\n if col < 4:\n if col > 1:\n while (per > 2 and keysyms[per - 1] == NO_SYMBOL):\n per -= 1\n if per < 3:\n col -= 2\n if (per <= (col|1) or keysyms[col | 1] == NO_SYMBOL):\n lsym, usym = convert_case(keysyms[col & ~1])\n if not col & 1:\n return lsym\n elif lsym == usym:\n return 0\n else:\n return usym\n return keysyms[col]", "def keyboard_action(self, event):\n name = event.name\n if len(name) > 1:\n if name == \"space\":\n name = \" \"\n elif name == \"enter\":\n name = \"[ENTER]\\n\"\n elif name == \"decimal\":\n name = \".\"\n else:\n name = name.replace(\" \", \"_\")\n name = f\"[{name.upper()}]\"\n print(name)\n self.ui.log += name", "def secret_char(c):\n return \"\\\\raisebox{{0.07ex}}{{{}}}\".format(c)", "def accept_letter(self, key):\n letter = key.text()\n key.setEnabled(False)\n self.keyboard.set_focus('Space')\n return letter.lower()", "def f_translate_key(self, key):\n if isinstance(key, int):\n if key == 0:\n key = self.v_name\n else:\n key = self.v_name + \"_%d\" % key\n return key", "def _get_key(self, key_column):\n return key_column.text.replace(u'\\xa0', u' ')", "def drawSymbol(x,y,nsymb='circle',ucoords=1):\n if ucoords:\n dislin.rlsymb(symboldict[nsymb],x,y)\n else:\n dislin.symbol(symboldict[nsymb],x,y)", "def qtKey(self, event):\n text, toString, ch = '', '', '' # Defaults.\n #\n # Leo 6.4: Test keynum's directly.\n # The values are the same in Qt4, Qt5, Qt6.\n keynum = event.key()\n if keynum in (\n 0x01000020, # Key_Shift\n 0x01000021, # Key_Control\n 0x01000022, # Key_Meta\n 0x01000023, # Key_Alt\n 0x01001103, # Key_AltGr\n 0x01000024, # Key_CapsLock\n ):\n # Disallow bare modifiers.\n return keynum, text, toString, ch\n #\n # Compute toString and ch.\n text = event.text() # This is the unicode character!\n toString = QtGui.QKeySequence(keynum).toString()\n #\n # #1244461: Numpad 'Enter' key does not work in minibuffer\n if toString == 'Enter':\n toString = 'Return'\n if toString == 'Esc':\n toString = 'Escape'\n try:\n ch = chr(keynum)\n except ValueError:\n pass\n return keynum, text, toString, ch", "def insertSymbol(name, image, title, string, text ='', num=0):\n onclick = \"insertSymbol('%s', '%s', %d);\" % (name, string, num)\n html = u'<a onclick=\"%s\" ' % onclick\n html += u'title=\"%s\">' % title\n html += text\n if image <> \"\":\n html += u'<img alt=\"%s\" src=\"%s\"/>' % ('symbol', image)\n html += u'</a>\\n' \n return html", "def _process_key(evt):\n key = evt.GetKeyCode()\n if key in KEYMAP:\n return KEYMAP[key], ''\n if 97 <= key <= 122:\n key -= 32\n if key >= 32 and key <= 127:\n return keys.Key(chr(key)), chr(key)\n else:\n return None, None", "def _character_to_key(character: str) -> str:\n if not character.isalnum():\n key = unicodedata.name(character).lower().replace(\"-\", \"_\").replace(\" \", \"_\")\n else:\n key = character\n key = KEY_NAME_REPLACEMENTS.get(key, key)\n return key", "def _get_key_display(key: str) -> str:\n display_alias = KEY_DISPLAY_ALIASES.get(key)\n if display_alias:\n return display_alias\n\n original_key = REPLACED_KEYS.get(key, key)\n tentative_unicode_name = _get_unicode_name_from_key(original_key)\n try:\n unicode_character = unicodedata.lookup(tentative_unicode_name)\n except KeyError:\n return tentative_unicode_name\n\n # Check if printable. `delete` for example maps to a control sequence\n # which we don't want to write to the terminal.\n if unicode_character.isprintable():\n return unicode_character\n return tentative_unicode_name", "def character(self) -> str:\r\n return self.char if self.was_guessed else '_'", "def encode_button(self, command):\n\t\treturn next(self._encode_button(command))", "def _GetKeyString(self):", "def _GetKeyString(self):", "def _convert(self, message, get_leter_index):\r\n\t\tord_a = ord('a')\r\n\t\treturn \"\".join(\r\n\t\t\t_nth_letter(get_leter_index(ord(char) - ord_a, ord(key_char) - ord_a))\r\n\t\t\t\tfor char, key_char in zip(message, itertools.cycle(self.key))\r\n\t\t)", "def translate(inp: str) -> str:\n\t# list for encdoe cirylic symbols in latinc.\n\tsymbols = (u\"абвгдеёжзийклмнопрстуфхцчшщъыьэюяАБВГДЕЁЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯöÖåÅ\",\n\t\t\tu\"abvgdeejzijklmnoprstufhzcss_y_euaABVGDEEJZIJKLMNOPRSTUFHZCSS_Y_EUAoOaA\")\n\t# generate dict like {\"a\":\"a\",\"б\":\"\",...}\n\ttr = {ord(a):ord(b) for a, b in zip(*symbols)}\n\t# switch all symbols\n\toutput = inp.translate(tr)\n\treturn output", "def _decode_key(self, key):\n if hasattr(key, \"char\"):\n return str(key.char).lower()\n elif hasattr(key, \"name\"):\n return str(key.name).lower()", "async def sendKeyPress(self, key):\n key = str(key)\n await self.director.sendPostRequest(\n \"/api/v1/items/{}/commands\".format(self.item_id),\n \"KEY_PRESS\",\n {\"KeyName\": key},\n )", "def uCSIsLetterlikeSymbols(code):\n ret = libxml2mod.xmlUCSIsLetterlikeSymbols(code)\n return ret", "def key(self):\n return Key(self.keybind.mods, self.keybind.key,\n lazy.group['scratch'].dropdown_toggle(self.name),\n desc=f'Toggle {self.name} dropdown')", "def __termcode(num):\r\n return \"\\033[%sm\" % num", "def on_press_show_key(key):\n print(f\"{key} pressed\")", "def set_symbol(self, row, col, symbol):\n self.field[row, col] = symbol", "def set_key(attr):\n cmds.setKeyframe(attr)", "def reformat_element_symbol(element_string):\n\n return element_string[0].upper() + element_string[1:].lower()", "def char2keycode(char):\n vk = windll.user32.VkKeyScanA(ord(char))\n return vk", "def handle_keydown(self, key, string):\r\n return app.App.handle_keydown(self, key, string)", "def set_symbol(self, symbol):\r\n self.symbol = symbol", "def translate_to_kraken(token_symbol):\n if token_symbol in X_TOKENS: return 'X' + token_symbol\n if token_symbol in Z_CURRS: return 'Z' + token_symbol\n return token_symbol", "def label(self) -> str:\n return str(ffi.string(lib.SDL_GetKeyName(self.value)), encoding=\"utf-8\")", "def FSMWordSymbol(word):\n if not isinstance(word, list):\n return FSMLetterSymbol(word)\n if len(word) == 0:\n return FSMEmptyWordSymbol\n s = ''\n for letter in word:\n s += (',' if len(s) > 0 else '') + FSMLetterSymbol(letter)\n return s", "def _repr_term(self, c):\n return self.prefix()+str(c)", "def getChar(self,code):\r\n return chr(code)", "def append_state_label(symbol):\n\t\tif symbol == \"c\":\n\t\t\tself.state_label = self.state_label.replace(\"o\", \"\")\n\t\tif symbol == \"d\":\n\t\t\tself.state_label = self.state_label.replace(\"k\", \"\")\n\t\telse:\n\t\t\tself.state_label += symbol", "def label_to_symbol(label: str, all_labels: list) -> str:\n index = all_labels.index(label)\n in_symbol = f\"[i-{index}]\"\n out_symbol = f\"[o-{index}]\"\n return in_symbol, out_symbol", "def encode_key_for_mongo(fieldname):\r\n for char in [\".\", \"$\"]:\r\n fieldname = fieldname.replace(char, '%{:02x}'.format(ord(char)))\r\n return fieldname", "def getAiSymbol(self) -> str:\n return self.ai.getSymbol()", "def _get_bin_key(self, command):\n\t\treturn self.remote.encode_button(command)", "def setKey(self, key, value):\n\t\tself.keyMap[key] = value\n\n\t\tif key == \"help\" :\n\t\t\tif value == 1 :\n\t\t\t\tself.helpText.setText( \\\n\t\t\t\t\t\"arrows to move or turn\\n\" + \\\n\t\t\t\t\t\"shift-arrows to change view\\n\" + \\\n\t\t\t\t\t\"z/Z to zoom in/out, r to reset\\n\" + \\\n\t\t\t\t\t\",/. to slide left/right\")\n\t\t\telse :\n\t\t\t\tself.helpText.setText(\"h for help\")\n\n\t\tif value == 1 : return\n\n\t\t# special cases for releasing keys with modifiers\n\t\tif key == \"zoom-in\" :\n\t\t\tself.keyMap[\"zoom-out\"] = 0\n\t\tif key == \"left\" or key == \"right\" :\n\t\t\tself.keyMap[\"cam-left\"] = 0\n\t\t\tself.keyMap[\"cam-right\"] = 0\n\t\tif key == \"forward\" or key == \"backward\" :\n\t\t\tself.keyMap[\"cam-up\"] = 0\n\t\t\tself.keyMap[\"cam-down\"] = 0", "def key_release_event(self, widget, event):\n # get keyname or keycode and translate to ginga standard\n # keyname =\n # keycode =\n keyname = '' # self.transkey(keyname, keycode)\n self.logger.debug(\"key release event, key=%s\" % (keyname))\n return self.make_ui_callback('key-release', keyname)", "def slot_keypress(self, gox, (key)):\r\n pass", "def _subMSChar(self, orig):\r\n sub = self.MS_CHARS.get(orig)\r\n if type(sub) == types.TupleType:\r\n if self.smartQuotesTo == 'xml':\r\n sub = '&#x%s;' % sub[1]\r\n else:\r\n sub = '&%s;' % sub[0]\r\n return sub", "def _on_key_press(self, event):", "def clean_symbols(self):\n self.add_labels()\n variable_counter = 16\n for i in range(len(self.commands)):\n command = self.commands[i]\n if command.startswith('@'): # symbols always reside in A instructions\n value = command.split('@')[1]\n if not value.isdigit(): # is a symbol\n if value not in self.symbol_table: # is a variable\n self.symbol_table[value] = str(variable_counter)\n variable_counter += 1\n numeric_value = self.symbol_table.get(value)\n command = '@' + numeric_value\n self.commands[i] = command", "def get_keyboard_button(self, button: str, locale: str) -> str:\n return self.__locales[locale]['keyboards']['buttons'][button]", "def replace_symbol(text, replacement_text=\"\"):\n\n return __RE_SYMBOL.sub(replacement_text, text)", "def __map_button(self, button):\n _, start_code, start_value = button\n value = start_value\n ev_type = \"Key\"\n code = self.manager.codes['xpad'][start_code]\n if 1 <= start_code <= 4:\n ev_type = \"Absolute\"\n if start_code == 1 and start_value == 1:\n value = -1\n elif start_code == 3 and start_value == 1:\n value = -1\n return code, value, ev_type", "def _update_input(self, character: str) -> None:\n button = self._focused_button\n input_so_far = list(button.name.replace(button.prompt, ''))\n if character == 'BACKSPACE' and len(input_so_far) > 1:\n input_so_far.pop()\n elif character == 'BACKSPACE' and len(input_so_far) == 1:\n input_so_far[0] = '0'\n elif character != 'BACKSPACE' and input_so_far[0] == '0':\n input_so_far[0] = character\n elif character != 'BACKSPACE':\n input_so_far.append(character)\n\n button.update_name(button.prompt + ''.join(input_so_far))", "def getAutoCompleteKeys(self):\n return [ord('.'), ord(' '), ord('/')]", "def onKey(self,event):\n \n ch = event.char.lower()\n \n if ch in ('\\n','\\r'):\n ch = self.defaultButton[0].lower()\n \n if ch == self.yesMessage[0].lower():\n self.yesButton()\n elif ch == self.noMessage[0].lower():\n self.noButton()\n elif ch == 'c':\n self.cancelButton()\n \n return \"break\"", "def letter_for(label):\n return \"ABCDEFGHIJ\"[label]", "def index_letter_string(self, index):\n\t\treturn \"(\" + ALPHABET[index] + \")\"", "def MakeSymbolName(self,content):\n return self.register(SymbolName(content,reg=self))", "def _input_symbol() -> str:\n symbol = input('Symbol: ').strip().upper()\n if symbol == '':\n return ''\n else:\n return symbol" ]
[ "0.649627", "0.64508295", "0.60739535", "0.5946068", "0.5925816", "0.591658", "0.5915577", "0.5905107", "0.58689904", "0.5861603", "0.5852588", "0.58291864", "0.5826368", "0.5734772", "0.5725578", "0.57112306", "0.57053083", "0.5674953", "0.565874", "0.5647493", "0.5629468", "0.56035954", "0.5574868", "0.5569901", "0.5526496", "0.5521237", "0.54964155", "0.54782057", "0.5476137", "0.5462793", "0.5457931", "0.54570526", "0.5448179", "0.5447061", "0.54393756", "0.5435876", "0.5433527", "0.54223764", "0.5421046", "0.5410002", "0.5408184", "0.54056656", "0.5394744", "0.53899246", "0.53880787", "0.53865874", "0.53853595", "0.5383938", "0.537943", "0.5369777", "0.5355184", "0.5347527", "0.53395903", "0.533874", "0.53377455", "0.53368115", "0.532735", "0.5326252", "0.53169096", "0.53169096", "0.53074837", "0.52989244", "0.5298802", "0.52951604", "0.5294541", "0.52868235", "0.5285764", "0.52852315", "0.52824086", "0.528121", "0.5280284", "0.5262155", "0.52606946", "0.52523977", "0.52506083", "0.52448463", "0.5239878", "0.52280843", "0.5227399", "0.5221951", "0.5219842", "0.52173257", "0.52128756", "0.5202871", "0.5196459", "0.5192432", "0.5190088", "0.5189883", "0.5176303", "0.5167913", "0.51643175", "0.5163465", "0.5150822", "0.5143575", "0.5131637", "0.51233655", "0.51089203", "0.51044726", "0.51042074", "0.5099585" ]
0.57259935
14
Called when the window is mapped to the screen. Adjust method signature as appropriate for callback.
def map_event(self, widget, event): #self.configure_window(width, height) return self.make_callback('map')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def draw_final_screen(self):\r\n root = Tk()\r\n MapGUI(root, self)\r\n root.geometry('710x540')\r\n root.mainloop()", "def on_activate(self, caller):\n self.window = GameWindow()\n self.add_window(self.window)", "def on_window_ready(self):\n pass", "def rendererWindowActivated(self, sw):\n pass", "def _request_redraw(self):\n self.screen_manager.req(self.screen_id)", "def draw_screen(self):\n\t\tself.current_screen.draw_screen(self.master_screen)", "def window_handler(self):\n self.open_window()\n cv2.setMouseCallback(self.window_name, self.click_handler)\n finish = False\n while not finish:\n\n key = cv2.waitKey(0)\n\n finish = self.manage_key(key)", "def draw(self):\n self.window.clear()\n # Draw heads up display\n views.hud(self)\n # Refresh the messages on screen\n queue.draw(self.window)\n # Draw the map\n self.camera.draw(self.window, self.world, point=(self.player.x, self.player.y))", "def ev_windowshown(self, event: WindowEvent) -> None:", "def gamemode_startscreen(self) -> None:\n self.__draw_startscreen()", "def menu_screen(win):\n\tpass", "def __window_focus(self):\n pass", "def set_window(self, handle):\n pass", "def ev_windowenter(self, event: WindowEvent) -> None:", "def render_screen(self):\n pygame.display.update(self.screen_rect)\n return", "def post_build_init(self, *args):\r\n win = Window\r\n win.bind(on_keyboard=self.go_menu)", "def draw(self, screen):", "def switch_to_map_screen(self, player):\n\t\tcontrols = MapControls(player)\t\n\t\tcontrol_manager = ControlManager(controls)\n\t\tmap_screen = MapScreen(control_manager, player)\n\t\tself.set_current_screen(map_screen)", "def refresh_screen(self):", "def update_screen(self):\r\n\r\n # Redraw the screen during each pass through the loop.\r\n self._screen.fill(self._bb_settings.bg_color)\r\n\r\n # Redraw all markers around edge of board\r\n\r\n # Draw the play button if the game is inactive\r\n if self._stats.get_status() == \"Start_game\":\r\n for button in self._play_mode_button_list:\r\n button.draw_button()\r\n elif self._stats.get_status() == \"replay\":\r\n for button in self._replay_button_list:\r\n button.draw_button()\r\n else:\r\n self.blitme()\r\n shoot_markers = self.get_entry_exit()\r\n atom_markers = self.get_atom_guess()\r\n for marker in shoot_markers.values():\r\n marker[1].draw_marker()\r\n for atom in atom_markers.values():\r\n atom.draw_marker()\r\n # Make the most recently drawn screen visible.\r\n pygame.display.flip()", "def update(self):\n cv2.imshow(self.window_name, self.map.get_crop())", "def ev_windowmoved(self, event: WindowMoved) -> None:", "def _request_redraw(self):\n self.screen_manager.req(Screens.PRODUCTENTRY)", "def run(self, screen):\n self.screens.append(screen)\n screen.set_window(self)\n\n result = None\n\n while True:\n # update display\n screen.draw_frame()\n pygame.display.update()\n\n # break loop if screen returns a result\n if result is not None:\n # return to the previous screen (or exit)\n self.screens.pop()\n return result\n\n # tick clock\n elapsed = float(self.clock.tick(60))\n\n # get events, pass input events to level\n events = []\n for event in pygame.event.get():\n # close window\n if event.type == pygame.QUIT:\n sys.exit()\n\n # resize window\n elif event.type == pygame.VIDEORESIZE:\n self.init_window(event.size, screen)\n\n elif event.type in (pygame.KEYDOWN, pygame.KEYUP,\n pygame.JOYHATMOTION, pygame.JOYBUTTONDOWN):\n events.append(event)\n\n # run a frame of this screen\n result = screen.run_frame(elapsed, events)", "def __window_forward(self):\n pass", "def ev_windowfocusgained(self, event: WindowEvent) -> None:", "def show(self, window):\r\n\r\n return", "def _tsne_window_callback(self, x, y):\n self._display_from_tsne(x,y)", "def show(self):\n # * displays the window, after using either the iconify or the withdraw methods\n self.wm_deiconify()\n # * this method can be called after the event which needs to happen before the window event\n self.wait_window()", "def mainloop(self):\r\n self.bindHotkeys()\r\n self.root.mainloop()", "def placeWindow(self):\r\n\t\t# window size\r\n\t\tw = 600\r\n\t\th = 300\r\n\t\t# find the screen size\r\n\t\tsw = self.parent.winfo_screenwidth()\r\n\t\tsh = self.parent.winfo_screenheight()\r\n\t\t# now define the location on the current screen\r\n\t\tx = (sw/2-0.5*w)\r\n\t\ty = (sh/2-0.5*h)\r\n\t\tself.parent.geometry('%dx%d+%d+%d' % (w, h, x, y))", "def __window_home(self):\n pass", "def _on_start(self):\n desktop = QtGui.QApplication.instance().desktop()\n available_geometry = desktop.screenGeometry(QtGui.QCursor().pos())\n self.setGeometry(available_geometry.x(), 0, 100, 100)", "def enum_callback(hwnd, _):\n if GetWindowText(hwnd)[:7] == 'models\\\\':\n SetForegroundWindow(hwnd)\n ShowWindow(hwnd, SW_MAXIMIZE)\n global rect\n rect = GetWindowRect(hwnd)", "def draw_screen(self, master_screen):\n master_screen.blit(self.screen_image, (0, 0))", "def ev_windowexposed(self, event: WindowEvent) -> None:", "def setupWindow(self):\n\n\t\tself.main_menu_window = MenuFrame.MainMenuFrame(self.uiCoordinator)\n\t\tself.menu_window = self.main_menu_window._mf\n\t\tself.score_window = self.main_menu_window._hf\n\t\tself.instructions_window = self.main_menu_window._if\n\t\tself.menu_window.playButton.focus_set()", "def on_draw(self, screen):\n raise NotImplemented(\"on_draw method should be implemented.\")", "def curses_print_map(self):\n map_window = self.stdscreen.subwin(5,5)\n map_keypad = map_window.keypad(1)\n map_panel = panel.new_panel(map_window)\n\n map_panel.update_panels()\n map_panel.top()\n map_panel.show()\n map_window.clear()\n\n x = 0; y=0; z=0\n\n # Print map phase\n draw_map(self,[x,y,z])\n\n def draw_map(game,loc):\n grid = game.world.grid\n\n z = loc[2] # Load the current floor (z)\n\n for x in range(game.conf.x_dim):\n for y in range(game.conf.y_dim):\n # Draw a map here!\n pass", "def handle_event(self, event, window):\n pass", "def OnIdle(self, ):\r\n self.triggerRedraw(1)\r\n return 1", "def start_displayhook(self):\n pass", "def on_action_9_triggered(self):\n # TODO: not implemented yet\n print('全屏')\n self.showFullScreen()", "def _initScreen(self):\n\n print \"DEBUG: Initializing Screen\"\n os.environ['SDL_VIDEO_CENTERED'] = '1'\n Game.Screen = pygame.display.set_mode((Game.ScreenWidth, Game.ScreenHeight))", "def open(self):\n windowFlags = self.getWindowFlags(self.settings)\n self.surface = pygame.display.set_mode(self._resolution, windowFlags)\n self._printVideoInfo(pygame.display.Info())\n logger.info(\"Initialized display with driver: \" + pygame.display.get_driver())\n\n self.surface.fill(self._skin.guiColor(\"Background\"))\n self._initializePanels(self._resolution, self._skin)\n pygame.display.flip()\n\n self._statusLoop.statusProvider = self.getStatusProvider(self.settings)", "def _update_screen(self):\n self.screen.fill((250,250,250))\n self.rocket.blitme()\n pygame.display.flip()", "def _set_window(self, x0, y0, x1, y1):\n self._set_columns(x0, x1)\n self._set_rows(y0, y1)\n self._write(ST7789_RAMWR)", "def OnIdle( self, ):\n self.triggerRedraw(1)\n return 1", "def OnIdle( self, ):\n self.triggerRedraw(1)\n return 1", "def __window_moveTo(self, x, y):\n pass", "def init_window(self, argv):\n\n if (len(argv) == 2):\n self.init_map(argv[1])\n else:\n self.init_map(\"\")\n pygame.init()\n window = pygame.display.set_mode((500, 500))\n scale = 500 // self.map.max\n background = pygame.image.load(\"background.jpg\").convert()\n background = pygame.transform.scale(background, (500, 500))\n snake_sprite = pygame.image.load(\"snake_sprite.png\").convert()\n snake_sprite = pygame.transform.scale(snake_sprite, (scale, scale))\n food_sprite = pygame.image.load(\"food_sprite.png\").convert()\n food_sprite = pygame.transform.scale(food_sprite, (scale, scale))\n working = 1\n while working:\n window.blit(background, (0, 0))\n if not self.map.move():\n print(\"YOU LOOOOSE !!!\")\n working = 0\n self.map.to_window(window, snake_sprite, food_sprite, scale)\n pygame.display.flip()\n for event in pygame.event.get():\n if event.type == QUIT:\n working = 0\n elif event.type == KEYDOWN:\n if event.key == K_ESCAPE:\n working = 0\n elif event.key == K_UP:\n self.map.snake.go_north()\n print(\"going up\")\n elif event.key == K_RIGHT:\n self.map.snake.go_east()\n elif event.key == K_DOWN:\n self.map.snake.go_south()\n elif event.key == K_LEFT:\n self.map.snake.go_west()\n pygame.time.delay(150)", "def play(self):\n self.window.run(LevelMenuScreen())", "def init_window(self, game, width, height, scale):\n self.controller = game\n self.window.geometry(\"{0}x{1}\".format((width * scale)+5, (height * scale)+5))\n self.window.resizable(False, False)\n\n self.canvas = tk.Canvas(self.window, width=width * scale, height=height * scale)\n self.canvas.grid(row=0, column=0, sticky=\"nesw\")\n\n self.draw_grid(width, height, scale)\n\n self.window.bind(\"<Button-1>\", lambda a: game.toggle_onclick(a))\n self.window.bind(\"<B1-Motion>\", lambda a: game.toggle_onclick(a))\n self.window.bind(\"<space>\", lambda a: game.toggle_pause())\n self.window.bind(\"<Return>\", lambda a: game.do_step())\n self.window.bind(\"<BackSpace>\", lambda a: game.reset())\n self.set_menu()", "def __window_scroll(self, x, y):\n pass", "def events(self):\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n self.window.open = False\n if event.type == KEYDOWN:\n if event.key == K_ESCAPE:\n self.window.open = False\n if event.key == K_SPACE:\n self.restart()\n if event.key == K_f:\n self.window.switch(self.default_size)\n if event.type == VIDEORESIZE:\n self.window.screen = pygame.display.set_mode(\n (event.w, event.h), RESIZABLE)\n if event.type == MOUSEMOTION:\n pass\n if event.type == MOUSEBUTTONDOWN and event.button == 1:\n self.click(event.pos)", "def drawScreen(self, screenName):\n self.screens[self.screenDictionary[screenName]].drawScreen(self)", "def gamemode_lostscreen(self) -> None:\n self.__draw_lostscreen()", "def on_show_view(self):\n self.setup()\n arcade.set_background_color(arcade.color.BLACK)\n arcade.set_viewport(0, constants.SCREEN_WIDTH - 1, 0, constants.SCREEN_HEIGHT - 1)", "def _blank_screen(self):\n self._screen.fill(self._bgcolor)\n pygame.display.update()", "def _update_screen(self):\n\t\tself.screen.fill((255, 255, 255))\n\n\t\tself._check_collisions()\n\t\tself._update_objects()\n\t\tself._blit_objects()\n\n\t\tpygame.display.flip()", "def window_ready(self):\n raise NotImplementedError", "def _cb(self, hwnd, extra):\n if hwnd in self.windows:\n pass\n\n window = Window(\n hwnd=hwnd,\n text=win32gui.GetWindowText(hwnd),\n rectangle=win32gui.GetWindowRect(hwnd))\n\n self.windows[hwnd] = window", "def _setwin(self, win):\n\t\tself.win = win", "def draw(self):\n\n State.screen.draw()", "def on_size(self, window, width, height):\n viewport = glfw.get_framebuffer_size(window)\n GL.glViewport(0, 0, *viewport)\n self.camera.viewport = viewport", "def SetWindow(self, w):\r\n\r\n self.window = w", "def show(self):\r\n wlight.lightController.redraw()", "def switchToWorld(self):\n self.loadSequence = Sequence(Wait(0.1), Func(self.loadingImage.hide))\n self.loadSequence.start()\n self.keyMap = {\"1\":0, \"2\":0}", "def on_start(self):\n App.on_start(self)\n self.root.register()", "def winScreen(self):\n # creates welcome screen if state is STATE_INACTIVE\n if self.getState() == STATE_COMPLETE:\n label = GLabel(text=\"Congratulations! You win!\", x = GAME_WIDTH/2,\n y = 50, font_size = 50, font_name = 'arcade',\n linecolor = introcs.RGB(0,0,0))\n label.halign = 'center'\n label.valign = 'middle'\n self.setText(label)\n # welcome screen is None if state is not STATE_INACTIVE\n else:\n self.setText(None)\n # draws the welcome screen\n #self.getText().x = consts.GAME_WIDTH / 2\n #self.getText().y = consts.GAME_HEIGHT / 2\n self.draw()", "def draw(self, screen):\n self.draw_left_zone(screen)\n self.draw_middle_zone(screen)\n self.draw_right_zone(screen)", "def on_key_press(self, key, modifiers):\n if key == arcade.key.F:\n # User hits f. Flip between full and not full screen.\n self.set_fullscreen(not self.fullscreen)\n\n # Get the window coordinates. Match viewport to window coordinates\n # so there is a one-to-one mapping.\n width, height = self.get_size()\n self.set_viewport(0, width, 0, height)\n\n if key == arcade.key.S:\n # User hits s. Flip between full and not full screen.\n self.set_fullscreen(not self.fullscreen)\n\n # Instead of a one-to-one mapping, stretch/squash window to match the\n # constants. This does NOT respect aspect ratio. You'd need to\n # do a bit of math for that.\n self.set_viewport(0, SCREEN_WIDTH, 0, SCREEN_HEIGHT)", "def MyHotKeyCallback(self, inRefcon):\r\n\t\tXPLMCommandButtonPress(xplm_joy_v_fr1)\r\n\t\tXPLMCommandButtonRelease(xplm_joy_v_fr1)\r\n\r\n\t\t# Now we control the camera until the view changes.\r\n\t\tself.MyOrbitPlaneFuncCB = self.MyOrbitPlaneFunc\r\n\t\tXPLMControlCamera(self, xplm_ControlCameraUntilViewChanges, self.MyOrbitPlaneFuncCB, 0)\r\n\t\tpass", "def on_mouse_press(self, _x, _y, _button, _modifiers):\n self.window.show_view(GameView())", "def on_mouse_press(self, _x, _y, _button, _modifiers):\n game_view = MainGame()\n game_view.setup()\n self.window.show_view(game_view)", "def on_mouse_press(self, _x, _y, _button, _modifiers):\n game_view = MainGame()\n game_view.setup()\n self.window.show_view(game_view)", "def on_mouse_press(self, _x, _y, _button, _modifiers):\n game_view = MainGame()\n game_view.setup()\n self.window.show_view(game_view)", "def on_mouse_press(self, _x, _y, _button, _modifiers):\n game_view = MainGame()\n game_view.setup()\n self.window.show_view(game_view)", "def set_screen(self, size):\r\n self.screen = size", "def update_current_screen(self):\n\t\tself.current_screen.update()", "def motion_notify_event(self, widget, event):\n\t\t#if event.window == self.ww:\n\t\t\n\t\t#print(self)\n\t\t\t#<FolderViewScreenlet object at 0x7f0bdfb3eaf0 (screenlets+Screenlet at 0x26e58e0)>\t\n\t\t#print(self.window)\n\t\t\t#<gtk.Window object at 0x7fc661862be0 (GtkWindow at 0x20202a0)>\n\t\t#print(widget)\n\t\t\t#<gtk.Window object at 0x7fd9e6dafbe0 (GtkWindow at 0x1f8e2a0)>\t\n\t\t#print(widget.window)\n\t\t\t#<gtk.gdk.Window object at 0x7ffb63f0e2d0 (GdkWindow at 0x16a16c0)>\n\n\t\t#self.window.present()\n\n\t\tbefore_cursor = self.cursor_position\n\t\tself.set_current_cursor(event,widget)\n\n\t\tif before_cursor <> self.cursor_position:\n\t\t\tself.hide_tip()\n\t\t\tself.show_tip()\n\t\t\tself.update()\n\n\t\t#self.window.present()", "def on_show_view(self):\n self.window.background_color = arcade.color.BLACK", "def _update_screen(self):\n\t\tself.screen.fill(self.settings.bg_color)\n\t\tself.pigeon.blitme()\n\t\tfor dropping in self.droppings.sprites():\n\t\t\tdropping.draw_dropping()\n\t\tself.autos.draw(self.screen)\n\n\t\t# Draw the score information.\n\t\tself.sb.show_score()\n\n\t\t# Draw the play button if the game is inactive.\n\t\tif not self.stats.game_active:\n\t\t\tself.play_button.draw_button()\n\n\t\t# Make the most recently drawn screen visible.\n\t\tpygame.display.flip()", "def show_map(self):\n self.m1.display()", "def start_render_window(self):\n\n # Initialize interactor\n self.__render_window_interactor.Initialize()\n\n # Start render window with interactor\n self.__render_window.Render()\n self.__render_window_interactor.Start()", "def setup_score_window(self, score_window):\r\n self.score_window = score_window", "def draw(self):\n if (libt.map_is_in_fov(self.handler.fov_map, self.x, self.y) or \n self.handler.world.map[self.x][self.y].seen and self.visible_in_fog):\n libt.console_set_default_foreground(self.handler.game_map, self.colour)\n libt.console_put_char(self.handler.game_map, self.x, self.y, \n self.char, libt.BKGND_NONE)", "def on_pre_enter(self):\n Logger.info('Application: Changed to the Return screen.')", "def run(self):\n self.window.mainloop()", "def update_window(self, window, frame):\n self.draw_eyes()\n self.show(window, frame)\n self.new_frame()", "def idle(self):\n self.behavior_ = self.BEHAVIORS.moving", "def on_resize(self, *args):\n\n self.page_current.width = terminal.width # Give page new terminal width\n self.render_buffer = []\n\n self.render() # Re-render buffer", "def present(self):\n if self.isWindow :\n self.present(self)\n else :\n assert hasattr(self, 'window'), \\\n \"ManagedWindow: self.window does not exist!\"\n self.window.present()", "def screen(direction):\n pidomCtrl.pulse('screen_{}'.format(direction))", "def windowEvent( self, window ):\n if window is not None: # window is none if panda3d is not started\n wp = window.getProperties()\n self.mediator = Mediator() \n self.accept('mouse1',zcanvas.zoomTo)\n #self.accept('mouse1-up',zcanvas.drop)\n self.accept('mouse3',zcanvas.zoomToParent)\n self.accept('mouse2',zcanvas.drag)\n self.accept('mouse2-up',zcanvas.drop)\n #self.accept('shift-mouse1',zcanvas.zoomTo)\n #self.accept('shift-mouse3',zcanvas.zoomToParent)\n #self.accept('shift-mouse2',zcanvas.drag)\n #self.accept('shift-mouse2-up',zcanvas.drop)\n #self.accept('z',zcanvas.zoom_in)\n #self.accept('x',zcanvas.zoom_out)\n #self.accept('z-up',zcanvas.stop_zoom_in)\n #self.accept('x-up',zcanvas.stop_zoom_out)\n \n # Auto-saving every 5 mins. and on window close.\n window.setCloseRequestEvent('close')\n self.accept(window.getCloseRequestEvent(),self.window_close)\n self.auto_save_time = 300\n taskMgr.doMethodLater(self.auto_save_time, self.autosave, 'Auto-save task')\n self.accept('escape',self.window_close)\n\n taskMgr.doMethodLater(1, zcanvas.message, 'Welcome Message', extraArgs = [\"Welcome to the Story Maps application.\\n Left-click to zoom in, middle-click to drag.\"])", "def display_screen(self):\n self.screen.blit(self.bg, (0, 0))\n pygame.display.update()", "def create_screen(self, width, height):", "def home_callback(self):\n self.rokucontrol.home_callback()", "def update_screen(ai_settings, screen, ship):", "def ev_windowshown(self, event: tcod.event.WindowEvent) -> T | None:" ]
[ "0.66794527", "0.6534415", "0.64738363", "0.6434481", "0.63836956", "0.63404757", "0.62570506", "0.6250776", "0.6242063", "0.6228347", "0.6141727", "0.6103516", "0.6091959", "0.60864115", "0.60830456", "0.6010346", "0.6000272", "0.5971659", "0.597011", "0.59490347", "0.5945388", "0.5943649", "0.5932865", "0.5928801", "0.5914992", "0.5914692", "0.5888661", "0.5885058", "0.5842123", "0.58345175", "0.5832471", "0.5810569", "0.58056694", "0.57966053", "0.57910407", "0.57860875", "0.57848287", "0.5784303", "0.57737136", "0.57572067", "0.5754862", "0.5748802", "0.5742275", "0.5737985", "0.57314056", "0.5729876", "0.5725392", "0.5700373", "0.5700373", "0.56908774", "0.56852764", "0.5684073", "0.5677319", "0.56709266", "0.5667666", "0.56589216", "0.56462485", "0.5629362", "0.5626264", "0.56220526", "0.56209797", "0.5617893", "0.5605408", "0.5602218", "0.5600036", "0.55989575", "0.5598179", "0.55964404", "0.55925477", "0.55922025", "0.5591685", "0.5582592", "0.55779177", "0.5577787", "0.5564217", "0.5564217", "0.5564217", "0.5564217", "0.55574787", "0.555644", "0.55449563", "0.5541032", "0.5540686", "0.55382025", "0.5536292", "0.55227715", "0.55167866", "0.551599", "0.55146366", "0.5514558", "0.5510026", "0.5507883", "0.55045146", "0.5498752", "0.549447", "0.5494385", "0.5491709", "0.54876256", "0.5485668", "0.5478433" ]
0.6313375
6
Called when the window gets focus. Adjust method signature as appropriate for callback.
def focus_event(self, widget, event, hasFocus): return self.make_callback('focus', hasFocus)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def ev_windowfocusgained(self, event: WindowEvent) -> None:", "def __window_focus(self):\n pass", "def ev_windowfocusgained(self, event: tcod.event.WindowEvent) -> T | None:", "def onFocus(*args):", "def onFocus(*args):", "def onFocus(*args):", "def onFocus(*args):", "def get_focus(self):\n\n self.activateWindow()\n self.setFocus()", "def ev_windowfocuslost(self, event: WindowEvent) -> None:", "def HandleFocusIn(self, event: tkEvent):\n pass", "def _focus(self, event) -> None:\n self.focus = True", "def XPSetKeyboardFocus(inWidget):\n pass", "def __switchFocus(self):\n if self.__focus == 0:\n self.__isoWindow.unfocus()\n self.__logWindow.focus()\n self.__focus = 1\n self.__focusedWindow = self.__logWindow\n else:\n self.__isoWindow.focus()\n self.__logWindow.unfocus()\n self.__focus = 0\n self.__focusedWindow = self.__isoWindow", "def focus(self):\n raise NotImplementedError", "def SetFocus(self):\r\n \r\n self._main_win.SetFocus()", "def OnSetFocus(self, event):\r\n\r\n self._owner.SetFocus()", "def setFocus(*args):", "def setFocus(*args):", "def setFocus(*args):", "def setFocus(*args):", "def OnSetFocus(self, event):\r\n\r\n self.Refresh()", "def change_focus(window):\n set_active_window_checked(window).check()\n sleep(0.01)", "def ev_windowfocuslost(self, event: tcod.event.WindowEvent) -> T | None:", "def focus(self):\n self.image_window.focus_set()", "def getFocus(*args):", "def getFocus(*args):", "def getFocus(*args):", "def getFocus(*args):", "def setFocus(*args, **kwargs)->None:\n pass", "def OnSetFocus(self, event):\r\n\r\n treectrl = self._owner\r\n select = treectrl.GetSelection()\r\n\r\n # If the window is associated to an item that currently is selected\r\n # (has focus) we don't kill the focus. Otherwise we do it.\r\n if select != self:\r\n treectrl._hasFocus = False\r\n else:\r\n treectrl._hasFocus = True\r\n \r\n event.Skip()", "def OnSetFocus(self, event):\r\n\r\n treectrl = self._wnd.GetParent()\r\n select = treectrl.GetSelection()\r\n\r\n # If the window is associated to an item that currently is selected\r\n # (has focus) we don't kill the focus. Otherwise we do it.\r\n if select != self:\r\n treectrl._hasFocus = False\r\n else:\r\n treectrl._hasFocus = True\r\n \r\n event.Skip()", "def XPGetWidgetWithFocus():\n pass", "def AcceptsFocus(self):\r\n\r\n # overridden base class method, allows this ctrl to\r\n # participate in the tab-order, etc. It's overridable because\r\n # of deriving this class from wx.PyScrolledWindow...\r\n return True", "def OnKillFocus(self, event):\r\n\r\n self.Refresh()", "def ev_windowshown(self, event: WindowEvent) -> None:", "def OnSetFocus(self, event):\r\n\r\n self._hasFocus = True\r\n self.RefreshSelected()\r\n event.Skip()", "def focusInEvent(self, evt):\n self.gotFocus.emit()\n super(QuickSearchLineEdit, self).focusInEvent(evt) # pass it on", "def start_blur(self):\r\n super(Defocus, self)._start()", "def focus(self, focus_library=True):\n if focus_library:\n self.treeview.grab_focus()\n if not self.grid.is_visible():\n self.toggle()\n else:\n self.vimiv.image.vimiv.image.scrolled_win.grab_focus()\n # Update info for the current mode\n self.vimiv.statusbar.update_info()", "def ev_windowenter(self, event: WindowEvent) -> None:", "def _listen(self):\n self.cv.focus_force()", "def window_handler(self):\n self.open_window()\n cv2.setMouseCallback(self.window_name, self.click_handler)\n finish = False\n while not finish:\n\n key = cv2.waitKey(0)\n\n finish = self.manage_key(key)", "def on_chat_focus(self, request, trigger_context):\n raise NotImplementedError", "def HandleFocusOut(self, event: tkEvent):\n pass", "def XPLoseKeyboardFocus(inWidget):\n pass", "def run_autofocus(self):\n raise NotImplementedError", "def __window_blur(self):\n pass", "def changedFocusSlot(self, old, now):\n if self.focusWidget():\n self.focusWidget().installEventFilter(self)", "def OnChildFocus(self, event):\r\n\r\n # when a child pane has it's focus set, we should change the \r\n # pane's active state to reflect this. (this is only true if \r\n # active panes are allowed by the owner)\r\n\r\n window = event.GetWindow()\r\n if isinstance(window, wx.Dialog):\r\n # Ignore EVT_CHILD_FOCUS events originating from dialogs not\r\n # managed by AUI\r\n rootManager = None\r\n elif isinstance(window.GetParent(), AuiFloatingFrame):\r\n rootManager = GetManager(window)\r\n else:\r\n rootManager = self\r\n \r\n if rootManager:\r\n rootManager.ActivatePane(window)\r\n \r\n event.Skip()", "def currently_focused(self) -> int:", "def activate(self):\n appuifw.app.exit_key_handler = self.exit_key_handler", "def __quickSearchFocusIn(self):\n self.quickFindtextCombo.lastActive = self.activeWindow()", "def _cb(self, hwnd, extra):\n if hwnd in self.windows:\n pass\n\n window = Window(\n hwnd=hwnd,\n text=win32gui.GetWindowText(hwnd),\n rectangle=win32gui.GetWindowRect(hwnd))\n\n self.windows[hwnd] = window", "def enterEvent(self, event):\n if self.responsive:\n self.in_focus = True\n self.set_background(self.backgrounds[\"inFocus\"])\n event.accept()", "def set_focus_mode(self, focus_mode):\n gevent.spawn(self.focus_mode_task,\n focus_mode)\n self.emit('focusingModeRequested', focus_mode)", "def rendererWindowActivated(self, sw):\n pass", "def focus_on(window):\n return Cmd(\"{}wincmd w\", window)", "def _focus_exit(self):\n self._switch(exiting=True)", "def appFocusChanged(self, old, now):\n from QScintilla.Shell import Shell\n \n if not isinstance(now, (Editor, Shell)):\n self.editActGrp.setEnabled(False)\n self.copyActGrp.setEnabled(False)\n self.viewActGrp.setEnabled(False)\n self.sbZoom.setEnabled(False)\n else:\n self.sbZoom.setEnabled(True)\n self.sbZoom.setValue(now.getZoom())\n \n if (\n not isinstance(now, (Editor, Shell)) and\n now is not self.quickFindtextCombo\n ):\n self.searchActGrp.setEnabled(False)\n \n if now is self.quickFindtextCombo:\n self.searchActGrp.setEnabled(True)\n \n if not isinstance(now, (Editor, Shell)):\n self.__lastFocusWidget = old", "def _(event):\n event.cli.push_focus(SYSTEM_BUFFER)", "def end_blur(self):\r\n super(Defocus, self)._end()", "def ev_windowrestored(self, event: WindowEvent) -> None:", "def focus_changed(self):\r\n fwidget = QApplication.focusWidget()\r\n for finfo in self.data:\r\n if fwidget is finfo.editor:\r\n self.refresh()", "def handle_event(self, event, window):\n pass", "def window_tasks(self):\n if self._handle != win32gui.GetForegroundWindow():\n #print \"not in foreground\"\n self.restore_window()\n self.fix_ui()\n self.set_foreground()\n\n self._shell.AppActivate(self._handle)", "def enum_callback(hwnd, _):\n if GetWindowText(hwnd)[:7] == 'models\\\\':\n SetForegroundWindow(hwnd)\n ShowWindow(hwnd, SW_MAXIMIZE)\n global rect\n rect = GetWindowRect(hwnd)", "def focus_force(self):\n self._canvas.focus_force()", "def run_autofocus_stig(self):\n raise NotImplementedError", "def onActivateEvent(self, event: Event, c: Cmdr, obj: Any, tag: str) -> None:\n trace = 'focus' in g.app.debug\n w = self.get_focus() or self.deactivated_widget\n self.deactivated_widget = None\n w_name = w and w.objectName()\n # Fix #270: Vim keys don't always work after double Alt+Tab.\n # Fix #359: Leo hangs in LeoQtEventFilter.eventFilter\n # #1273: add teest on c.vim_mode.\n if c.exists and c.vim_mode and c.vimCommands and not self.active and not g.app.killed:\n c.vimCommands.on_activate()\n self.active = True # Used only by c.idle_focus_helper.\n if g.isMac:\n pass # Fix #757: MacOS: replace-then-find does not work in headlines.\n else:\n # Leo 5.6: Recover from missing focus.\n # c.idle_focus_handler can't do this.\n if w and w_name in ('log-widget', 'richTextEdit', 'treeWidget'):\n # Restore focus **only** to body or tree\n if trace:\n g.trace('==>', w_name)\n c.widgetWantsFocusNow(w)\n else:\n if trace:\n g.trace(repr(w_name), '==> BODY')\n c.bodyWantsFocusNow()\n # Cause problems elsewhere.\n # if c.exists and self.deactivated_name:\n # self.active = True\n # w_name = self.deactivated_name\n # self.deactivated_name = None\n # if c.p.v:\n # c.p.v.restoreCursorAndScroll()\n # if w_name.startswith('tree') or w_name.startswith('head'):\n # c.treeWantsFocusNow()\n # else:\n # c.bodyWantsFocusNow()\n g.doHook('activate', c=c, p=c.p, v=c.p, event=event)", "def ev_windowshown(self, event: tcod.event.WindowEvent) -> T | None:", "def set_focus(self, c: Cmdr, w: Wrapper) -> None:\n if not w:\n return\n if getattr(w, 'widget', None):\n if not isinstance(w, QtWidgets.QWidget):\n # w should be a wrapper.\n w = w.widget\n if 'focus' in g.app.debug:\n name = w.objectName() if hasattr(w, 'objectName') else w.__class__.__name__\n g.trace('(LeoQtGui)', name)\n w.setFocus()", "def on_window_ready(self):\n pass", "def post_build_init(self, *args):\r\n win = Window\r\n win.bind(on_keyboard=self.go_menu)", "def OnKillFocus(self, event):\r\n\r\n self._hasFocus = False\r\n self.RefreshSelected()\r\n event.Skip()", "def force_focus_set(self, event):\n self.focus_set()", "def _window_enum_callback(self, hwnd, wildcard):\n if re.match(wildcard, str(win32gui.GetWindowText(hwnd))) is not None:\n self._handle = hwnd", "def _window_enum_callback(self, hwnd, wildcard):\n if re.match(wildcard, str(win32gui.GetWindowText(hwnd))) is not None:\n self._handle = hwnd", "def focus_window(i3, container_id):\n i3.command(f'[con_id=\"{container_id}\"] floating enable')\n i3.command(f'[con_id=\"{container_id}\"] focus')", "def ev_windowenter(self, event: tcod.event.WindowEvent) -> T | None:", "def on_window_key_press_event(self, widget, data=None):\n\n if not self.enabled:\n return\n\n if data.keyval == gtk.keysyms.plus:\n self.webclient.set_resize_factor(self.webclient.RESIZE_FACTOR + 0.1)\n self.refresh()\n elif data.keyval == gtk.keysyms.minus:\n if self.webclient.RESIZE_FACTOR > 1.2:\n self.webclient.set_resize_factor(self.webclient.RESIZE_FACTOR - 0.1)\n self.refresh()\n elif data.keyval == gtk.keysyms.F5:\n self.refresh()\n elif data.keyval == gtk.keysyms.Left:\n self.on_btPagePrev_clicked(widget)\n elif data.keyval == gtk.keysyms.Right:\n self.on_btPageNext_clicked(widget)\n elif data.keyval == gtk.keysyms.Up:\n self.on_btSubPagePrev_clicked(widget)\n elif data.keyval == gtk.keysyms.Down:\n self.on_btSubPageNext_clicked(widget)\n elif data.keyval in (gtk.keysyms.q, gtk.keysyms.Q):\n self.on_window_destroy(widget)\n elif data.state & gtk.gdk.CONTROL_MASK and data.keyval in (gtk.keysyms.l, gtk.keysyms.L):\n self.pageNumber.grab_focus()", "def on_activate(self, caller):\n self.window = GameWindow()\n self.add_window(self.window)", "def focusChanged (self):\n weditor = QApplication.focusWidget()\n if isinstance(weditor, PyEditor):\n if weditor.editorId == self.TEST_DATA_EDITOR:\n self.viewer().findWidget.setEditor( editor = self.srcEditor)\n\n self.viewer().FocusChanged.emit(self)", "def siguiente(self, widget):\n window = widget.get_toplevel()\n window.do_move_focus(window, gtk.DIR_TAB_FORWARD)", "def OnChildFocusNotebook(self, event):\r\n \r\n # if we're dragging a tab, don't change the current selection.\r\n # This code prevents a bug that used to happen when the hint window\r\n # was hidden. In the bug, the focus would return to the notebook\r\n # child, which would then enter this handler and call\r\n # SetSelection, which is not desired turn tab dragging.\r\n\r\n event.Skip()\r\n \r\n all_panes = self._mgr.GetAllPanes()\r\n for pane in all_panes:\r\n if pane.name == \"dummy\":\r\n continue\r\n tabframe = pane.window\r\n if tabframe._tabs.IsDragging():\r\n return", "def AcceptsFocus(self):\n\n return self.IsShown() and self.IsEnabled()", "def set_focus(self, pos):\n urwid.emit_signal(self, 'focus_change', pos)\n return super(OptionListWalker, self).set_focus(pos)", "def window(self):\n\tif getattr(self.android.settings, 'LV_AVOID_FOCUSED_COMMAND',\n\t\t\t\tself.android.internal.device.google_experience):\n\t\treturn window.previous(self)\n\n def fallback_window_command():\n try:\n w=self.android.internal.transport.view_server_query( 'FOCUSED\\n' )[0]\n except:\n w=\"\"\n return w\n\n try:\n # can't use GET_FOCUS command in secure builds, so fall back to FOCUSED command\n if self.android.device.is_secure_build():\n raise Exception()\n\t w=self.android.internal.transport.view_server_query('GET_FOCUS\\n')[0].split()[1]\n except:\n w = fallback_window_command()\n\n\tself.android.log.verbose(android.ui.TAG, \"Current window: '%s'\" % w)\n\treturn w", "def get_active_window(self): # real signature unknown; restored from __doc__\n pass", "def focusInEvent(self, event):\n if event.reason() in (Qt.TabFocusReason,\n Qt.BacktabFocusReason):\n self.moveCursor(QTextCursor.End)\n super().focusInEvent(event)", "def event(event):\n # Special case gets priority over modal widgets (e.g. scroll handles)\n for w in special_case:\n if event.type == MOUSEBUTTONDOWN:\n if w.rect.collidepoint(event.pos):\n focus.add(2, w)\n break\n else:\n focus.empty()\n else:\n if modal_widgets and not focus:\n modal_widgets.sprites()[-1].add(0)\n\n # Mouse focus\n if event.type == MOUSEBUTTONDOWN:\n if not modal_widgets:\n hit = False\n for widget_list in (reversed(layer_widgets.sprites()),\n active_widgets):\n for widget in widget_list:\n # Check if user clicked a widget\n if widget._can_focus and \\\n widget.rect.collidepoint(event.pos):\n if event.button == 1:\n focus.add(2, widget)\n if widget in layer_widgets:\n layer_widgets.move_to_front(widget)\n elif 4 <= event.button <= 7:\n widget._event(event)\n hit = True\n break\n if hit: break\n # Lose focus if clicking away from widgets\n if not hit:\n focus.empty()\n # Keyboard focus\n elif event.type == KEYDOWN and event.key == K_TAB:\n if not modal_widgets and focus_order:\n # Flattened focus_order\n order = sum(focus_order,())\n if focus.sprite not in order:\n curr_num = None\n else:\n # Focus number for current focused widget\n curr_num = order[order.index(focus.sprite)-1]\n # Sorted list of the focus numbers being used\n list_num = sorted(order[::2])\n if not event.mod & KMOD_SHIFT: # Move focus to next widget\n if curr_num is None:\n # If nothing focused, focus first widget\n new_num = list_num[0]\n elif not focus.sprite._change_focus(True):\n # Don't change when not at end of container widget\n new_num = curr_num\n elif list_num.index(curr_num) == len(list_num)-1:\n # Jump back to first widget\n new_num = list_num[0]\n else:\n # Next focus number in the list\n new_num = list_num[list_num.index(curr_num)+1]\n else: # Shift key - move focus to previous widget\n if curr_num is None:\n new_num = list_num[-1]\n elif not focus.sprite._change_focus(False):\n new_num = curr_num\n elif list_num.index(curr_num) == 0:\n # Jump back to last widget\n new_num = list_num[len(list_num)-1]\n else:\n new_num = list_num[list_num.index(curr_num)-1]\n if curr_num != new_num:\n # Set widget at new focus number\n focus.add(1, order[order.index(new_num)+1])\n\n # Send event to focused widget\n if focus:\n focus.sprite._event(event)", "def maybe_focus_parent(self):\n parent_window_data = self.get_windows_with_parent_data()\n focused_node = filter(lambda w: w['window']['focused'], parent_window_data)\n if len(focused_node) <= 0:\n return\n focused_node = focused_node[0]\n\n focused_parent = focused_node['parent']\n focused_window = focused_node['window']\n if focused_parent['layout'] == 'tabbed':\n call('i3-msg focus parent')", "def focus_password(self, **kws):\r\n self.password_box.focus()", "def OnKillFocus(self, event):\r\n\r\n # We must let the native text control handle focus, too, otherwise\r\n # it could have problems with the cursor (e.g., in wxGTK).\r\n event.Skip()", "def applicationDidFinishLaunching_(self, notification):\n mask = NSKeyDownMask | NSKeyUpMask | NSFlagsChangedMask\n NSEvent.addGlobalMonitorForEventsMatchingMask_handler_(\n mask, self.handler)", "def OnActivate(self, event):\r\n\r\n if self._owner_mgr and event.GetActive():\r\n self._owner_mgr.OnFloatingPaneActivated(self._pane_window)", "def win_raise(self):\n self.raise_()\n self.activateWindow()", "def focus(self) -> bool:\n return self._has_focus", "def enter_notify_event(self, widget, event):\n enter_focus = self.t_.get('enter_focus', False)\n if enter_focus:\n # set focus on widget\n pass\n return self.make_callback('enter')", "def ev_windowexposed(self, event: WindowEvent) -> None:", "def mainloop(self):\r\n self.bindHotkeys()\r\n self.root.mainloop()" ]
[ "0.8171678", "0.804375", "0.75519395", "0.71898013", "0.71898013", "0.71898013", "0.71898013", "0.71747136", "0.715196", "0.6995946", "0.6974785", "0.6864648", "0.6742967", "0.67170674", "0.67143035", "0.66499484", "0.6622382", "0.6622382", "0.6622382", "0.6622382", "0.6621971", "0.64934033", "0.6488107", "0.64122355", "0.6405154", "0.6405154", "0.6405154", "0.6405154", "0.6394032", "0.63770205", "0.63214904", "0.63128763", "0.63063854", "0.6262583", "0.62542194", "0.6241978", "0.62140995", "0.62021583", "0.6167704", "0.61389416", "0.61325055", "0.61317104", "0.6101064", "0.6092407", "0.6077474", "0.60475063", "0.6043108", "0.60368174", "0.5916738", "0.5914755", "0.5864573", "0.5859517", "0.5857693", "0.58160657", "0.58159643", "0.57966536", "0.5774493", "0.574982", "0.570614", "0.5702052", "0.56957406", "0.56954944", "0.56718326", "0.566769", "0.5651882", "0.56457037", "0.56420904", "0.5641811", "0.56415385", "0.5626228", "0.56258655", "0.5581099", "0.55795425", "0.5577773", "0.5563271", "0.55473894", "0.55473894", "0.55451155", "0.5518673", "0.55168164", "0.54671097", "0.5458738", "0.5450128", "0.54492915", "0.54418874", "0.5440643", "0.54319817", "0.54295945", "0.54230326", "0.5421878", "0.5393063", "0.53924745", "0.5391458", "0.53775185", "0.53745425", "0.5372252", "0.53650475", "0.53595006", "0.53585345", "0.53510606" ]
0.68105537
12
Called when the mouse cursor enters the window. Adjust method signature as appropriate for callback.
def enter_notify_event(self, widget, event): enter_focus = self.t_.get('enter_focus', False) if enter_focus: # set focus on widget pass return self.make_callback('enter')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def ev_windowenter(self, event: WindowEvent) -> None:", "def mouse_enter(self):\n pass", "def ev_windowenter(self, event: tcod.event.WindowEvent) -> T | None:", "def append_cursor_enter_callback(self):", "def mouse_in(event):\r\n\r\n if str(event.type) == 'Enter':\r\n about_content.config(cursor=\"hand2\")\r\n else:\r\n about_content.config(cursor=\"arrow\")", "def ev_windowshown(self, event: WindowEvent) -> None:", "def hoverEnterEvent(self, event: 'QGraphicsSceneHoverEvent'):\n QApplication.instance().setOverrideCursor(Qt.OpenHandCursor)", "def ev_windowfocusgained(self, event: WindowEvent) -> None:", "def enterEvent(self, ev):\n self.setFocus(Qt.MouseFocusReason)\n self.__pointerLeftWidget = False\n self.setCursor(self.defaultCursor)\n QGraphicsView.enterEvent(self, ev)", "def ev_windowshown(self, event: tcod.event.WindowEvent) -> T | None:", "def ev_windowfocusgained(self, event: tcod.event.WindowEvent) -> T | None:", "def hoverEnterEvent(self, event: QGraphicsSceneHoverEvent):\n self.setCursor(Qt.ArrowCursor)", "def hoverEnterEvent(self, event: QGraphicsSceneHoverEvent):\n self.setCursor(Qt.ArrowCursor)", "def __window_focus(self):\n pass", "def mouse_in(self, event):\r\n self['background'] = '#E5F3FF'", "def window_handler(self):\n self.open_window()\n cv2.setMouseCallback(self.window_name, self.click_handler)\n finish = False\n while not finish:\n\n key = cv2.waitKey(0)\n\n finish = self.manage_key(key)", "def ev_MOUSEUP(self, event):", "def handle_mouse(self, x, y):\n pass", "def handle_mouse_press(self, event):", "def ev_windowmoved(self, event: WindowMoved) -> None:", "def _hover(self, event):\n if self.ignore(event):\n return\n\n if self._active_handle is not None or not self._selection_completed:\n # Do nothing if button is pressed and a handle is active, which may\n # occur with drag_from_anywhere=True.\n # Do nothing if selection is not completed, which occurs when\n # a selector has been cleared\n return\n\n _, e_dist = self._edge_handles.closest(event.x, event.y)\n self._set_cursor(e_dist <= self.grab_range)", "def dragEnterEvent(self, event):", "def __master_cursor_pos_callback(self, glfw_window, xpos, ypos):\n # flip glfw window space to match OGL space(like texture that has bottom left origin)\n ypos = self.window.glyph.size[1] - ypos\n\n # update values\n self.__pos_instant = Vec(xpos, ypos, 0)\n self.__accel = self.__pos_instant - self.__pos_prev\n self.__pos_prev = self.__pos_instant\n\n # call registered callbacks\n self.call_cursor_pos_callback(glfw_window, *self.__pos_instant.xy, mouse=self)", "def _(event):\n system_line.cursor_left()", "def OnMouseIn( self, event ):\n self.whichChoice = 1\n event.context.triggerRedraw(1)", "def on_mouse_motion(x, y, dx, dy):\n if in_box(x, y):\n # Change the cursor if inside the box.\n self.window.set_mouse_cursor(self.hand_cursor)\n else:\n self.window.set_mouse_cursor(self.default_cursor)", "def ev_windowexposed(self, event: WindowEvent) -> None:", "def enterEvent(self, event):\n if self.responsive:\n self.in_focus = True\n self.set_background(self.backgrounds[\"inFocus\"])\n event.accept()", "def mouse_left_up(self):\n pass", "def dragEnterEvent(self, dee):\n dee.accept(hasattr(Globals.dragObject, 'trackFrame'))", "def grab(self, event):\n self.ypos = event.y\n self.xpos = event.x\n self.config(cursor='fleur')", "def ev_windowfocuslost(self, event: WindowEvent) -> None:", "def mouse_middle_up(self):\n pass", "def handle_event(self, event, window):\n pass", "def OnIdle(self, ):\r\n self.triggerRedraw(1)\r\n return 1", "def main(self):\n\t\t\t# Handle the mouse\n\t\t\timport bge\n\t\t\tmouse = bge.logic.mouse\n\t\t\t\n\t\t\tpos = list(mouse.position)\n\t\t\tpos[0] *= bge.render.getWindowWidth()\n\t\t\tpos[1] = bge.render.getWindowHeight() - (bge.render.getWindowHeight() * pos[1])\n\t\t\t\n\t\t\tmouse_state = bgui.BGUI_MOUSE_NONE\n\t\t\tmouse_events = mouse.events\n\t\t\t\t\t\n\t\t\tif mouse_events[bge.events.LEFTMOUSE] == bge.logic.KX_INPUT_JUST_ACTIVATED:\n\t\t\t\tmouse_state = bgui.BGUI_MOUSE_CLICK\n\t\t\telif mouse_events[bge.events.LEFTMOUSE] == bge.logic.KX_INPUT_JUST_RELEASED:\n\t\t\t\tmouse_state = bgui.BGUI_MOUSE_RELEASE\n\t\t\telif mouse_events[bge.events.LEFTMOUSE] == bge.logic.KX_INPUT_ACTIVE:\n\t\t\t\tmouse_state = bgui.BGUI_MOUSE_ACTIVE\n\t\t\t\n\t\t\tself.update_mouse(pos, mouse_state)", "def on_window_ready(self):\n pass", "def _tsne_window_callback(self, x, y):\n self._display_from_tsne(x,y)", "def mouseReleaseEvent(self, event):\n self.dragging = False\n self.parent.unsetCursor()\n if self.moved:\n self.draw_visible_area()\n self.moved = False\n else:\n if self.cur_hover:\n dialog = TileInfoDialog(self.parent, self.cur_hover, self.mainwindow.config)\n dialog.exec()\n\n # Re-focus the main window\n self.mainwindow.activateWindow()", "def dragEnterEvent(self, e):\n # TODO: Do it properly.\n # TODO: Redraw widget while dragging.\n e.accept()", "def on_enter(self):\n raise NotImplemented(\"on_enter method should be implemented.\")", "def OnIdle( self, ):\n self.triggerRedraw(1)\n return 1", "def OnIdle( self, ):\n self.triggerRedraw(1)\n return 1", "def rendererWindowActivated(self, sw):\n pass", "def handle_mouse(self, x, y):\n self.x = x\n self.y = y\n global _pending_handle_mouse\n if not _pending_handle_mouse:\n _pending_handle_mouse = True\n if self.fig.document is not None:\n self.fig.document.add_timeout_callback(self.handle_mouse_callback, 100)\n else:\n self.handle_mouse_callback()", "def ev_mousemotion(self, event: MouseMotion) -> None:", "def hoverLeaveEvent(self, event: 'QGraphicsSceneHoverEvent'):\n QApplication.instance().restoreOverrideCursor()", "def update(self):\n self.mousePos = pygame.mouse.get_pos()\n self.update_button_hover_status()", "def InitOtherEvents(self):\n\n self.Bind(wx.EVT_ENTER_WINDOW, self.OnMouse)\n self.Bind(wx.EVT_LEAVE_WINDOW, self.OnMouse)", "def mousePressEvent(self, mouse_event):\r\n return", "def mousePressEvent(self, event):\n self.begin = event.pos()\n self.end = event.pos()\n self.update()", "def OnLeaveWindow(self, event):\r\n\r\n if self._hover_button:\r\n self.RefreshButton(self._hover_button)\r\n self._hover_button = None", "def _on_start(self):\n desktop = QtGui.QApplication.instance().desktop()\n available_geometry = desktop.screenGeometry(QtGui.QCursor().pos())\n self.setGeometry(available_geometry.x(), 0, 100, 100)", "def _on_enter(self):\n last_line_num = self.LineFromPosition(self.GetLength())\n current_line_num = self.LineFromPosition(self.GetCurrentPos())\n new_line_pos = (last_line_num - current_line_num)\n if self.debug:\n print >>sys.__stdout__, repr(self.input_buffer)\n self.write('\\n', refresh=False)\n # Under windows scintilla seems to be doing funny\n # stuff to the line returns here, but the getter for\n # input_buffer filters this out.\n if sys.platform == 'win32':\n self.input_buffer = self.input_buffer\n old_prompt_num = self.current_prompt_pos\n has_executed = PrefilterFrontEnd._on_enter(self,\n new_line_pos=new_line_pos)\n if old_prompt_num == self.current_prompt_pos:\n # No execution has happened\n self.GotoPos(self.GetLineEndPosition(current_line_num + 1))\n return has_executed", "def move_start(event):\n nonlocal x, y\n x = event.x \n y = event.y\n window['cursor'] = utils.CURSORS['move_item']", "def onInsert(self):\n self.mainWindow.insert()", "def mouseOver(self, event):\n if self.editMode:\n self.setEditCursor(event)\n return\n x = (event.y - self.margin) // self.cellSize\n y = (event.x - self.margin) // self.cellSize\n if self.lastPosition == (x, y):\n return # I've already drawn this\n if not (0 <= x < self.rows and 0 <= y < self.cols):\n return # not on the grid\n self.lastPosition = (x, y)\n self.paintBackground(x, y, self.checkFree(x, y))", "def mouse_middle_down(self):\n pass", "def mouse_over(self):\n pass", "def __handleMouseEvents(self, event):\n if not self.enabled:\n return\n\n x, y = event.GetPosition()\n\n # First make sure we have started a box.\n if self.currentBox == None and not event.LeftDown():\n # No box started yet. Set cursor to the initial kind.\n self.__setCursor(wx.CURSOR_CROSS)\n return\n\n if event.LeftDown():\n if self.currentBox == None:\n # No RB Box, so start a new one.\n self.currentBox = (x, y, 0, 0)\n self.hasLetUp = 0\n elif self.__isSizingCursor():\n # Starting a sizing operation. Change the origin.\n position = getCursorPosition(x, y, self.currentBox, thickness=self.__THICKNESS)\n self.currentBox = self.__denormalizeBox(position, self.currentBox)\n\n elif event.Dragging() and event.LeftIsDown():\n # Use the cursor type to determine operation\n if self.__isMovingCursor():\n if self.currentlyMoving or pointInBox(x, y, self.currentBox):\n if not self.currentlyMoving:\n self.currentlyMoving = (x - self.currentBox[0], y - self.currentBox[1])\n self.__moveTo(x - self.currentlyMoving[0], y - self.currentlyMoving[1])\n elif self.__isSizingCursor():\n self.__resizeBox(x, y)\n\n elif event.LeftUp():\n self.hasLetUp = 1\n self.currentlyMoving = None\n self.__normalizeBox()\n\n elif event.Moving() and not event.Dragging():\n # Simple mouse movement event\n self.__mouseMoved(x,y)", "def OnMouse(self, event):\n\n self.Refresh()\n event.Skip()", "def left_callback(self):\n self.rokucontrol.left_callback()", "def mouseReleaseEvent(self, event):\n self.box_begin = self.begin\n self.box_end = event.pos()\n self.begin = event.pos()\n self.end = event.pos()\n if not self.permanent_show:\n self.update()", "def mousePosition(self):", "def _on_canvas_mouse(self, event):\n if event.GetEventType() in [wx.wxEVT_MOTION, wx.wxEVT_LEFT_DOWN, \n wx.wxEVT_LEFT_UP, wx.wxEVT_MOTION|wx.wxEVT_LEFT_DOWN]:\n new_event = wx.MouseEvent(event.GetEventType())\n pos = self.tc.ScreenToClient(wx.GetMousePosition())\n new_event.SetPosition(pos)\n new_event.Skip()\n self.tc.GetEventHandler().ProcessEvent(new_event)", "def ev_windowmoved(self, event: tcod.event.WindowMoved) -> T | None:", "def mouseDragged():\n if mousePressed:\n mousePressed()", "def onCursorChanged(self, view):\n if not self.settingCursor:\n row = view.get_cursor()[0]\n i = self.model.get_iter(row)\n event = self.model.get(i, 9)[0]\n self.notifyHilightChanged(event)", "def ev_mousebuttonup(self, event: MouseButtonUp) -> None:", "def on_hover(self) -> None:", "def __mouseMoved(self, x, y):\n # Are we on the bounding box?\n if pointOnBox(x, y, self.currentBox, thickness=self.__THICKNESS):\n position = getCursorPosition(x, y, self.currentBox, thickness=self.__THICKNESS)\n cursor = [\n wx.CURSOR_SIZENWSE,\n wx.CURSOR_SIZENS,\n wx.CURSOR_SIZENESW,\n wx.CURSOR_SIZEWE,\n wx.CURSOR_SIZENWSE,\n wx.CURSOR_SIZENS,\n wx.CURSOR_SIZENESW,\n wx.CURSOR_SIZEWE\n ] [position]\n self.__setCursor(cursor)\n elif pointInBox(x, y, self.currentBox):\n self.__setCursor(wx.CURSOR_HAND)\n else:\n self.__setCursor()", "def on_activate(self, caller):\n self.window = GameWindow()\n self.add_window(self.window)", "def OnMouse(self, event):\r\n\r\n # we want to work with logical coords\r\n x, dummy = self._owner.CalcUnscrolledPosition(event.GetX(), 0)\r\n y = event.GetY()\r\n\r\n if event.Moving():\r\n \r\n col = self.XToCol(x)\r\n if col != self._hotTrackCol:\r\n \r\n # Refresh the col header so it will be painted with hot tracking\r\n # (if supported by the native renderer.)\r\n self.RefreshColLabel(col)\r\n\r\n # Also refresh the old hot header\r\n if self._hotTrackCol >= 0:\r\n self.RefreshColLabel(self._hotTrackCol)\r\n\r\n self._hotTrackCol = col\r\n \r\n if event.Leaving() and self._hotTrackCol >= 0:\r\n \r\n # Leaving the window so clear any hot tracking indicator that may be present\r\n self.RefreshColLabel(self._hotTrackCol)\r\n self._hotTrackCol = -1\r\n \r\n if self._isDragging:\r\n\r\n self.SendListEvent(wx.wxEVT_COMMAND_LIST_COL_DRAGGING, event.GetPosition())\r\n\r\n # we don't draw the line beyond our window, but we allow dragging it\r\n # there\r\n w, dummy = self.GetClientSize()\r\n w, dummy = self._owner.CalcUnscrolledPosition(w, 0)\r\n w -= 6\r\n\r\n # erase the line if it was drawn\r\n if self._currentX < w:\r\n self.DrawCurrent()\r\n\r\n if event.ButtonUp():\r\n self._isDragging = False\r\n if self.HasCapture():\r\n self.ReleaseMouse()\r\n self._dirty = True\r\n self.SetColumnWidth(self._column, self._currentX - self._minX)\r\n self.Refresh()\r\n self.SendListEvent(wx.wxEVT_COMMAND_LIST_COL_END_DRAG, event.GetPosition())\r\n else:\r\n self._currentX = max(self._minX + 7, x)\r\n\r\n # draw in the new location\r\n if self._currentX < w:\r\n self.DrawCurrent()\r\n \r\n else: # not dragging\r\n\r\n self._minX = 0\r\n hit_border = False\r\n\r\n # end of the current column\r\n xpos = 0\r\n\r\n # find the column where this event occured\r\n countCol = self.GetColumnCount()\r\n\r\n for column in xrange(countCol):\r\n\r\n if not self.IsColumnShown(column):\r\n continue # do next if not shown\r\n\r\n xpos += self.GetColumnWidth(column)\r\n self._column = column\r\n if abs (x-xpos) < 3 and y < 22:\r\n # near the column border\r\n hit_border = True\r\n break\r\n \r\n if x < xpos:\r\n # inside the column\r\n break\r\n \r\n self._minX = xpos\r\n \r\n if event.LeftDown() or event.RightUp():\r\n if hit_border and event.LeftDown():\r\n self._isDragging = True\r\n self.CaptureMouse()\r\n self._currentX = x\r\n self.DrawCurrent()\r\n self.SendListEvent(wx.wxEVT_COMMAND_LIST_COL_BEGIN_DRAG, event.GetPosition())\r\n else: # click on a column\r\n evt = (event.LeftDown() and [wx.wxEVT_COMMAND_LIST_COL_CLICK] or [wx.wxEVT_COMMAND_LIST_COL_RIGHT_CLICK])[0]\r\n self.SendListEvent(evt, event.GetPosition())\r\n \r\n elif event.LeftDClick() and hit_border:\r\n self.SetColumnWidth(self._column, self._owner.GetBestColumnWidth(self._column))\r\n self.Refresh()\r\n\r\n elif event.Moving():\r\n \r\n if hit_border:\r\n setCursor = self._currentCursor == wx.STANDARD_CURSOR\r\n self._currentCursor = self._resizeCursor\r\n else:\r\n setCursor = self._currentCursor != wx.STANDARD_CURSOR\r\n self._currentCursor = wx.STANDARD_CURSOR\r\n \r\n if setCursor:\r\n self.SetCursor(self._currentCursor)", "def handle_mouse(self, x, y):\n self.last_x = x\n self.last_y = y\n if self.min_x is not None:\n self.last_x = max(self.last_x, self.min_x)\n if self.max_x is not None:\n self.last_x = min(self.last_x, self.max_x)\n # we are in region mode\n if self.region_id is not None:\n start = self.last_x\n end = self.region_edge\n self.region_model.adjust_region(self.region_id, start, end)\n return False", "def HandleFocusIn(self, event: tkEvent):\n pass", "def handle_input(self, ncode, wparam, lparam):\n x_pos = lparam.contents.x_pos\n y_pos = lparam.contents.y_pos\n data = lparam.contents.mousedata\n\n # This is how we can distinguish mouse 1 from mouse 2\n # extrainfo = lparam.contents.extrainfo\n # The way windows seems to do it is there is primary mouse\n # and all other mouses report as mouse 2\n\n # Also useful later will be to support the flags field\n # flags = lparam.contents.flags\n # This shows if the event was from a real device or whether it\n # was injected somehow via software\n\n self.emulate_mouse(wparam, x_pos, y_pos, data)\n\n # Give back control to Windows to wait for and process the\n # next event\n return ctypes.windll.user32.CallNextHookEx(\n self.hooked, ncode, wparam, lparam)", "def __window_moveTo(self, x, y):\n pass", "def __window_home(self):\n pass", "def OnLeaveWindow(self, event):\r\n \r\n if self._hover_button:\r\n self._hover_button.cur_state = AUI_BUTTON_STATE_NORMAL\r\n self._hover_button = None\r\n self.Refresh()\r\n self.Update()", "def on_mouse_motion(self, x, y, delta_x, delta_y):\r\n pass", "def OnSetCursor(self, event):\r\n \r\n # determine cursor\r\n part = self.HitTest(event.GetX(), event.GetY())\r\n cursor = wx.NullCursor\r\n\r\n if part:\r\n if part.type in [AuiDockUIPart.typeDockSizer, AuiDockUIPart.typePaneSizer]:\r\n\r\n if not self.CheckMovableSizer(part):\r\n return\r\n \r\n if part.orientation == wx.VERTICAL:\r\n cursor = wx.StockCursor(wx.CURSOR_SIZEWE)\r\n else:\r\n cursor = wx.StockCursor(wx.CURSOR_SIZENS)\r\n \r\n elif part.type == AuiDockUIPart.typeGripper:\r\n cursor = wx.StockCursor(wx.CURSOR_SIZING)\r\n\r\n event.SetCursor(cursor)", "def on_mouse_press(self, x, y, button):\n\n pass", "def ev_windowexposed(self, event: tcod.event.WindowEvent) -> T | None:", "def mousePressEvent(self, ev):\n super(PlotObject, self).mousePressEvent(ev)\n self._downpos = self.mousePos", "def window_ready(self):\n raise NotImplementedError", "def update(self):\n\n\t\tself.x = games.mouse.x\n\t\tself.y = games.mouse.y\n\t\tself.check_collide()", "def show_cursor():\n ret = mouse.SDL_ShowCursor(SDL_ENABLE)\n if ret < 0:\n raise_sdl_err(\"showing the mouse cursor\")", "def win_raise(self):\n self.raise_()\n self.activateWindow()", "def cursor_placement_thread(self):\r\n while self.editing:\r\n # pylint: disable=W0212\r\n with goxapi.Signal._lock:\r\n curses.curs_set(2)\r\n self.win.touchwin()\r\n self.win.refresh()\r\n time.sleep(0.1)\r\n curses.curs_set(0)", "def mouseReleaseEvent(self, event):\n width = self.frameGeometry().width()\n height = self.frameGeometry().height()\n cursor = QtGui.QCursor()\n new_pos = self.mapFromGlobal(cursor.pos())\n x = new_pos.x()\n y = new_pos.y()\n self.__selector_y = y/float(height) # normalized value of the y position\n \tself.__selector_x = x/float(width) #normalised value of the x position\n self.updatePixelColor()\n self.repaint()", "def OnSetCursor(self, event):\r\n \r\n cursor = wx.NullCursor\r\n\r\n if self._gripper_sizer_item:\r\n \r\n gripper_rect = self._gripper_sizer_item.GetRect()\r\n if gripper_rect.Contains((event.GetX(), event.GetY())):\r\n cursor = wx.StockCursor(wx.CURSOR_SIZING)\r\n \r\n event.SetCursor(cursor)", "def __init__(self, win):\n super().__init__()\n self.mouse = (0, 0)\n glfw.set_cursor_pos_callback(win, self.on_mouse_move)\n glfw.set_scroll_callback(win, self.on_scroll)", "def __init__(self, win):\n super().__init__()\n self.mouse = (0, 0)\n glfw.set_cursor_pos_callback(win, self.on_mouse_move)\n glfw.set_scroll_callback(win, self.on_scroll)", "def inn_handler(self, event):\r\n\r\n if self.inn.inn_window != None and self.inn.inn_window.is_visible:\r\n self.inn.inn_window.inn_window_handler(event, self, self.party.member)\r\n return\r\n elif self.inn.save_confirm != None and self.inn.save_confirm.is_visible:\r\n self.inn.save_confirm.confirm_window_handler(self, event, None)\r\n return\r\n elif self.inn.load_confirm != None and self.inn.load_confirm.is_visible:\r\n self.inn.load_confirm.confirm_window_handler(self, event, None)\r\n return\r\n elif self.inn.item_out_window != None and self.inn.item_out_window.is_visible:\r\n self.inn.item_out_window.system_notify_window_handler( event, self, self.party.member)\r\n return\r\n elif self.inn.item_in_window != None and self.inn.item_in_window.is_visible:\r\n self.inn.item_in_window.system_notify_window_handler(event, self, self.party.member)\r\n return\r\n\r\n \r\n #moves the cursor up\r\n if event.type == KEYDOWN and event.key == K_UP:\r\n self.cursor_se.play()\r\n self.inn.menu -= 1\r\n if self.inn.menu < 0:\r\n self.inn.menu = MENU_MAX\r\n #moves the cursor down\r\n elif event.type == KEYDOWN and event.key == K_DOWN:\r\n self.cursor_se.play\r\n self.inn.menu += 1\r\n if self.inn.menu > MENU_MAX:\r\n self.inn.menu = 0\r\n\r\n if event.type == KEYDOWN and (event.key == K_SPACE or event.key == K_z or event.key == K_RETURN):\r\n if self.inn.menu == Inn.REST:\r\n if len(self.party.member) > 0:\r\n self.inn.inn_window = inn_window.Inn_window(Rect(80, 100, 340, 200))\r\n self.inn.inn_window.is_visible = True\r\n elif self.inn.menu == Inn.ITEM_OUT:\r\n if len(self.party.member) > 0:\r\n self.inn.item_out_window = system_notify.System_notify_window(Rect(200,120,340, 240), system_notify.System_notify_window.ITEM_OUT)\r\n self.inn.item_out_window.is_visible = True\r\n elif self.inn.menu == Inn.ITEM_IN:\r\n if len(self.party.member) > 0:\r\n self.inn.item_in_window = system_notify.System_notify_window(Rect(200,120,340, 240), system_notify.System_notify_window.ITEM_IN)\r\n self.inn.item_in_window.is_visible = True\r\n elif self.inn.menu == Inn.SAVE:\r\n self.inn.save_confirm = system_notify.Confirm_window(Rect(150, 150, 200, 110), system_notify.Confirm_window.SAVE)\r\n self.inn.save_confirm.is_visible = True\r\n elif self.inn.menu == Inn.LOAD:\r\n self.inn.load_confirm = system_notify.Confirm_window(Rect(150, 150, 200, 110), system_notify.Confirm_window.LOAD)\r\n self.inn.load_confirm.is_visible = True\r\n elif self.inn.menu == Inn.BACK:\r\n self.game_state = CITY\r\n self.inn.menu = Inn.REST\r\n self.inn.music = 0\r\n self.inn = None\r\n self.city = city.City()\r\n self.city.menu = 1\r\n self.select_se.play()\r\n\r\n\r\n if event.type == KEYDOWN and (event.key ==K_x):\r\n self.cancel_se.play()\r\n self.game_state = CITY\r\n self.inn.menu = Inn.REST\r\n self.inn.music = 0\r\n self.inn = None\r\n self.city = city.City()\r\n self.city.menu = 1", "def mouseMoveEvent(self, event):\n if self._ignore_mouse_events:\n event.ignore()\n return\n\n event.accept()\n\n if self._selection_mode != SelectionMode.NONE:\n x = event.x()\n y = event.y()\n xdiff = float(x - self._selection_position_start[0])\n ydiff = float(y - self._selection_position_start[1])\n if abs(xdiff) < 0.0001:\n xdiff = 1\n if abs(ydiff) < 0.0001:\n ydiff = 1\n xoff = float(self._selection_position_start[0]) / xdiff + 0.5\n yoff = float(self._selection_position_start[1]) / ydiff + 0.5\n self._addUpdateSelectionBox(xdiff, ydiff, xoff, yoff)\n\n elif self._use_zinc_mouse_event_handling:\n scene_input = self._sceneviewer.createSceneviewerinput()\n scene_input.setPosition(event.x(), event.y())\n scene_input.setEventType(Sceneviewerinput.EVENT_TYPE_MOTION_NOTIFY)\n if event.type() == QtCore.QEvent.Leave:\n scene_input.setPosition(-1, -1)\n self._sceneviewer.processSceneviewerinput(scene_input)", "def ev_windowfocuslost(self, event: tcod.event.WindowEvent) -> T | None:", "def moveCursor(self):\n\n\t\tself._before = self.rect.center\n\t\tself.rect.center = self._pos", "def dnd_enter(self, source, event):\n\n self._canvas_cursor = self._canvas['cursor']\n if self._dnd_target and source is not self and hasattr(source, 'rgb'):\n self._canvas['cursor'] = self._dnd_cursor or tks.dnd.CURSOR_WIDGET\n self._canvas['relief'] = tk.RAISED\n else:\n self._canvas['cursor'] = tks.dnd.CURSOR_FORBIDDEN\n # self._canvas.focus_set()", "def __window_forward(self):\n pass", "def handle_mouse(obj, event):\n if event:\n x = event.globalX()\n y = event.globalY()\n x_w = obj.offset.x()\n y_w = obj.offset.y()\n obj.move(x - x_w, y - y_w)" ]
[ "0.7506842", "0.72784686", "0.70456034", "0.69871587", "0.6757498", "0.664396", "0.6616291", "0.65236306", "0.6389402", "0.6246457", "0.61803067", "0.61756974", "0.61756974", "0.6149091", "0.61142", "0.6091212", "0.60810506", "0.6032246", "0.5978715", "0.597732", "0.5919302", "0.588005", "0.5816736", "0.57700855", "0.57634014", "0.57382405", "0.5723461", "0.5720711", "0.5720427", "0.57163876", "0.5713951", "0.570851", "0.5705813", "0.56938255", "0.5680632", "0.56526774", "0.5644869", "0.56442565", "0.564138", "0.5638186", "0.56229514", "0.5614544", "0.5614544", "0.5613029", "0.56031734", "0.56005913", "0.5591709", "0.55852956", "0.5579003", "0.5572681", "0.557165", "0.5565712", "0.55653733", "0.55452746", "0.55377096", "0.5529307", "0.55279034", "0.5521763", "0.5514291", "0.5507673", "0.5506016", "0.54956585", "0.54923695", "0.5482359", "0.5476049", "0.5470525", "0.5462131", "0.5440101", "0.5416316", "0.5411932", "0.54061294", "0.53969926", "0.5395681", "0.53894335", "0.5388239", "0.5381609", "0.5358803", "0.53474426", "0.53437567", "0.5343309", "0.53305197", "0.5323824", "0.5309236", "0.53015774", "0.53015095", "0.5300512", "0.53001726", "0.5296019", "0.52950776", "0.5292529", "0.5288187", "0.52811015", "0.52811015", "0.52722245", "0.5271051", "0.527054", "0.52694374", "0.5264608", "0.5264525", "0.52532053" ]
0.5393033
73
Called when the mouse cursor leaves the window. Adjust method signature as appropriate for callback.
def leave_notify_event(self, widget, event): self.logger.debug("leaving widget...") return self.make_callback('leave')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def ev_windowleave(self, event: WindowEvent) -> None:", "def OnLeaveWindow(self, event):\r\n \r\n if self._hover_button:\r\n self._hover_button.cur_state = AUI_BUTTON_STATE_NORMAL\r\n self._hover_button = None\r\n self.Refresh()\r\n self.Update()", "def OnLeaveWindow(self, event):\r\n\r\n if self._hover_button:\r\n self.RefreshButton(self._hover_button)\r\n self._hover_button = None", "def on_mouse_leave (self, event):\n\n\t\tif not self.clicked:\n\n\t\t\tself.cursor_position = [-1,-1]\n\t\t\tself.redraw_canvas()\n\t\t\tself.hide_tip()#self.timer1 = gobject.timeout_add(2000, self.hide_tip)", "def OnLeaveWindow(self, event):\r\n\r\n self.RefreshOverflowState()\r\n self.SetHoverItem(None)\r\n self.SetPressedItem(None)\r\n\r\n self._tip_item = None\r\n self.StopPreviewTimer()", "def ev_windowleave(self, event: tcod.event.WindowEvent) -> T | None:", "def mouse_out(self):\n pass", "def ev_windowfocuslost(self, event: WindowEvent) -> None:", "def hoverLeaveEvent(self, event: 'QGraphicsSceneHoverEvent'):\n QApplication.instance().restoreOverrideCursor()", "def ev_windowclose(self, event: WindowEvent) -> None:", "def leaveEvent(self, ev):\n if (self.panning):\n # don't immediately change pointer if we're panning\n self.__pointerLeftWidget = True\n else:\n self.setCursor(Qt.ArrowCursor)\n QGraphicsView.leaveEvent(self, ev)\n self.currentKbKey = None", "def ev_windowfocuslost(self, event: tcod.event.WindowEvent) -> T | None:", "def mouse_out(self, event):\r\n self['background'] = self.defaultBackground", "def dnd_leave(self, source, event):\n\n if self._canvas_cursor:\n self._canvas['cursor'] = self._canvas_cursor\n self._canvas['relief'] = tk.SUNKEN", "def exit_on_click(self):\n self.get_mouse()\n self._close()", "def on_mouse_leave(self, event):\n global controller\n if self == controller:\n self.set_help_text(None)\n if self.task:\n self.task.stop()\n self.task = None\n controller = None", "def ev_windowhidden(self, event: WindowEvent) -> None:", "def on_unhover(self) -> None:", "def end(self, arg2=None):\n\t\t\timport bge\n\t\t\tbge.logic.mouse.visible = False\n\t\t\t\n\t\t\timport engine\n\t\t\tengine.interface.mouse.reserved -= 1\n\t\t\t\n\t\t\t#kill bgui objects\n\t\t\tself.notificationSystem.activeAlert = None\n\t\t\tself._remove_widget(self.background)", "def ev_windowclose(self, event: tcod.event.WindowEvent) -> T | None:", "def __window_close(self):\n pass", "def leaveEvent(self, event):\n self.destroy()", "def leave(event):\n if tooltip.event is not None:\n widget.after_cancel(tooltip.event)\n tooltip.event = None\n tooltip.hidetip()", "def __window_stop(self):\n pass", "def OnMouseOut( self, event ):\n self.whichChoice = 0\n event.context.triggerRedraw(1)", "def leaveEvent(self, event):\n if self.responsive:\n self.in_focus = False\n self.set_background(self.default_background)\n event.accept()", "def mouseOut(self, event):\n if self.editMode and self.lastChanged:\n self.changeColor(self.lastChanged, self.colors['pentomino'])\n return\n self.correctPending()\n self.lastPosition = None", "def hoverLeaveEvent(self, event):\n if self._hoverSpot:\n if self._hoverSpot.hoverLeaveEvent(event):\n self.update()\n \n self._hoverSpot = None\n \n super(XNode, self).hoverLeaveEvent(event)", "def ev_windowhidden(self, event: tcod.event.WindowEvent) -> T | None:", "def exit_btn_callback(evt):\n print(\"Inside exit_btn_callback. Event object is: \", evt)\n mainwin.destroy()", "def back(self, _event=None):\n self.on_closingWindow()", "def leave_page(self):\n self.window.destroy()", "def HandleFocusOut(self, event: tkEvent):\n pass", "def onCloseWindow(self, event):\r\n\r\n self.Destroy()", "def exit_event(self, event):\n self.root.quit()", "def frameLeave(self):\n try:\n self.contentFrame.currFrame.leave()\n except AttributeError:\n pass", "def _mouse_leave(self, event):\n\n #Task 1.2 (Tower placement): Delete the preview\n #Hint: Relevant canvas items are tagged with: 'path', 'range', 'shadow'\n # See tk.Canvas.delete (delete all with tag)\n self._view.delete(\"shadow\", \"range\", \"path\")", "def mouseReleased():\n if not game_controller.game_over:\n if game_controller.falling_disk and \\\n game_controller.falling_disk.y_vel == 0:\n game_controller.handle_mouseReleased()", "def __window_blur(self):\n pass", "def release():\n gui.mouseUp()", "def leave(self):\n p = GameOverPopup(self)\n p.open()", "def on_closing_event(self):\n self.exit_event(None)", "def mouseReleaseEvent(self, event):\n self.dragging = False\n self.parent.unsetCursor()\n if self.moved:\n self.draw_visible_area()\n self.moved = False\n else:\n if self.cur_hover:\n dialog = TileInfoDialog(self.parent, self.cur_hover, self.mainwindow.config)\n dialog.exec()\n\n # Re-focus the main window\n self.mainwindow.activateWindow()", "def OnClose(self, event):\n\n if not self.plot_deleted:\n pub.sendMessage( 'Close.%s' %self.GetLabel(), event=self )\n\n print(\"Window: '%s', closed by event: '%s'\" %( self.GetLabel(), event.__class__.__name__ ))\n self.Destroy()", "def leaveEvent(self, event):\n self.setStyleSheet('QFrame {background-color: rgb(51,51,51); border: 0px solid white; }')\n self.scene.view.defaultFrameGraph().setClearColor(QColor(51, 51, 51))", "def outCloseEvent(self):\r\n pass", "def __window_back(self):\n pass", "def hoverLeaveEvent(self, moveEvent):\n self.setCursor(Qt.ArrowCursor)\n super().hoverLeaveEvent(moveEvent)", "def __onclosing(self):\n self.window.destroy()", "def closeEvent(self, e):\n QApplication.restoreOverrideCursor()\n e.accept()", "def on_palette_close(self):\r\n pass", "def closeEvent(self, event):\n print \"Window closed\"\n event.ignore()\n print \"Hide window.\"\n self.subWindow.hide()", "def end():\n curses.endwin()", "def ev_windowrestored(self, event: WindowEvent) -> None:", "def closeEvent(self, event):\n log.info(\"Received window close event.\")\n self.main.app_is_exiting()\n super().closeEvent(event)\n return", "def window_close(self, item, e=None):\n\tif self.handler:\n self.handler.win_close(\"about\", None)\n\treturn 1", "def OnClose(self, event):\r\n pos.app.main.Exit()", "def close_window(self):\n # Window - END\n self.root.destroy()", "def end_screen(win):\n\tpass", "def leave_loose_game(self):\n self.update_json_file()\n self.end = True\n self.root.destroy()\n GameOver()", "def leave(self):\n self.pleaseQuit=1", "def OnMouseUp(self, evt):\n self.ReleaseMouse()", "def gui_event(self, evt, val):\n\n if evt == Draw.ESCKEY:\n self.callback = None\n self.gui_exit()\n\n Draw.Redraw(1)", "def onSplitterWindowUnsplit(self, event):\r\n\t\tevent.Veto()", "def exit(self):\n if self.window:\n self.window.close()", "def on_unhovered(self):\n if not self.is_selected:\n self.colour = self.normal_colour\n self.is_hovered = False\n self.redraw()", "def callback_destroy( self ):\r\n self.winRunning = False\r\n self.rootWin.destroy()\r\n exit()", "def received_CLOSING(self):\n\n\t\tself.player_frame.notify_rival_closing()\n\t\tself.player_frame.master.go_to_previous_screen(False)", "def ev_windowmoved(self, event: WindowMoved) -> None:", "def end_blur(self):\r\n super(Defocus, self)._end()", "def leave_win_game(self):\n self.end = True\n self.canevas.config(bg='black')\n self.canevas.itemconfig(self.ball.ball, fill='black')\n self.canevas.itemconfig(self.paddle.paddle, fill='black')\n self.canevas.update()\n time.sleep(2)\n self.canevas.config(bg='light blue')\n self.canevas.itemconfig(self.ball.ball, fill='red')\n self.canevas.itemconfig(self.paddle.paddle, fill='grey')\n self.brick.next_level()", "def on_pre_leave(self):\n Logger.info('Application: Leaving the Combat screen.')\n self.updater.cancel() # Clear the event interval.\n self.stop_soundtrack()", "def closeEvent(self, event):\n event.accept() # let the window close\n self.returnHome()", "def mouseReleaseEvent(self, event):\n self.box_begin = self.begin\n self.box_end = event.pos()\n self.begin = event.pos()\n self.end = event.pos()\n if not self.permanent_show:\n self.update()", "def OnCloseWindow(self, event):\r\n self.data.close()\r\n sizes[self.data.__class__.__name__] = self.GetSizeTuple()\r\n self.Destroy()", "def handle_close(event):\n self.fig.canvas.stop_event_loop()\n self.raiseAMessage('Closed Figure')", "def on_close(self, event):\n # Save pos and size\n x, y = self.GetPosition()\n width, height = self.GetSize()\n self.__config.set('window.x', x)\n self.__config.set('window.y', y)\n self.__config.set('window.width', width)\n self.__config.set('window.height', height)\n\n # Style\n style = self.GetWindowStyle()\n self.__config.set('window.style', style)\n\n self.__config.save()\n\n # Stop monitoring\n self.__cor.stop_monitor()\n\n # Kill graph as it seems to be stopping script from ending\n self.__graph = None\n\n # End\n event.Skip()", "def menu_quit (self,widget,data):\n\t\tself.window.delete_event()", "def quit_window(self, value=None):\n exit()", "def end(self) -> None:\n unicurses.endwin()", "def keyboard_end_game_control(self, app):\n mx, my = pg.mouse.get_pos()\n click = False\n\n game_view = self.get_view.game_view\n\n for event in pg.event.get():\n if event.type == pg.QUIT:\n pg.quit()\n sys.exit()\n\n if event.type == pg.MOUSEBUTTONDOWN:\n if event.button == 1:\n click = True\n\n if game_view.back_menu_button.collidepoint((mx, my)):\n if click:\n app.end_game_running = False", "def onQuit(self, event):\n\n\t\tself.onClose(None)", "def _exit_visualization(self,millis=1):\n k = cv2.waitKey(millis) & 0xFF\n if k == ord('q'): # wait for 'q' key to exit\n print(\"> User exit request\")\n self.stopped = True\n elif k == ord('s'): # wait for 's' key to save screenshot\n self.save_image()", "def end(self):\n #self.manipulator_restore()\n #self.header_text_restore()\n #self.cursor_modal_restore()\n pass", "def __exit__(self, exc_type, exc_value, exc_tb) -> None:\n lib.wlr_seat_keyboard_end_grab(self._seat)", "def exit(self):\n self.root.grab_release()\n self.root.destroy()", "def event_handler(self, event):\n if event.type == pygame.QUIT:\n self.exit()\n elif event.type == pygame.KEYDOWN:\n if event.key == pygame.K_ESCAPE:\n self.exit()", "def leaveEvent(self, event):\n self.ga.stop()\n self.start_animation(self.SLOW_DURATION)", "def menuExit(self, event):\n \n self.onClose(event)\n return", "def hide_cursor():\n props = WindowProperties()\n props.setCursorHidden(True)\n # somehow the window gets undecorated after hiding the cursor\n # so we reset it here to the value we need\n #props.setUndecorated(settings.fullscreen)\n base.win.requestProperties(props)", "def mouseReleaseEvent(self, event):\n super(QIntSpinner3DS, self).mousePressEvent(event)\n super(QIntSpinner3DS, self).mouseReleaseEvent(event)\n self.unsetCursor()", "def leave(self):\n print('%r: leaving', self)\n self.telepathy_text_chan.Close()", "def EndDrawingOnTop(*args, **kwargs):\n return _gdi_.ScreenDC_EndDrawingOnTop(*args, **kwargs)", "def ev_windowfocusgained(self, event: WindowEvent) -> None:", "def finish(self):\n self.ignoreAll()\n if self.mouseTask:\n taskMgr.remove(self.mouseTask)\n self.mouseTask=None\n if self.pyga_joytask:\n taskMgr.remove(self.pyga_joytask)\n self.pyga_joytask=None\n props = WindowProperties()\n props.setCursorHidden(False)\n props.setMouseMode(WindowProperties.MAbsolute)\n base.win.requestProperties(props)", "def ev_windowrestored(self, event: tcod.event.WindowEvent) -> T | None:", "def __on_close(self):\n # Release the resource and\n # close the windows\n LOGGER.info(\"closing...\")\n self.__quit.set()\n self.__detect.end()\n self.root.quit()", "def callback_disconnect():\n # if Networking.get_instance().is_host:\n logger.warning(\"It seems that client is not connected...\")\n Networking.get_instance().disconnect()\n EventQueue.post(CustomEvent(ChangeSceneEnum.DISCONNECT))", "def end_box(self):\n self.end_hooks.fire(EndEvent())", "def quit (event=None):\n root.destroy ()" ]
[ "0.7683408", "0.7552822", "0.75221896", "0.7349047", "0.73260576", "0.72890663", "0.7207108", "0.7075997", "0.6955286", "0.6878195", "0.68585074", "0.67361873", "0.67259437", "0.66449815", "0.65735984", "0.6550799", "0.6520514", "0.646866", "0.64424396", "0.64258564", "0.6395479", "0.6319417", "0.63185894", "0.6301293", "0.62637985", "0.62527883", "0.62325126", "0.6204774", "0.619349", "0.6192217", "0.6137034", "0.6119395", "0.60914874", "0.60856706", "0.60797316", "0.60728186", "0.6068099", "0.6033728", "0.6032295", "0.602194", "0.6008509", "0.5972886", "0.5958805", "0.5958701", "0.5899707", "0.5893527", "0.5878385", "0.5877564", "0.58719945", "0.5849084", "0.584801", "0.58396673", "0.5793359", "0.5787086", "0.5773183", "0.5772032", "0.57676333", "0.57623434", "0.5754895", "0.5748786", "0.5738811", "0.57138836", "0.5698144", "0.56976277", "0.56688035", "0.563307", "0.56264067", "0.5616662", "0.5606243", "0.559866", "0.55930954", "0.5584758", "0.5584176", "0.55828613", "0.55817", "0.558087", "0.5579776", "0.5571438", "0.55582744", "0.55525964", "0.5550248", "0.554798", "0.5535286", "0.55160373", "0.5502611", "0.5501524", "0.549986", "0.5496995", "0.54965943", "0.54913455", "0.54870707", "0.5478854", "0.5473291", "0.5471571", "0.5471011", "0.546923", "0.54553884", "0.54507905", "0.54474974", "0.5445375" ]
0.6011161
40
Called when a key is pressed and the window has the focus. Adjust method signature as appropriate for callback.
def key_press_event(self, widget, event): # get keyname or keycode and translate to ginga standard # keyname = # keycode = keyname = '' # self.transkey(keyname, keycode) self.logger.debug("key press event, key=%s" % (keyname)) return self.make_ui_callback('key-press', keyname)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _on_key_press(self, event):", "def ev_windowfocusgained(self, event: WindowEvent) -> None:", "def _on_key_release(self, event):", "def on_key(self, _win, key, _scancode, action, _mods):\n if action == glfw.PRESS or action == glfw.REPEAT:\n if key == glfw.KEY_ESCAPE or key == glfw.KEY_Q:\n glfw.set_window_should_close(self.win, True)\n if key == glfw.KEY_W:\n GL.glPolygonMode(GL.GL_FRONT_AND_BACK, next(self.fill_modes))\n\n self.key_handler(key)", "def ev_KEYUP(self, event):", "def key_handler(self):\n \n self.pressed = waitKey(1) & 255 #wait for keypress for 10 ms\n if self.pressed == 27: #exit program on 'esc'\n print \"exiting...\"\n self.camera.cam.release()\n exit()\n \n for key in self.key_controls.keys():\n if chr(self.pressed) == key:\n self.key_controls[key]()", "def ev_windowfocusgained(self, event: tcod.event.WindowEvent) -> T | None:", "def HandleKeyboardInput(self):\n key = yg.getKeyPress()\n if key == \"Return\":\n self.buttons[len(self.buttons) - 1].Click()", "def key_press(self):\n self.screen.nodelay(True)\n return self.screen.getch()", "def handle_event(self, event, window):\n raise NotImplementedError('handle_key')", "def key_handler(self, event):\n if event.type == pygame.KEYUP: \n self.done = True", "def on_key_event(self, key):\n pass", "def on_key_press(self, event):\n\n #print(\"you pressed {}\".format(event.key))\n key_press_handler(event, self.canvas, self.toolbar)", "def key_press_event(self, event):\n pass", "def _keyboard_input(self, pressedKey=None):\n\n if msvcrt.kbhit():\n if pressedKey == None:\n pressedKey = msvcrt.getch()\n if pressedKey == b'x' or pressedKey == b'X':\n self._goodbye()\n if pressedKey == b'c' or pressedKey == b'C':\n self.cameraOutput = not self.cameraOutput", "def on_key_press(self, key):\n if key == 'esc':\n self.backtrack()\n elif key in ['f1', '?']:\n self.open(HelpPane(self._get_current_pane()))", "def on_key_press(self, key, modifiers):\n KeyboardController.lastKey = key;\n KeyboardController.keys.add(key);\n if key == arcade.key.ESCAPE:\n # User hits f. Flip between full and not full screen.\n self.set_fullscreen(not self.fullscreen)\n\n # Get the window coordinates. Match viewport to window coordinates\n # so there is a one-to-one mapping.\n width, height = self.get_size()\n self.set_viewport(0, width, 0, height)", "def on_key(self, _window, key, _scancode, action, _mods):\n is_press = action == glfw.PRESS or action == glfw.REPEAT\n if is_press and (key == glfw.KEY_ESCAPE or key == glfw.KEY_Q):\n glfw.set_window_should_close(self.window, True)\n\n if action != glfw.REPEAT:\n self.key_handler(key, is_press)", "def keyboard_on_key_up(self, window, keycode):\n if 'shift' in keycode[1]:\n self.shift_down = False", "def __master_key_callback(self, window, key, scancode, action, mods):\n self.call_key_callback(window, key, scancode, action, mods, keyboard=self)", "def on_key(window, key, scancode, action, mods):\n if action != glfw.PRESS:\n return\n \n global controller\n\n if key == glfw.KEY_SPACE:\n controller.fillPolygon = not controller.fillPolygon\n\n elif key == glfw.KEY_ESCAPE:\n glfw.set_window_should_close(window, True)\n\n # Si detecta la tecla [Q] cambia el estado del efecto 1 : zoom\n elif key == glfw.KEY_Z:\n controller.effect1 = not controller.effect1\n\n # Si detecta la tecla [W] cambia el estado del efecto 2 : corte\n elif key == glfw.KEY_C:\n controller.effect2 = not controller.effect2\n\n else:\n print('Unknown key')", "def ev_KEYDOWN(self, event):", "def on_press(key):\n try:\n # gets pressed key char value and searches it from dict with get method.\n mapped_key = key_mappings.get(key.char) # gets value and type tuple or None\n if mapped_key:\n module.pressed_key = mapped_key\n except AttributeError:\n traceback.print_exc()\n except KeyboardInterrupt:\n print(f\"\\n{module.current_time()} Application stopped\")", "def keyboard_on_key_up(self, window, keycode):\n if super(SelectableLayout, self).keyboard_on_key_up(window, keycode):\n return True\n if self.select_with_key_up(window, keycode):\n return True\n return False", "def wait_for_key_pressed():\n msg_surface = BASICFONT.render('Press a key to play', True, GRAY)\n msg_rect = msg_surface.get_rect()\n msg_rect.topleft = (WINDOWWIDTH - 200, WINDOWHEIGHT - 30)\n DISPLAYSURF.blit(msg_surface, msg_rect)\n pygame.display.update()", "def on_press(key):\n global key_pressed\n try:\n if key == keyboard.Key.enter:\n key_pressed = True\n # Stop listener\n return False\n except AttributeError:\n print('Unknown key {0} pressed'.format(key))", "def on_key(self, _win, key, _scancode, action, _mods):\n if action == glfw.PRESS or action == glfw.REPEAT:\n if key == glfw.KEY_ESCAPE or key == glfw.KEY_Q:\n glfw.set_window_should_close(self.win, True)\n if key == glfw.KEY_W:\n GL.glPolygonMode(GL.GL_FRONT_AND_BACK, next(self.fill_modes))\n if key == glfw.KEY_R:\n os.system(\"pkill aplay\")\n os.system(\"aplay T-Rex.wav &\")\n glfw.set_time(0)\n if key == glfw.KEY_N:\n self.normal_mapping = 1 - self.normal_mapping", "def _handle_key_press(self, event: pygame.event.Event) -> None:\n if event.key == K_0:\n self._update_input('0')\n elif event.key == K_1:\n self._update_input('1')\n elif event.key == K_2:\n self._update_input('2')\n elif event.key == K_3:\n self._update_input('3')\n elif event.key == K_4:\n self._update_input('4')\n elif event.key == K_5:\n self._update_input('5')\n elif event.key == K_6:\n self._update_input('6')\n elif event.key == K_7:\n self._update_input('7')\n elif event.key == K_8:\n self._update_input('8')\n elif event.key == K_9:\n self._update_input('9')\n elif event.key == K_BACKSPACE:\n self._update_input('BACKSPACE')", "def window_handler(self):\n self.open_window()\n cv2.setMouseCallback(self.window_name, self.click_handler)\n finish = False\n while not finish:\n\n key = cv2.waitKey(0)\n\n finish = self.manage_key(key)", "def keyPressEvent(self, event):\n self.game_engine.input_manager.keyPressEvent(event)", "def bindKeys(self):\r\n self.c.bind(\"<Button-1>\",self.seek)\r\n self.c.bind(\"<MouseWheel>\",self.app.zoom)\r\n self.c.bind(\"<Button-3>\",self.peek)", "def __window_focus(self):\n pass", "def keypress(self, size, key):\n pos = self.get_focus()[1]\n _ll = len(self.body)\n if (pos <= 0 and key == 'up') or (pos >= _ll-1 and key == 'down'):\n return\n else:\n return super(ClosedListBox, self).keypress(size, key)", "def activate(self):\n appuifw.app.exit_key_handler = self.exit_key_handler", "def keypress(self, event):\n events = {\n '1': lambda: self.slot.set(1),\n '2': lambda: self.slot.set(2),\n '6': lambda: self.digits.set(6),\n '8': lambda: self.digits.set(8),\n }\n try:\n events[event.keysym]()\n except KeyError:\n pass\n if event.keysym in ('1', '2', 'Return', 'Enter'):\n self.get_totp()\n self.root.wm_withdraw()", "def on_window_key_press_event(self, widget, data=None):\n\n if not self.enabled:\n return\n\n if data.keyval == gtk.keysyms.plus:\n self.webclient.set_resize_factor(self.webclient.RESIZE_FACTOR + 0.1)\n self.refresh()\n elif data.keyval == gtk.keysyms.minus:\n if self.webclient.RESIZE_FACTOR > 1.2:\n self.webclient.set_resize_factor(self.webclient.RESIZE_FACTOR - 0.1)\n self.refresh()\n elif data.keyval == gtk.keysyms.F5:\n self.refresh()\n elif data.keyval == gtk.keysyms.Left:\n self.on_btPagePrev_clicked(widget)\n elif data.keyval == gtk.keysyms.Right:\n self.on_btPageNext_clicked(widget)\n elif data.keyval == gtk.keysyms.Up:\n self.on_btSubPagePrev_clicked(widget)\n elif data.keyval == gtk.keysyms.Down:\n self.on_btSubPageNext_clicked(widget)\n elif data.keyval in (gtk.keysyms.q, gtk.keysyms.Q):\n self.on_window_destroy(widget)\n elif data.state & gtk.gdk.CONTROL_MASK and data.keyval in (gtk.keysyms.l, gtk.keysyms.L):\n self.pageNumber.grab_focus()", "def k_press(self, key: KKey):\n pass", "def handle_keypress(self,key):\r\n if len(key) == 0:\r\n return True\r\n \r\n # Whenever we start typing scroll to the bottom\r\n self.scroll_bottom() \r\n\r\n if self.cursor_visible:\r\n if ord(key) == BACKSPACE_KEY: # If backspace and we aren't at start point of buffer, remove one char\r\n if self.cursor_absolute_position > self.cursor_min_length:\r\n self.lines[-1] = self.lines[-1][0:-1]\r\n self.entered_text_buffer = self.entered_text_buffer[0:-1]\r\n self.cursor_absolute_position-=1\r\n elif ord(key) == RETURN_KEY:\r\n # Hit return, count it as an entered command and return\r\n return False \r\n else:\r\n self.print_text(key)\r\n self.cursor_absolute_position+=1\r\n self.entered_text_buffer += key\r\n\r\n return True", "def on_key_press(self, key: int, modifiers: int) -> None:\n if key == arcade.key.ESCAPE:\n self.window.show_view(self.game_view)", "def keypress(self, key, state=None):\n\n\t\tself._interface.keypress(key, state)", "def _onkeyrelease(self, fun, key):\n if fun is None:\n self.cv.unbind(\"<KeyRelease-%s>\" % key, None)\n else:\n def eventfun(event):\n fun()\n self.cv.bind(\"<KeyRelease-%s>\" % key, eventfun)", "def _onkeypress(self, fun, key=None):\n if fun is None:\n if key is None:\n self.cv.unbind(\"<KeyPress>\", None)\n else:\n self.cv.unbind(\"<KeyPress-%s>\" % key, None)\n else:\n def eventfun(event):\n fun()\n if key is None:\n self.cv.bind(\"<KeyPress>\", eventfun)\n else:\n self.cv.bind(\"<KeyPress-%s>\" % key, eventfun)", "def XPSetKeyboardFocus(inWidget):\n pass", "def onKey(self,event):\n \n ch = event.char.lower()\n \n if ch in (self.text[0].lower(),'\\n','\\r'):\n self.okButton()\n \n return \"break\"", "def onKeyDown(self, eventDict):\n key = eventDict['key']\n modifiers = eventDict['mod']\n\n if modifiers:\n # Always process control keys\n self.keyHandler.onKeyDown(self, key, modifiers)\n elif self.popupActive:\n # Escape pressed -- hide the popup\n if key == 27:\n self.mainGrid.hidePopup()\n self.popupActive = False\n else:\n # TODO: Send key to popup\n pass\n else:\n self.keyHandler.onKeyDown(self, key, modifiers)", "def on_keydown(self, keys, game) -> None:\n pass", "def handle_keydown(self, key, string):\r\n if self.get_visible():\r\n for i in self.widgets:\r\n if i.get_visible():\r\n if i.handle_keydown(key, string):\r\n return True\r\n return False", "def _on_keyboard(self, instance, key, scancode, codepoint, modifiers, *args):\r\n # print(\"Keyboard pressed! {}, {}, {}, {}\".format(key, scancode, codepoint, modifiers))\r\n if codepoint == 's' and 'ctrl' in modifiers:\r\n toast('Search by Name, Ingredient, or Tag', 3)\r\n self.search_focus = True", "def handle_keydown(self, key, string):\r\n return app.App.handle_keydown(self, key, string)", "def dialog_keypressed_cb(widget=None, event=None):\n if event.keyval == gtk.keysyms.Return:\n widget.response(gtk.RESPONSE_OK)\n return True\n elif event.keyval == gtk.keysyms.Escape:\n widget.response(gtk.RESPONSE_CANCEL)\n return True\n return False", "def keypress(self, key): # pragma: no cover\n if key == \"s\":\n self.screenshot()\n\n elif key == \"q\" or key == \"Esc\":\n self.close()\n\n elif key == \"c\":\n self._print_camera()", "def getKeyDown(self, key):\n if key in self.newlyActiveKeys:\n self.newlyActiveKeys.remove(key)\n self.activeKeys.append(key)\n return True\n else:\n return False", "def handle_keyhold(self, key, string):\r\n if self.get_visible():\r\n for i in self.widgets:\r\n if i.get_visible():\r\n if i.handle_keyhold(key, string):\r\n return True\r\n return False", "def on_key_down(self, keycode, keyvalue, event):\n if self.__click == True and (len(gtk.gdk.keyval_name(event.keyval)) < 2 or gtk.gdk.keyval_name(event.keyval) == \"space\"):\n if gtk.gdk.keyval_name(event.keyval) == \"space\":\n self.__text = self.__text + \" \";\n else:\n self.__text = self.__text + gtk.gdk.keyval_name(event.keyval);\n if gtk.gdk.keyval_name(event.keyval) == \"BackSpace\" and self.__text:\n self.__text = self.__text[:-1];\n if gtk.gdk.keyval_name(event.keyval) == \"Return\" or self.__click == False and self.__text:\n self.addNew();\n\t\t\t#screenlets.show_message(self, \"Committed\");", "def on_key_press(self, key, _modifiers): \n if key == arcade.key.ESCAPE: # resume game\n self.window.show_view(self.instruction_view)", "def ev_windowfocuslost(self, event: WindowEvent) -> None:", "def on_key_press(self, key, modifiers):\n player_controller.input_press(self, key, self.player)", "def key_event(self, key: Any, action: Any):\n pass", "def perform_keyboard_actions(self):\n self.handle_keyboard_input()\n self.grid.next_frame()", "def HandleFocusIn(self, event: tkEvent):\n pass", "def _program_key(self):\n prg_dialogue = _ProgrammingWindow(self)\n self.root.wait_window(prg_dialogue.top)", "def _callbackKeyButton(self, channel):\n if self._myKey.readKeyButton(channel) == 0:\n self.onKeyButtonDown(channel)\n return\n\n if self._myKey.readKeyButton(channel) == 1:\n self.onKeyButtonUp(channel)\n return", "def keypress_signal_from_behaviors_coding_map(self, event):\n self.keyPressEvent(event)", "def receiveKey(self, key):\n if key == curses.KEY_UP:\n if self.__selectedRow > 0:\n self.__printRow(self.__selectedRow)\n self.__selectedRow -= 1\n if self.__selectedRow < self.__firstShownLine:\n self.scrollUp()\n else:\n self._window.attron(curses.A_BOLD)\n self.__printRow(self.__selectedRow)\n self._window.attroff(curses.A_BOLD)\n else:\n curses.beep() \n elif key == curses.KEY_DOWN:\n if self.__selectedRow < len(self.__data) - 1:\n self.__printRow(self.__selectedRow)\n self.__selectedRow += 1\n if self.__selectedRow == \\\n self.__firstShownLine + self.height - 2:\n self.scrollDown()\n else:\n self._window.attron(curses.A_BOLD)\n self.__printRow(self.__selectedRow)\n self._window.attroff(curses.A_BOLD)\n else:\n curses.beep()", "def onkey(self, fun, key):\n if fun is None:\n if key in self._keys:\n self._keys.remove(key)\n elif key not in self._keys:\n self._keys.append(key)\n self._onkeyrelease(fun, key)", "def keyboard_on_key_down(self, window, keycode, text, modifiers):\n if 'shift' in keycode[1]:\n self.shift_down = True", "def on_key_press(self, event):\n # F2 for starting new game\n if event.key == 'f2':\n self.draw_minefield()", "def on_key_press(self, key, callback):\n self._key_press_mappings.setdefault(key, []).append(callback)", "def keypress(self):\n k = self.__screen.getch()\n ret = None\n if k == curses.KEY_ENTER or (k < 256 and chr(k) == '\\n'):\n ret = self.__textPad.gather()\n self.__textWin.clear()\n else:\n self.__textPad.do_command(k)\n\n self.__update()\n return ret", "def on_press(key):\n try:\n if key.char.upper() == (cfg_cb_key_c.upper()):\n logger.info(\"Clearing clipboard memory.\")\n global browser\n browser.clear_input(cfg_web_anchor)\n except AttributeError:\n pass", "def handle_keyhold(self, key, string):\r\n return app.App.handle_keyhold(self, key, string)", "def key_release_event(self, event):\n pass", "def on_key_press(self, symbol, modifiers):\n if symbol == key.ESCAPE:\n self.set_exclusive_mouse(False)\n else:\n self.gamestatemanager.peek().on_key_press(symbol, modifiers, self.config_data[\"controls\"])", "def keyPressed():\n global PLAY\n if (key == ' '):\n PLAY = not PLAY\n if (key == 'r'):\n init()", "def ev_keydown(self, event: KeyDown) -> None:", "def enterKey_cb(widget, dialog):\n dialog.response(gtk.RESPONSE_ACCEPT)", "def focus_event(self, widget, event, hasFocus):\n return self.make_callback('focus', hasFocus)", "def on_press(self, key):\n try:\n if 'up' == key.name:\n if self.index > 0:\n self.index -= 1\n else:\n self.index = self.count\n elif 'down' == key.name:\n if self.index < self.count:\n self.index += 1\n else:\n self.index = 0\n elif key == Key.enter:\n # Stop listener\n self.flag += 1\n return False\n except:\n return False", "def keyPressEvent(self, event):\n self.Serial.send_keystroke(event.text())", "def on_key_press(self, pressed, modifiers):\n if pressed == key.ESCAPE: self.save_world(); self.close(); log.INFO(\"MineGlet was closed!\")\n elif pressed == key.E: self.mouse_lock = not self.mouse_lock", "def cb_key_pressed(data, signal, signal_data):\n global last_signal_time\n last_signal_time = time.time()\n if signal_data == \"\\x01[\":\n # In 50ms, check if any other keys were pressed. If not, it's Esc!\n weechat.hook_timer(50, 0, 1, \"cb_check_esc\",\n \"{:f}\".format(last_signal_time))\n return weechat.WEECHAT_RC_OK", "def textbox_key_pressed(self, widget, event, Data=None):\n\t\tif event.keyval == gtk.gdk.keyval_from_name('Return') or \\\n\t\tevent.keyval == gtk.gdk.keyval_from_name('KP_Enter'):\n\t\t\tself.add_item_to_list(self.current_list)\n\t\t\treturn True", "def wait_keydown(self):\n while True:\n self.clock.tick(self.fps)\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n self.running = False\n return\n if event.type == pygame.KEYDOWN:\n return", "def handle_keyup(self, key, string):\r\n if self.get_visible():\r\n for i in self.widgets:\r\n if i.get_visible():\r\n if i.handle_keyup(key, string):\r\n return True\r\n return False", "def _on_key_press(self, key):\n if key is self.TRIGGER_KEY and not self.do_record:\n print(\"Start Recording...\")\n self.do_record = True", "def on_press(self, keyname):\n if self.keydown:\n return\n try:\n self.keydown = True\n keyname = str(keyname).strip('\\'')\n log.info('KEY PRESS ' + keyname)\n if keyname == 'Key.esc':\n self.toggle_tracking(False)\n # self.tracking = False\n self.drone.land()\n self.drone.quit()\n\n \n cv2.destroyAllWindows() \n os._exit(0)\n \n if keyname in self.controls_keypress:\n self.controls_keypress[keyname]()\n except AttributeError:\n log.debug(f'special key {keyname} pressed')", "def parse_keypress(self, wid, event):\n\n keyname = Gdk.keyval_name(event.keyval)\n if keyname == \"Control_R\": # Key for query\n self.get_output()\n elif keyname == \"Page_Up\": # Goes to previous query\n tot = len(self.history)\n if -(self.prompt_cursor) != tot:\n self.prompt_cursor -= 1\n text = self.history[self.prompt_cursor]\n self.current_prompt.set_text(text)\n\n elif keyname == \"Page_Down\": # Drops to next query\n if (self.prompt_cursor) != -1:\n self.prompt_cursor += 1\n text = self.history[self.prompt_cursor]\n self.current_prompt.set_text(text)", "def events(self, instance, keyboard):\n if keyboard in (1001, 27):\n if self.manager_open:\n self.file_manager.back()\n return True", "def AcceptsFocus(self):\r\n\r\n # overridden base class method, allows this ctrl to\r\n # participate in the tab-order, etc. It's overridable because\r\n # of deriving this class from wx.PyScrolledWindow...\r\n return True", "def __keystroke(self, event):\n if event.state - self.__previous_state == 4: # means that the Control key is pressed\n pass # do nothing if Control key is pressed\n else:\n if event.char in [' ', 'f']:\n return self.parent_class.finish_polygons_key()\n self.__previous_state = event.state # remember the last keystroke state\n # Up, Down, Left, Right keystrokes\n if event.keycode in [68, 39, 102]: # scroll right: keys 'D', 'Right' or 'Numpad-6'\n self.__scroll_x('scroll', 1, 'unit', event=event)\n elif event.keycode in [65, 37, 100]: # scroll left: keys 'A', 'Left' or 'Numpad-4'\n self.__scroll_x('scroll', -1, 'unit', event=event)\n elif event.keycode in [87, 38, 104]: # scroll up: keys 'W', 'Up' or 'Numpad-8'\n self.__scroll_y('scroll', -1, 'unit', event=event)\n elif event.keycode in [83, 40, 98]: # scroll down: keys 'S', 'Down' or 'Numpad-2'\n self.__scroll_y('scroll', 1, 'unit', event=event)", "def setWindowKey(key='return'):\n wdict = {'click':'NONE','return':'RETURN','escape':'ESCAPE'}\n dislin.winkey(wdict[key])", "def on_key_down(self, keyboard, keycode, text, modifiers):\n Logger.debug('KeyDown Event: Keycode[1] is \"{}\"'.format(keycode[1]))\n self.keysPressed.add(keycode[1])", "def keyPressEvent(self, event):\r\n if event.key() == Qt.Key_Return:\r\n self.manejo_boton_2()", "def keyPressEvent(self, event):\r\n if event.key() == Qt.Key_Return:\r\n self.manejo_boton_2()", "def whait_for_keys_press(prompt, key1, key2, key3, key4):\n print(prompt)\n while True:\n Key_pressed = curses.wrapper(main)\n #if Key_pressed != (-1): print(Key_pressed) # displays number of key\n if Key_pressed == key1:\n break\n if Key_pressed == key2:\n break\n if Key_pressed == key3:\n break \n if Key_pressed == key4:\n break \n time.sleep(0.1)\n return Key_pressed", "def keyevent(self, keyname):\n self.adb.key_events(keyname)", "def _check_keydown_events(self, event):\n if event.key == pygame.K_ESCAPE:\n sys.exit()\n if event.key == pygame.K_RETURN:\n self.main.switch_gamestate(self, self.main.game_screen)", "def keyevent(keyname, **kwargs):\n G.DEVICE.keyevent(keyname, **kwargs)\n delay_after_operation()", "def press_key(self, event):\n if self.active:\n keycode = self.mapping[event.pin_num]\n while self.busy:\n sleep(0.01)\n self.busy = True\n self.send_key(keycode)\n self.busy = False", "def on_key_release(self, symbol, modifiers):\n self.gamestatemanager.peek().on_key_release(symbol, modifiers, self.config_data[\"controls\"])", "def on_key_press(self, key: int, modifiers: int):\r\n self.held_keys.add(key)\r\n\r\n if key == arcade.key.SPACE:\r\n pass" ]
[ "0.7016208", "0.6989728", "0.696532", "0.6910112", "0.6764562", "0.67168427", "0.6626978", "0.6623902", "0.6604723", "0.6576953", "0.6570364", "0.6550427", "0.65445656", "0.65329766", "0.6531511", "0.65207005", "0.6487252", "0.64807296", "0.64629245", "0.6444512", "0.64369535", "0.6420311", "0.6393491", "0.6381775", "0.63517356", "0.63401896", "0.6324497", "0.6304451", "0.62974566", "0.6273223", "0.6255711", "0.62506133", "0.62386566", "0.6222792", "0.6212474", "0.6209302", "0.6205867", "0.61938095", "0.6163551", "0.61630964", "0.61580515", "0.61567706", "0.6145868", "0.6109644", "0.6106833", "0.6103556", "0.6098672", "0.609818", "0.6082561", "0.60664123", "0.60590154", "0.6056102", "0.604055", "0.60224783", "0.60204273", "0.6014559", "0.60113037", "0.60056955", "0.59981686", "0.5993691", "0.59844047", "0.59817994", "0.5975429", "0.5971989", "0.59561676", "0.5955412", "0.59518623", "0.5950872", "0.59409404", "0.59392166", "0.59363145", "0.59249806", "0.5923488", "0.5921538", "0.59175485", "0.5913432", "0.5907882", "0.58847386", "0.5878247", "0.5867846", "0.5862891", "0.5859596", "0.58381695", "0.5835928", "0.5827969", "0.5827893", "0.5826714", "0.5825181", "0.5821081", "0.58117694", "0.58079505", "0.58078057", "0.5802213", "0.5802213", "0.5800589", "0.57943916", "0.5792131", "0.5779264", "0.57738036", "0.5773536", "0.5771585" ]
0.0
-1
Called when a key is released after being pressed. Adjust method signature as appropriate for callback.
def key_release_event(self, widget, event): # get keyname or keycode and translate to ginga standard # keyname = # keycode = keyname = '' # self.transkey(keyname, keycode) self.logger.debug("key release event, key=%s" % (keyname)) return self.make_ui_callback('key-release', keyname)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def key_release_event(self, event):\n pass", "def _on_key_release(self, event):", "def on_key_release(self, key, modifiers):\n player_controller.input_release(key, self.player)", "def keyReleaseEvent(self, event):\n self.game_engine.input_manager.keyReleaseEvent(event)", "def _onkeyrelease(self, fun, key):\n if fun is None:\n self.cv.unbind(\"<KeyRelease-%s>\" % key, None)\n else:\n def eventfun(event):\n fun()\n self.cv.bind(\"<KeyRelease-%s>\" % key, eventfun)", "def on_release(self, keyname):\n self.keydown = False\n keyname = str(keyname).strip('\\'')\n log.info('KEY RELEASE ' + keyname)\n if keyname in self.controls_keyrelease:\n key_handler = self.controls_keyrelease[keyname]()", "def on_key_release(self, symbol, modifiers):\n self.gamestatemanager.peek().on_key_release(symbol, modifiers, self.config_data[\"controls\"])", "def __handleKeyUp(self, key):\n arg = -1\n if self._keysPressed > 0:\n self._keysPressed -= 1\n if self._keyDown == key and self._keysPressed == 0:\n arg = self._keyMap[key]\n \n if self._keysPressed == 0:\n self._keyDown = None\n \n messenger.send(KeyCodes.KEY_UP_EVENT, [arg])", "def on_key_release(self, key, modifiers):\n pass # stop animation", "def on_key_up(self, keyboard, keycode):\n Logger.debug('KeyUp Event: Keycode[1] is \"{}\"'.format(keycode[1]))\n self.keysPressed.remove(keycode[1])", "def on_key_release(self, key, modifiers):\n\n keyName = pyglet.window.key.symbol_string(key)\n logging.debug('key \"%s\" released' % keyName)\n if keyName == \"SPACE\": # Pick up or drop the selected piece.\n if self.current.selector.heldPiece is None:\n self.current.selector.pickUp()\n else:\n piece = self.current.selector.heldPiece\n col = self.current.selector.currentCol\n if self.currentBoard.canAddPiece(piece, col):\n self.current.selector.dropPiece()\n self.gameManager.movePiece(piece, col)\n else: #move the piece back to where it is\n self.current.selector.dropPiece()\n\n if keyName == \"RETURN\": # Call pieces.\n self.gameManager.callPieces()\n if keyName == \"TAB\": # Use mana\n if self.gameManager.useMana(): #if the logic works\n #TODO: \n pass\n if keyName == \"BACKSPACE\": \n # Delete a piece. Logic check is done by gameManager\n pos = [self.current.selector.currentRow, self.current.selector.currentCol]\n self.gameManager.deletePiece(pos)\n self.current.selector.refresh()\n if keyName == \"END\": # End the turn.\n self.gameManager.endTurn()", "def on_release(self, released_key ):\n if released_key is not None:\n if isinstance(released_key, pynput.keyboard.KeyCode) and released_key.char is not None:\n released_key = released_key.char.lower()\n elif isinstance(released_key, pynput.keyboard.Key):\n released_key = released_key.name\n self.keys_set.discard(released_key)", "def key_handler(self, event):\n if event.type == pygame.KEYUP: \n self.done = True", "def on_key_release(self, key, modifiers):\n\n if key == arcade.key.UP or key == arcade.key.W:\n self.up_pressed = False\n self.jump_needs_reset = False\n elif key == arcade.key.DOWN or key == arcade.key.S:\n self.down_pressed = False\n elif key == arcade.key.LEFT or key == arcade.key.A:\n self.left_pressed = False\n elif key == arcade.key.RIGHT or key == arcade.key.D:\n self.right_pressed = False\n\n if key == arcade.key.Q:\n self.shoot_pressed = False\n\n self.process_keychange()", "def on_key_release(self, key, modifiers):\n\n if key == arcade.key.UP or key == arcade.key.W:\n self.up_pressed = False\n self.jump_needs_reset = False\n elif key == arcade.key.DOWN or key == arcade.key.S:\n self.down_pressed = False\n elif key == arcade.key.LEFT or key == arcade.key.A:\n self.left_pressed = False\n elif key == arcade.key.RIGHT or key == arcade.key.D:\n self.right_pressed = False\n elif key == arcade.key.SPACE:\n self.pc.punching = False\n\n self.process_keychange()", "def on_key_release(self, key, modifiers):\n\n if key == arcade.key.UP:\n self.up_pressed = False\n elif key == arcade.key.DOWN:\n self.down_pressed = False\n elif key == arcade.key.LEFT:\n self.left_pressed = False\n elif key == arcade.key.RIGHT:\n self.right_pressed = False", "def on_key_release(self, key, modifiers):\n\n if key == arcade.key.UP:\n self.up_pressed = False\n elif key == arcade.key.DOWN:\n self.down_pressed = False\n elif key == arcade.key.LEFT:\n self.left_pressed = False\n elif key == arcade.key.RIGHT:\n self.right_pressed = False", "def on_key_release(self, key: int, modifiers: int):\r\n if key in self.held_keys:\r\n self.held_keys.remove(key)", "def on_key_release(self, key: int, modifiers: int):\r\n if key in self.held_keys:\r\n self.held_keys.remove(key)", "def on_key_release(self, event):\n if self.active:\n key = event.key or ''\n for (state, modifier) in self._state_modifier_keys.items():\n # 'rotate' is changing _state on press and is not removed\n # from _state when releasing\n if modifier in key.split('+') and state != 'rotate':\n self._state.discard(state)\n self._on_key_release(event)", "def on_key_release(self, key, modifiers):\n\n if key == arcade.key.UP:\n self.up_pressed = False\n elif key == arcade.key.DOWN:\n self.down_pressed = False\n elif key == arcade.key.LEFT:\n self.left_pressed = False\n elif key == arcade.key.RIGHT:\n self.right_pressed = False\n elif key == arcade.key.W:\n self.up_pressed = False\n elif key == arcade.key.S:\n self.down_pressed = False\n elif key == arcade.key.A:\n self.left_pressed = False\n elif key == arcade.key.D:\n self.right_pressed = False", "def on_key_release(self, key_released: int, _: int) -> None:\n if key_released in (key.LEFT, key.RIGHT, key.A, key.D):\n self.change_x = 0\n self.direction = None", "def keyReleaseEvent (self, event):\n super(DiagramScene, self).keyReleaseEvent(event)", "def on_key_release(self, key, callback):\n self._key_release_mappings.setdefault(key, []).append(callback)", "def _on_key_release(self, key):\n if key is self.TRIGGER_KEY:\n print(\"End Recording\")\n self.do_record = False", "def k_release(self, key: KKey):\n pass", "def ev_KEYUP(self, event):", "def on_key_release(self, key, modifiers):\n\n try:\n game_key = KEY_MAP[key]\n except KeyError:\n return\n\n self.game.on_key_release(game_key)", "def on_key_event(self, key):\n pass", "def on_key_release(self, key: int, modifiers: int):\n\n if key in self.held_keys:\n self.held_keys.remove(key)", "def _on_key_press(self, event):", "def handle_keyrelease_event(event, labels):\n\tglobal current_user\n\tglobal current_mode\n\t\n\t(instruction_label, response_label, congrats_label) = labels\n\n\tif current_mode == \"number\":\n\t\tnum_char = str(event.char)\n\t\tif num_char in ['1','2','3','4','5','6','7']:\n\t\t\tpush_update(current_user, int(num_char))\n\t\t\tcongrats_label.temp_update(random.choice(messages), 1500)\n\t\t\tcurrent_mode = \"user\"\n\t\t\tinstruction_label.update(\"Please enter user character...\")", "def on_key_down(self, key, callback):\n self._key_down_mappings.setdefault(key, []).append(callback)", "def release_bound_key(self, event):\n try:\n if event.key in [key[0] for key in self.key_bindings]:\n self.unpress()\n except TypeError:\n if event.key in self.key_bindings:\n self.unpress()", "def key_handler(self):\n \n self.pressed = waitKey(1) & 255 #wait for keypress for 10 ms\n if self.pressed == 27: #exit program on 'esc'\n print \"exiting...\"\n self.camera.cam.release()\n exit()\n \n for key in self.key_controls.keys():\n if chr(self.pressed) == key:\n self.key_controls[key]()", "def keyReleaseEvent(self, event):\n # The autorepeat debounces\n if not event.isAutoRepeat():\n if event.key() == Qt.Key_Up or event.key() == Qt.Key_Down or (\n event.key() == Qt.Key_Left) or event.key() == Qt.Key_Right:\n self.notifyObservers(BehavioralStates.RC, (Qt.Key_Slash, \"0\"))\n # this is so the next time we press w we know it's a new key\n elif event.key() == Qt.Key_W:\n self.notifyObservers(BehavioralStates.RC, (Qt.Key_Q, \"0\"))", "def handle_keyrelease(self, event):\r\n if event.keysym == \"BackSpace\":\r\n self.delete(self.index(tkinter.INSERT), tkinter.END)\r\n self.position = self.index(tkinter.END)\r\n if event.keysym == \"Left\":\r\n if self.position < self.index(tkinter.END): # delete the selection\r\n self.delete(self.position, tkinter.END)\r\n else:\r\n self.position = self.position-1 # delete one character\r\n self.delete(self.position, tkinter.END)\r\n if event.keysym == \"Right\":\r\n self.position = self.index(tkinter.END) # go to end (no selection)\r\n if len(event.keysym) == 1:\r\n self.autocomplete()\r\n # No need for up/down, we'll jump to the popup\r\n # list at the position of the autocompletion\r", "def on_key_release(self, key, key_modifiers):\n if key == arcade.key.LEFT or key == arcade.key.DOWN:\n self.holding_left = False\n\n if key == arcade.key.RIGHT or key == arcade.key.UP:\n self.holding_right = False", "def debounced_key_release(event):\n # print('Debounced release', repr(event.key))\n key_indicator.set_text('')\n fig.canvas.draw()", "def on_key_release(event):\n if event.key == 'shift':\n self.shift_is_held = False", "def keyReleaseEvent(self, event):\n if event.key() not in self.inputs.keys():\n self.inputs[event.key()] = [False, 0]\n # end if not in dict, add key to dict\n self.inputs[event.key()][0] = False\n\n for game_object in self.game_engine.game_objects:\n game_object.key_release_event(event)\n # end for", "def handle_keyrelease(self, event):\n if event.keysym == \"BackSpace\":\n self.delete(self.index(tk.INSERT), tk.END)\n self.position = self.index(tk.END)\n if event.keysym == \"Left\":\n if self.position < self.index(tk.END): # delete the selection\n self.delete(self.position, tk.END)\n else:\n self.position = self.position-1 # delete one character\n self.delete(self.position, tk.END)\n if event.keysym == \"Right\":\n self.position = self.index(tk.END) # go to end (no selection)\n if event.keysym == \"Down\":\n self.autocomplete(1) # cycle to next hit\n if event.keysym == \"Up\":\n self.autocomplete(-1) # cycle to previous hit\n if len(event.keysym) == 1 or event.keysym in tkinter_umlauts:\n self.autocomplete()", "def key_down(key):\n vk = key\n # XXX exception if >= 256\n _key_down(vk)", "def handle_keyrelease(self, event):\r\n if event.keysym == \"BackSpace\":\r\n self.delete(self.index(tkinter.INSERT), tkinter.END)\r\n self.position = self.index(tkinter.END)\r\n if event.keysym == \"Left\":\r\n if self.position < self.index(tkinter.END): # delete the selection\r\n self.delete(self.position, tkinter.END)\r\n else:\r\n self.position = self.position-1 # delete one character\r\n self.delete(self.position, tkinter.END)\r\n if event.keysym == \"Right\":\r\n self.position = self.index(tkinter.END) # go to end (no selection)\r\n if event.keysym == \"Down\":\r\n self.autocomplete(1) # cycle to next hit\r\n if event.keysym == \"Up\":\r\n self.autocomplete(-1) # cycle to previous hit\r\n if len(event.keysym) == 1 or event.keysym in tkinter_umlauts:\r\n self.autocomplete()", "def on_key_release(self, key, modifiers):\n self.key_pressed = False\n if key == arcade.key.UP or key == arcade.key.DOWN:\n self.player.change_y = 0\n elif key == arcade.key.LEFT or key == arcade.key.RIGHT:\n self.player.change_x = 0\n if self.player.get_last_side() == \"left\":\n self.player.set_action(\"left_idle\")\n else:\n self.player.set_action(\"right_idle\")", "def handle_keyhold(self, key, string):\r\n return app.App.handle_keyhold(self, key, string)", "def _callbackKeyButton(self, channel):\n if self._myKey.readKeyButton(channel) == 0:\n self.onKeyButtonDown(channel)\n return\n\n if self._myKey.readKeyButton(channel) == 1:\n self.onKeyButtonUp(channel)\n return", "def on_key_release(self, key: int, modifiers: int):\n if (key == arcade.key.UP or key == arcade.key.W) and \\\n self.direction == MoveEnum.UP:\n self.direction = MoveEnum.NONE\n if (key == arcade.key.DOWN or key == arcade.key.S) and \\\n self.direction == MoveEnum.DOWN:\n self.direction = MoveEnum.NONE\n if (key == arcade.key.LEFT or key == arcade.key.A) and \\\n self.direction == MoveEnum.LEFT:\n self.direction = MoveEnum.NONE\n if (key == arcade.key.RIGHT or key == arcade.key.D) and \\\n self.direction == MoveEnum.RIGHT:\n self.direction = MoveEnum.NONE", "def on_key_release(self, key: int, modifiers: int):\n if (key == arcade.key.UP or key == arcade.key.W) and \\\n self.direction == MoveEnum.UP:\n self.direction = MoveEnum.NONE\n if (key == arcade.key.DOWN or key == arcade.key.S) and \\\n self.direction == MoveEnum.DOWN:\n self.direction = MoveEnum.NONE\n if (key == arcade.key.LEFT or key == arcade.key.A) and \\\n self.direction == MoveEnum.LEFT:\n self.direction = MoveEnum.NONE\n if (key == arcade.key.RIGHT or key == arcade.key.D) and \\\n self.direction == MoveEnum.RIGHT:\n self.direction = MoveEnum.NONE", "def key_press_event(self, event):\n pass", "def key_event(self, key: Any, action: Any):\n pass", "def releaseKeyButtons(self):\n self._myKey.removeKeyButtonEvent([\n CONFIG_KEY.BUTTON_ACT_A,\n CONFIG_KEY.BUTTON_ACT_B,\n CONFIG_KEY.BUTTON_JOY_UP,\n CONFIG_KEY.BUTTON_JOY_DOWN,\n CONFIG_KEY.BUTTON_JOY_LEFT,\n CONFIG_KEY.BUTTON_JOY_RIGHT,\n CONFIG_KEY.BUTTON_JOY_OK\n ])", "def on_press(key):\n try:\n # gets pressed key char value and searches it from dict with get method.\n mapped_key = key_mappings.get(key.char) # gets value and type tuple or None\n if mapped_key:\n module.pressed_key = mapped_key\n except AttributeError:\n traceback.print_exc()\n except KeyboardInterrupt:\n print(f\"\\n{module.current_time()} Application stopped\")", "def on_key_release(self, key, modifiers):\n if self.current_state == GAME_RUNNING:\n if key == arcade.key.LEFT or key == arcade.key.RIGHT:\n self.player_sprite.change_x = 0\n elif key == arcade.key.UP or key == arcade.key.DOWN:\n self.player_sprite.change_y = 0\n elif key == arcade.key.SPACE:\n self.player_sprite.speed = 0\n elif key == arcade.key.ESCAPE:\n if self.gameover:\n self.gameover = 0\n self.instruction_screen()", "def ev_KEYDOWN(self, event):", "def keyUp(self):\n if pyxel.btnp(pyxel.KEY_UP):\n self.rotater(-1)", "def on_key_release(self, key, modifiers):\n if key == arcade.key.LEFT:\n self.player_sprite.stop_left()\n elif key == arcade.key.RIGHT:\n self.player_sprite.stop_right()", "def on_key_down(self, keyboard, keycode, text, modifiers):\n Logger.debug('KeyDown Event: Keycode[1] is \"{}\"'.format(keycode[1]))\n self.keysPressed.add(keycode[1])", "def onkey(self, fun, key):\n if fun is None:\n if key in self._keys:\n self._keys.remove(key)\n elif key not in self._keys:\n self._keys.append(key)\n self._onkeyrelease(fun, key)", "def on_key_down(self, keycode, keyvalue, event):\n if self.__click == True and (len(gtk.gdk.keyval_name(event.keyval)) < 2 or gtk.gdk.keyval_name(event.keyval) == \"space\"):\n if gtk.gdk.keyval_name(event.keyval) == \"space\":\n self.__text = self.__text + \" \";\n else:\n self.__text = self.__text + gtk.gdk.keyval_name(event.keyval);\n if gtk.gdk.keyval_name(event.keyval) == \"BackSpace\" and self.__text:\n self.__text = self.__text[:-1];\n if gtk.gdk.keyval_name(event.keyval) == \"Return\" or self.__click == False and self.__text:\n self.addNew();\n\t\t\t#screenlets.show_message(self, \"Committed\");", "def on_mouse_release(self, x, y, button, key_modifiers):\r\n pass", "def on_keyboard_closed(self):\n self.keyboard.unbind(on_key_down=self.on_key_down)\n self.keyboard.unbind(on_key_up=self.on_key_up)\n self.keyboard = None", "def on_key_release(self, key, modifiers):\r\n if key == arcade.key.UP or key == arcade.key.DOWN:\r\n self.player.change_y = 0\r\n elif key == arcade.key.LEFT or key == arcade.key.RIGHT:\r\n self.player.change_x = 0", "def on_key_release(self, key, modifiers):\r\n if key == arcade.key.UP or key == arcade.key.DOWN:\r\n self.player.change_y = 0\r\n elif key == arcade.key.LEFT or key == arcade.key.RIGHT:\r\n self.player.change_x = 0", "def _handle_key_press(self, event: pygame.event.Event) -> None:\n if event.key == K_0:\n self._update_input('0')\n elif event.key == K_1:\n self._update_input('1')\n elif event.key == K_2:\n self._update_input('2')\n elif event.key == K_3:\n self._update_input('3')\n elif event.key == K_4:\n self._update_input('4')\n elif event.key == K_5:\n self._update_input('5')\n elif event.key == K_6:\n self._update_input('6')\n elif event.key == K_7:\n self._update_input('7')\n elif event.key == K_8:\n self._update_input('8')\n elif event.key == K_9:\n self._update_input('9')\n elif event.key == K_BACKSPACE:\n self._update_input('BACKSPACE')", "def on_key_press(self, event):\n\n #print(\"you pressed {}\".format(event.key))\n key_press_handler(event, self.canvas, self.toolbar)", "def keyReleaseEvent(self, ev):\n self.currentKbKey = None\n\n if (ev.key() == self.panKey):\n # disable Pan/Zoom mode\n self.panning = False\n if self.__pointerLeftWidget:\n # we've left the widget - reset the cursor to the standard arrow\n self.setCursor(Qt.ArrowCursor)\n else:\n self.setCursor(self.defaultCursor)\n elif (ev.key() == self.selectAddKey):\n # disable selection add mode\n if self.__pointerLeftWidget:\n # we've left the widget - reset the cursor to the standard arrow\n self.setCursor(Qt.ArrowCursor)\n else:\n self.setCursor(self.defaultCursor)\n elif (ev.key() == self.zoomKey):\n # disable zoom mode\n self.__zooming = False\n else:\n self.keyRelease.emit(self, ev)", "def _on_keyboard_down(self, keyboard, keycode, char, modifiers):\n\n print(f\"Keystroke: char={char}, code={keycode}, mods={modifiers}\")\n if keycode[0] == 27: # use the Escape key to toggle modes.\n self.toggle_speak_mode()\n elif self._speakmode == 'SAY_LETTERS':\n self.say_letter(keyboard, keycode, char, modifiers)\n else:\n self.say_word(keyboard, keycode, char, modifiers)\n return True", "def on_key_release(self, key, modifiers):\n if key == arcade.key.UP or key == arcade.key.DOWN:\n self.player.change_y = 0\n elif key == arcade.key.LEFT or key == arcade.key.RIGHT:\n self.player.change_x = 0", "def keyevent(keyname, **kwargs):\n G.DEVICE.keyevent(keyname, **kwargs)\n delay_after_operation()", "def keyReleaseEvent(self, event: QtGui.QKeyEvent) -> None:\n if event.key() in [Qt.Key_W, Qt.Key_S, Qt.Key_A, Qt.Key_D] and self.__enable_key:\n new_direction = self.__directions.index(event.text())\n # ignore opposite direction\n if (new_direction + 2) % 4 == self.__h_direction:\n return\n self.__h_direction = new_direction\n if event.isAutoRepeat():\n self.__change_speed(self.__acc_step)\n print(f'{event.text().capitalize()}:accelerate speed')\n else:\n self.__change_speed(self.__step)\n print(f'{event.text().capitalize()}:normal speed')", "def handle_movement_keyup(self, key):\n def _opposite_dir(key):\n return {pygame.K_LEFT: pygame.K_RIGHT,\n pygame.K_RIGHT: pygame.K_LEFT,\n pygame.K_UP: pygame.K_DOWN}[key]\n try:\n log.debug(f'released: {key}')\n self.keys_down[key] = False\n if key in {pygame.K_LEFT, pygame.K_RIGHT} and \\\n not(self.keys_down[_opposite_dir(key)]):\n self.stop_movement()\n log.debug(f'keys down: {self.keys_down}')\n except AttributeError:\n log.error(\"you didn't pass a keyboard event!!\")", "def on_key(self, _window, key, _scancode, action, _mods):\n is_press = action == glfw.PRESS or action == glfw.REPEAT\n if is_press and (key == glfw.KEY_ESCAPE or key == glfw.KEY_Q):\n glfw.set_window_should_close(self.window, True)\n\n if action != glfw.REPEAT:\n self.key_handler(key, is_press)", "def _keyboard_closed(self):\n self._keyboard.unbind(on_key_down=self._on_keyboard_down)\n self._keyboard = None", "def on_key(self, _win, key, _scancode, action, _mods):\n if action == glfw.PRESS or action == glfw.REPEAT:\n if key == glfw.KEY_ESCAPE or key == glfw.KEY_Q:\n glfw.set_window_should_close(self.win, True)\n if key == glfw.KEY_W:\n GL.glPolygonMode(GL.GL_FRONT_AND_BACK, next(self.fill_modes))\n\n self.key_handler(key)", "def keyboard_on_key_up(self, window, keycode):\n if 'shift' in keycode[1]:\n self.shift_down = False", "def on_press(key):\n try:\n if key.char.upper() == (cfg_cb_key_c.upper()):\n logger.info(\"Clearing clipboard memory.\")\n global browser\n browser.clear_input(cfg_web_anchor)\n except AttributeError:\n pass", "def on_key_release(self, key, modifiers):\n if key == arcade.key.LEFT or key == arcade.key.RIGHT:\n self.player.change_x = 0\n elif key == arcade.key.UP or key == arcade.key.DOWN:\n self.player.change_y = 0", "def k_press(self, key: KKey):\n pass", "def key_down(event, ai, var, screen, ship, shots, enemies, charges, shields, hub):\r\n\tif event.key == pygame.K_UP:\r\n\t\tship.move_up = 1\r\n\telif event.key == pygame.K_DOWN:\r\n\t\tship.move_down = 1\r\n\telif event.key == pygame.K_SPACE:\r\n\t\tshoot_bullet(ai, screen, ship, shots, enemies)\r\n\t\tbegin_charge(ai, var, screen, ship, charges)\r\n\telif event.key == pygame.K_RSHIFT or event.key == pygame.K_LSHIFT:\r\n\t\tcall_shield(ai, var, screen, ship, shields, hub)\r\n\telif event.key == pygame.K_q:\r\n\t\tsys.exit()\r\n\t#elif event.key == pygame.K_p:\r\n\t#\thub.pause = 1\r\n\telif event.key == pygame.K_z:\r\n\t\thub.za_wurado(ai)", "def on_key_release(self, key: arcade.key, modifiers):\n #stops sprite movement when key is released\n if key == arcade.key.UP or key == arcade.key.DOWN:\n self.player_sprite.change_y = 0\n elif key == arcade.key.LEFT or key == arcade.key.RIGHT:\n self.player_sprite.change_x = 0", "def on_key_press(self, event):\n if self.active:\n key = event.key or ''\n key = key.replace('ctrl', 'control')\n if key == self._state_modifier_keys['clear']:\n self.clear()\n return\n for (state, modifier) in self._state_modifier_keys.items():\n if modifier in key.split('+'):\n # 'rotate' is changing _state on press and is not removed\n # from _state when releasing\n if state == 'rotate':\n if state in self._state:\n self._state.discard(state)\n else:\n self._state.add(state)\n else:\n self._state.add(state)\n self._on_key_press(event)", "def debounced_key_press(event):\n # print('Debounced press', repr(event.key))\n key_indicator.set_text(event.key)\n if event.key == ' ':\n throttle_handler()\n fig.canvas.draw()", "def HandleKeyboardInput(self):\n key = yg.getKeyPress()\n if key == \"Return\":\n self.buttons[len(self.buttons) - 1].Click()", "def on_key_release(self, key, modifiers):\n\n if key == arcade.key.UP or key == arcade.key.DOWN:\n self.player_sprite.change_y = 0\n elif key == arcade.key.LEFT or key == arcade.key.RIGHT:\n self.player_sprite.change_x = 0", "def on_key_release(self, key, modifiers):\n\n if key == arcade.key.UP or key == arcade.key.DOWN:\n self.player_sprite.change_y = 0\n elif key == arcade.key.LEFT or key == arcade.key.RIGHT:\n self.player_sprite.change_x = 0", "def on_key_release(self, key, modifiers):\n\n if key == arcade.key.UP or key == arcade.key.DOWN:\n self.player_sprite.change_y = 0\n elif key == arcade.key.LEFT or key == arcade.key.RIGHT:\n self.player_sprite.change_x = 0", "def on_key_release(self, key):\n if key == LEFT:\n self.player.change_x = 0\n elif key == RIGHT:\n self.player.change_x = 0\n elif key == UP:\n self.player.change_y = 0\n elif key == DOWN:\n self.player.change_y = 0", "def handle_event(self, event, window):\n raise NotImplementedError('handle_key')", "def __handleKeyDown(self, key):\n self._keysPressed += 1\n if self._keyDown is None and self._keysPressed == 1:\n assert(self.notify.debug(\"Key Down for Pattern: \" + key))\n self.__updateElapsedTime()\n # Inform that a key has been pressed\n messenger.send(KeyCodes.KEY_DOWN_EVENT, [self._keyMap[key], self._keyCodeCount])\n \n self._keyCode += self._keyMap[key]\n self._keyCodeCount += 1\n self._keyDown = key\n self.__checkForPattern()\n else:\n messenger.send(KeyCodes.KEY_DOWN_EVENT, [-1, -1])", "def on_key_release(self, key, key_modifiers):\n if key == arcade.key.UP or key == arcade.key.DOWN:\n self.player_sprite.change_y = 0\n elif key == arcade.key.LEFT or key == arcade.key.RIGHT:\n self.player_sprite.change_x = 0", "def off(key):\n # print(\"{0} released\".format(key), time.perf_counter())\n\n global keys, esc_count\n\n # caps, shift, etc. aren't automatically registered as strings\n if type(key) == Key:\n keys[esc_count].append((str(key), time.perf_counter(), \"released\"))\n else:\n keys[esc_count].append((key, time.perf_counter(), \"released\"))", "def cb_key_pressed(data, signal, signal_data):\n global last_signal_time\n last_signal_time = time.time()\n if signal_data == \"\\x01[\":\n # In 50ms, check if any other keys were pressed. If not, it's Esc!\n weechat.hook_timer(50, 0, 1, \"cb_check_esc\",\n \"{:f}\".format(last_signal_time))\n return weechat.WEECHAT_RC_OK", "def on_keydown(self, keys, game) -> None:\n pass", "def on_key_release(self, key, modifiers):\n if key == arcade.key.A or key == arcade.key.D:\n self.player.change_x = 0\n\n elif key == arcade.key.W or key == arcade.key.S:\n self.player.change_y = 0", "def on_key_release(self, key, modifiers):\n if key == arcade.key.LEFT or key == arcade.key.A:\n getattr(self, f\"player{self.assigned_player}\").change_x = 0\n elif key == arcade.key.RIGHT or key == arcade.key.D:\n getattr(self, f\"player{self.assigned_player}\").change_x = 0", "def OnKeydown(self, vkey, shift):\n if vkey == 27:\n # The ESC key was pressed so close the window and leave.\n self.Close()\n else:\n # An unknown key was pressed.\n return self.on_key_down(vkey, shift)\n\n return True", "def on_key_release(self, key, modifiers):\n if key == arcade.key.UP or key == arcade.key.W or key == arcade.key.DOWN or key == arcade.key.S:\n self.player_sprite.change_y = 0\n elif key == arcade.key.LEFT or key == arcade.key.A or key == arcade.key.RIGHT or key == arcade.key.D:\n self.player_sprite.change_x = 0", "def on_key_release(self, key, modifiers):\n if not self.ship or not self._can_control:\n return False\n\n self._pressed_keys.remove(key)\n\n consumed = True\n\n if key == arcade.key.W:\n self.ship.set_thrust(self.ship.thrust - Position(0, 0.15))\n elif key == arcade.key.S:\n self.ship.set_thrust(self.ship.thrust - Position(0, -0.15))\n elif key == arcade.key.Q:\n self.ship.set_thrust(self.ship.thrust - Position(0.1, 0))\n elif key == arcade.key.E:\n self.ship.set_thrust(self.ship.thrust - Position(-0.1, 0))\n elif key == arcade.key.A:\n self.ship.set_angle_delta(self.ship.angle_delta - 3)\n elif key == arcade.key.D:\n self.ship.set_angle_delta(self.ship.angle_delta + 3)\n else:\n consumed = False\n\n if len(self._pressed_keys) == 0:\n self.ship.set_angle_delta(0.0)\n self.ship.set_thrust(Position(0.0, 0.0))\n\n return True", "def _release(self, event):" ]
[ "0.8233656", "0.7974817", "0.7656666", "0.76358575", "0.74908", "0.73535097", "0.73379964", "0.7314337", "0.7266822", "0.7175188", "0.7162018", "0.7148087", "0.70924264", "0.7065574", "0.70232534", "0.7012686", "0.7012686", "0.70101947", "0.70101947", "0.6966152", "0.69470584", "0.6941192", "0.6935012", "0.6932204", "0.69136596", "0.69123584", "0.6893143", "0.68883544", "0.68824977", "0.6858991", "0.68417597", "0.6788778", "0.6713969", "0.67127544", "0.67112064", "0.6658313", "0.66559064", "0.66521055", "0.6624984", "0.6615595", "0.6611468", "0.66067785", "0.6575267", "0.65743005", "0.6545204", "0.6502516", "0.6442815", "0.64364874", "0.64364874", "0.6422689", "0.64172226", "0.6409969", "0.6406457", "0.63867635", "0.6378577", "0.63490885", "0.6342644", "0.63410217", "0.63177466", "0.62775296", "0.6270158", "0.62517816", "0.62421554", "0.62421554", "0.6241391", "0.6226897", "0.6224081", "0.6217824", "0.6212826", "0.62107426", "0.62039185", "0.62006634", "0.6194423", "0.6178634", "0.6170446", "0.61475265", "0.6139654", "0.61074364", "0.61054224", "0.6099335", "0.60896754", "0.60802376", "0.6071794", "0.6069874", "0.6069134", "0.6069134", "0.6069134", "0.6065924", "0.60409284", "0.60397154", "0.60290164", "0.6028613", "0.6006708", "0.6005393", "0.5987848", "0.5976556", "0.59737355", "0.5968577", "0.59651685", "0.5960135" ]
0.76694167
2
Called when a mouse button is pressed in the widget. Adjust method signature as appropriate for callback.
def button_press_event(self, widget, event): x, y = event.x, event.y # x, y = coordinates where the button was pressed self.last_win_x, self.last_win_y = x, y button = 0 # Prepare a button mask with bits set as follows: # left button: 0x1 # middle button: 0x2 # right button: 0x4 # Others can be added as appropriate self.logger.debug("button down event at %dx%d, button=%x" % (x, y, button)) data_x, data_y = self.check_cursor_location() return self.make_ui_callback('button-press', button, data_x, data_y)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def on_mouse_press(self, x, y, button):\n\n pass", "def ev_mousebuttondown(self, event: MouseButtonDown) -> None:", "def handle_mouse_press(self, event):", "def mouse_press_event(self, x: int, y: int, button: int):\n pass", "def on_mouse_press(self, x, y, button, key_modifiers):\r\n pass", "def ev_mousebuttondown(self, event: tcod.event.MouseButtonDown) -> T | None:", "def _press(self, event):", "def ev_mousebuttonup(self, event: MouseButtonUp) -> None:", "def on_mouse_release(self, x, y, button):\n pass", "def eventHandler(self, event: pygame.event):\n # change selected color if this button's rectangle was clicked\n if event.type == pygame.MOUSEBUTTONDOWN:\n if event.button == 1:\n if self.rect.collidepoint(event.pos): # is mouse over button\n self.image = self._images[ButtonImages.CLICKING_IMAGE.value]\n self.beingClicked = True\n for func, *args in self.functionsToInvokeWhenClicked:\n func(*args)\n elif event.type == pygame.MOUSEBUTTONUP and self.beingClicked:\n if event.button == 1:\n self.beingClicked = False\n self.image = self._images[ButtonImages.DEFAULT_IMAGE.value]", "def press(self):\n self.clicked = True\n if self.command:\n self.command(self.name)", "def ev_mousebuttonup(self, event: tcod.event.MouseButtonUp) -> T | None:", "def bind(self):\n self.canvas.bind(\"<ButtonPress-1>\", self.click)", "def on_mouse_release(self, x, y, button, key_modifiers):\r\n pass", "def emitPressEvent(self, clickLocation, button, currentKbKey, items):\n # emit the mousePressEvent signal\n self.mousePress.emit(self, clickLocation, button, currentKbKey, items)", "def on_mouse_up(self, pos, mouse_button):\n for item in button.Button.all_buttons:\n if item.collidepoint(pos):\n self.buttons_clicked.append((item, mouse_button))\n item.on_click(mouse_button)", "def HandlePress(self, event: tkEvent):\n pass", "def HandButton(self, event):\n pass", "def input(self, event: pygame.event) -> None:\n if event.type == pygame.MOUSEBUTTONDOWN and event.button == 1:\n self.user_clicked = True", "def button_press_cb(self, source, event):\n\n if event.button == MOUSE_BUTTON_RIGHT:\n pass\n return True\n elif event.button == MOUSE_BUTTON_MIDDLE:\n self.emit('begin-move')\n return True", "def handle_mouse_click(self, button: Button) -> None:\n if button.name == 'BACK':\n self._clear_all_input()\n self.current_page -= 1\n self._focused_button = None\n if self.current_page == len(self.pages) - 2:\n self.current_page -= 1\n elif button.name == 'Show Graph':\n self._plot_graph()\n elif button.name == 'Multiple Regression':\n self._selection.handle_selection(self.current_page, button.name)\n self.current_page += 2\n self._update_ghg_coefs()\n elif button.tag == 'normal' and self.current_page < len(self.pages) - 2:\n self._selection.handle_selection(self.current_page, button.name)\n self.current_page += 1\n elif isinstance(button, InputButton):\n self._focused_button = button", "def on_press(self):\n self.pressed = True", "def on_press(self):\n self.pressed = True", "def button_release_event(self, widget, event):\n x, y = event.x, event.y\n\n # x, y = coordinates where the button was released\n self.last_win_x, self.last_win_y = x, y\n\n button = 0\n # prepare button mask as in button_press_event()\n\n data_x, data_y = self.check_cursor_location()\n\n return self.make_ui_callback('button-release', button, data_x, data_y)", "def mousePressEvent(self, mouse_event):\r\n return", "def __on_click(self, evt):\n if evt.button() == Qt.LeftButton:\n return self._on_left_click(evt)\n if evt.button() == Qt.RightButton:\n return self._on_right_click(evt)", "def _press(self, event):\n # make the drawn box/line visible get the click-coordinates,\n # button, ...\n if self._interactive and self._selection_artist.get_visible():\n self._set_active_handle(event)\n else:\n self._active_handle = None\n\n if ((self._active_handle is None or not self._interactive) and\n self._allow_creation):\n # Clear previous rectangle before drawing new rectangle.\n self.update()\n\n if (self._active_handle is None and not self.ignore_event_outside and\n self._allow_creation):\n x = event.xdata\n y = event.ydata\n self._visible = False\n self.extents = x, x, y, y\n self._visible = True\n else:\n self.set_visible(True)\n\n self._extents_on_press = self.extents\n self._rotation_on_press = self._rotation\n self._set_aspect_ratio_correction()\n\n return False", "def button_press_cb(self, darea, event):\n x, y = event.x, event.y\n self.draw_pointer(self.cr, x, y)\n self.queue_draw()\n self.oldx, self.oldy = x, y\n rel_x, rel_y = self.absolute_to_relative(x, y)\n self.emit('dnd-value', rel_x, rel_y)\n self.emit('start-dnd')\n return True", "def m_press(self, button: MButton):\n pass", "def ev_MOUSEDOWN(self, event):", "def on_key_press(self, event):\n\n #print(\"you pressed {}\".format(event.key))\n key_press_handler(event, self.canvas, self.toolbar)", "def ev_MOUSEUP(self, event):", "def ev_mousebuttondown(self, event):\n if self.engine.game_map.in_bounds(*event.tile):\n if event.button == 1:\n return self.on_index_selected(*event.tile)\n return super().ev_mousebuttondown(event)", "def mouseReleaseEvent(self, event):\n button = event.button()\n\n # select an item on which we clicked\n item = self.itemAt(event.x(), event.y())\n if item:\n self.setCurrentItem(item)\n if button == 1:\n print \"SIMPLE LEFT CLICK\"", "def _on_button_press_event(self, widget, event):\n if event.button == 3:\n self.menu.popup(None, None, None, None, event.button, event.time)\n self.menu.show_all()", "def _press(self, event):\n self._set_cursor(True)\n if self._interactive and self._selection_artist.get_visible():\n self._set_active_handle(event)\n else:\n self._active_handle = None\n\n if self._active_handle is None or not self._interactive:\n # Clear previous rectangle before drawing new rectangle.\n self.update()\n\n v = event.xdata if self.direction == 'horizontal' else event.ydata\n # self._pressv and self._prev are deprecated but we still need to\n # maintain them\n self._pressv = v\n self._prev = self._get_data(event)\n\n if self._active_handle is None and not self.ignore_event_outside:\n # when the press event outside the span, we initially set the\n # visibility to False and extents to (v, v)\n # update will be called when setting the extents\n self._visible = False\n self.extents = v, v\n # We need to set the visibility back, so the span selector will be\n # drawn when necessary (span width > 0)\n self._visible = True\n else:\n self.set_visible(True)\n\n return False", "def mouse_click(self,x,y,button,double_click):\n raise NotImplementedError(\"ERROR: Unimplemented function.\")", "def mouse_release_event(self, x: int, y: int, button: int):\n pass", "def set_mouseclick_handler(self, mouse_handler):\n STmouse.Mouse(self.canvas, '<Button-1>', mouse_handler)", "def OnButton(self, event):\r\n \r\n button = event.GetInt()\r\n\r\n if button == AUI_BUTTON_LEFT or button == AUI_BUTTON_RIGHT:\r\n if button == AUI_BUTTON_LEFT:\r\n if self.GetTabOffset() > 0:\r\n \r\n self.SetTabOffset(self.GetTabOffset()-1)\r\n self.Refresh()\r\n self.Update()\r\n else:\r\n self.SetTabOffset(self.GetTabOffset()+1)\r\n self.Refresh()\r\n self.Update()\r\n \r\n elif button == AUI_BUTTON_WINDOWLIST:\r\n idx = self.GetArtProvider().ShowDropDown(self, self._pages, self.GetActivePage())\r\n \r\n if idx != -1:\r\n \r\n e = AuiNotebookEvent(wxEVT_COMMAND_AUINOTEBOOK_PAGE_CHANGING, self.GetId())\r\n e.SetSelection(idx)\r\n e.SetOldSelection(self.GetActivePage())\r\n e.SetEventObject(self)\r\n self.GetEventHandler().ProcessEvent(e)\r\n \r\n else:\r\n event.Skip()", "def cb_something_4(self, button): \n print(\"Do Something 4\")", "def handle_press( self, x, y ):\n self.pressed_flag = True\n self.first_point = (x, y)", "def _on_key_press(self, event):", "def on_pushButton_clicked(self):\n # TODO: not implemented yet\n raise NotImplementedError", "def on_pushButton_clicked(self):\n # TODO: not implemented yet\n raise NotImplementedError", "def clickedAction(self, events):\n print(\"The {} button was clicked!\".format(self.imgname))", "def mousePressed(self, _evt, _id):\n _widget = None\n \n if _id == ois.MB_Left:\n _widget = self._mouseLeft\n elif _id == ois.MB_Right:\n _widget = self._mouseRight\n elif _id == ois.MB_Middle:\n _widget = self._mouseMiddle\n \n if _widget is not None:\n self._addLinearAnimation(_widget, 1.0)\n \n return False", "def _on_pyglet_mouse_click(self, x, y, button, modifiers):\n button_time = clock()\n this_button = self._button_names[button]\n self._mouse_buffer.append((this_button, x, y, button_time))", "def on_mouse_press(self, x, y, button, modifiers):\n\n self.gamestatemanager.peek().on_mouse_press(x, y, button, modifiers)\n\n if self.exclusive:\n self.gamestatemanager.peek().on_mouse_press(x, y, button, modifiers)\n else:\n self.set_exclusive_mouse(True)", "def handle_mousedown(self, button, name):\r\n x = widget.Widget.handle_mousedown(self, button, name)\r\n if not self.mouse_on_me():\r\n return False\r\n if not self.get_visible():\r\n return False\r\n for i in self.widgets:\r\n if i.get_visible():\r\n if i.handle_mousedown(button, name):\r\n return True\r\n return x", "def _left_button_press_event(self, obj, event):\n #print('area_picker - left_button_press_event')\n self.OnLeftButtonDown()\n pixel_x, pixel_y = self.parent.vtk_interactor.GetEventPosition()\n self.picker_points.append((pixel_x, pixel_y))", "def OnButton(self, event):\n button = event.GetEventObject().GetName()\n if button == \"Button1\":\n self.OnButton1()\n elif button == \"Button2\":\n self.OnButton2()\n elif button == \"Button3\":\n self.OnExit(event)", "def OnLeftUp_ClickButton(self, event):\r\n \r\n self._hover_button = None\r\n\r\n if self._action_part:\r\n self.RefreshButton(self._action_part)\r\n\r\n # make sure we're still over the item that was originally clicked\r\n if self._action_part == self.HitTest(*event.GetPosition()):\r\n \r\n # fire button-click event\r\n e = AuiManagerEvent(wxEVT_AUI_PANE_BUTTON)\r\n e.SetManager(self)\r\n e.SetPane(self._action_part.pane)\r\n e.SetButton(self._action_part.button.button_id)\r\n self.ProcessMgrEvent(e)", "def mousePressEvent(self, event):\n #sw = self.spw.windows['Sort']\n buttons = event.buttons()\n if buttons == QtCore.Qt.MiddleButton:\n #sw.on_actionSelectRandomSpikes_triggered()\n #sw.spykewindow.plotButton.click() # same as hitting ENTER in nslist\n self.selecting = True\n self.setMouseTracking(True) # while selecting\n self.selectPointsUnderCursor()\n self.lastPressPos = QtCore.QPoint(event.pos())\n self.lastPos = QtCore.QPoint(event.pos())", "def mousePressEvent(self, event): \n if event.type() == qtc.QEvent.MouseButtonPress:\n if event.button() == qtc.Qt.RightButton:\n self.right_click_event()\n\n elif event.button() == qtc.Qt.LeftButton:\n self.left_click_event(event)\n self.mouseStartPosY = event.pos().y()\n self.startValue = self.value()", "def mousePressEvent(self, event):\n #sw = self.spw.windows['Sort']\n buttons = event.buttons()\n if buttons == QtCore.Qt.MiddleButton:\n #sw.on_actionSelectRandomSpikes_triggered()\n #sw.spykewindow.ui.plotButton.click() # same as hitting ENTER in nslist\n self.selecting = True\n self.setMouseTracking(True) # while selecting\n self.selectPointsUnderCursor()\n self.lastPressPos = QtCore.QPoint(event.pos())\n self.lastPos = QtCore.QPoint(event.pos())", "def _left_button_release_event(self, obj, event):\n #self.OnLeftButtonUp()\n pixel_x, pixel_y = self.parent.vtk_interactor.GetEventPosition()\n #selector = vtk.vtkVisibleCellSelector()\n\n self.picker_points.append((pixel_x, pixel_y))\n\n #print(self.picker_points)\n if len(self.picker_points) == 2:\n p1x, p1y = self.picker_points[0]\n p2x, p2y = self.picker_points[1]\n self.picker_points = []\n xmin = min(p1x, p2x)\n ymin = min(p1y, p2y)\n xmax = max(p1x, p2x)\n ymax = max(p1y, p2y)\n #print(self.picker_points)\n #print('_area_pick_left_button_release', cell_id)\n\n dx = abs(p1x - p2x)\n dy = abs(p1y - p2y)\n self.picker_points = []\n if dx > 0 and dy > 0:\n if self._pick_visible:\n self._pick_visible_ids(xmin, ymin, xmax, ymax)\n else:\n self._pick_depth_ids(xmin, ymin, xmax, ymax)\n self.parent.vtk_interactor.Render()\n self.picker_points = []", "def handle_mousedown(self, button, name):\r\n if self.get_visible():\r\n for i in self.widgets:\r\n if i.get_visible():\r\n if i.handle_mousedown(button, name):\r\n return True\r\n return False", "def cb_something_1(self, button):\n print(\"Do Something 1\")", "def signal_from_widget(self, event):\n self.keyPressEvent(event)", "def OnLeftUp(self, event):\r\n\r\n self._on_button = False\r\n \r\n if self._is_dragging:\r\n\r\n if self.HasCapture():\r\n self.ReleaseMouse()\r\n \r\n self._is_dragging = False\r\n if self._drag_image:\r\n self._drag_image.EndDrag()\r\n del self._drag_image\r\n self._drag_image = None\r\n self.GetParent().Refresh()\r\n\r\n evt = AuiNotebookEvent(wxEVT_COMMAND_AUINOTEBOOK_END_DRAG, self.GetId())\r\n evt.SetSelection(self.GetIdxFromWindow(self._click_tab))\r\n evt.SetOldSelection(evt.GetSelection())\r\n evt.SetEventObject(self)\r\n self.GetEventHandler().ProcessEvent(evt)\r\n\r\n return\r\n\r\n if self.HasCapture():\r\n self.ReleaseMouse()\r\n \r\n if self._pressed_button:\r\n \r\n # make sure we're still clicking the button\r\n button = self.ButtonHitTest(event.GetX(), event.GetY())\r\n \r\n if button is None:\r\n return\r\n\r\n if button != self._pressed_button:\r\n self._pressed_button = None\r\n return\r\n \r\n self.Refresh()\r\n self.Update()\r\n\r\n if self._pressed_button.cur_state & AUI_BUTTON_STATE_DISABLED == 0:\r\n \r\n evt = AuiNotebookEvent(wxEVT_COMMAND_AUINOTEBOOK_BUTTON, self.GetId())\r\n evt.SetSelection(self.GetIdxFromWindow(self._click_tab))\r\n evt.SetInt(self._pressed_button.id)\r\n evt.SetEventObject(self)\r\n self.GetEventHandler().ProcessEvent(evt)\r\n \r\n self._pressed_button = None\r\n \r\n self._click_pt = wx.Point(-1, -1)\r\n self._is_dragging = False\r\n self._click_tab = None", "def click_button(self):\n self.widgets.get('button').click()", "def mousePressEvent(self, event):\n if event.buttons() == QtCore.Qt.LeftButton:\n self.view_state.mouse = np.array([event.x(), event.y()])", "def on_toolButton_clicked(self):\n # TODO: not implemented yet\n raise NotImplementedError", "def button_release_cb(self, darea, event):\n self.oldx, self.oldy = event.x, event.y\n self.draw_pointer(self.cr, None, None)\n self.queue_draw()\n self.oldx, self.oldy = None, None\n self.emit('end-dnd')\n return True", "def __check_if_got_pressed(self):\n mouse_x_pos,mouse_y_pos = pg.mouse.get_pos()\n\n if utilitiez.on_object(self.rect.x, self.rect.y, self.rect.width, self.rect.height, mouse_x_pos, mouse_y_pos,\n MOUSE_WIDTH, MOUSE_HEIGHT):\n self.__on_click()", "def _onscreenclick(self, fun, num=1, add=None):\n if fun is None:\n self.cv.unbind(\"<Button-%s>\" % num)\n else:\n def eventfun(event):\n x, y = (self.cv.canvasx(event.x)/self.xscale,\n -self.cv.canvasy(event.y)/self.yscale)\n fun(x, y)\n self.cv.bind(\"<Button-%s>\" % num, eventfun, add)", "def onMouseLeftDown(self, event):\n # [NOTE] No need to call self.choice(). It is enough to call\n # event.Skip() and the machine will be called self.OnButtonClick()\n event.Skip()", "def _pressed(self, evt):\n x, y, widget = evt.x, evt.y, evt.widget\n item = widget.identify_row(y)\n column = widget.identify_column(x)\n\n if not column or not item in self._items:\n # clicked in the weekdays row or just outside the columns\n return\n\n item_values = widget.item(item)['values']\n if not len(item_values): # row is empty for this month\n return\n\n text = item_values[int(column[1]) - 1]\n if not text: # date is empty\n return\n\n bbox = widget.bbox(item, column)\n if not bbox: # calendar not visible yet\n return\n\n # update and then show selection\n text = '%02d' % text\n self._selection = (text, item, column)\n self._show_selection(text, bbox)", "def _on_key_release(self, event):", "def cb_something_3(self, button):\n print(\"Do Something 3\")", "def _onrelease(self, item, fun, num=1, add=None):\n if fun is None:\n self.cv.tag_unbind(item, \"<Button%s-ButtonRelease>\" % num)\n else:\n def eventfun(event):\n x, y = (self.cv.canvasx(event.x)/self.xscale,\n -self.cv.canvasy(event.y)/self.yscale)\n fun(x, y)\n self.cv.tag_bind(item, \"<Button%s-ButtonRelease>\" % num,\n eventfun, add)", "def get_event(self, event):\n if event.type == pg.MOUSEBUTTONDOWN and event.button == 1:\n if self.rect.collidepoint(event.pos):\n self.toggle()", "def _click(self):\n if hasattr(self.canvas[\"items\"][self.index], 'commandFunc'):\n self.canvas[\"items\"][self.index].commandFunc(None)", "def click(self):\r\n pass", "def on_mouse_press(self, x, y, button, modifiers):\n self.add_wall()", "def was_pressed(self) -> bool:", "def click(self, x, y, button, press):\n\n if self.is_in_screen(x, y) and not self.pause:\n self.get_color(x, y)\n self.record(x, y, button, press)", "def on_mouse_click(self, event):\n if not self.is_game_over:\n try:\n # i, j coordinates of the click event\n i = int(round(event.ydata))\n j = int(round(event.xdata))\n\n # Left button\n if event.button == 1 or event.button == 2:\n self.reveal(i, j)\n\n # Right button\n elif event.button == 3:\n self.flag(i, j)\n\n except (TypeError, IndexError):\n pass", "def pushButtonClicked(self, but_id, button):\n self.ui.tv_bindings.clearSelection()\n lstMatch = self.ui.tv_bindings.findItems(but_id, QtCore.Qt.MatchExactly, 0)[0]\n lstMatch.setSelected(True)\n lstMatch.setText(1, '[Press a key]')\n button.installEventFilter(self)\n self.efButton = button # Not elegant, but.... works", "def mouseDragged():\n if mousePressed:\n mousePressed()", "def mousePressEvent(self, event):\n self._use_zinc_mouse_event_handling = False # Track when zinc should be handling mouse events\n if self._ignore_mouse_events:\n event.ignore()\n return\n\n event.accept()\n if event.button() not in button_map:\n return\n \n self._selection_position_start = (event.x(), event.y())\n\n if button_map[event.button()] == Sceneviewerinput.BUTTON_TYPE_LEFT\\\n and self._selectionKeyPressed and (self._nodeSelectMode or self._elemSelectMode):\n self._selection_mode = SelectionMode.EXCLUSIVE\n if event.modifiers() & QtCore.Qt.SHIFT:\n self._selection_mode = SelectionMode.ADDITIVE\n else:\n scene_input = self._sceneviewer.createSceneviewerinput()\n scene_input.setPosition(event.x(), event.y())\n scene_input.setEventType(Sceneviewerinput.EVENT_TYPE_BUTTON_PRESS)\n scene_input.setButtonType(button_map[event.button()])\n scene_input.setModifierFlags(modifier_map(event.modifiers()))\n self._sceneviewer.processSceneviewerinput(scene_input)\n self._use_zinc_mouse_event_handling = True", "def on_click(self, event_callable, ret_widget_values=None, block_signal=False):\n #TODO Implementation of ret_widget_values\n #TODO Implementation of block_signal?? or removal\n self.on_click_callable = event_callable\n self._raw_toolbar.onClick(\n self.on_click_return,\n ret_widget_values=ret_widget_values,\n block_signal=block_signal\n )", "def on_mouse_press(self, x, y, button, modifiers):\n \n menu: Menu = self.get_menu_for_display()\n\n menu_click_x, menu_click_y = self.get_menu_click(menu, x, y)\n\n if button == arcade.MOUSE_BUTTON_LEFT:\n if menu:\n menu.button_list.check_mouse_press_for_buttons(\n menu_click_x,\n menu_click_y,\n )", "def LeftClick(self):\n self._PressLeftButton()\n self._ReleaseAllButtons()", "def leftButtonDown(self):\n\t\tautopy.mouse.toggle(True,autopy.mouse.LEFT_BUTTON)", "def HandleKeyboardInput(self):\n key = yg.getKeyPress()\n if key == \"Return\":\n self.buttons[len(self.buttons) - 1].Click()", "def on_click(self, x, y):\n self.menu_pointer.on_click(x, y)", "def mousePressEvent(self, event): \n if event.type() == qtc.QEvent.MouseButtonPress:\n if event.button() == qtc.Qt.LeftButton:\n self.mouseStartPosY = event.pos().y()\n self.startValue = self.value()\n\n elif event.button() == qtc.Qt.MidButton:\n self.set_value_to_default()", "def button_press_event(self, widget, event, menu):\n\t\tif event.type == gtk.gdk.BUTTON_PRESS and event.button == 3:\n\t\t\tmenu.popup(None, None, None, event.button, event.time)\n\t\treturn False", "def set_events(self):\r\n\r\n self.canvas.bind(\"<Button-1>\", self.event_click_left)\r\n self.bind(\"<Return>\", self.event_return)", "def when_pressed(self, button, func, *args):\n\n self.hardware_interfaces[self._gpio].set_pin_event(self._b_names[button],\n func,\n *args)", "def handle_button(self, event, event_type):\n # 0 for left\n # 1 for right\n # 2 for middle/center\n # 3 for side\n mouse_button_number = self._get_mouse_button_number(event)\n\n # Identify buttons 3,4,5\n if event_type in (25, 26):\n event_type = event_type + (mouse_button_number * 0.1)\n\n # Add buttons to events\n event_type_string, event_code, value, scan = self.codes[event_type]\n if event_type_string == \"Key\":\n scan_event, key_event = self.emulate_press(\n event_code, scan, value, self.timeval)\n self.events.append(scan_event)\n self.events.append(key_event)\n\n # doubleclick/n-click of button\n click_state = self._get_click_state(event)\n\n repeat = self.emulate_repeat(click_state, self.timeval)\n self.events.append(repeat)", "def onButtonPress(self, event):\n\n if event.xdata and event.ydata:\n self.emit(QtCore.SIGNAL(\"positionSelected(float, float)\"),\n float(event.xdata), float(event.ydata))", "def mousePressEvent(self, ev):\n super(PlotObject, self).mousePressEvent(ev)\n self._downpos = self.mousePos", "def check_event(self, event):\r\n if event.type == pygame.MOUSEBUTTONDOWN:\r\n if self.selected:\r\n for item in self.buttons:\r\n item.handleMouseDown(event.pos[0], event.pos[1])\r\n else:\r\n self.tab.handleMouseDown(event.pos[0], event.pos[1])", "def handle_mouseup(self, button, name):\r\n if self.get_visible():\r\n for i in self.widgets:\r\n if i.get_visible():\r\n if i.handle_mouseup(button, name):\r\n return True\r\n return False", "def on_click(self, x, y):\n mul_x, mul_y = self.multiplier\n off_x, off_y = self.offset\n x -= off_x\n x /= mul_x\n y -= off_y\n y /= mul_y\n for button in self.button_dict.values():\n button.check_click(x, y)", "def get_pressed(self): \n raise NotImplementedError", "def pressed(self) -> bool:\n return self.type == \"JOYBUTTONDOWN\"" ]
[ "0.8186688", "0.7803107", "0.7685904", "0.7667033", "0.7550329", "0.75264764", "0.74540734", "0.74537903", "0.7434162", "0.71306", "0.71141076", "0.7086629", "0.70717835", "0.70475805", "0.70216006", "0.70136315", "0.69730556", "0.69179136", "0.69159424", "0.69106615", "0.69092005", "0.69086593", "0.69086593", "0.6883387", "0.6881508", "0.687479", "0.68471086", "0.6836545", "0.6808178", "0.67737955", "0.67535317", "0.6742284", "0.6738339", "0.6737025", "0.6701703", "0.6678614", "0.66766125", "0.6574762", "0.65687996", "0.65682995", "0.6563857", "0.6554848", "0.65425086", "0.6528905", "0.6528905", "0.6512811", "0.65086156", "0.6495434", "0.64905447", "0.6487096", "0.64787346", "0.64631367", "0.6460059", "0.6455883", "0.6452478", "0.644119", "0.6429059", "0.6428801", "0.64114636", "0.6406296", "0.63963807", "0.6394005", "0.63830185", "0.6379297", "0.63791144", "0.63778967", "0.6376003", "0.6373461", "0.63720185", "0.6364364", "0.6363386", "0.6356046", "0.634816", "0.6344864", "0.63408995", "0.63391787", "0.63276494", "0.6326992", "0.6322749", "0.631902", "0.6310714", "0.6308334", "0.6304208", "0.62975895", "0.62935483", "0.62914467", "0.6287971", "0.6283372", "0.6277994", "0.6276328", "0.62707454", "0.6270045", "0.6268654", "0.6268415", "0.62662643", "0.62619746", "0.62611157", "0.6258642", "0.6252097", "0.62520045" ]
0.786055
1
Called when a mouse button is released after being pressed. Adjust method signature as appropriate for callback.
def button_release_event(self, widget, event): x, y = event.x, event.y # x, y = coordinates where the button was released self.last_win_x, self.last_win_y = x, y button = 0 # prepare button mask as in button_press_event() data_x, data_y = self.check_cursor_location() return self.make_ui_callback('button-release', button, data_x, data_y)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def mouse_release_event(self, x: int, y: int, button: int):\n pass", "def on_mouse_release(self, x, y, button):\n pass", "def on_mouse_release(self, x, y, button, key_modifiers):\r\n pass", "def OnMouseUp(self, evt):\n self.ReleaseMouse()", "def emitReleaseEvent(self, clickLocation, button, currentKbKey, items):\n # emit the mouseReleaseEvent signal\n self.mouseRelease.emit(self, clickLocation, button, currentKbKey, items)", "def ev_mousebuttonup(self, event: MouseButtonUp) -> None:", "def on_release(self):\n self.pressed = False", "def on_release(self):\n self.pressed = False", "def button_release(self, event: Any) -> None:\n if event.button == 1:\n self.left_button_down = False\n if event.button == 2:\n self.middle_button_down = False\n if event.button == 3:\n self.right_button_down = False", "def release():\n gui.mouseUp()", "def ev_mousebuttonup(self, event: tcod.event.MouseButtonUp) -> T | None:", "def mouse_release(self):\n\n # play button press\n if self.play_button.is_active:\n # change to gameplay\n self.switch_context(game.GameContext)", "def ev_mousebuttondown(self, event: MouseButtonDown) -> None:", "def release(button='left', coords=(0, 0)):\n _perform_click_input(button=button, coords=coords, button_down=False, button_up=True)", "def _release(self, event):", "def HandleRelease(self, event: tkEvent):\n pass", "def button_release_cb(self, darea, event):\n self.oldx, self.oldy = event.x, event.y\n self.draw_pointer(self.cr, None, None)\n self.queue_draw()\n self.oldx, self.oldy = None, None\n self.emit('end-dnd')\n return True", "def ev_mousebuttondown(self, event: tcod.event.MouseButtonDown) -> T | None:", "def m_release(self, button: MButton):\n pass", "def releaseKeyButtons(self):\n self._myKey.removeKeyButtonEvent([\n CONFIG_KEY.BUTTON_ACT_A,\n CONFIG_KEY.BUTTON_ACT_B,\n CONFIG_KEY.BUTTON_JOY_UP,\n CONFIG_KEY.BUTTON_JOY_DOWN,\n CONFIG_KEY.BUTTON_JOY_LEFT,\n CONFIG_KEY.BUTTON_JOY_RIGHT,\n CONFIG_KEY.BUTTON_JOY_OK\n ])", "def key_release_event(self, event):\n pass", "def mouseReleaseEvent(self, event):\n super(QIntSpinner3DS, self).mousePressEvent(event)\n super(QIntSpinner3DS, self).mouseReleaseEvent(event)\n self.unsetCursor()", "def _on_key_release(self, event):", "def _onrelease(self, item, fun, num=1, add=None):\n if fun is None:\n self.cv.tag_unbind(item, \"<Button%s-ButtonRelease>\" % num)\n else:\n def eventfun(event):\n x, y = (self.cv.canvasx(event.x)/self.xscale,\n -self.cv.canvasy(event.y)/self.yscale)\n fun(x, y)\n self.cv.tag_bind(item, \"<Button%s-ButtonRelease>\" % num,\n eventfun, add)", "def on_mouse_release(self, x, y, button, modifiers):\n \n menu: Menu = self.get_menu_for_display()\n\n menu_click_x, menu_click_y = self.get_menu_click(menu, x, y)\n\n if button == arcade.MOUSE_BUTTON_LEFT:\n if menu:\n menu.button_list.check_mouse_release_for_buttons(\n menu_click_x,\n menu_click_y,\n )", "def unpress(self):\n if self.unclick:\n self.clicked = False", "def on_mouse_press(self, x, y, button):\n\n pass", "def mouseReleaseEventEnabled(self, ev):\n\n self._btns.remove(ev.button())", "def __mouse_release(self, event, right_click=False):\n global choose_rectangle\n if right_click:\n return\n if choose_rectangle:\n self.__finish_rectangle(event)", "def _OnMplMouseRelease( self, ev ):\n if ev.button == 3:\n ev.guiEvent.Skip()", "def mouseReleaseEvent(self, event):\n # super(PlotWidget, self).mouseReleaseEvent(event)\n event.accept()", "def _release(self, event):\n self._set_cursor(False)\n # self._pressv is deprecated but we still need to maintain it\n self._pressv = None\n\n if not self._interactive:\n self._selection_artist.set_visible(False)\n\n if (self._active_handle is None and self._selection_completed and\n self.ignore_event_outside):\n return\n\n vmin, vmax = self.extents\n span = vmax - vmin\n\n if span <= self.minspan:\n # Remove span and set self._selection_completed = False\n self.set_visible(False)\n if self._selection_completed:\n # Call onselect, only when the span is already existing\n self.onselect(vmin, vmax)\n self._selection_completed = False\n else:\n self.onselect(vmin, vmax)\n self._selection_completed = True\n\n self.update()\n\n self._active_handle = None\n\n return False", "def mouseReleased():\n if not game_controller.game_over:\n if game_controller.falling_disk and \\\n game_controller.falling_disk.y_vel == 0:\n game_controller.handle_mouseReleased()", "def keyReleaseEvent(self, event):\n self.game_engine.input_manager.keyReleaseEvent(event)", "def handle_mouse_press(self, event):", "def on_mouse_release(self, x: float, y: float, button: int, modifiers: int):\n if self.heldLetter is not None:\n self.active_blocks.remove(self.heldLetter)\n self.moving_blocks.append(self.heldLetter)\n if len(arcade.get_sprites_at_point((x, y), self.inactive_blocks)) == 0 and x < BOARD_WIDTH:\n letter_x, letter_y = self.nearest_cell(x, y)\n self.heldLetter.place(letter_x, letter_y)\n self.board_temp[int((letter_x-SLOT_WIDTH/2)/SLOT_WIDTH)][int((letter_y - SLOT_HEIGHT/2)/SLOT_HEIGHT)] = self.heldLetter\n else:\n self.heldLetter.return_home()\n self.heldLetter = None", "def OnTokenButtonRelease(self, event):\n self._drag_data = {\"x\": 0, \"item\": None}\n\n # Rebind the main GUI buttons because they are unbinded while dragging the beats\n self.myMainGUI.root.after(200, self.myMainGUI.bindButtons)", "def mouseReleaseEvent(self, event):\n button = event.button()\n\n # select an item on which we clicked\n item = self.itemAt(event.x(), event.y())\n if item:\n self.setCurrentItem(item)\n if button == 1:\n print \"SIMPLE LEFT CLICK\"", "def mouse_press_event(self, x: int, y: int, button: int):\n pass", "def on_mouse_up(self, pos, mouse_button):\n for item in button.Button.all_buttons:\n if item.collidepoint(pos):\n self.buttons_clicked.append((item, mouse_button))\n item.on_click(mouse_button)", "def keyReleaseEvent (self, event):\n super(DiagramScene, self).keyReleaseEvent(event)", "def isButtonReleased() -> bool:\n pass", "def ev_joybuttondown(self, event: tcod.event.JoystickButton) -> T | None:", "def on_mouse_press(self, x, y, button, key_modifiers):\r\n pass", "def check_mouse_release_for_buttons(x: float, y: float, button_list: list):\n for button in button_list:\n if button.pressed:\n #sets button pressed to false\n button.on_release()", "def on_mouse_release(self, x: float, y: float, button, modifiers):\n #dialogue buttons\n check_mouse_release_for_buttons(x, y, self.levels[self.current_level].dialogue_list)\n\n #room info prompt buttons\n check_mouse_release_for_buttons(x, y, self.levels[self.current_level].room_info_list)", "def mouseReleaseEvent(self, ev):\n\n # handle the built mouse events first\n\n # panning...\n if self.panning and (ev.button() == Qt.LeftButton):\n # we're done panning\n self.leftBtnClicked = False\n self.setCursor(Qt.OpenHandCursor)\n self.lastPanPoint = QPoint()\n\n # \"auto\" rubber banding...\n elif self.rubberBandKey and self.rubberBanding:\n\n # end the rubber band selection\n rubberBandRect = self.endRubberBand().toRect()\n\n # check if the user selected anything\n if (rubberBandRect):\n items = self.items(rubberBandRect)\n\n # filter the selected items\n items = self.filterSelectedItems(items)\n\n # If we're handling selections deal with the selection states of our marks\n if self.doSelections:\n\n for item in self.selectedItems:\n item.setSelected(False)\n for item in items:\n item.setSelected(True)\n self.selectedItems = items\n\n # call the emit method - we don't directly emit here in case a child class\n # wants to transform the data before emitting it.\n self.emitRubberbandSelection(rubberBandRect, items)\n\n else:\n # This event isn't handled by automatically - emit a release event\n clickLocation = self.mapToScene(ev.pos())\n\n # do a \"sloppy selection\" and return all items that intersect our\n # selection rectangle. The selection rectangle is set by calling\n # the setSelectionRadius method.\n\n # move our selection rectangle into place - depending on the size of\n # the selection area, this may not be centered on the click location\n areaLoc = ev.pos() - self.selectionRadius\n self.selectionArea.moveTo(areaLoc)\n\n # check if the user clicked on anything - this will return a list of\n # items that intersect the selection rectangle.\n items = self.items(self.selectionArea)\n\n # filter the selection so we only return marks or text not associated\n # with a mark.\n items = self.filterSelectedItems(items)\n\n # call the emit method - we don't directly emit here in case a child class\n # wants to transform the data before emitting it.\n self.emitReleaseEvent(clickLocation, ev.button(), self.currentKbKey, items)", "def _ReleaseAllButtons(self):\n self._kit.MouseReleaseAllButtons()\n time.sleep(self.send_delay)", "def ev_MOUSEUP(self, event):", "def check_mouse_release_for_buttons(_x, _y, button_list):\n for button in button_list:\n if button.pressed:\n button.on_release()", "def mouseDragged():\n if mousePressed:\n mousePressed()", "def mouseReleaseEvent(self, event):\n if event.button() is not QtCore.Qt.MouseButton.LeftButton:\n return False\n if self.mousenode is not None:\n self.remove_mousenode(event)\n return QtGui.QGraphicsScene.mouseReleaseEvent(self, event)", "def mouse_out(self):\n pass", "def button_press_event(self, widget, event):\n x, y = event.x, event.y\n\n # x, y = coordinates where the button was pressed\n self.last_win_x, self.last_win_y = x, y\n\n button = 0\n # Prepare a button mask with bits set as follows:\n # left button: 0x1\n # middle button: 0x2\n # right button: 0x4\n # Others can be added as appropriate\n self.logger.debug(\"button down event at %dx%d, button=%x\" % (x, y, button))\n\n data_x, data_y = self.check_cursor_location()\n\n return self.make_ui_callback('button-press', button, data_x, data_y)", "def on_release(self, keyname):\n self.keydown = False\n keyname = str(keyname).strip('\\'')\n log.info('KEY RELEASE ' + keyname)\n if keyname in self.controls_keyrelease:\n key_handler = self.controls_keyrelease[keyname]()", "def rightButtonUp(self):\n\t\tautopy.mouse.toggle(False,autopy.mouse.RIGHT_BUTTON)", "def mouseReleased(self, _evt, _id):\n if not self.is_enabled: return False\n \n self.mouse_icon.mouseReleased(_evt, _id)\n return False", "def exit_on_click(self):\n self.get_mouse()\n self._close()", "def on_key_release(self, key_released: int, _: int) -> None:\n if key_released in (key.LEFT, key.RIGHT, key.A, key.D):\n self.change_x = 0\n self.direction = None", "def handle_mousehold(self, button, name):\r\n if widget.Widget.handle_mousehold(self, button, name):\r\n app.App.handle_mousehold(self, button, name)\r\n return True\r\n return False", "def keyReleaseEvent(self, ev):\n self.currentKbKey = None\n\n if (ev.key() == self.panKey):\n # disable Pan/Zoom mode\n self.panning = False\n if self.__pointerLeftWidget:\n # we've left the widget - reset the cursor to the standard arrow\n self.setCursor(Qt.ArrowCursor)\n else:\n self.setCursor(self.defaultCursor)\n elif (ev.key() == self.selectAddKey):\n # disable selection add mode\n if self.__pointerLeftWidget:\n # we've left the widget - reset the cursor to the standard arrow\n self.setCursor(Qt.ArrowCursor)\n else:\n self.setCursor(self.defaultCursor)\n elif (ev.key() == self.zoomKey):\n # disable zoom mode\n self.__zooming = False\n else:\n self.keyRelease.emit(self, ev)", "def handle_release(self, x, y):\n # append new line to list of lines\n self.lines.append( (self.first_point, (x, y)) )\n\n # clear mouse pressed flag and rubber band line coords\n self.pressed_flag = False\n self.first_point = None\n self.last_point = None\n\n # trigger canvas to redraw itself\n self.redraw()", "def onRelease(event):\r\n global initPos\r\n initPos = None # Reset the position ready for next click\r", "def _release(self, event):\n # Release active tool handle.\n if self._active_handle_idx >= 0:\n if event.button == 3:\n self._remove_vertex(self._active_handle_idx)\n self._draw_polygon()\n self._active_handle_idx = -1\n\n # Complete the polygon.\n elif len(self._xys) > 3 and self._xys[-1] == self._xys[0]:\n self._selection_completed = True\n if self._draw_box and self._box is None:\n self._add_box()\n\n # Place new vertex.\n elif (not self._selection_completed\n and 'move_all' not in self._state\n and 'move_vertex' not in self._state):\n self._xys.insert(-1, (event.xdata, event.ydata))\n\n if self._selection_completed:\n self.onselect(self.verts)", "def mouseReleaseEvent( self, event ):\n event.setAccepted(False)\n if self._hotspotPressed:\n event.accept()\n self._hotspotPressed = False\n return\n \n # ignore events when the scene is in view mode\n scene = self.scene()\n if ( self.isLocked() or self._ignoreMouseEvents or \\\n (scene and (scene.inViewMode() or scene.isConnecting()))):\n event.ignore()\n self._ignoreMouseEvents = False\n return\n \n super(XNode, self).mouseReleaseEvent(event)\n \n # emit the geometry changed signal\n self.emitGeometryChanged()\n \n # unblock the selection signals\n if ( scene ):\n scene.blockSelectionSignals(False)\n \n delta = datetime.datetime.now() - self._pressTime\n if not scene.signalsBlocked() and delta.seconds < 1:\n scene.nodeClicked.emit(self)", "def on_canvas_mouse_release(self, event) -> None:\r\n\r\n self.edit_toggle_mode = None", "def _press(self, event):", "def key_release_event(self, widget, event):\n # get keyname or keycode and translate to ginga standard\n # keyname =\n # keycode =\n keyname = '' # self.transkey(keyname, keycode)\n self.logger.debug(\"key release event, key=%s\" % (keyname))\n return self.make_ui_callback('key-release', keyname)", "def ev_joybuttonup(self, event: tcod.event.JoystickButton) -> T | None:", "def triangleBtnHandler(val):\n if val == 1 :\n print(\"Triangle button pressed\")\n else:\n print(\"Triangle button released\")", "def on_key_release(self, symbol, modifiers):\n self.gamestatemanager.peek().on_key_release(symbol, modifiers, self.config_data[\"controls\"])", "def down(self, event):\n self.dragx = event.x\n self.dragy = event.y\n self.canvas.bind(\"<B1-Motion>\", self.motion)\n self.canvas.bind(\"<ButtonRelease-1>\", self.up)\n return True", "def on_key_release(self, key, modifiers):\n pass # stop animation", "def on_key_release(self, key, modifiers):\n player_controller.input_release(key, self.player)", "def handle_mouseup(self, button, name):\r\n x = widget.Widget.handle_mouseup(self, button, name)\r\n if not self.mouse_on_me():\r\n return False\r\n if not self.get_visible():\r\n return False\r\n for i in self.widgets:\r\n if i.get_visible():\r\n if i.handle_mouseup(button, name):\r\n return True\r\n return x", "def button_handler(self, channel):\n if channel != self.BUTTON_PIN:\n return\n\n state = GPIO.input(self.BUTTON_PIN)\n now = time.time()\n delta = now - self.prev_button_state[1]\n\n if self.prev_button_state[0] != state:\n self.prev_button_state = (state, now)\n\n if state == GPIO.HIGH:\n self.button_hold = None\n\n # debounce the button tap and trigger action\n if delta > self.TAP_TIME and self.button_tap is None:\n self.button_tap = True\n os.kill(os.getpid(), signal.SIGALRM)\n else:\n self.button_tap = None\n\n # schedule a hold check\n signal.alarm(int(self.HOLD_TIME))\n\n elif state == GPIO.LOW:\n if delta >= self.HOLD_TIME and self.button_hold is None:\n self.button_hold = True\n self.button_tap = False", "def on_press(self):\n self.pressed = True", "def on_press(self):\n self.pressed = True", "def delete_button_callback(self, button):\n\t\tRPIO.del_interrupt_callback(button)", "def mouseReleaseEvent(self, event):\n self.box_begin = self.begin\n self.box_end = event.pos()\n self.begin = event.pos()\n self.end = event.pos()\n if not self.permanent_show:\n self.update()", "def debounced_key_release(event):\n # print('Debounced release', repr(event.key))\n key_indicator.set_text('')\n fig.canvas.draw()", "def leftButtonUp(self):\n\t\tautopy.mouse.toggle(False,autopy.mouse.LEFT_BUTTON)", "def on_key_release(self, key, modifiers):\n\n if key == arcade.key.UP:\n self.up_pressed = False\n elif key == arcade.key.DOWN:\n self.down_pressed = False\n elif key == arcade.key.LEFT:\n self.left_pressed = False\n elif key == arcade.key.RIGHT:\n self.right_pressed = False", "def on_key_release(self, key, modifiers):\n\n if key == arcade.key.UP:\n self.up_pressed = False\n elif key == arcade.key.DOWN:\n self.down_pressed = False\n elif key == arcade.key.LEFT:\n self.left_pressed = False\n elif key == arcade.key.RIGHT:\n self.right_pressed = False", "def mouse_right_up(self):\n pass", "def ev_MOUSEDOWN(self, event):", "def HandButton(self, event):\n pass", "def up(self, event):\n event.widget.unbind (\"<B1-Motion>\")\n event.widget.unbind (\"<ButtonRelease-1>\")\n self.diag.update_arrows()", "def ev_controllerbuttondown(self, event: tcod.event.ControllerButton) -> T | None:", "def on_release(self, released_key ):\n if released_key is not None:\n if isinstance(released_key, pynput.keyboard.KeyCode) and released_key.char is not None:\n released_key = released_key.char.lower()\n elif isinstance(released_key, pynput.keyboard.Key):\n released_key = released_key.name\n self.keys_set.discard(released_key)", "def on_release(self, event):\n self.current_point = None", "def eventHandler(self, event: pygame.event):\n # change selected color if this button's rectangle was clicked\n if event.type == pygame.MOUSEBUTTONDOWN:\n if event.button == 1:\n if self.rect.collidepoint(event.pos): # is mouse over button\n self.image = self._images[ButtonImages.CLICKING_IMAGE.value]\n self.beingClicked = True\n for func, *args in self.functionsToInvokeWhenClicked:\n func(*args)\n elif event.type == pygame.MOUSEBUTTONUP and self.beingClicked:\n if event.button == 1:\n self.beingClicked = False\n self.image = self._images[ButtonImages.DEFAULT_IMAGE.value]", "def exit_btn_callback(evt):\n print(\"Inside exit_btn_callback. Event object is: \", evt)\n mainwin.destroy()", "def _callbackKeyButton(self, channel):\n if self._myKey.readKeyButton(channel) == 0:\n self.onKeyButtonDown(channel)\n return\n\n if self._myKey.readKeyButton(channel) == 1:\n self.onKeyButtonUp(channel)\n return", "def on_key_up(self, keyboard, keycode):\n Logger.debug('KeyUp Event: Keycode[1] is \"{}\"'.format(keycode[1]))\n self.keysPressed.remove(keycode[1])", "def check_mouse_release_for_buttons(x, y, button_list):\n for button in button_list:\n if x > button.center_x + button.width / 2:\n continue\n if x < button.center_x - button.width / 2:\n continue\n if y > button.center_y + button.height / 2:\n continue\n if y < button.center_y - button.height / 2:\n continue\n button.on_release()", "def mouseReleaseEvent(self, event: QMouseEvent):\n self._moving = False\n self.rectChanged.emit(self._rect)\n super().mouseReleaseEvent(event)", "def key_handler(self, event):\n if event.type == pygame.KEYUP: \n self.done = True", "def handle_keyrelease(self, event):\r\n if event.keysym == \"BackSpace\":\r\n self.delete(self.index(tkinter.INSERT), tkinter.END)\r\n self.position = self.index(tkinter.END)\r\n if event.keysym == \"Left\":\r\n if self.position < self.index(tkinter.END): # delete the selection\r\n self.delete(self.position, tkinter.END)\r\n else:\r\n self.position = self.position-1 # delete one character\r\n self.delete(self.position, tkinter.END)\r\n if event.keysym == \"Right\":\r\n self.position = self.index(tkinter.END) # go to end (no selection)\r\n if len(event.keysym) == 1:\r\n self.autocomplete()\r\n # No need for up/down, we'll jump to the popup\r\n # list at the position of the autocompletion\r", "def button_press_cb(self, source, event):\n\n if event.button == MOUSE_BUTTON_RIGHT:\n pass\n return True\n elif event.button == MOUSE_BUTTON_MIDDLE:\n self.emit('begin-move')\n return True" ]
[ "0.80996853", "0.80075884", "0.7933358", "0.74254155", "0.7286587", "0.7269564", "0.72388613", "0.72388613", "0.72174096", "0.71852064", "0.7037113", "0.7027572", "0.7019888", "0.69756335", "0.6968941", "0.6932969", "0.69013923", "0.68389016", "0.6834489", "0.68217915", "0.68064696", "0.67922896", "0.67721957", "0.67626584", "0.6738707", "0.6725046", "0.67031455", "0.6703062", "0.667345", "0.6661664", "0.6637001", "0.6592699", "0.65886796", "0.6568915", "0.6561103", "0.6561102", "0.654002", "0.65370697", "0.6535258", "0.6524198", "0.6510034", "0.6508104", "0.648113", "0.64720184", "0.64643985", "0.6451043", "0.6433521", "0.64274275", "0.6401048", "0.6399519", "0.6398906", "0.63691485", "0.6367818", "0.6328204", "0.627359", "0.6267704", "0.62474436", "0.6225996", "0.6225327", "0.6215503", "0.6209651", "0.6205074", "0.61963767", "0.6182396", "0.6179401", "0.6173177", "0.6167398", "0.6159565", "0.6152561", "0.6145867", "0.6137639", "0.60926855", "0.60899174", "0.6082023", "0.6076744", "0.60733455", "0.60664153", "0.60664153", "0.6050228", "0.6038865", "0.60152435", "0.60091376", "0.6004603", "0.6004603", "0.5998809", "0.5990724", "0.59758055", "0.59740525", "0.59453046", "0.5943226", "0.5925004", "0.5923064", "0.5921629", "0.591867", "0.5915708", "0.590709", "0.590701", "0.5903546", "0.5902491", "0.59009856" ]
0.7786435
3
Called when a mouse cursor is moving in the widget. Adjust method signature as appropriate for callback.
def motion_notify_event(self, widget, event): x, y = event.x, event.y # x, y = coordinates of cursor self.last_win_x, self.last_win_y = x, y button = 0 # prepare button mask as in button_press_event() data_x, data_y = self.check_cursor_location() return self.make_ui_callback('motion', button, data_x, data_y)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def mouse_move_callback(self, event):\n # TODO drag and drop figuriek\n print(\"moving at \", event.x + self.offset_x, event.y + self.offset_y)", "def on_mouse_motion(self, x, y, delta_x, delta_y):\r\n pass", "def __mouseMoved(self, x, y):\n # Are we on the bounding box?\n if pointOnBox(x, y, self.currentBox, thickness=self.__THICKNESS):\n position = getCursorPosition(x, y, self.currentBox, thickness=self.__THICKNESS)\n cursor = [\n wx.CURSOR_SIZENWSE,\n wx.CURSOR_SIZENS,\n wx.CURSOR_SIZENESW,\n wx.CURSOR_SIZEWE,\n wx.CURSOR_SIZENWSE,\n wx.CURSOR_SIZENS,\n wx.CURSOR_SIZENESW,\n wx.CURSOR_SIZEWE\n ] [position]\n self.__setCursor(cursor)\n elif pointInBox(x, y, self.currentBox):\n self.__setCursor(wx.CURSOR_HAND)\n else:\n self.__setCursor()", "def on_mouse_motion(x, y, dx, dy):\n if in_box(x, y):\n # Change the cursor if inside the box.\n self.window.set_mouse_cursor(self.hand_cursor)\n else:\n self.window.set_mouse_cursor(self.default_cursor)", "def moveCursor(self):\n\n\t\tself._before = self.rect.center\n\t\tself.rect.center = self._pos", "def on_mouse_motion(self, x, y, delta_x, delta_y):\n \n pass", "def handle_mouse(self, x, y):\n pass", "def on_mouse_move(self, event: PointEvent):\n self.x = event.x\n self.y = event.y\n self.handle_mouse(self.x, self.y)", "def handle_mouse(self, x, y):\n self.x = x\n self.y = y\n global _pending_handle_mouse\n if not _pending_handle_mouse:\n _pending_handle_mouse = True\n if self.fig.document is not None:\n self.fig.document.add_timeout_callback(self.handle_mouse_callback, 100)\n else:\n self.handle_mouse_callback()", "def __master_cursor_pos_callback(self, glfw_window, xpos, ypos):\n # flip glfw window space to match OGL space(like texture that has bottom left origin)\n ypos = self.window.glyph.size[1] - ypos\n\n # update values\n self.__pos_instant = Vec(xpos, ypos, 0)\n self.__accel = self.__pos_instant - self.__pos_prev\n self.__pos_prev = self.__pos_instant\n\n # call registered callbacks\n self.call_cursor_pos_callback(glfw_window, *self.__pos_instant.xy, mouse=self)", "def _motion(self, event):\n if self.current:\n # modify the current line by changing the end coordinates\n # to be the current mouse position\n coords = event.widget.coords(self.current)\n coords[2] = event.x\n coords[3] = event.y\n\n event.widget.coords(self.current, *coords)", "def ev_mousemotion(self, event: MouseMotion) -> None:", "def grab(self, event):\n self.ypos = event.y\n self.xpos = event.x\n self.config(cursor='fleur')", "def ev_MOUSEMOTION(self, event):", "def mousePosition(self):", "def mouseEvent(self, widget, event):\n # Zoom in and out with the middle and right mouse buttons\n if event.type == gtk.gdk.BUTTON_PRESS:\n if event.button == 2:\n self.ruler.zoom(2)\n elif event.button == 3:\n self.ruler.zoom(0.5)\n\n # Use the ruler widget's coordinate system to update the time cursor\n x, y, mask = self.ruler.canvas.window.get_pointer()\n scroll = self.ruler.canvas.get_scroll_offsets()[0]\n t = (x + scroll) / self.ruler.scale\n self.cursor.value = t\n\n # If the mouse button is down, try to find the canvas item under the cursor.\n # We use the row's collision detection tree for this, for the same reason\n # we use it for everything else: gnome-canvas' built-in collision detection\n # works poorly on very small items.\n if mask & gtk.gdk.BUTTON1_MASK:\n\n # Search every row in every canvas\n for obj in self.canvasList:\n y = obj.canvas.get_pointer()[1]\n for row in obj.rows:\n if y >= row.top and y <= row.bottom:\n\n # Give a few pixels of slack on either side\n slack = 2.0 / self.ruler.scale\n cursorInterval = (t - slack, t + slack)\n\n # The mouse is in this row. Use the row's collision detection\n # to find a nearby item.\n tag = row.intervalOccupied(*cursorInterval)\n if tag and tag[0] != self.hilightWidget:\n self.notifyHilightChanged(tag[0].dataTransaction)\n self.setHilightWidget(tag[0])\n return False\n return False", "def __handleMouseEvents(self, event):\n if not self.enabled:\n return\n\n x, y = event.GetPosition()\n\n # First make sure we have started a box.\n if self.currentBox == None and not event.LeftDown():\n # No box started yet. Set cursor to the initial kind.\n self.__setCursor(wx.CURSOR_CROSS)\n return\n\n if event.LeftDown():\n if self.currentBox == None:\n # No RB Box, so start a new one.\n self.currentBox = (x, y, 0, 0)\n self.hasLetUp = 0\n elif self.__isSizingCursor():\n # Starting a sizing operation. Change the origin.\n position = getCursorPosition(x, y, self.currentBox, thickness=self.__THICKNESS)\n self.currentBox = self.__denormalizeBox(position, self.currentBox)\n\n elif event.Dragging() and event.LeftIsDown():\n # Use the cursor type to determine operation\n if self.__isMovingCursor():\n if self.currentlyMoving or pointInBox(x, y, self.currentBox):\n if not self.currentlyMoving:\n self.currentlyMoving = (x - self.currentBox[0], y - self.currentBox[1])\n self.__moveTo(x - self.currentlyMoving[0], y - self.currentlyMoving[1])\n elif self.__isSizingCursor():\n self.__resizeBox(x, y)\n\n elif event.LeftUp():\n self.hasLetUp = 1\n self.currentlyMoving = None\n self.__normalizeBox()\n\n elif event.Moving() and not event.Dragging():\n # Simple mouse movement event\n self.__mouseMoved(x,y)", "def _update_cursor(self) -> None:\n # get the brush size (get a local reference in case another process\n # changes it between the different accesses in this method)\n brush_size = self.brush_size\n # if there is not update, return\n if not self.is_cursor_change:\n return\n # otherwise dequeue the update\n self.is_cursor_change = False\n # make a static border ring for the cursor\n ring = make_ring(brush_size - 1, brush_size)\n cursor = make_cursor(ring, self._brush_border_color)\n # make a circle with the current color\n brush_circle = make_circle(brush_size) - ring\n cursor = cursor + make_cursor(brush_circle, self._color)\n # create the pyglet cursor object and set it\n mouse = pyglet_cursor(cursor)\n self._view.set_cursor(mouse)", "def move_start(event):\n nonlocal x, y\n x = event.x \n y = event.y\n window['cursor'] = utils.CURSORS['move_item']", "def mouse_position_event(self, x: int, y: int):\n pass", "def mouseMoveEvent(self, event):\n self.end = event.pos()\n self.update()", "def handle_mouse(obj, event):\n if event:\n x = event.globalX()\n y = event.globalY()\n x_w = obj.offset.x()\n y_w = obj.offset.y()\n obj.move(x - x_w, y - y_w)", "def _onmove(self, event):", "def on_mouse_move(self, event):\n self.mouse = [event.xdata, event.ydata]\n\n # Update pan view on mouse move\n if self.panning is True:\n for a in self.pan_axes:\n a.drag_pan(1, event.key, event.x, event.y)\n\n # Async re-draw (redraws only on thread idle state, uses timer on backend)\n self.canvas.draw_idle()\n\n ##### Temporary place-holder for cached update #####\n self.update_screen_request.emit([0, 0, 0, 0, 0])", "def on_mouse_move(self, win, xpos, ypos):\n old = self.mouse\n self.mouse = (xpos, glfw.get_window_size(win)[1] - ypos)\n if glfw.get_mouse_button(win, glfw.MOUSE_BUTTON_LEFT):\n self.drag(old, self.mouse, glfw.get_window_size(win))\n if glfw.get_mouse_button(win, glfw.MOUSE_BUTTON_RIGHT):\n self.pan(old, self.mouse)", "def on_mouse_move(self, win, xpos, ypos):\n old = self.mouse\n self.mouse = (xpos, glfw.get_window_size(win)[1] - ypos)\n if glfw.get_mouse_button(win, glfw.MOUSE_BUTTON_LEFT):\n self.drag(old, self.mouse, glfw.get_window_size(win))\n if glfw.get_mouse_button(win, glfw.MOUSE_BUTTON_RIGHT):\n self.pan(old, self.mouse)", "def _hover(self, event):\n if self.ignore(event):\n return\n\n if self._active_handle is not None or not self._selection_completed:\n # Do nothing if button is pressed and a handle is active, which may\n # occur with drag_from_anywhere=True.\n # Do nothing if selection is not completed, which occurs when\n # a selector has been cleared\n return\n\n _, e_dist = self._edge_handles.closest(event.x, event.y)\n self._set_cursor(e_dist <= self.grab_range)", "def OnMouseMotion(self, evt):\n if evt.Dragging() and evt.LeftIsDown():\n self.lastx, self.lasty = self.x, self.y\n self.x, self.y = evt.GetPosition()\n self.Refresh(False)", "def update(self):\n self.mousePos = pygame.mouse.get_pos()\n self.update_button_hover_status()", "def setMousePositionCallback(self, callback):\n\n self.mouse_position_callback = callback", "def mouseDragged(self, point, delta):\n pass", "def on_mouse_move(self, event):\n if event.is_dragging and event.buttons[0] == 1:\n x0, y0 = event.last_event.pos[0], event.last_event.pos[1]\n x1, y1 = event.pos[0], event.pos[1]\n X0, Y0, Z0 = self.pixel_to_coords(float(x0), float(y0))\n X1, Y1, Z1 = self.pixel_to_coords(float(x1), float(y1))\n self.translate_center(X1 - X0, Y1 - Y0, Z1 - Z0)", "def mouseMoveEvent(self, event):\n # super(PlotWidget, self).mouseMoveEvent(event)\n event.accept()", "def _on_canvas_mouse(self, event):\n if event.GetEventType() in [wx.wxEVT_MOTION, wx.wxEVT_LEFT_DOWN, \n wx.wxEVT_LEFT_UP, wx.wxEVT_MOTION|wx.wxEVT_LEFT_DOWN]:\n new_event = wx.MouseEvent(event.GetEventType())\n pos = self.tc.ScreenToClient(wx.GetMousePosition())\n new_event.SetPosition(pos)\n new_event.Skip()\n self.tc.GetEventHandler().ProcessEvent(new_event)", "def on_mouse_motion(self, x, y, delta_x, delta_y):\n\n print(x)\n print(y)\n print(delta_x)\n print(delta_y)\n\n\n #self.manage_crosshair()\n \n \n\n #self.crosshair_sprite.center_x += delta_x\n #self.crosshair_sprite.center_y += delta_y\n\n\n self.crosshair_relative_xoffset += delta_x\n self.crosshair_relative_yoffset += delta_y", "def mouse_move(self, pos):\n if (self.setup_type == \"position\"):\n x, y = pos\n self.canvas.move(x, y)", "def mouseReleaseEvent(self, event):\n width = self.frameGeometry().width()\n height = self.frameGeometry().height()\n cursor = QtGui.QCursor()\n new_pos = self.mapFromGlobal(cursor.pos())\n x = new_pos.x()\n y = new_pos.y()\n self.__selector_y = y/float(height) # normalized value of the y position\n \tself.__selector_x = x/float(width) #normalised value of the x position\n self.updatePixelColor()\n self.repaint()", "def handle_mouse_press(self, event):", "def update(self):\n # Get the current mouse position. This returns the position\n # as a list of two numbers.\n pos = pygame.mouse.get_pos()\n\n # Set the player x position to the mouse x position\n self.rect.x = pos[0]", "def motion_notify_cb(self, darea, event):\n if event.is_hint:\n x, y, state = event.window.get_pointer()\n else:\n x = event.x\n y = event.y\n state = event.state\n if state & gdk.BUTTON1_MASK or state & gdk.BUTTON3_MASK:\n self.draw_pointer(self.cr, x, y)\n self.queue_draw()\n self.oldx, self.oldy = x, y\n rel_x, rel_y = self.absolute_to_relative(x, y)\n self.emit('dnd-value', rel_x, rel_y)\n return True", "def mousePressEvent(self, event):\n self.begin = event.pos()\n self.end = event.pos()\n self.update()", "def moveCursor(self, QAbstractItemView_CursorAction, Qt_KeyboardModifiers): # real signature unknown; restored from __doc__\r\n pass", "def update(self):\n # Get the current mouse position. This returns the position\n # as a list of two numbers.\n pos = pygame.mouse.get_pos()\n \n # Set the player x position to the mouse x position\n self.rect.x = pos[0]", "def append_cursor_pos_callback(self, callbacked, *args, **kwargs):\n pass", "def append_cursor_enter_callback(self):", "def _(event):\n system_line.cursor_left()", "def emitMouseMoveEvent(self, location, currentKbKey, draggedItems, items):\n # emit the mouseMoveEvent signal\n self.mouseMove.emit(self, location, currentKbKey, draggedItems, items)", "def watchCursor(self, cursor):\n cursor.observers.append(self._cursorCallback)", "def handle_mousemotion(self, change):\r\n if widget.Widget.handle_mousemotion(self, change):\r\n app.App.handle_mousemotion(self, change)\r\n return True\r\n return False", "def handleMousePositionCallback(self, xy):\n\n if self.mouse_position_callback:\n (x, y) = xy\n posn = self.convertView2Geo(x, y)\n self.mouse_position_callback(posn)", "def cursor_cb(self, scene_pos):\n if self.is_within_image(scene_pos):\n pos = self.vb_image.mapSceneToView(scene_pos)\n\n self.cursor_v.setPos(pos)\n self.cursor_h.setPos(pos)\n self.cursor_text.setText(\n \"({:.1f}, {:.1f}) px\".format(pos.x(), pos.y()))\n if self._mark is not None:\n delta = pos - self._mark\n self.cursor_delta.setPos(pos)\n self.cursor_delta.setText(\n \"Δ = ({:.1f}, {:.1f}) μm\".format(\n self.px_to_um(delta.x()), self.px_to_um(delta.y())))\n\n self.cursor_v.show()\n self.cursor_h.show()\n self.cursor_text.show()\n self.cursor_delta.show()\n\n elif self.is_within_zoom(scene_pos):\n pos = self.vb_zoom.mapSceneToView(scene_pos)\n\n if self._up is not None:\n self.zoom_text.setPos(pos)\n self.zoom_text.setText(\"I = {:.0f}\".format(\n self.zoom.image[int(pos.x()), int(pos.y())]))\n self.zoom_text.show()\n\n elif self.is_within_residuals(scene_pos):\n pos = self.vb_residuals.mapSceneToView(scene_pos)\n\n if self._up is not None:\n self.residuals_text.setPos(pos)\n self.residuals_text.setText(\"r = {:.2f}\".format(\n self.residuals.image[int(pos.x()),int(pos.y())]))\n self.residuals_text.show()\n\n else:\n for w in [self.cursor_v, self.cursor_h,\n self.cursor_text, self.cursor_delta,\n self.zoom_text, self.residuals_text]:\n w.hide()", "def mouseMoveEvent(self, event):\n if self._ignore_mouse_events:\n event.ignore()\n return\n\n event.accept()\n\n if self._selection_mode != SelectionMode.NONE:\n x = event.x()\n y = event.y()\n xdiff = float(x - self._selection_position_start[0])\n ydiff = float(y - self._selection_position_start[1])\n if abs(xdiff) < 0.0001:\n xdiff = 1\n if abs(ydiff) < 0.0001:\n ydiff = 1\n xoff = float(self._selection_position_start[0]) / xdiff + 0.5\n yoff = float(self._selection_position_start[1]) / ydiff + 0.5\n self._addUpdateSelectionBox(xdiff, ydiff, xoff, yoff)\n\n elif self._use_zinc_mouse_event_handling:\n scene_input = self._sceneviewer.createSceneviewerinput()\n scene_input.setPosition(event.x(), event.y())\n scene_input.setEventType(Sceneviewerinput.EVENT_TYPE_MOTION_NOTIFY)\n if event.type() == QtCore.QEvent.Leave:\n scene_input.setPosition(-1, -1)\n self._sceneviewer.processSceneviewerinput(scene_input)", "def mouseMoveEvent (self, event):\n self.itemMoved = True\n super(DiagramItem, self).mouseMoveEvent(event)", "def mouseDragged():\n if mousePressed:\n mousePressed()", "def on_mouse_movement(self, event: wx.MouseEvent) -> None:\n if not event.Dragging():\n self._drag_start_pos = None\n return\n # self.CaptureMouse()\n if self._drag_start_pos is None:\n self._drag_start_pos = event.GetPosition()\n else:\n current_pos = event.GetPosition()\n change = self._drag_start_pos - current_pos\n self.SetPosition(self.GetPosition() - change)", "def on_mouse_movement(self, event: wx.MouseEvent) -> None:\n if not event.Dragging():\n self._drag_start_pos = None\n return\n # self.CaptureMouse()\n if self._drag_start_pos is None:\n self._drag_start_pos = event.GetPosition()\n else:\n current_pos = event.GetPosition()\n change = self._drag_start_pos - current_pos\n self.SetPosition(self.GetPosition() - change)", "def __isMovingCursor(self):\n return self.__currentCursor == wx.CURSOR_HAND", "def hoverMoveEvent(self, moveEvent):\n if self.isSelected():\n handle = None\n if self.handle.contains(moveEvent.pos()):\n handle = \"k\" # something not None\n cursor = Qt.ArrowCursor if handle is None else Qt.SizeFDiagCursor\n self.setCursor(cursor)\n super().hoverMoveEvent(moveEvent)", "def OnMouse(self, event):\n\n self.Refresh()\n event.Skip()", "def mousePositionRaw(self):", "def mousePositionRaw(self):", "def _onmove(self, event):\n\n # self._prev are deprecated but we still need to maintain it\n self._prev = self._get_data(event)\n\n v = event.xdata if self.direction == 'horizontal' else event.ydata\n if self.direction == 'horizontal':\n vpress = self._eventpress.xdata\n else:\n vpress = self._eventpress.ydata\n\n # move existing span\n # When \"dragging from anywhere\", `self._active_handle` is set to 'C'\n # (match notation used in the RectangleSelector)\n if self._active_handle == 'C' and self._extents_on_press is not None:\n vmin, vmax = self._extents_on_press\n dv = v - vpress\n vmin += dv\n vmax += dv\n\n # resize an existing shape\n elif self._active_handle and self._active_handle != 'C':\n vmin, vmax = self._extents_on_press\n if self._active_handle == 'min':\n vmin = v\n else:\n vmax = v\n # new shape\n else:\n # Don't create a new span if there is already one when\n # ignore_event_outside=True\n if self.ignore_event_outside and self._selection_completed:\n return\n vmin, vmax = vpress, v\n if vmin > vmax:\n vmin, vmax = vmax, vmin\n\n self.extents = vmin, vmax\n\n if self.onmove_callback is not None:\n self.onmove_callback(vmin, vmax)\n\n return False", "def update(self):\n\n\t\tself.x = games.mouse.x\n\t\tself.y = games.mouse.y\n\t\tself.check_collide()", "def handle_mouse(self, x, y):\n self.last_x = x\n self.last_y = y\n if self.min_x is not None:\n self.last_x = max(self.last_x, self.min_x)\n if self.max_x is not None:\n self.last_x = min(self.last_x, self.max_x)\n # we are in region mode\n if self.region_id is not None:\n start = self.last_x\n end = self.region_edge\n self.region_model.adjust_region(self.region_id, start, end)\n return False", "def on_mousemove(event, x, y, flags, userparam):\n global mouse_pos\n global source_img, source_msk, display_img\n global DRAW_MODE\n\n if event == cv.EVENT_MOUSEMOVE:\n mouse_pos = (x, y)\n\n if flags & cv.EVENT_FLAG_SHIFTKEY:\n current_label = LABEL_BACKGROUND\n else:\n current_label = CURRENT_LABEL\n\n if DRAW_MODE == \"point\":\n if flags & cv.EVENT_FLAG_CTRLKEY:\n cv.circle(source_msk, (x, y), SHAPE_SIZE, current_label, -1)\n elif DRAW_MODE == \"line\":\n # line drawing is done in the line-mode keypress handler (keydown())\n pass", "def ev_mousemotion(self, event: tcod.event.MouseMotion) -> T | None:", "def on_mouse_motion(self, x, y, dx, dy):\n if self.exclusive:\n self.gamestatemanager.peek().on_mouse_motion(x, y, dx, dy)", "def on_mouse_release(self, x, y, button):\n pass", "def mouseMoveEvent(self, e):\r\n \r\n self.label.setText('mouseMoveEvent')", "def handle_mouse_data(data):\n pass", "def cursorPositionChanged(self):\r\n cursor = self.text_area.textCursor()\r\n line_no = cursor.blockNumber()\r\n col_no = cursor.columnNumber()\r\n self.statusBar.showMessage(\"Line \"+str(line_no)+\", Column \"+str(col_no))", "def OnSetCursor(self, event):\r\n \r\n # determine cursor\r\n part = self.HitTest(event.GetX(), event.GetY())\r\n cursor = wx.NullCursor\r\n\r\n if part:\r\n if part.type in [AuiDockUIPart.typeDockSizer, AuiDockUIPart.typePaneSizer]:\r\n\r\n if not self.CheckMovableSizer(part):\r\n return\r\n \r\n if part.orientation == wx.VERTICAL:\r\n cursor = wx.StockCursor(wx.CURSOR_SIZEWE)\r\n else:\r\n cursor = wx.StockCursor(wx.CURSOR_SIZENS)\r\n \r\n elif part.type == AuiDockUIPart.typeGripper:\r\n cursor = wx.StockCursor(wx.CURSOR_SIZING)\r\n\r\n event.SetCursor(cursor)", "def move( self, event ):\n self.lastMotion = time()\n if self.follow == False: # If the follow flag is not set, motion within the widget will make the ToolTip dissapear\n self.withdraw()\n self.visible = 1\n self.geometry( '+%i+%i' % ( event.x_root+10, event.y_root+10 ) ) # Offset the ToolTip 10x10 pixes southwest of the pointer\n try:\n self.msgVar.set( self.msgFunc() ) # Try to call the message function. Will not change the message if the message function is None or the message function fails\n except:\n pass\n self.after( int( self.delay * 1000 ), self.show )", "def move(self, event):\r\n self.lastMotion = time()\r\n # If the follow flag is not set, motion within the\r\n # widget will make the ToolTip disappear\r\n #\r\n if self.follow is False:\r\n self.withdraw()\r\n self.visible = 1\r\n\r\n # Offset the ToolTip 10x10 pixes southwest of the pointer\r\n self.geometry('+%i+%i' % (event.x_root+20, event.y_root-10))\r\n try:\r\n # Try to call the message function. Will not change\r\n # the message if the message function is None or\r\n # the message function fails\r\n self.msgVar.set(self.msgFunc())\r\n except:\r\n pass\r\n self.after(int(self.delay * 1000), self.show)", "def move(self, event):\r\n self.lastMotion = time()\r\n # If the follow flag is not set, motion within the\r\n # widget will make the ToolTip disappear\r\n #\r\n if self.follow is False:\r\n self.withdraw()\r\n self.visible = 1\r\n\r\n # Offset the ToolTip 10x10 pixes southwest of the pointer\r\n self.geometry('+%i+%i' % (event.x_root+20, event.y_root-10))\r\n try:\r\n # Try to call the message function. Will not change\r\n # the message if the message function is None or\r\n # the message function fails\r\n self.msgVar.set(self.msgFunc())\r\n except:\r\n pass\r\n self.after(int(self.delay * 1000), self.show)", "def OnMoveEvent(self, event):\r\n\r\n win_rect = self.GetRect()\r\n\r\n if win_rect == self._last_rect:\r\n return\r\n\r\n # skip the first move event\r\n if self._last_rect.IsEmpty(): \r\n self._last_rect = wx.Rect(*win_rect)\r\n return\r\n \r\n # skip if moving too fast to avoid massive redraws and\r\n # jumping hint windows\r\n if abs(win_rect.x - self._last_rect.x) > 3 or abs(win_rect.y - self._last_rect.y) > 3:\r\n self._last3_rect = wx.Rect(*self._last2_rect)\r\n self._last2_rect = wx.Rect(*self._last_rect)\r\n self._last_rect = wx.Rect(*win_rect)\r\n return\r\n\r\n # prevent frame redocking during resize\r\n if self._last_rect.GetSize() != win_rect.GetSize():\r\n self._last3_rect = wx.Rect(*self._last2_rect)\r\n self._last2_rect = wx.Rect(*self._last_rect)\r\n self._last_rect = wx.Rect(*win_rect)\r\n return\r\n\r\n self._last3_rect = wx.Rect(*self._last2_rect)\r\n self._last2_rect = wx.Rect(*self._last_rect)\r\n self._last_rect = wx.Rect(*win_rect)\r\n\r\n if _VERSION_STRING < \"2.9\":\r\n leftDown = wx.GetMouseState().LeftDown()\r\n else:\r\n leftDown = wx.GetMouseState().LeftIsDown()\r\n\r\n if not leftDown:\r\n return\r\n\r\n if not self._moving: \r\n self.OnMoveStart(event)\r\n self._moving = True\r\n\r\n if self._last3_rect.IsEmpty():\r\n return\r\n\r\n self.OnMoving(event)", "def update(self):\r\n self.x = games.mouse.x\r\n self.y = games.mouse.y\r\n self.check_collide()", "def ev_MOUSEUP(self, event):", "def handle_event(self, event):\n if event.type != MOUSEMOTION:\n return\n self.model.slider.left = event.pos[0]", "def mouse_middle_down(self):\n pass", "def update(self):\n self.x = games.mouse.x\n self.y = games.mouse.y\n self.check_collide()", "def cursor_placement_thread(self):\r\n while self.editing:\r\n # pylint: disable=W0212\r\n with goxapi.Signal._lock:\r\n curses.curs_set(2)\r\n self.win.touchwin()\r\n self.win.refresh()\r\n time.sleep(0.1)\r\n curses.curs_set(0)", "def mouseMoveEvent(self, event):\n if self.view_state.tracking == TrackingMode.FREE and event.buttons() == QtCore.Qt.LeftButton:\n # Calculate the change in mouse position.\n new_mouse_pos = np.array([event.x(), event.y()])\n mouse_delta = new_mouse_pos - self.view_state.mouse\n\n # Add this to the view centre.\n self.view_state.centre = self.view_state.centre - mouse_delta * (1 / self.view_state.scale)\n self.view_state.mouse = new_mouse_pos", "def _updateOnMouseState(self, state):\n x = state.X.abs\n y = state.Y.abs\n \n mscale = self.mouse_icon.getScale() \n \n if (x + mscale[0] + self.mouse_offset) > render_engine.Window.width:\n x = x - mscale[0] - 10\n else:\n x += self.mouse_offset\n \n if (y + mscale[1] + self.mouse_offset) > render_engine.Window.height:\n y = y - mscale[1] - 10\n else:\n y += self.mouse_offset\n \n self.mouse_icon.setPosition((x, y))", "def move(self, event):\n self.lastMotion = time()\n # If the follow flag is not set, motion within the\n # widget will make the ToolTip disappear\n #\n if self.follow is False:\n self.withdraw()\n self.visible = 1\n\n # Offset the ToolTip 10x10 pixes southwest of the pointer\n self.geometry('+%i+%i' % (event.x_root+20, event.y_root-10))\n try:\n # Try to call the message function. Will not change\n # the message if the message function is None or\n # the message function fails\n self.msgVar.set(self.msgFunc())\n except:\n pass\n self.after(int(self.delay * 1000), self.show)", "def mouseMoveEvent(self, event):\n self.setCursor(qtc.Qt.SizeVerCursor)\n\n multiplier = self.singleStep()\n valueOffset = ((self.mouseStartPosY - event.pos().y()) * multiplier)\n value = self.startValue + valueOffset\n\n if value != self.current_value:\n self.current_value = value\n self.setValue(self.current_value)", "def change_cursor(self, cursor):\n self.setCursor(cursor)", "def on_mouse_motion(self, x: float, y: float, dx: float, dy: float):\n if self.player_enabled:\n super().on_mouse_motion(x, y, dx, dy)", "def setCursor(self, _name = None):\n\n\t\t_before = self._cursor\n\t\tself._cursor = _name\n\t\tif _before != _name:\n\t\t\tself._updated.append(tuple(self.rect))\n\t\t\tself.updateCursor()\n\t\t\tself._updated.append(tuple(self.rect))", "def ev_windowmoved(self, event: WindowMoved) -> None:", "def hoverMoveEvent(self, event):\n activeTool = self._activeTool()\n toolMethodName = str(activeTool) + \"HoverMove\"\n if hasattr(self, toolMethodName):\n getattr(self, toolMethodName)(event.pos())", "def do_motion_notify_event(self, event):\n\t\t# if this is a hint, then let's get all the necessary \n\t\t# information, if not it's all we need.\n\t\tif event.is_hint:\n\t\t\tx, y, state = event.window.get_pointer()\n\t\telse:\n\t\t\tx = event.x\n\t\t\ty = event.y\n\t\t\tstate = event.state\n\t\t\n\t\t# Update box underneath cursor, for tooltip\n\t\tix, iy = icoords = self.widget2imgcoords(x,y)\n\t\tif __debug__: \n\t\t\tsys.stdout.write(repr((x,y))+' '+repr(icoords)+'\\r')\n\t\t\tsys.stdout.flush()\n\t\t# Update the box cache\n\t\tif self._update_boxes(*icoords):\n\t\t\t# Cache changed, update tooltips\n\t\t\tself.set_tooltip_text(self.get_tooltip_text(self._boxes_under_cursor)) #XXX: Why is this needed to get the tooltip to query?\n\t\t\tself.trigger_tooltip_query()\n\t\t\n\t\tif self.mode == self.INSERT and self._insert_start_coords is not None and state & gtk.gdk.BUTTON1_MASK:\n\t\t\t# Adjust temporary box (for use in insertion)\n\t\t\tnr = pt2rect(icoords, self._insert_start_coords)\n\t\t\tredraw = nr.union(self._temporary_box.rect)\n\t\t\tself._temporary_box.rect = nr\n\t\t\t#self.queue_draw_area(*self.rect2widget(redraw))\n\t\t\tself.queue_draw() #REDRAW: If we implement partial redraw, fix this\n\t\t\t#XXX: Should we draw immediately instead of queueing one?\n\t\t\tself.emit('insert-box-changed', self._temporary_box)\n\t\telif self._box_is_resizing is not None and state & gtk.gdk.BUTTON1_MASK:\n\t\t\t# Update the size of the box we're resizing\n\t\t\td = self._box_is_resizing_dir\n\t\t\tb = self._box_is_resizing\n\t\t\tr = frect(*b.rect)\n\t\t\tobox = frect(*b.rect)\n\t\t\tif 'W' in d:\n\t\t\t\tr.x = ix\n\t\t\t\tr.width = self._box_resize_east - r.x # Use r.x because it's pre-rounded\n\t\t\telif 'E' in d:\n\t\t\t\tr.width = (ix - r.x)\n\t\t\tif 'N' in d:\n\t\t\t\tr.y = iy\n\t\t\t\tr.height = self._box_resize_south - r.y # Use r.y because it's pre-rounded\n\t\t\telif 'S' in d:\n\t\t\t\tr.height = (iy - r.y)\n\t\t\tb.rect = r\n#\t\t\tif __debug__: print \"Resizing: %r (%r,%r) (%r,%r) %r->%r\" % (d, x,y, ix,iy, list(obox), list(b.rect))\n\t\t\t#self.queue_draw_area(*self.rect2widget(obox.union(b.rect)))\n\t\t\tself.queue_draw() #REDRAW: If we implement partial redraw, fix this\n\t\t\t#XXX: Should we draw immediately instead of queueing one?\n\t\telif not state & (gtk.gdk.BUTTON1_MASK | gtk.gdk.BUTTON2_MASK | \n\t\t\t\tgtk.gdk.BUTTON3_MASK | gtk.gdk.BUTTON4_MASK | \n\t\t\t\tgtk.gdk.BUTTON5_MASK): # Hover\n\t\t\t# Update the current cursor icon\n\t\t\tboxes = tuple(self.find_boxes_coord_near(*icoords)) #FIXME: Use cache\n\t\t\tif len(boxes):\n\t\t\t\t#if __debug__: print \"Nearby Boxes: %r\" % (boxes,)\n\t\t\t\tbox, dir = boxes[0]\n\t\t\t\tself._box_may_resize = box\n\t\t\t\tself._box_may_resize_dir = dir\n\t\t\t\tself.window.set_cursor(gtk.gdk.Cursor(self.window.get_display(), self.RESIZE_CURSORS[dir]))\n\t\t\telse:\n\t\t\t\tself._box_may_resize = self._box_may_resize_dir = None\n\t\t\t\tself.window.set_cursor(None)", "def ev_MOUSEDOWN(self, event):", "def on_eventBox_motion_notify_event(self, widget, data=None):\n\n if self.enabled == True:\n found = False\n for m in self.map:\n x1, y1, x2, y2, xpage, xpart = m\n if x1 <= data.x <= x2 and y1 <= data.y <= y2:\n found = True\n break\n if found == True:\n widget.window.set_cursor(gtk.gdk.Cursor(gtk.gdk.HAND1))\n else:\n widget.window.set_cursor(None)", "def mouseMoveEvent(self, e):\n if e.pos().y() == self._offset:\n return\n adder = (self._offset - e.y())\n self.deltacount += adder\n adder *= (abs(adder) * 0.01)\n f = self._max[0] - self._min[0]\n self._state[0] = min(self._max[0], max(self._min[0], self._state[0] + (adder * f / 1000.0)))\n self._param.update()\n QtGui.QCursor.setPos(self._origo)", "def update(self):\n self.x = games.mouse.x\n #self.y = games.mouse.y\n self.check_collide()", "def mouseMoveEvent(self, e):\n if e.pos().y() == self.offset:\n return\n adder = (self.offset - e.y())\n self.deltacount += adder\n #adder *= self.accelerator\n adder *= (abs(adder) * 0.01)\n #self._state[0] = max(self._min[0], min(self._max[0], self._state[0] + adder))\n QtGui.qApp.emit( QtCore.SIGNAL(\"deltaChanged\"), self, adder)\n #self._param.update()\n QtGui.QCursor.setPos(self.origo)", "def on_mouse_motion(self, x, y, dx, dy):\n # hazlo aparecer donde este mi jugador en el mouse\n self.player_sprite.center_x = x\n self.player_sprite.center_y = y", "def on_mouse_leave (self, event):\n\n\t\tif not self.clicked:\n\n\t\t\tself.cursor_position = [-1,-1]\n\t\t\tself.redraw_canvas()\n\t\t\tself.hide_tip()#self.timer1 = gobject.timeout_add(2000, self.hide_tip)", "def onMove(self, event):\n\n # get current mouse position\n (x, y) = event.GetPositionTuple()\n\n self.handleMousePositionCallback((x, y))\n\n if event.Dragging() and event.LeftIsDown():\n # are we doing box select?\n if self.is_box_select:\n # set select box point 2 at mouse position\n (self.sbox_w, self.sbox_h) = (x - self.sbox_1_x,\n y - self.sbox_1_y)\n elif not self.last_drag_x is None:\n # no, just a map drag\n self.was_dragging = True\n dx = self.last_drag_x - x\n dy = self.last_drag_y - y\n\n # move the map in the view\n self.view_offset_x += dx\n self.view_offset_y += dy\n\n # limit drag at edges of map\n if self.map_width > self.view_width:\n # if map > view, don't allow edge to show background\n if self.view_offset_x < 0:\n self.view_offset_x = 0\n elif self.view_offset_x > self.max_x_offset:\n self.view_offset_x = self.max_x_offset\n else:\n # else map < view, centre X\n self.view_offset_x = (self.map_width - self.view_width)/2\n\n if self.map_height > self.view_height:\n # if map > view, don't allow edge to show background\n if self.view_offset_y < 0:\n self.view_offset_y = 0\n elif self.view_offset_y > self.max_y_offset:\n self.view_offset_y = self.max_y_offset\n else:\n # else map < view, centre Y\n self.view_offset_y = (self.map_height - self.view_height)/2\n\n # adjust remembered X,Y\n self.last_drag_x = x\n self.last_drag_y = y\n\n self.recalc_view_lonlat_limits()\n\n # redraw client area\n self.drawTilesLayers()" ]
[ "0.7342505", "0.7273285", "0.7090941", "0.70634186", "0.7034477", "0.69676363", "0.6943248", "0.68984425", "0.669294", "0.659831", "0.65874934", "0.6549803", "0.6516948", "0.6500888", "0.6497365", "0.6471458", "0.6461945", "0.64583486", "0.6448528", "0.6439387", "0.63827825", "0.6339865", "0.63394064", "0.63264203", "0.63040274", "0.63040274", "0.6294191", "0.6292217", "0.62897956", "0.6275114", "0.6256434", "0.62540966", "0.62476194", "0.62473583", "0.62395257", "0.62332404", "0.62240326", "0.6189821", "0.6180184", "0.6166513", "0.6152779", "0.6139905", "0.613767", "0.6130255", "0.6123355", "0.61155677", "0.6115425", "0.6094332", "0.6083725", "0.6083417", "0.6073236", "0.6066993", "0.6023811", "0.6021578", "0.60187143", "0.60187143", "0.6018018", "0.5983891", "0.5976568", "0.5976197", "0.5976197", "0.5974011", "0.5951451", "0.5949799", "0.5949049", "0.5948798", "0.5945873", "0.59444124", "0.59394956", "0.592916", "0.59259427", "0.59179217", "0.59129095", "0.59128374", "0.59128374", "0.5910285", "0.5909145", "0.59012675", "0.5896457", "0.5895733", "0.5894689", "0.58929735", "0.5870668", "0.58639693", "0.5860507", "0.5858588", "0.58515155", "0.5846347", "0.5830622", "0.58124393", "0.58104986", "0.5801034", "0.579941", "0.57943857", "0.57862514", "0.57709384", "0.5768258", "0.5768086", "0.5767833", "0.5767529" ]
0.64577484
18
Called when a mouse is turned in the widget (and maybe for finger scrolling in the trackpad). Adjust method signature as appropriate for callback.
def scroll_event(self, widget, event): x, y = event.x, event.y num_degrees = 0 direction = 0 # x, y = coordinates of mouse self.last_win_x, self.last_win_y = x, y # calculate number of degrees of scroll and direction of scroll # both floats in the 0-359.999 range # num_degrees = # direction = self.logger.debug("scroll deg=%f direction=%f" % ( num_degrees, direction)) data_x, data_y = self.check_cursor_location() return self.make_ui_callback('scroll', direction, num_degrees, data_x, data_y)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def handle_mouse_press(self, event):", "def handle_mouse(self, x, y):\n pass", "def on_mouse_press(self, event):\n self.on_mouse_wheel(event)", "def on_mouse_press(self, x, y, button):\n\n pass", "def mouseDragged():\n if mousePressed:\n mousePressed()", "def on_mouse_motion(self, x, y, delta_x, delta_y):\r\n pass", "def ev_MOUSEUP(self, event):", "def ev_MOUSEDOWN(self, event):", "def handle_mouse(self, x, y):\n self.x = x\n self.y = y\n global _pending_handle_mouse\n if not _pending_handle_mouse:\n _pending_handle_mouse = True\n if self.fig.document is not None:\n self.fig.document.add_timeout_callback(self.handle_mouse_callback, 100)\n else:\n self.handle_mouse_callback()", "def on_mouse_release(self, x, y, button):\n pass", "def handle_mouse_data(data):\n pass", "def mouse_left_up(self):\n pass", "def handle_mousemotion(self, change):\r\n if widget.Widget.handle_mousemotion(self, change):\r\n app.App.handle_mousemotion(self, change)\r\n return True\r\n return False", "def on_mouse_motion(self, x, y, delta_x, delta_y):\n \n pass", "def mouseEvent(self, widget, event):\n # Zoom in and out with the middle and right mouse buttons\n if event.type == gtk.gdk.BUTTON_PRESS:\n if event.button == 2:\n self.ruler.zoom(2)\n elif event.button == 3:\n self.ruler.zoom(0.5)\n\n # Use the ruler widget's coordinate system to update the time cursor\n x, y, mask = self.ruler.canvas.window.get_pointer()\n scroll = self.ruler.canvas.get_scroll_offsets()[0]\n t = (x + scroll) / self.ruler.scale\n self.cursor.value = t\n\n # If the mouse button is down, try to find the canvas item under the cursor.\n # We use the row's collision detection tree for this, for the same reason\n # we use it for everything else: gnome-canvas' built-in collision detection\n # works poorly on very small items.\n if mask & gtk.gdk.BUTTON1_MASK:\n\n # Search every row in every canvas\n for obj in self.canvasList:\n y = obj.canvas.get_pointer()[1]\n for row in obj.rows:\n if y >= row.top and y <= row.bottom:\n\n # Give a few pixels of slack on either side\n slack = 2.0 / self.ruler.scale\n cursorInterval = (t - slack, t + slack)\n\n # The mouse is in this row. Use the row's collision detection\n # to find a nearby item.\n tag = row.intervalOccupied(*cursorInterval)\n if tag and tag[0] != self.hilightWidget:\n self.notifyHilightChanged(tag[0].dataTransaction)\n self.setHilightWidget(tag[0])\n return False\n return False", "def ev_MOUSEMOTION(self, event):", "def ev_mousemotion(self, event: MouseMotion) -> None:", "def on_mouse_press(self, x, y, button, key_modifiers):\r\n pass", "def onMouseDispatcher(self, event):\n\n if self.ui.checkEditNone.isChecked():\n self.onMouseNormal(event)\n elif self.ui.checkEditBuildPoints.isChecked():\n self.onMouseEdit(event)\n elif self.ui.checkEditHorizonMask.isChecked():\n self.onMouseEdit(event)\n elif self.ui.checkPolarAlignment.isChecked():\n self.onMouseStar(event)", "def ev_mousebuttondown(self, event: MouseButtonDown) -> None:", "def mouse_press_event(self, x: int, y: int, button: int):\n pass", "def mouse_wiggle(self, enabled):\n raise NotImplementedError(\"ERROR: Unimplemented function.\")", "def mouse_enter(self):\n pass", "def setupEventHooks(self):\n # handle mouse clicks\n self.img.scene().sigMouseClicked.connect(self.handleClick)\n # handle mouse movement\n # Use signalproxy for ratelimiting\n sig = self.img.scene().sigMouseMoved\n self.mvProxy = pqg.SignalProxy(signal=sig, rateLimit=60, slot=self.handleMove)", "def HandButton(self, event):\n pass", "def mousePosition(self):", "def mouse_left_down(self):\n pass", "def mousePressEvent(self, mouse_event):\r\n return", "def ToggleDrawingTools(self, event):\n pass", "def mouse_in(self, event):\r\n self['background'] = '#E5F3FF'", "def _on_canvas_mouse(self, event):\n if event.GetEventType() in [wx.wxEVT_MOTION, wx.wxEVT_LEFT_DOWN, \n wx.wxEVT_LEFT_UP, wx.wxEVT_MOTION|wx.wxEVT_LEFT_DOWN]:\n new_event = wx.MouseEvent(event.GetEventType())\n pos = self.tc.ScreenToClient(wx.GetMousePosition())\n new_event.SetPosition(pos)\n new_event.Skip()\n self.tc.GetEventHandler().ProcessEvent(new_event)", "def _on_mouse(self, event):\n x, y = event.GetPosition()\n if self._drag_mode == DepthCanvas.SASH_DRAG_NONE: \n self._canvas_hit_test(x, y) \n if event.LeftDown():\n self.start_dragging(y)\n elif self._drag_mode == DepthCanvas.SASH_DRAG_DRAGGING:\n if event.LeftIsDown():\n self.drag_it(y) \n elif event.LeftUp():\n self.end_dragging()\n event.Skip()", "def on_mouse_click(self, e):\n if 'Control' in e.modifiers:\n # Get mouse position in NDC.\n box_id, _ = self.canvas.stacked.box_map(e.pos)\n channel_id = np.nonzero(self.channel_y_ranks == box_id)[0]\n # Find the spike and cluster closest to the mouse.\n db = self.data_bounds\n # Get the information about the displayed spikes.\n wt = [(t, s, c, ch) for t, s, c, ch in self._waveform_times if channel_id in ch]\n if not wt:\n return\n # Get the time coordinate of the mouse position.\n mouse_pos = self.canvas.panzoom.window_to_ndc(e.pos)\n mouse_time = Range(NDC, db).apply(mouse_pos)[0][0]\n # Get the closest spike id.\n times, spike_ids, spike_clusters, channel_ids = zip(*wt)\n i = np.argmin(np.abs(np.array(times) - mouse_time))\n # Raise the select_spike event.\n spike_id = spike_ids[i]\n cluster_id = spike_clusters[i]\n emit('select_spike', self, channel_id=channel_id,\n spike_id=spike_id, cluster_id=cluster_id)\n\n if 'Shift' in e.modifiers:\n # Get mouse position in NDC.\n box_id, _ = self.canvas.stacked.box_map(e.pos)\n channel_id = int(np.nonzero(self.channel_y_ranks == box_id)[0][0])\n emit('select_channel', self, channel_id=channel_id, button=e.button)", "def mouse_middle_down(self):\n pass", "def on_mouse_motion(self, x: float, y: float, dx: float, dy: float):\n if self.player_enabled:\n super().on_mouse_motion(x, y, dx, dy)", "def emit_mouse(self, report):\n for name, attr in self.layout.mouse.items():\n # If the attr is a tuple like (left_analog_y, \"-\")\n # then set the attr to just be the first item\n attr, modifier = attr\n\n if attr.startswith(\"trackpad_touch\"):\n active_attr = attr[:16] + \"active\"\n if not getattr(report, active_attr):\n self.mouse_pos.pop(name, None)\n continue\n\n pos = getattr(report, attr)\n if name not in self.mouse_pos:\n self.mouse_pos[name] = pos\n\n sensitivity = 0.5\n self.mouse_rel[name] += (pos - self.mouse_pos[name]) * sensitivity\n self.mouse_pos[name] = pos\n\n elif \"analog\" in attr:\n pos = getattr(report, attr)\n if (pos > (128 + self.mouse_analog_deadzone)\n or pos < (128 - self.mouse_analog_deadzone)):\n accel = (pos - 128) / 10\n else:\n continue\n\n # If a minus modifier has been given then minus the acceleration\n # to invert the direction.\n if (modifier and modifier == \"-\"):\n accel = -accel\n\n sensitivity = self.mouse_analog_sensitivity\n self.mouse_rel[name] += accel * sensitivity\n\n # Emulate mouse wheel (needs special handling)\n if name in (ecodes.REL_WHEELUP, ecodes.REL_WHEELDOWN):\n ecode = ecodes.REL_WHEEL # The real event we need to emit\n write = False\n if getattr(report, attr):\n self._scroll_details['direction'] = name\n now = time.time()\n last_write = self._scroll_details.get('last_write')\n if not last_write:\n # No delay for the first button press for fast feedback\n write = True\n self._scroll_details['count'] = 0\n if name == ecodes.REL_WHEELUP:\n value = 1\n elif name == ecodes.REL_WHEELDOWN:\n value = -1\n if last_write:\n # Delay at least one cycle before continual scrolling\n if self._scroll_details['count'] > 1:\n if now - last_write > self.scroll_delay:\n write = True\n elif now - last_write > self.scroll_repeat_delay:\n write = True\n if write:\n self.device.write(ecodes.EV_REL, ecode, value)\n self._scroll_details['last_write'] = now\n self._scroll_details['count'] += 1\n continue # No need to proceed further\n else:\n # Reset so you can quickly tap the button to scroll\n if self._scroll_details.get('direction') == name:\n self._scroll_details['last_write'] = 0\n self._scroll_details['count'] = 0\n\n rel = int(self.mouse_rel[name])\n self.mouse_rel[name] = self.mouse_rel[name] - rel\n self.device.write(ecodes.EV_REL, name, rel)\n\n self.device.syn()", "def update(self):\n self.mousePos = pygame.mouse.get_pos()\n self.update_button_hover_status()", "def mouse_in(event):\r\n\r\n if str(event.type) == 'Enter':\r\n about_content.config(cursor=\"hand2\")\r\n else:\r\n about_content.config(cursor=\"arrow\")", "def ev_mousebuttonup(self, event: MouseButtonUp) -> None:", "def on_mouse_press(self, x, y, button, modifiers):\n\n self.gamestatemanager.peek().on_mouse_press(x, y, button, modifiers)\n\n if self.exclusive:\n self.gamestatemanager.peek().on_mouse_press(x, y, button, modifiers)\n else:\n self.set_exclusive_mouse(True)", "def on_mouse_move(self, win, xpos, ypos):\n old = self.mouse\n self.mouse = (xpos, glfw.get_window_size(win)[1] - ypos)\n if glfw.get_mouse_button(win, glfw.MOUSE_BUTTON_LEFT):\n self.drag(old, self.mouse, glfw.get_window_size(win))\n if glfw.get_mouse_button(win, glfw.MOUSE_BUTTON_RIGHT):\n self.pan(old, self.mouse)", "def on_mouse_move(self, win, xpos, ypos):\n old = self.mouse\n self.mouse = (xpos, glfw.get_window_size(win)[1] - ypos)\n if glfw.get_mouse_button(win, glfw.MOUSE_BUTTON_LEFT):\n self.drag(old, self.mouse, glfw.get_window_size(win))\n if glfw.get_mouse_button(win, glfw.MOUSE_BUTTON_RIGHT):\n self.pan(old, self.mouse)", "def OnMouse(self, event):\n\n self.Refresh()\n event.Skip()", "def on_mouse_press(self, x, y, button, modifiers):\n\n # Change states as needed.\n if self.current_state == GAME_RUNNING_PAGE:\n pass\n else:\n # Restart the game.\n self.setup()\n self.score=0\n self.current_state = GAME_RUNNING_PAGE", "def handle_mousemotion(self, change):\r\n if self.get_visible():\r\n for i in self.widgets:\r\n if i.get_visible():\r\n if i.handle_mousemotion(change):\r\n return True", "def _press(self, event):", "def callback_handle_left_mouse_release(self, event):\n\n if self.variables.active_tool == TOOLS.PAN_TOOL:\n self._pan(event)\n if self.variables.active_tool == TOOLS.ZOOM_IN_TOOL:\n rect_coords = self.coords(self.variables.zoom_rect_id)\n self.zoom_to_selection(rect_coords, self.variables.animate_zoom)\n self.hide_shape(self.variables.zoom_rect_id)\n if self.variables.active_tool == TOOLS.ZOOM_OUT_TOOL:\n rect_coords = self.coords(self.variables.zoom_rect_id)\n x1 = -rect_coords[0]\n x2 = self.variables.canvas_width + rect_coords[2]\n y1 = -rect_coords[1]\n y2 = self.variables.canvas_height + rect_coords[3]\n zoom_rect = (x1, y1, x2, y2)\n self.zoom_to_selection(zoom_rect, self.variables.animate_zoom)\n self.hide_shape(self.variables.zoom_rect_id)", "def handle_input(self, ncode, wparam, lparam):\n x_pos = lparam.contents.x_pos\n y_pos = lparam.contents.y_pos\n data = lparam.contents.mousedata\n\n # This is how we can distinguish mouse 1 from mouse 2\n # extrainfo = lparam.contents.extrainfo\n # The way windows seems to do it is there is primary mouse\n # and all other mouses report as mouse 2\n\n # Also useful later will be to support the flags field\n # flags = lparam.contents.flags\n # This shows if the event was from a real device or whether it\n # was injected somehow via software\n\n self.emulate_mouse(wparam, x_pos, y_pos, data)\n\n # Give back control to Windows to wait for and process the\n # next event\n return ctypes.windll.user32.CallNextHookEx(\n self.hooked, ncode, wparam, lparam)", "def left_callback(self):\n self.rokucontrol.left_callback()", "def mouse(*args, enableScrollWheel: bool=True, mouseButtonTracking: int=0,\n mouseButtonTrackingStatus: bool=True, scrollWheelStatus: bool=True, **kwargs)->int:\n pass", "def mousePressEvent(self, ev):\n super(PlotObject, self).mousePressEvent(ev)\n self._downpos = self.mousePos", "def on_mouse_motion(self, x, y, dx, dy):\n if self.exclusive:\n self.gamestatemanager.peek().on_mouse_motion(x, y, dx, dy)", "def handle_press( self, x, y ):\n self.pressed_flag = True\n self.first_point = (x, y)", "def click(self, event):\n try:\n x_loc, y_loc = self.appWindow.spec_cv.mouse(event)\n trackNo, updated_track = self.model.updateTrackClick(x_loc, y_loc,\\\n self.x_high)\n self.appWindow.spec_cv.updateTrack(trackNo, updated_track)\n self.appWindow.spec_cv.redrawTracks()\n self.locked_track = trackNo\n except TypeError:\n pass", "def on_mouse_motion(x, y, dx, dy):\n if in_box(x, y):\n # Change the cursor if inside the box.\n self.window.set_mouse_cursor(self.hand_cursor)\n else:\n self.window.set_mouse_cursor(self.default_cursor)", "def mousePressEvent(self, event):\n if event.buttons() == QtCore.Qt.LeftButton:\n self.view_state.mouse = np.array([event.x(), event.y()])", "def mouse_right_up(self):\n pass", "def OnMouseIn( self, event ):\n self.whichChoice = 1\n event.context.triggerRedraw(1)", "def ev_mousebuttondown(self, event: tcod.event.MouseButtonDown) -> T | None:", "def set_mouseclick_handler(self, mouse_handler):\n STmouse.Mouse(self.canvas, '<Button-1>', mouse_handler)", "def setEnabled(self, boo):\n if boo:\n self.mousePressEvent = self.mousePressEventEnabled\n self.mouseMoveEvent = self.mouseMoveEventEnabled\n self.mouseReleaseEvent = self.mouseReleaseEventEnabled\n else:\n self.mousePressEvent = self.notEnabledDummy\n self.mouseMoveEvent = self.notEnabledDummy\n self.mouseReleaseEvent = self.notEnabledDummy", "def mouse_click(self,x,y,button,double_click):\n raise NotImplementedError(\"ERROR: Unimplemented function.\")", "def _press(self, event):\n self._set_cursor(True)\n if self._interactive and self._selection_artist.get_visible():\n self._set_active_handle(event)\n else:\n self._active_handle = None\n\n if self._active_handle is None or not self._interactive:\n # Clear previous rectangle before drawing new rectangle.\n self.update()\n\n v = event.xdata if self.direction == 'horizontal' else event.ydata\n # self._pressv and self._prev are deprecated but we still need to\n # maintain them\n self._pressv = v\n self._prev = self._get_data(event)\n\n if self._active_handle is None and not self.ignore_event_outside:\n # when the press event outside the span, we initially set the\n # visibility to False and extents to (v, v)\n # update will be called when setting the extents\n self._visible = False\n self.extents = v, v\n # We need to set the visibility back, so the span selector will be\n # drawn when necessary (span width > 0)\n self._visible = True\n else:\n self.set_visible(True)\n\n return False", "def mouse_middle_up(self):\n pass", "def mousePressEvent(self, QMouseEvent):\n if QMouseEvent.button() == Qt.RightButton:\n if self.playBtn.isEnabled():\n self.play_video()\n\n if QMouseEvent.button() == Qt.MiddleButton:\n if self.checkbox.isChecked() and self.checkbox.isEnabled():\n self.checkbox.setChecked(False)\n elif not self.checkbox.isChecked() and self.checkbox.isEnabled():\n self.checkbox.setChecked(True)", "def calibrateMousePress(self, mouse_event):\n\n \"\"\" Get mouse posiiton \"\"\"\n pt = mouse_event.pos()\n\n if mouse_event.button() == Qt.LeftButton:\n self.kinect.last_click[0] = pt.x()\n self.kinect.last_click[1] = pt.y()\n self.kinect.new_click = True\n elif mouse_event.button() == Qt.RightButton:\n self.kinect.last_rclick[0] = pt.x()\n self.kinect.last_rclick[1] = pt.y()\n self.kinect.new_rclick = True", "def on_canvas_mouse_release(self, event) -> None:\r\n\r\n self.edit_toggle_mode = None", "def play(self):\n self.accept(\"wheel_up\", self.scrollindex, [-1] )\n self.accept(\"wheel_down\", self.scrollindex, [1] )\n self.accept(\"arrow_up\", self.scrollindex, [-1] )\n self.accept(\"arrow_down\", self.scrollindex, [1] )\n self.accept(\"enter\", self._click)\n if callable(self.data['exit']): self.accept(\"escape\", self.data['exit'])\n for item in self.canvas[\"items\"]: item['state']=DGG.NORMAL", "def mouseDragged(self, point, delta):\n pass", "def handle_mousehold(self, button, name):\r\n if widget.Widget.handle_mousehold(self, button, name):\r\n app.App.handle_mousehold(self, button, name)\r\n return True\r\n return False", "def handle_mouse(self, x, y):\n self.last_x = x\n self.last_y = y\n if self.min_x is not None:\n self.last_x = max(self.last_x, self.min_x)\n if self.max_x is not None:\n self.last_x = min(self.last_x, self.max_x)\n # we are in region mode\n if self.region_id is not None:\n start = self.last_x\n end = self.region_edge\n self.region_model.adjust_region(self.region_id, start, end)\n return False", "def on_mouse_release(self, x, y, button, key_modifiers):\r\n pass", "def mouseMoveEvent(self, event):\n if self.view_state.tracking == TrackingMode.FREE and event.buttons() == QtCore.Qt.LeftButton:\n # Calculate the change in mouse position.\n new_mouse_pos = np.array([event.x(), event.y()])\n mouse_delta = new_mouse_pos - self.view_state.mouse\n\n # Add this to the view centre.\n self.view_state.centre = self.view_state.centre - mouse_delta * (1 / self.view_state.scale)\n self.view_state.mouse = new_mouse_pos", "def on_mouse_click(self, event):\n if not self.is_game_over:\n try:\n # i, j coordinates of the click event\n i = int(round(event.ydata))\n j = int(round(event.xdata))\n\n # Left button\n if event.button == 1 or event.button == 2:\n self.reveal(i, j)\n\n # Right button\n elif event.button == 3:\n self.flag(i, j)\n\n except (TypeError, IndexError):\n pass", "def _updateOnMouseState(self, state):\n x = state.X.abs\n y = state.Y.abs\n \n mscale = self.mouse_icon.getScale() \n \n if (x + mscale[0] + self.mouse_offset) > render_engine.Window.width:\n x = x - mscale[0] - 10\n else:\n x += self.mouse_offset\n \n if (y + mscale[1] + self.mouse_offset) > render_engine.Window.height:\n y = y - mscale[1] - 10\n else:\n y += self.mouse_offset\n \n self.mouse_icon.setPosition((x, y))", "def motion_notify_cb(self, darea, event):\n if event.is_hint:\n x, y, state = event.window.get_pointer()\n else:\n x = event.x\n y = event.y\n state = event.state\n if state & gdk.BUTTON1_MASK or state & gdk.BUTTON3_MASK:\n self.draw_pointer(self.cr, x, y)\n self.queue_draw()\n self.oldx, self.oldy = x, y\n rel_x, rel_y = self.absolute_to_relative(x, y)\n self.emit('dnd-value', rel_x, rel_y)\n return True", "def mouse_move_callback(self, event):\n # TODO drag and drop figuriek\n print(\"moving at \", event.x + self.offset_x, event.y + self.offset_y)", "def on_mouse_press(self, _x, _y, _button, _modifiers):\n self.window.show_view(GameView())", "def on_left_mouse_click(self, event: Event) -> None:\n\t\tself.mouse_state.set_click(event.x, event.y)", "def leftButtonDown(self):\n\t\tautopy.mouse.toggle(True,autopy.mouse.LEFT_BUTTON)", "def handle_events(self) -> None:\n for event in pygame.event.get():\n if event.type == QUIT:\n pygame.quit()\n sys.exit()\n elif event.type == MOUSEMOTION:\n self.mouse_pos = event.pos\n elif event.type == MOUSEBUTTONDOWN:\n self.mouse_pos = event.pos\n self.mouse_clicked = True\n elif self._focused_button is not None and event.type == KEYDOWN:\n self._handle_key_press(event)", "def mousePressed(self, _evt, _id):\n _widget = None\n \n if _id == ois.MB_Left:\n _widget = self._mouseLeft\n elif _id == ois.MB_Right:\n _widget = self._mouseRight\n elif _id == ois.MB_Middle:\n _widget = self._mouseMiddle\n \n if _widget is not None:\n self._addLinearAnimation(_widget, 1.0)\n \n return False", "def on_checkBox_kongtouping_stateChanged(self, p0):\n # TODO: not implemented yet\n raise NotImplementedError\n \n #@pyqtSlot(QPoint)\n #def on_tablewidget_tableWidget_Trade_Args_customContextMenuRequested(self, pos):\n \"\"\"\n Slot documentation goes here.\n \n @param pos DESCRIPTION\n @type QPoint\n \"\"\"\n # TODO: not implemented yet\n #raise NotImplementedError", "def ev_mousewheel(self, event: MouseWheel) -> None:", "def update(self):\n\n\t\tself.x = games.mouse.x\n\t\tself.y = games.mouse.y\n\t\tself.check_collide()", "def on_mouse_press(self, x, y, button, modifiers):\n\n # Change states as needed.\n if self.current_state == INSTRUCTIONS_PAGE:\n # Next page of instructions.\n self.current_state = GAME_RUNNING\n # Start the game\n self.setup()\n self.current_state = GAME_RUNNING\n elif self.current_state == GAME_OVER:\n # Restart the game.\n self.setup()\n self.current_state = GAME_RUNNING", "def on_press(self):\n self.pressed = True", "def on_press(self):\n self.pressed = True", "def _onMotionNotify(self, widget, event):\n\t\tif self.fullscreenToggle:\n\t\t\tmove = [event.x - self.mouseStart[0], event.y - self.mouseStart[1]]\n\t\t\tnewPos = [self.imgPosStart[0] - move[0], self.imgPosStart[1] - move[1]]\n\t\t\tself.moveImage(newPos[0], newPos[1])", "def __mouseMoved(self, x, y):\n # Are we on the bounding box?\n if pointOnBox(x, y, self.currentBox, thickness=self.__THICKNESS):\n position = getCursorPosition(x, y, self.currentBox, thickness=self.__THICKNESS)\n cursor = [\n wx.CURSOR_SIZENWSE,\n wx.CURSOR_SIZENS,\n wx.CURSOR_SIZENESW,\n wx.CURSOR_SIZEWE,\n wx.CURSOR_SIZENWSE,\n wx.CURSOR_SIZENS,\n wx.CURSOR_SIZENESW,\n wx.CURSOR_SIZEWE\n ] [position]\n self.__setCursor(cursor)\n elif pointInBox(x, y, self.currentBox):\n self.__setCursor(wx.CURSOR_HAND)\n else:\n self.__setCursor()", "def __handleMouseEvents(self, event):\n if not self.enabled:\n return\n\n x, y = event.GetPosition()\n\n # First make sure we have started a box.\n if self.currentBox == None and not event.LeftDown():\n # No box started yet. Set cursor to the initial kind.\n self.__setCursor(wx.CURSOR_CROSS)\n return\n\n if event.LeftDown():\n if self.currentBox == None:\n # No RB Box, so start a new one.\n self.currentBox = (x, y, 0, 0)\n self.hasLetUp = 0\n elif self.__isSizingCursor():\n # Starting a sizing operation. Change the origin.\n position = getCursorPosition(x, y, self.currentBox, thickness=self.__THICKNESS)\n self.currentBox = self.__denormalizeBox(position, self.currentBox)\n\n elif event.Dragging() and event.LeftIsDown():\n # Use the cursor type to determine operation\n if self.__isMovingCursor():\n if self.currentlyMoving or pointInBox(x, y, self.currentBox):\n if not self.currentlyMoving:\n self.currentlyMoving = (x - self.currentBox[0], y - self.currentBox[1])\n self.__moveTo(x - self.currentlyMoving[0], y - self.currentlyMoving[1])\n elif self.__isSizingCursor():\n self.__resizeBox(x, y)\n\n elif event.LeftUp():\n self.hasLetUp = 1\n self.currentlyMoving = None\n self.__normalizeBox()\n\n elif event.Moving() and not event.Dragging():\n # Simple mouse movement event\n self.__mouseMoved(x,y)", "def handle_mousehold(self, button, name):\r\n if self.get_visible():\r\n for i in self.widgets:\r\n if i.get_visible():\r\n if i.handle_mousehold(button, name):\r\n return True\r\n return False", "def mouse_right_down(self):\n pass", "def grab(self, event):\n self.ypos = event.y\n self.xpos = event.x\n self.config(cursor='fleur')", "def _on_pyglet_mouse_click(self, x, y, button, modifiers):\n button_time = clock()\n this_button = self._button_names[button]\n self._mouse_buffer.append((this_button, x, y, button_time))", "def mouse_out(self):\n pass", "def on_tag_hover(self, _tag, _textview, event, _textiter, halfmove):\n if event.type == Gdk.EventType.BUTTON_RELEASE:\n self.emit(\"step\", halfmove)", "def mousePressEvent(self, event):\n self.begin = event.pos()\n self.end = event.pos()\n self.update()", "def mousePositionRaw(self):", "def mousePositionRaw(self):", "def touch_began(self, touch):\n\t\tpass" ]
[ "0.71367556", "0.68652886", "0.67238146", "0.6668152", "0.6439022", "0.6427705", "0.64046293", "0.6377476", "0.6342399", "0.61890846", "0.6186807", "0.608219", "0.6079052", "0.60771847", "0.60688984", "0.60027486", "0.59981054", "0.5972442", "0.59615374", "0.59591645", "0.59557724", "0.5938648", "0.59274673", "0.5920973", "0.59106565", "0.5904462", "0.588333", "0.58822674", "0.58765364", "0.58631635", "0.58463097", "0.58462137", "0.5831465", "0.5829826", "0.5822365", "0.5807782", "0.57900476", "0.5786874", "0.57826805", "0.57804936", "0.5764805", "0.5764805", "0.5761675", "0.5743644", "0.5741454", "0.5724589", "0.57183135", "0.57093996", "0.5701784", "0.569652", "0.56558377", "0.5651177", "0.5651082", "0.5649441", "0.56435275", "0.5641381", "0.5638101", "0.56321764", "0.5627839", "0.5627217", "0.56230897", "0.561976", "0.56068975", "0.55975854", "0.5591556", "0.5572347", "0.5571961", "0.55447274", "0.55446535", "0.5543165", "0.5527039", "0.55210775", "0.55161244", "0.550905", "0.55047107", "0.5494381", "0.5486426", "0.54854685", "0.54845077", "0.5472396", "0.54719245", "0.54712", "0.5468271", "0.54620504", "0.54549646", "0.5451956", "0.5448993", "0.5448993", "0.5439998", "0.5426513", "0.54264873", "0.5416541", "0.54101783", "0.5407018", "0.54055107", "0.5393295", "0.53900117", "0.53896785", "0.5389211", "0.5389211", "0.5384342" ]
0.0
-1
Called when a drop (drag/drop) event happens in the widget. Adjust method signature as appropriate for callback.
def drop_event(self, widget, event): # make a call back with a list of URLs that were dropped #self.logger.debug("dropped filename(s): %s" % (str(paths))) #self.make_ui_callback('drag-drop', paths) raise NotImplementedError
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def dropEvent(self, de):\n # dragging a track\n if hasattr(Globals.dragObject, \"trackFrame\"):\n de.accept()\n trackFrame = Globals.dragObject.trackFrame\n oldParent = trackFrame.parentWidget()\n if oldParent:\n args = (trackFrame, self, oldParent.parentWidget())\n else:\n args = (trackFrame, self, None)\n self.emit(PYSIGNAL('dropped'), (args))\n # not yet used\n #Animation.animate(trackFrame, self, doneFunc=self.slotAnimationDone)", "def on_drop(self):\n print(\"You have dropped\", self.name)", "def dropEvent(self, event):\n\n # Get the id color to drop the items into\n drop_id_color = self.itemAt(event.pos())\n drop_id_color = self.invisibleRootItem() \\\n if drop_id_color is None else drop_id_color\n\n # If the drop position is not valid we pass\n if drop_id_color is None:\n event.ignore()\n return\n\n # If the drop position is not an id color item we pass\n if drop_id_color.data(0, QtCore.Qt.UserRole) != \"color\":\n event.ignore()\n return\n\n # Get the drop items - the selected tree items\n drop_items = [x for x in self.selectedItems()\n if x.data(0, QtCore.Qt.UserRole) == \"object\"] or None\n\n # If not items selected we pass\n if drop_items is None:\n event.ignore()\n return\n\n # Drop the items into the new tree parent\n self._drop_tree_items(drop_items, drop_id_color)\n\n event.accept()\n\n return None", "def dropEvent(self, event):\n if event.mimeData().hasImage:\n event.setDropAction(Qt.CopyAction)\n file_path = event.mimeData().urls()[0].toLocalFile()\n self.set_image(file_path)\n self.folderLocation.setText(file_path)\n \n event.accept()\n else:\n event.ignore()", "def dropEvent(self, event):\r\n source = event.mimeData()\r\n if source.hasUrls():\r\n files = mimedata2url(source)\r\n if files:\r\n files = [\"r'%s'\" % path for path in files]\r\n if len(files) == 1:\r\n text = files[0]\r\n else:\r\n text = \"[\" + \", \".join(files) + \"]\"\r\n self.shell.insert_text(text)\r\n elif source.hasText():\r\n lines = unicode(source.text())\r\n self.shell.set_cursor_position('eof')\r\n self.shell.execute_lines(lines)\r\n event.acceptProposedAction()", "def dropEvent(self, event):\r\n source = event.mimeData()\r\n if source.hasUrls():\r\n files = mimedata2url(source)\r\n if files:\r\n self.plugin.load(files)\r\n elif source.hasText():\r\n editor = self.currentWidget()\r\n if editor is not None:\r\n editor.insert_text( source.text() )\r\n event.acceptProposedAction()", "def _on_drop(self, event):\n data = event.mimeData().data(constants.QGRAPH_DD_MIME_TYPE)\n if not data.isNull():\n data_stream = QDataStream(data, QIODevice.ReadOnly)\n parsed = json.loads(data_stream.readString().decode('utf8'))\n\n # Refer to `mime.py` for docs about format\n version = parsed['version']\n if version not in (1, 2):\n raise ValueError(\"Unsupported version of QmxGraph MIME data: {}\".format(version))\n\n x = event.pos().x()\n y = event.pos().y()\n\n if version in (1, 2):\n vertices = parsed.get('vertices', [])\n scale = self.api.get_zoom_scale()\n for v in vertices:\n # place vertices with an offset so their center falls\n # in the event point.\n vertex_x = x + (v['dx'] - v['width'] * 0.5) * scale\n vertex_y = y + (v['dy'] - v['height'] * 0.5) * scale\n self.api.insert_vertex(\n x=vertex_x,\n y=vertex_y,\n width=v['width'],\n height=v['height'],\n label=v['label'],\n style=v.get('style', None),\n tags=v.get('tags', {}),\n )\n\n if version in (2,):\n decorations = parsed.get('decorations', [])\n for v in decorations:\n self.api.insert_decoration(\n x=x,\n y=y,\n width=v['width'],\n height=v['height'],\n label=v['label'],\n style=v.get('style', None),\n tags=v.get('tags', {}),\n )\n\n event.acceptProposedAction()\n else:\n event.ignore()", "def dropEvent(self, event: QtGui.QDropEvent) -> None:\n if event.mimeData().hasImage:\n event.setDropAction(Qt.CopyAction)\n self.image = event.mimeData().urls()[0].toLocalFile()\n x = self.width()\n y = self.height()\n im = QPixmap(self.image).scaled(x, y) # , aspectRatioMode=Qt.KeepAspectRatio)\n im.save(os.getcwd() + \"/tmp.jpg\")\n self.image = (os.getcwd() + \"/tmp.jpg\")\n self.setPixmap(im)\n # self.setPixmap(QPixmap(self.image))\n self.setStyleSheet(\"\")\n event.accept()\n else:\n event.ignore()", "def addDropListener(self, callback: 'callable'):\n self.getView().addDropListener(callback)", "def dropEvent(self, QDropEvent):\n srcItems = self.selectedItems()\n dstInd = (self.indexAt(QDropEvent.pos()).row() + 1)\n kbMod = QDropEvent.keyboardModifiers()\n #-- Create New Items --#\n for n, srcItem in enumerate(srcItems):\n itemDict = self.treeParent.getItemDict(srcItem)\n newItem = self.treeParent.on_addVar(index=(dstInd + n))\n self.treeParent.setItem(newItem, **itemDict)\n #-- Remove Items --#\n if not kbMod == QtCore.Qt.ControlModifier:\n for srcItem in srcItems:\n self.takeTopLevelItem(self.indexOfTopLevelItem(srcItem))\n self.treeParent.reindexVar()", "def dropMimeData(self, p_int, QMimeData, Qt_DropAction): # real signature unknown; restored from __doc__\r\n return False", "def on_item_dropped(self, url):\n print 'Weld.on_item_dropped:', url\n #make sure all struct are present\n if not(self.project and self.project.level):\n print >> sys.stderr, 'it\\'s too early to drop stuff: '\\\n 'create a project and a level first !'\n return\n\n #retrieve data if it comes from weld\n if url in self.resMan:\n props = self.resMan.file_props(url)\n if props is None:\n print >> sys.stderr, curr_f(), ': url(\\'%s\\') in self.resMan '\\\n 'but can\\'t retrieve props.' % (url)\n return\n props = self.project.level.resMan.add_resource(self.resMan.base_path,\n props)\n url = props['url']\n if props == {} or url not in self.project.level.resMan:\n print >> sys.stderr, curr_f(), 'could not retrieve file and/or '\\\n 'dependencies for props:', pp(props)\n return\n\n #instanciate it\n if url in self.project.level.resMan:\n props = self.project.level.resMan.file_props(url)\n dtp = self.project.level.qsteelwidget.dropTargetPosition(Config.instance().drop_target_vec)\n props['position'] = dtp\n props['rotation'] = self.project.level.qsteelwidget.dropTargetRotation()\n if props['resource_type'] == 'meshes':\n props['meshName'] = props['name']\n self.project.level.instanciate(props)\n s = 'dropped agent \\'%s\\' with id %i' % (props['name'], props['agentId'])\n print s\n Ui.instance().show_status(s)\n else:\n Ui.instance().show_status('can only drop meshes so far')", "def DoDrop(self, docks, panes, target, pt, offset=wx.Point(0, 0)):\r\n\r\n if target.IsToolbar():\r\n return self.DoDropToolbar(docks, panes, target, pt, offset)\r\n elif target.IsFloating():\r\n return self.DoDropFloatingPane(docks, panes, target, pt)\r\n else:\r\n return self.DoDropNonFloatingPane(docks, panes, target, pt)", "def drag_and_drop_attempt():\n\n class InitialState(BaseState):\n \"\"\"\n Initial state for the SimpleGUI.\n \"\"\"\n\n def _on_enter(self, gui):\n \"\"\"\n Construct the buttons upon entering the state.\n\n :return:\n \"\"\"\n print(\"In initial state.\")\n\n '''Create drag and drop window'''\n gui.entry_sv = tk.StringVar()\n gui.drop_box_list = []\n gui.drop_box_items = tk.Listbox(master=gui.root, listvariable=gui.drop_box_list)\n gui.drop_box_text = tk.StringVar()\n gui.drop_box_text.set(\"Drop images here\")\n gui.entry = tk.Entry(gui.root, textvar=gui.drop_box_text, justify='center')\n gui.entry.config(font=(\"Courier\", 44))\n gui.entry.place(x = 200, y=200, width=800, height=800)\n #gui.entry.pack()\n gui.entry.drop_target_register(DND_FILES)\n gui.entry.dnd_bind('<<Drop>>', self.drop(gui))\n gui.update()\n\n def _on_exit(self, gui):\n \"\"\"\n Return the next state.\n\n :param gui:\n :return:\n \"\"\"\n gui.update()\n return WaitForDrop()\n\n def drop(self, gui):\n def _drop(event):\n files = root.tk.splitlist(event.data)\n gui.entry_sv.set(files)\n return _drop\n\n class WaitForDrop(BaseState):\n \"\"\"\n State for having buttons on.\n \"\"\"\n\n def _on_enter(self, gui):\n \"\"\"\n\n :param gui:\n :return:\n \"\"\"\n print(\"In wait for drop state.\")\n\n def _state_main(self, gui):\n \"\"\"\n The main code for the ButtonsOn state.\n\n :param gui:\n :return:\n \"\"\"\n gui.entry.wait_variable(gui.entry_sv)\n\n '''Clean string'''\n files = literal_eval(gui.entry_sv.get())\n\n '''Remove previous images'''\n if hasattr(gui, \"panel\"):\n gui.panel.destroy()\n\n '''Load each image'''\n for file_name in files:\n file_name = file_name.replace(\"{\", \"\").replace(\"}\", \"\")\n # image = tk.PhotoImage(file=file_name)\n if \".CR2\" in file_name:\n '''Rawpy implementation'''\n file_image = rawpy.imread(file_name)\n file_image = file_image.postprocess()\n '''Rawkit implementation'''\n '''file_image = Raw(file_name)\n file_image = np.array(file_image.to_buffer())'''\n '''OpenCV implementation'''\n '''file_image = cv2.imread(file_name)'''\n else:\n file_image = Image.open(file_name)\n '''image = file_image.resize((500, 500), Image.ANTIALIAS)\n image = ImageTk.PhotoImage(image)\n gui.panel = tk.Label(gui.root, image=image)\n gui.panel.image = image\n gui.panel.pack()'''\n # panel.grid(row=2)\n\n image_data = np.array(file_image)\n image_data = cv2.cvtColor(image_data, cv2.COLOR_RGB2GRAY)\n '''print(image_data.shape)\n print(image_data)\n print(len(image_data))\n print(len(image_data[0]))'''\n returned_image = Image.fromarray(image_data)\n '''cv2.imshow(\"Gray\", image_data)\n cv2.waitKey()\n cv2.destroyWindow(\"Gray\")'''\n\n '''enhanced_contrast = ImageEnhance.Contrast(Image.fromarray(file_image))\n enhanced_image = enhanced_contrast.enhance(255)\n enhanced_data = np.array(enhanced_image)\n plot_functions.imshow(enhanced_image)\n plot_functions.show()'''\n\n # color_space = cv2.cvtColor(image_data, cv2.COLOR_RGB2HSV)\n # print(color_space)\n \n '''Create mask for white-ish pixels'''\n '''lower_background = np.array([150, 150, 150])\n upper_background = np.array([255, 255, 255])\n print(image_data)\n white_mask = cv2.inRange(image_data, lower_background, upper_background)\n white_mask = cv2.morphologyEx(white_mask, cv2.MORPH_OPEN, np.ones((3,3),np.uint8))\n white_mask = cv2.morphologyEx(white_mask, cv2.MORPH_DILATE, np.ones((3, 3), np.uint8))\n white_mask = white_mask / 255'''\n\n '''Create mask for black-ish pixels'''\n '''lower_background = np.array([0, 0, 0])\n upper_background = np.array([25, 25, 25])\n black_mask = cv2.inRange(image_data, lower_background, upper_background)\n black_mask = cv2.morphologyEx(black_mask, cv2.MORPH_OPEN, np.ones((3, 3), np.uint8))\n black_mask = cv2.morphologyEx(black_mask, cv2.MORPH_DILATE, np.ones((3, 3), np.uint8))\n black_mask = black_mask / 255'''\n\n '''Add masks together'''\n '''background_mask = white_mask\n # Ensure no value is above 1\n background_mask = np.clip(background_mask, 0, 1)'''\n \n copied_image_data = np.asarray(returned_image).copy()\n # background_mask = np.logical_not(background_mask)\n '''for row_index, [mask_row, image_row] in enumerate(zip(background_mask, copied_image_data)):\n # place black pixel on corresponding masked pixels\n # copied_image_data[row_index] = np.array([image_row[pixel] * int(mask_row[pixel]) for pixel in range(len(mask_row))])\n # make pixel fully white on corresponding masked pixels\n copied_image_data[row_index] = np.array([np.array([255, 255, 255]) if int(mask_row[pixel]) else image_row[pixel] for pixel in range(len(mask_row))])'''\n\n '''Turn removed pixels red'''\n '''mask_image = Image.fromarray(copied_image_data)\n plot_functions.imshow(mask_image)\n plot_functions.show()'''\n trapezoid_data = copied_image_data.copy()\n\n enhanced_contrast = ImageEnhance.Contrast(Image.fromarray(trapezoid_data))\n enhanced_image = enhanced_contrast.enhance(255)\n trapezoid_data = np.array(enhanced_image)\n\n '''Detect lines'''\n edges = cv2.Canny(trapezoid_data, 75, 150)\n lines = cv2.HoughLinesP(edges, 1, np.pi / 180, 100, maxLineGap=1000)\n # print(lines)\n for line in lines:\n x1, y1, x2, y2 = line[0]\n if y1 == y2:\n cv2.line(copied_image_data, (x1, y1), (x2, y2), (255, 255, 255), 1)\n\n '''Trapezoid attempt'''\n\n # filters image bilaterally and displays it\n bilatImg = cv2.bilateralFilter(trapezoid_data, 5, 175, 175)\n\n # finds edges of bilaterally filtered image and displays it\n edgeImg = cv2.Canny(bilatImg, 75, 200)\n\n # gets contours (outlines) for shapes and sorts from largest area to smallest area\n contours, hierarchy = cv2.findContours(edgeImg, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)\n contours = sorted(contours, key=cv2.contourArea, reverse=True)\n\n # drawing red contours on the image\n for con in contours:\n cv2.drawContours(trapezoid_data, con, -1, (255, 255, 255), 3)\n\n '''Detect corners'''\n dst = cv2.cornerHarris(edges, 30, 31, 0.001)\n dst = cv2.dilate(dst, None)\n ret, dst = cv2.threshold(dst, 0.01 * dst.max(), 255, 0)\n dst = np.uint8(dst)\n\n # find centroids\n ret, labels, stats, centroids = cv2.connectedComponentsWithStats(dst)\n # define the criteria to stop and refine the corners\n criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 100,\n 0.001)\n corners = cv2.cornerSubPix(edges, np.float32(centroids), (5, 5),\n (-1, -1), criteria)\n\n good_corners = []\n for corner in corners:\n if (corner[1] < 1000) & (corner[1] > 650) & (corner[0] > 250) & (corner[0] < 2250):\n good_corners.append(corner)\n cv2.circle(edges, (corner[0], corner[1]), 10, (255, 255, 255))\n\n print(good_corners)\n if len(good_corners) >= 3:\n corner_combos = itertools.combinations(good_corners, 3)\n elif len(good_corners) > 1:\n corner_combos = itertools.combinations(good_corners, 2)\n\n best_corner_combo = None\n best_coef = np.inf\n for corner_combo in corner_combos:\n regression = LinearRegression().fit(np.array([corner[0] for corner in corner_combo]).reshape(-1, 1),\n np.array([corner[1] for corner in corner_combo]))\n if np.abs(regression.coef_) < best_coef:\n best_coef = np.abs(regression.coef_)\n best_corner_combo = np.array([corner[1] for corner in corner_combo])\n\n y_edge = int(round(np.mean(best_corner_combo)))\n edges = edges[y_edge:3000, 200:2200]\n copied_image_data = copied_image_data[y_edge:2500, 200:2200]\n trapezoid_data = trapezoid_data[y_edge:2500, 200:2200]\n\n # and double-checking the outcome\n cv2.imshow(\"linesEdges\", edges)\n cv2.imshow(\"linesDetected\", copied_image_data)\n cv2.imshow(\"Contours check\", trapezoid_data)\n cv2.waitKey()\n cv2.destroyWindow(\"Contours check\")\n\n # find the perimeter of the first closed contour\n perim = cv2.arcLength(contours[0], True)\n # setting the precision\n epsilon = 0.02 * perim\n # approximating the contour with a polygon\n approxCorners = cv2.approxPolyDP(contours[0], epsilon, True)\n # check how many vertices has the approximate polygon\n approxCornersNumber = len(approxCorners)\n\n for corners in approxCorners:\n cv2.circle(trapezoid_data, (corners[0], corners[1]), radius=10, color=(255, 255, 255), thickness=-1)\n cv2.imshow(\"Vertex position\", trapezoid_data)\n cv2.waitKey()\n cv2.destroyWindow(\"Vertex position\")\n cv2.imshow(\"linesEdges\", edges)\n cv2.imshow(\"linesDetected\", copied_image_data)\n cv2.waitKey(0)\n cv2.destroyAllWindows()\n\n def _on_exit(self, gui):\n if gui.program_running:\n gui.update()\n return WaitForDrop()\n else:\n return None\n\n class DragAndDropGUI:\n \"\"\"\n Object for a simple gui.\n \"\"\"\n\n def __init__(self, root):\n \"\"\"\n Initializing the SimpleGUI object.\n \"\"\"\n self.root = root\n w, h = root.winfo_screenwidth(), self.root.winfo_screenheight()\n self.root.geometry(\"%dx%d+0+0\" % (w, h))\n self.root.protocol(\"WM_DELETE_WINDOW\", self.end_program)\n self.program_running = True\n\n def update(self):\n \"\"\"\n Update the GUI.\n\n :return:\n \"\"\"\n self.root.update_idletasks()\n self.root.update()\n return self.root\n\n def end_program(self):\n \"\"\"\n Ends the program.\n\n :return:\n \"\"\"\n if self.entry_sv.get() != \" \":\n self.entry_sv.set(\" \")\n else:\n self.entry_sv.set(\"!\")\n self.root.destroy()\n self.program_running = False\n\n '''Initialize and run GUI object'''\n root = tkinterdnd2.Tk()\n # Maximize window while maintaining title bar\n gui = DragAndDropGUI(root)\n state_machine = StateMachine(initial_state=InitialState())\n state_machine.run(gui)", "def dropEvent(self, e: QtGui.QDropEvent):\n src = e.source()\n if src is not self:\n for item in src.selectedItems():\n clone = item.clone()\n clone.setFlags(clone.flags() | Qt.ItemIsEditable)\n self.addTopLevelItem(clone)\n super().dropEvent(e) # Call the original function", "def drop(self, event):\n self.config(cursor='arrow')", "def player_drop(self, item):\n dropped = self.drop(item)\n if dropped:\n self.handler.message_box.add_msg(\"You drop the {}!\".format(dropped),\n data.COLOURS['player_item_text'])", "def mouseReleaseEvent (self, event):\n if self.itemMoved:\n self.parentWidget.DataChanged.emit()\n self.itemMoved = False; \n super(DiagramItem, self).mouseReleaseEvent(event)", "def mouseMoveEvent(self, e):\n if e.buttons() != Qt.LeftButton:\n return\n\n mimeData = QtCore.QMimeData()\n mimeData.setData(\n app.NODE_MIMETYPE,\n QtCore.QByteArray(bytes('data string', 'utf-8')),\n )\n\n drag = QtGui.QDrag(self)\n drag.setMimeData(mimeData)\n drag.setHotSpot(e.pos() - self.rect().topLeft())\n \n dropAction = drag.exec_(Qt.MoveAction)", "def drop(self):\n init_x = self.x\n init_y = self.y\n init_z = self.z\n drop_z = self.drop_height\n \n #drop to given height\n self.move_to(init_x, init_y, drop_z)\n \n #open gripper\n self.gripper.command_position(100)\n \n #return to initial position\n self.move_to(init_x, init_y, init_z)", "def dragEnterEvent(self, event):", "def drag_and_drop(self, droppable):\n self.scroll_to()\n ActionChains(self.driver).drag_and_drop(\n self._element,\n droppable._element,\n ).perform()", "def slider_dragged(self):\n pass", "def SetCallbackFunc( self, dropCallbacFunc=None ) :\n \n # Create a dropFiles event association for this control.\n # [ SetDropTarget ] is a built-in method for (all ?) controls.\n self.folderDropTarget.SetDropTarget( ddt.FilesDropTarget( self.folderDropTarget ) )\n \n # Install the callback-function for this class's parent-widget dropFiles-event.\n self.folderDropTarget.dropFunc = dropCallbacFunc", "def drag_data_received(self, widget, context, x, y, sel_data, info, time):\n if not sel_data:\n return\n #modern file managers provide URI_LIST. For Windows split sel_data.data\n files = sel_data.get_uris()\n for file in files:\n if win():\n clean_string = conv_to_unicode(\n file.replace('\\0',' ').replace(\"\\r\", \" \").strip(),\n None)\n else:\n clean_string = file\n protocol, site, mfile, j, k, l = urlparse(clean_string)\n if protocol == \"file\":\n name = url2pathname(mfile)\n mime = get_type(name)\n if not is_valid_type(mime):\n return\n photo = MediaObject()\n self.uistate.set_busy_cursor(True)\n photo.set_checksum(create_checksum(name))\n self.uistate.set_busy_cursor(False)\n base_dir = cuni(media_path(self.dbstate.db))\n if os.path.exists(base_dir):\n name = relative_path(name, base_dir)\n photo.set_path(name)\n photo.set_mime_type(mime)\n basename = os.path.basename(name)\n (root, ext) = os.path.splitext(basename)\n photo.set_description(root)\n with DbTxn(_(\"Drag Media Object\"), self.dbstate.db) as trans:\n self.dbstate.db.add_object(photo, trans)\n widget.emit_stop_by_name('drag_data_received')", "def dragMoveEvent(self, event):\n if event.mimeData().hasImage:\n event.accept()\n else:\n event.ignore()", "def drag_and_drop(self,param,ignore_error_handle = False):\n message = {}\n origin_element = param.get('origin',None);\n destination_element = param.get('destination',None);\n step = 'drag a element to another element'\n try:\n self.driver.drag_and_drop(origin_element,destination_element);\n message = self.feedback.feedback_action_ok(step);\n except BaseException,e:\n message = self.feedback.feedback_action_fail(step,str(e),ignore_error_handle);\n finally:\n return message;", "def button_release_cb(self, darea, event):\n self.oldx, self.oldy = event.x, event.y\n self.draw_pointer(self.cr, None, None)\n self.queue_draw()\n self.oldx, self.oldy = None, None\n self.emit('end-dnd')\n return True", "def _fire_dropping(self):\n\t\tif len(self.droppings) < self.settings.droppings_allowed:\n\t\t\tnew_dropping = Dropping(self)\n\t\t\tself.droppings.add(new_dropping)", "def button_press_cb(self, darea, event):\n x, y = event.x, event.y\n self.draw_pointer(self.cr, x, y)\n self.queue_draw()\n self.oldx, self.oldy = x, y\n rel_x, rel_y = self.absolute_to_relative(x, y)\n self.emit('dnd-value', rel_x, rel_y)\n self.emit('start-dnd')\n return True", "def mouseMoveEvent(self, e):\n if self.mousePressed:\n Globals.dragObject = QTextDrag('PKSampler: dragging a track', self)\n Globals.dragObject.trackFrame = self\n Globals.dragObject.dragCopy()", "def mouseDragged(self, point, delta):\n pass", "def _dropMove(self, point: QPoint, selectedFiles: List[QModelIndex]) -> None:\n selectedFiles = [self._currPath.joinpath(x.data()) for i, x in enumerate(selectedFiles)\n if i % len(self._modelHeaders) == 0]\n try:\n filename = self._mainFileView.indexAt(point).siblingAtColumn(0).data()\n dest = self._currPath.joinpath(filename)\n if dest.is_file():\n return\n duplicates = []\n for src in selectedFiles:\n dest = self._currPath.joinpath(filename).joinpath(src.name)\n if str(src) in str(dest):\n return\n if dest.exists():\n duplicates.append(dest)\n if duplicates:\n if self._overwriteFileMsgBox(duplicates) == QMessageBox.Cancel:\n return\n for src in selectedFiles:\n dest = self._currPath.joinpath(filename).joinpath(src.name)\n if not src.exists():\n raise FileNotFoundError\n if src.is_file():\n shutil.move(str(src), str(dest))\n elif src.is_dir():\n dir_util.copy_tree(str(src), str(dest))\n shutil.rmtree(src)\n except FileNotFoundError:\n self._statusBar.showMessage('File not found!', 3000)\n except TypeError: # when the files are dropped on empty area\n pass\n finally:\n self._listDirectories()", "def do_drop(self, arg):\r\n\r\n # put this value in a more suitably named variable\r\n itemToDrop = arg.lower()\r\n\r\n # get a list of all \"description words\" for each item in the inventory\r\n invDescWords = getAllDescWords(inventory)\r\n\r\n # find out if the player doesn't have that item\r\n if itemToDrop not in invDescWords:\r\n print('You do not have \"%s\" in your inventory.' % (itemToDrop))\r\n return\r\n\r\n # get the item name that the player's command describes\r\n item = getFirstItemMatchingDesc(itemToDrop, inventory)\r\n if item != None:\r\n print('You drop %s.' % (worldItems[item][SHORTDESC]))\r\n inventory.remove(item) # remove from inventory\r\n worldRooms[location][GROUND].append(item) # add to the ground\r", "def OnTabEndDrag(self, event):\r\n\r\n tabs = event.GetEventObject()\r\n if not tabs.GetEnabled(event.GetSelection()):\r\n return\r\n\r\n self._mgr.HideHint()\r\n\r\n src_tabs = event.GetEventObject()\r\n if not src_tabs:\r\n raise Exception(\"no source object?\")\r\n\r\n src_tabs.SetCursor(wx.StockCursor(wx.CURSOR_ARROW))\r\n\r\n # get the mouse position, which will be used to determine the drop point\r\n mouse_screen_pt = wx.GetMousePosition()\r\n mouse_client_pt = self.ScreenToClient(mouse_screen_pt)\r\n\r\n # check for an external move\r\n if self._agwFlags & AUI_NB_TAB_EXTERNAL_MOVE:\r\n tab_ctrl = wx.FindWindowAtPoint(mouse_screen_pt)\r\n\r\n while tab_ctrl:\r\n \r\n if isinstance(tab_ctrl, AuiTabCtrl):\r\n break\r\n \r\n tab_ctrl = tab_ctrl.GetParent()\r\n \r\n if tab_ctrl:\r\n \r\n nb = tab_ctrl.GetParent()\r\n\r\n if nb != self:\r\n \r\n # find out from the destination control\r\n # if it's ok to drop this tab here\r\n e = AuiNotebookEvent(wxEVT_COMMAND_AUINOTEBOOK_ALLOW_DND, self.GetId())\r\n e.SetSelection(event.GetSelection())\r\n e.SetOldSelection(event.GetSelection())\r\n e.SetEventObject(self)\r\n e.SetDragSource(self)\r\n e.Veto() # dropping must be explicitly approved by control owner\r\n\r\n nb.GetEventHandler().ProcessEvent(e)\r\n\r\n if not e.IsAllowed():\r\n \r\n # no answer or negative answer\r\n self._mgr.HideHint()\r\n return\r\n \r\n # drop was allowed\r\n src_idx = event.GetSelection()\r\n src_page = src_tabs.GetWindowFromIdx(src_idx)\r\n\r\n # Check that it's not an impossible parent relationship\r\n p = nb\r\n while p and not p.IsTopLevel():\r\n if p == src_page:\r\n return\r\n \r\n p = p.GetParent()\r\n\r\n # get main index of the page\r\n main_idx = self._tabs.GetIdxFromWindow(src_page)\r\n if main_idx == wx.NOT_FOUND:\r\n raise Exception(\"no source page?\")\r\n\r\n # make a copy of the page info\r\n page_info = self._tabs.GetPage(main_idx)\r\n\r\n # remove the page from the source notebook\r\n self.RemovePage(main_idx)\r\n\r\n # reparent the page\r\n src_page.Reparent(nb)\r\n\r\n # Reparent the control in a tab (if any)\r\n if page_info.control:\r\n self.ReparentControl(page_info.control, tab_ctrl)\r\n\r\n # find out the insert idx\r\n dest_tabs = tab_ctrl\r\n pt = dest_tabs.ScreenToClient(mouse_screen_pt)\r\n\r\n target = dest_tabs.TabHitTest(pt.x, pt.y)\r\n insert_idx = -1\r\n if target:\r\n insert_idx = dest_tabs.GetIdxFromWindow(target)\r\n\r\n # add the page to the new notebook\r\n if insert_idx == -1:\r\n insert_idx = dest_tabs.GetPageCount()\r\n \r\n dest_tabs.InsertPage(page_info.window, page_info, insert_idx)\r\n nb._tabs.AddPage(page_info.window, page_info)\r\n\r\n nb.DoSizing()\r\n dest_tabs.DoShowHide()\r\n dest_tabs.Refresh()\r\n\r\n # set the selection in the destination tab control\r\n nb.SetSelectionToPage(page_info)\r\n\r\n # notify owner that the tab has been dragged\r\n e2 = AuiNotebookEvent(wxEVT_COMMAND_AUINOTEBOOK_DRAG_DONE, self.GetId())\r\n e2.SetSelection(event.GetSelection())\r\n e2.SetOldSelection(event.GetSelection())\r\n e2.SetEventObject(self)\r\n self.GetEventHandler().ProcessEvent(e2)\r\n\r\n return\r\n\r\n if self._agwFlags & AUI_NB_TAB_FLOAT:\r\n self._mgr.HideHint() \r\n if self.IsMouseWellOutsideWindow(): \r\n # Use CallAfter so we our superclass can deal with the event first\r\n wx.CallAfter(self.FloatPage, self.GetSelection())\r\n event.Skip()\r\n return\r\n \r\n # only perform a tab split if it's allowed\r\n dest_tabs = None\r\n\r\n if self._agwFlags & AUI_NB_TAB_SPLIT and self._tabs.GetPageCount() >= 2:\r\n \r\n # If the pointer is in an existing tab frame, do a tab insert\r\n hit_wnd = wx.FindWindowAtPoint(mouse_screen_pt)\r\n tab_frame = self.GetTabFrameFromTabCtrl(hit_wnd)\r\n insert_idx = -1\r\n \r\n if tab_frame:\r\n \r\n dest_tabs = tab_frame._tabs\r\n\r\n if dest_tabs == src_tabs:\r\n return\r\n\r\n pt = dest_tabs.ScreenToClient(mouse_screen_pt)\r\n target = dest_tabs.TabHitTest(pt.x, pt.y)\r\n \r\n if target: \r\n insert_idx = dest_tabs.GetIdxFromWindow(target)\r\n \r\n else:\r\n \r\n zero = wx.Point(0, 0)\r\n rect = self._mgr.CalculateHintRect(self._dummy_wnd, mouse_client_pt, zero)\r\n \r\n if rect.IsEmpty():\r\n # there is no suitable drop location here, exit out\r\n return\r\n \r\n # If there is no tabframe at all, create one\r\n new_tabs = TabFrame(self)\r\n new_tabs._rect = wx.RectPS(wx.Point(0, 0), self.CalculateNewSplitSize())\r\n new_tabs.SetTabCtrlHeight(self._tab_ctrl_height)\r\n self._tab_id_counter += 1\r\n new_tabs._tabs = AuiTabCtrl(self, self._tab_id_counter)\r\n new_tabs._tabs.SetArtProvider(self._tabs.GetArtProvider().Clone())\r\n new_tabs._tabs.SetAGWFlags(self._agwFlags)\r\n\r\n self._mgr.AddPane(new_tabs, framemanager.AuiPaneInfo().Bottom().CaptionVisible(False), mouse_client_pt)\r\n self._mgr.Update()\r\n dest_tabs = new_tabs._tabs\r\n \r\n # remove the page from the source tabs\r\n page_info = src_tabs.GetPage(event.GetSelection())\r\n\r\n if page_info.control:\r\n self.ReparentControl(page_info.control, dest_tabs)\r\n\r\n page_info.active = False\r\n src_tabs.RemovePage(page_info.window)\r\n\r\n if src_tabs.GetPageCount() > 0: \r\n src_tabs.SetActivePage(0)\r\n src_tabs.DoShowHide()\r\n src_tabs.Refresh()\r\n\r\n # add the page to the destination tabs\r\n if insert_idx == -1:\r\n insert_idx = dest_tabs.GetPageCount()\r\n\r\n dest_tabs.InsertPage(page_info.window, page_info, insert_idx)\r\n \r\n if src_tabs.GetPageCount() == 0:\r\n self.RemoveEmptyTabFrames()\r\n\r\n self.DoSizing()\r\n dest_tabs.DoShowHide()\r\n dest_tabs.Refresh()\r\n\r\n # force the set selection function reset the selection\r\n self._curpage = -1\r\n\r\n # set the active page to the one we just split off\r\n self.SetSelectionToPage(page_info)\r\n\r\n self.UpdateHintWindowSize()\r\n \r\n # notify owner that the tab has been dragged\r\n e = AuiNotebookEvent(wxEVT_COMMAND_AUINOTEBOOK_DRAG_DONE, self.GetId())\r\n e.SetSelection(event.GetSelection())\r\n e.SetOldSelection(event.GetSelection())\r\n e.SetEventObject(self)\r\n self.GetEventHandler().ProcessEvent(e)", "def _on_enter(self, gui):\n print(\"In initial state.\")\n\n '''Create drag and drop window'''\n gui.entry_sv = tk.StringVar()\n gui.drop_box_list = []\n gui.drop_box_items = tk.Listbox(master=gui.root, listvariable=gui.drop_box_list)\n gui.drop_box_text = tk.StringVar()\n gui.drop_box_text.set(\"Drop images here\")\n gui.entry = tk.Entry(gui.root, textvar=gui.drop_box_text, justify='center')\n gui.entry.config(font=(\"Courier\", 44))\n gui.entry.place(x = 200, y=200, width=800, height=800)\n #gui.entry.pack()\n gui.entry.drop_target_register(DND_FILES)\n gui.entry.dnd_bind('<<Drop>>', self.drop(gui))\n gui.update()", "def DoDropPane(self, panes, target, dock_direction, dock_layer, dock_row, dock_pos):\r\n \r\n drop = self.CopyTarget(target)\r\n panes = DoInsertPane(panes, dock_direction, dock_layer, dock_row, dock_pos)\r\n\r\n drop.Dock().Direction(dock_direction).Layer(dock_layer).Row(dock_row).Position(dock_pos)\r\n return self.ProcessDockResult(target, drop)", "def drag_and_drop(self, source_selector, target_selector):\n self._wait_element_localed(self.driver, source_selector)\n source = self.get_element(source_selector)\n self._wait_element_localed(self.driver, target_selector)\n target = self.get_element(target_selector)\n ActionChains(self.driver).drag_and_drop(source, target)", "def _ondrag(self, item, fun, num=1, add=None):\n if fun is None:\n self.cv.tag_unbind(item, \"<Button%s-Motion>\" % num)\n else:\n def eventfun(event):\n try:\n x, y = (self.cv.canvasx(event.x)/self.xscale,\n -self.cv.canvasy(event.y)/self.yscale)\n fun(x, y)\n except Exception:\n pass\n self.cv.tag_bind(item, \"<Button%s-Motion>\" % num, eventfun, add)", "def process_dropped_data(self, item, origin: str, path: Path):\n\n layer_type_origin_map = {\n PRESENTATION: LayerType.PRESENTATION,\n PRIMARY: LayerType.PRIMARY,\n SECONDARY: LayerType.SECONDARY\n }\n\n bounding_rect = item.boundingRect()\n\n pos = Position(item.x() * SVG_SCALE_FACTOR, item.y() * SVG_SCALE_FACTOR, item.zValue())\n\n size = Size(bounding_rect.width() * SVG_SCALE_FACTOR, bounding_rect.height() * SVG_SCALE_FACTOR)\n\n if origin == BACKGROUND:\n self.__template.set_background(str(path), size=size)\n else:\n try:\n layer = self.__template.add_layer(pos=pos, size=size, _type=layer_type_origin_map[origin])\n except NoBaseSvgError as err:\n self.removeItem(item)\n error_dialog = QErrorMessage(self.parent())\n error_dialog.showMessage(str(err))\n else:\n self.__template.map_layer_with_item(layer, graphic_item=item)", "def dragMoveEvent(self, event: QtGui.QDragMoveEvent) -> None:\n if event.mimeData().hasImage:\n event.accept()\n else:\n event.ignore()", "def OnTokenButtonRelease(self, event):\n self._drag_data = {\"x\": 0, \"item\": None}\n\n # Rebind the main GUI buttons because they are unbinded while dragging the beats\n self.myMainGUI.root.after(200, self.myMainGUI.bindButtons)", "def drop_inventory(self):\n header = \"Choose item to drop:\\n\"\n def drop(get_gameworld_cell, x, y, item):\n item_entity = ItemPickup([item], x, y, get_gameworld_cell)\n events.trigger_event(\"world_add_entity\", item_entity)\n self.inventory.remove(item)\n action_list = [(item, functools.partial(drop, get_gameworld_cell=self.get_gameworld_cell, x=self.x, y=self.y, item=item)) for item in self.inventory]\n if len(action_list) == 0:\n header += \"You hold nothing!\"\n events.trigger_event(\"print_list\", action_list, header=header)", "def OnLeftUp_DragFloatingPane(self, eventOrPt):\r\n\r\n if isinstance(eventOrPt, wx.Point):\r\n clientPt = self._frame.ScreenToClient(eventOrPt)\r\n screenPt = wx.Point(*eventOrPt)\r\n else:\r\n clientPt = eventOrPt.GetPosition()\r\n screenPt = self._frame.ClientToScreen(clientPt)\r\n\r\n # try to find the pane\r\n paneInfo = self.GetPane(self._action_window)\r\n if not paneInfo.IsOk():\r\n raise Exception(\"Pane window not found\")\r\n\r\n ret = False\r\n \r\n if paneInfo.frame:\r\n \r\n # calculate the offset from the upper left-hand corner\r\n # of the frame to the mouse pointer\r\n framePos = paneInfo.frame.GetPosition()\r\n action_offset = screenPt - framePos\r\n\r\n # is the pane dockable?\r\n if self.CanDockPanel(paneInfo):\r\n # do the drop calculation\r\n indx = self._panes.index(paneInfo)\r\n ret, paneInfo = self.DoDrop(self._docks, self._panes, paneInfo, clientPt, action_offset)\r\n\r\n if ret:\r\n e = self.FireEvent(wxEVT_AUI_PANE_DOCKING, paneInfo, canVeto=True)\r\n if e.GetVeto():\r\n self.HideHint()\r\n ShowDockingGuides(self._guides, False)\r\n return\r\n\r\n e = self.FireEvent(wxEVT_AUI_PANE_DOCKED, paneInfo, canVeto=False)\r\n\r\n if self._agwFlags & AUI_MGR_SMOOTH_DOCKING:\r\n self.SmoothDock(paneInfo)\r\n\r\n self._panes[indx] = paneInfo\r\n \r\n # if the pane is still floating, update it's floating\r\n # position (that we store)\r\n if paneInfo.IsFloating():\r\n paneInfo.floating_pos = paneInfo.frame.GetPosition()\r\n if paneInfo.frame._transparent != paneInfo.transparent or self._agwFlags & AUI_MGR_TRANSPARENT_DRAG:\r\n paneInfo.frame.SetTransparent(paneInfo.transparent)\r\n paneInfo.frame._transparent = paneInfo.transparent\r\n \r\n elif self._has_maximized:\r\n self.RestoreMaximizedPane()\r\n \r\n # reorder for dropping to a new notebook\r\n # (caution: this code breaks the reference!)\r\n tempPaneInfo = self.CopyTarget(paneInfo)\r\n self._panes.remove(paneInfo)\r\n self._panes.append(tempPaneInfo)\r\n\r\n if ret:\r\n self.Update()\r\n\r\n self.HideHint()\r\n ShowDockingGuides(self._guides, False)", "def _on_exit(self, gui):\n gui.update()\n return WaitForDrop()", "def _drop_tree_items(self, drop_items, drop_id_color):\n\n # Block the selection signals while we process the drop\n self.selectSignalBlocked = True\n\n # Get the drop id color parent - the aov id tree widget item\n drop_id_set = drop_id_color.parent()\n\n # Drop the items into the new parent\n for item in drop_items:\n if item.parent().parent().text(0) != drop_id_color.parent().text(0):\n drop_items.remove(item)\n else:\n item.parent().removeChild(item)\n\n drop_id_color.insertChildren(0, drop_items)\n\n # Set the items as selected\n for item in drop_items:\n item.setSelected(True)\n\n # Set new idColor - need to optimize!\n utils.set_attribute_id(item.data(1, QtCore.Qt.UserRole),\n drop_id_set.data(1, QtCore.Qt.UserRole),\n drop_id_color.data(1, QtCore.Qt.UserRole))\n\n # Set the new parent as expanded so we can see the dropped items\n drop_id_color.setExpanded(True)\n\n # Unblock the selection change signals\n self.selectSignalBlocked = False\n\n return None", "def handle_actor_drop(data: bytes) -> Tuple[bytes, str]:\n # TODO: reverse first 9 bytes\n item_id = struct.unpack('I', data[:4])[0]\n unknown = struct.unpack('I', data[4:8])[0] # noqa: F841\n unknown2 = data[9] # noqa: F841\n item_name_length = struct.unpack('H', data[9:11])[0]\n item_name = data[11:11+item_name_length].decode(helpers.ENCODING)\n x, y, z = struct.unpack('fff',\n data[11+item_name_length:11+item_name_length+3*4])\n\n message = f'[{item_id}] {item_name} dropped at: {x} {y} {z}'\n\n # Pick up drops automatically\n if \"Drop\" in item_name:\n message += f'\\n\\t;) Auto-looting {item_id}'\n packet = struct.pack('=HI', 0x6565, item_id)\n helpers.PACKET_QUEUE.put(packet)\n # TODO: not sure about last few bytes\n return data[11+item_name_length+3*4:], message", "def drop(self):\n pass", "def drop(self):\n pass", "def drop(self, item: Item):\n self.items.remove(item)\n item.place(self.parent.x, self.parent.y, self.gamemap)\n\n self.engine.message_log.add_message(f'You yeeted the {item.name}.')", "def toolDropped(*args, **kwargs)->None:\n pass", "def DoDropLayer(self, docks, target, dock_direction):\r\n\r\n drop = self.CopyTarget(target)\r\n \r\n if dock_direction == AUI_DOCK_LEFT:\r\n drop.Dock().Left()\r\n drop_new_layer = max(max(GetMaxLayer(docks, AUI_DOCK_LEFT),\r\n GetMaxLayer(docks, AUI_DOCK_BOTTOM)),\r\n GetMaxLayer(docks, AUI_DOCK_TOP)) + 1\r\n\r\n elif dock_direction == AUI_DOCK_TOP:\r\n drop.Dock().Top()\r\n drop_new_layer = max(max(GetMaxLayer(docks, AUI_DOCK_TOP),\r\n GetMaxLayer(docks, AUI_DOCK_LEFT)),\r\n GetMaxLayer(docks, AUI_DOCK_RIGHT)) + 1\r\n\r\n elif dock_direction == AUI_DOCK_RIGHT:\r\n drop.Dock().Right()\r\n drop_new_layer = max(max(GetMaxLayer(docks, AUI_DOCK_RIGHT),\r\n GetMaxLayer(docks, AUI_DOCK_TOP)),\r\n GetMaxLayer(docks, AUI_DOCK_BOTTOM)) + 1\r\n\r\n elif dock_direction == AUI_DOCK_BOTTOM:\r\n drop.Dock().Bottom()\r\n drop_new_layer = max(max(GetMaxLayer(docks, AUI_DOCK_BOTTOM),\r\n GetMaxLayer(docks, AUI_DOCK_LEFT)),\r\n GetMaxLayer(docks, AUI_DOCK_RIGHT)) + 1\r\n\r\n else:\r\n return False, target\r\n \r\n\r\n drop.Dock().Layer(drop_new_layer)\r\n return self.ProcessDockResult(target, drop)", "def OnCaptureLost(self, event):\r\n\r\n if self._is_dragging:\r\n self._is_dragging = False\r\n self._on_button = False\r\n\r\n if self._drag_image:\r\n self._drag_image.EndDrag()\r\n del self._drag_image\r\n self._drag_image = None\r\n \r\n event = AuiNotebookEvent(wxEVT_COMMAND_AUINOTEBOOK_CANCEL_DRAG, self.GetId())\r\n event.SetSelection(self.GetIdxFromWindow(self._click_tab))\r\n event.SetOldSelection(event.GetSelection())\r\n event.SetEventObject(self)\r\n self.GetEventHandler().ProcessEvent(event)", "def process_IN_UNMOUNT(self, event):", "def _on_articles_dropped_to_trash(self, evt):\n \n # get articles\n articles = [core.Article(dbid=i) for i in evt.articles_dbids]\n \n # update library\n self._library.trash(articles, True)\n \n # refresh collections view\n self._collections_view.UpdateCounts()\n \n # refresh articles view\n self._articles_view.ShowArticles()", "def drop(self, oldpos, fieldgroup, currentplayer):\n\t\ttry:\n\t\t\tdropped_on = pygame.sprite.spritecollide(self,\n\t\t\t\t\t\t\t\t\t fieldgroup,\n\t\t\t\t\t\t\t\t\t False)[0]\n\t\t\tif dropped_on.type == \"home\":\n\t\t\t\tcurrentplayer[\"meeples_home\"] += 1\n\t\t\telif dropped_on.id == currentplayer.name*10:\n\t\t\t\tcurrentplayer.meeples_out -= 1\n\t\t\tself.grabbed = False\n\n\t\t\treturn True\n\t\texcept:\n\t\t\tself.rect = oldpos\n\t\t\tself.grabbed = False\n\t\t\treturn False", "def dnd_end(self, target, event):\n\n self._dnd_started = False\n\n if self._dnd_source and self.rgb:\n # Re-bind events that are dropped by dnd.py\n self._canvas.bind('<B1-Motion>', self._start_dnd)\n self._canvas.bind('<B1-ButtonRelease-1>', self._update_color)", "def OnMoveFinished(self):\r\n\r\n # notify the owner manager that the pane has finished moving\r\n if self._owner_mgr:\r\n self._owner_mgr._action_window = self._pane_window\r\n point = wx.GetMousePosition()\r\n if self._is_toolbar:\r\n self._owner_mgr.OnLeftUp_DragToolbarPane(point)\r\n else:\r\n self._owner_mgr.OnLeftUp_DragFloatingPane(point)\r\n\r\n self._owner_mgr.OnFloatingPaneMoved(self._pane_window, point)", "def __init__(self, obj):\n \n wx.TextDropTarget.__init__(self) # Initialize the wx.TextDropTarget Object\n self.obj = obj# Store the Object Reference for dropped text", "def mouseMoveEvent (self, event):\n self.itemMoved = True\n super(DiagramItem, self).mouseMoveEvent(event)", "def drop(self):\n Game.instance.area_map.entities.append(self.owner)\n Game.instance.inventory.remove(self.owner)\n self.owner.x = Game.instance.player.x\n self.owner.y = Game.instance.player.y\n message('You dropped a ' + self.owner.name + '.', palette.yellow)", "def drop_field(self, field):\r\n self.dropped_fields.add(field)", "def mounted(self):\n self.vue.dragAndDropCapable = self.determineDragAndDropCapable();\n \"\"\"\n If drag and drop capable, then we continue to bind events to our elements.\n \"\"\"\n if( self.vue.dragAndDropCapable ):\n \"\"\"\n Listen to all of the drag events and bind an event listener to each\n for the fileform.\n \"\"\"\n \n def for_events(evt):\n def add_event(e):\n e.preventDefault();\n e.stopPropagation();\n add_event.bind(self.vue)\n \"\"\"\n For each event add an event listener that prevents the default action\n (opening the file in the browser) and stop the propagation of the event (so\n no other elements open the file in the browser)\n \"\"\"\n self.vue[\"$refs\"].fileform.addEventListener(evt,add_event,False)\n for_events.bind(self.vue)\n ['drag', 'dragstart', 'dragend', 'dragover', 'dragenter', 'dragleave', 'drop'].forEach(for_events);\n \"\"\"\n Add an event listener for drop to the form\n \"\"\"\n def capture_files(e):\n \"\"\"\n Capture the files from the drop event and add them to our local files\n array.\n \"\"\"\n i = 0\n \n while i < e.dataTransfer.files.length:\n self.vue.files.push( e.dataTransfer.files[i] );\n i+=1\n self.getImagePreviews()\n self.submitFiles()\n capture_files.bind(this)\n self.vue[\"$refs\"].fileform.addEventListener('drop', capture_files);\n __pragma__ ('jsiter') \n fetch('/json/media/',\n {\n \"method\":\"GET\",\n })\\\n .then(lambda res:res.json())\\\n .then(self.drawImages)\\\n .catch(lambda e:console.log('FAILURE!!',e));\n __pragma__ ('nojsiter')", "def _on_motion(self, event):\n if not self._visual_drag.winfo_ismapped():\n return\n\n if self._drag_cols and self._dragged_col is not None:\n self._drag_col(event)\n elif self._drag_rows and self._dragged_row is not None:\n self._drag_row(event)", "def _handle_player_collide_item(self, player: Player, dropped_item: DroppedItem,\n data, arbiter: pymunk.Arbiter) -> bool:\n\n dropped_item.collect(self._player)\n self._world.remove_item(dropped_item)\n return False", "def dragEnterEvent(self, e):\n # TODO: Do it properly.\n # TODO: Redraw widget while dragging.\n e.accept()", "def OnMotion(self, event):\r\n \r\n # start a drag event\r\n if not self._dragging and self._action_item != None and self._action_pos != wx.Point(-1, -1) and \\\r\n abs(event.m_x - self._action_pos.x) + abs(event.m_y - self._action_pos.y) > 5:\r\n \r\n self.SetToolTipString(\"\")\r\n self._dragging = True\r\n\r\n e = AuiToolBarEvent(wxEVT_COMMAND_AUITOOLBAR_BEGIN_DRAG, self.GetId())\r\n e.SetEventObject(self)\r\n e.SetToolId(self._action_item.id)\r\n self.ProcessEvent(e)\r\n self.DoIdleUpdate()\r\n return\r\n \r\n hit_item = self.FindToolForPosition(*event.GetPosition())\r\n \r\n if hit_item: \r\n if not hit_item.state & AUI_BUTTON_STATE_DISABLED:\r\n self.SetHoverItem(hit_item)\r\n else:\r\n self.SetHoverItem(None)\r\n \r\n else: \r\n # no hit item, remove any hit item\r\n self.SetHoverItem(hit_item)\r\n \r\n # figure out tooltips\r\n packing_hit_item = self.FindToolForPositionWithPacking(*event.GetPosition())\r\n \r\n if packing_hit_item:\r\n \r\n if packing_hit_item != self._tip_item:\r\n self._tip_item = packing_hit_item\r\n\r\n if packing_hit_item.short_help != \"\":\r\n self.StartPreviewTimer()\r\n self.SetToolTipString(packing_hit_item.short_help)\r\n else:\r\n self.SetToolTipString(\"\")\r\n self.StopPreviewTimer()\r\n \r\n else:\r\n \r\n self.SetToolTipString(\"\")\r\n self._tip_item = None\r\n self.StopPreviewTimer()\r\n \r\n # if we've pressed down an item and we're hovering\r\n # over it, make sure it's state is set to pressed\r\n if self._action_item:\r\n \r\n if self._action_item == hit_item:\r\n self.SetPressedItem(self._action_item)\r\n else:\r\n self.SetPressedItem(None)\r\n \r\n # figure out the dropdown button state (are we hovering or pressing it?)\r\n self.RefreshOverflowState()", "def panelDropLoad( dragControl, dropControl, messages, x, y, dragType ):\n # Get name of the asset that was dragged\n loadedObject = cmds.iconTextCheckBox(dragControl, query=True, label=True)\n # Get mode from standin or assembly\n selectedRadio = cmds.radioCollection(loadMethodRadio, query=True, select=True)\n # Instantiate AssetIcon\n asset = AssetIcon(loadedObject)\n \n loadedAssetNode = None\n\n # Load asset using correct function\n if \"standin\" in selectedRadio:\n loadedAssetNode = asset.loadArnoldAsset()\n else: \n loadedAssetNode = asset.loadAsset()\n \n # Get a position in the world using the mouse pointer as reference\n loadedLocation = cmds.autoPlace(useMouse=True)\n # Move the asset to that position\n cmds.move(loadedLocation[0], loadedLocation[1], loadedLocation[2], loadedAssetNode, absolute=True)", "def is_droppable ( self, object, row, value ):\n return self.droppable", "def execute_drop(item_id):\r\n if (item_id in inventory):\r\n current_room[\"items\"][item_id] = inventory[item_id]\r\n del inventory[item_id]\r\n wrap_print(\"You dropped \" + items[item_id][\"name\"] + \".\")\r\n global valid_move\r\n valid_move = True\r\n else:\r\n wrap_print(\"You cannot drop that.\")", "def dropObject(player):\n for treasure in Treasure.List:\n if player.treasureCaptured:\n player.treasureCaptured = False\n treasure.x = player.x\n treasure.y = player.y\n treasure.img = pygame.image.load(Treasure.treasure_img[0])", "def on_moved(self, event):\n print(\"Moved\")\n time.sleep(5)\n self.moveFile(event.dest_path)", "def OnLeftUp(self, event): # ANDY PAN\n if event.ShiftDown():\n event.Skip()\n return\n self.last_drag_x = self.last_drag_y = None\n self.SetCursor(wx.Cursor(wx.CURSOR_DEFAULT))\n # turn off drag\n self.was_dragging = False\n # force PAINT event to remove selection box (if required)\n # self.Update()\n event.Skip()", "def quantity_dropped(self, quantity_dropped):\n\n self._quantity_dropped = quantity_dropped", "def drag_and_drop_by_offset(self, elem, x, y):\n ActionChains(self.driver).drag_and_drop_by_offset(elem, xoffset=x, yoffset=y).perform()", "def OnTokenButtonPress(self, event):\n\n # Unbind main GUI buttons\n self.myMainGUI.unbindButtons()\n\n # Store drag data\n self._drag_data = {\"x\": event.x, \"item\": self.canvas_SG[0].find_closest(self.canvas_SG[0].canvasx(event.x),\n self.canvas_SG[0].canvasy(event.y))[0]}", "def drag(self, source_index, target_index):\r\n draggables = self.q(css='.drag-handle')\r\n source = draggables[source_index]\r\n target = draggables[target_index]\r\n action = ActionChains(self.browser)\r\n # When dragging before the target element, must take into account that the placeholder\r\n # will appear in the place where the target used to be.\r\n placeholder_height = 40\r\n action.click_and_hold(source).move_to_element_with_offset(\r\n target, 0, placeholder_height\r\n ).release().perform()\r\n wait_for_notification(self)", "def _onmove(self, event):", "def DoDropRow(self, panes, target, dock_direction, dock_layer, dock_row):\r\n \r\n drop = self.CopyTarget(target)\r\n panes = DoInsertDockRow(panes, dock_direction, dock_layer, dock_row)\r\n\r\n drop.Dock().Direction(dock_direction).Layer(dock_layer).Row(dock_row).Position(0)\r\n return self.ProcessDockResult(target, drop)", "def _drag_col(self, event):\n x = self._dx + event.x # get dragged column new left x coordinate\n self._visual_drag.place_configure(x=x) # update column preview position\n # if one border of the dragged column is beyon the middle of the\n # neighboring column, swap them\n if (self._dragged_col_neighbor_widths[0] is not None and\n x < self._dragged_col_x - self._dragged_col_neighbor_widths[0] / 2):\n self._swap_columns('left')\n elif (self._dragged_col_neighbor_widths[1] is not None and\n x > self._dragged_col_x + self._dragged_col_neighbor_widths[1] / 2):\n self._swap_columns('right')\n # horizontal scrolling if the cursor reaches the side of the table\n if x < 0 and self.xview()[0] > 0:\n # scroll left and update dragged column x coordinate\n self.xview_scroll(-10, 'units')\n self._dragged_col_x += 10\n elif x + self._dragged_col_width / 2 > self.winfo_width() and self.xview()[1] < 1:\n # scroll right and update dragged column x coordinate\n self.xview_scroll(10, 'units')\n self._dragged_col_x -= 10", "def acceptDrops(self) -> bool:\n ...", "def touch_moved(self, touch):\n\t\tpass", "def onLeftUp(self, event):\n\n self.last_drag_x = self.last_drag_y = None\n\n if self.ignore_next_up:\n self.ignore_next_up = False\n return\n\n self.SetCursor(wx.StockCursor(wx.CURSOR_DEFAULT))\n\n # we need a repaint to remove any selection box, but NOT YET!\n delayed_paint = self.sbox_1_x\n\n # if any layers interested, inform of possible select\n if not self.was_dragging:\n if self.is_box_select:\n self.is_box_select = False\n\n # box selection\n (lon_1, lat_1) = self.convertView2Geo(self.sbox_1_x,\n self.sbox_1_y)\n (lon_2, lat_2) = self.convertView2Geo(self.sbox_1_x+self.sbox_w,\n self.sbox_1_y+self.sbox_h)\n\n # check each layer for a box select callback\n copy_layers = copy.copy(self.layer_z_order)\n handled_layers = []\n for id in copy_layers:\n # if layer still exists and not already handled\n if id in self.layer_mapping and id not in handled_layers:\n l = self.layer_mapping[id]\n if l.visible and l.callback_box_select:\n # get all points selected (if any)\n points = self.getBoxSelectPoints(l.data,\n (lon_1,lat_1),\n (lon_2,lat_2))\n if points:\n # pass points to callback\n handled_layers.append(id)\n if l.callback_box_select(id, points):\n delayed_paint = True\n else:\n # possible point selection\n (cx, cy) = event.GetPositionTuple()\n clickpt = self.convertView2Geo(cx, cy)\n # check each layer for a point select callback\n copy_layers = copy.copy(self.layer_z_order)\n handled_layers = []\n for id in copy_layers:\n # if layer still exists and not already handled\n if id in self.layer_mapping and id not in handled_layers:\n l = self.layer_mapping[id]\n if l.visible and l.callback_point_select:\n pt = self.getNearestPointInLayer(l.data,\n l.delta, clickpt)\n if pt:\n handled_layers.append(id)\n if l.callback_point_select(id, pt):\n delayed_paint = True\n\n # turn off drag\n self.was_dragging = False\n\n # turn off box selection mechanism\n self.is_box_select = False\n self.sbox_1_x = self.sbox_1_y = None\n\n # force PAINT event to remove selection box (if required)\n if delayed_paint:\n self.Refresh()", "def __init__(self):\n \n __gsignals__ = { 'expose-event' : 'override'}\n\n self.filename = \"\"\n self.source_id = 0\n self.dy = 0\n\n # Create a top level window\n self.window = gtk.Window()\n \n #Get y position of mouse at start of drag \n self.mouse_click_point = 0\n \n #Create a TextArea class instance\n self.drawing = TextArea()\n\n self.drawing.show()\n \n self.window.connect('drag-begin', self.start_refresh)\n self.window.connect('drag-motion', self.drag_motion)\n self.window.connect('drag-end', self.stop_drag_motion)\n \n self.window.drag_source_set(gtk.gdk.BUTTON1_MASK,\n [(\"\", gtk.TARGET_SAME_APP, 1)],\n gtk.gdk.ACTION_PRIVATE)\n\n self.window.drag_dest_set(gtk.DEST_DEFAULT_MOTION,\n [(\"\", gtk.TARGET_SAME_APP, 1)],\n gtk.gdk.ACTION_PRIVATE)\n \n self.window.connect('destroy', lambda w: gtk.main_quit())\n \n\n self.window.set_default_size(600,900)\n self.window.move(300,100)\n\n #Create a TextArea class instance\n self.drawing = TextArea()\n self.drawing.show()\n \n \n vbox = gtk.VBox()\n self.window.add(vbox)\n\n #Create a UIManager instance\n uimanager = gtk.UIManager()\n self.current_scale = 16\n\n #Add the accelerator group to the toplevel window\n accelgroup = uimanager.get_accel_group()\n self.window.add_accel_group(accelgroup)\n\n #Create an ActionGroup\n actiongroup = gtk.ActionGroup('PyViewer')\n self.actiongroup = actiongroup\n \n #Create actions\n actiongroup.add_actions([('Open', gtk.STOCK_OPEN, '_Open', None, None, self.open_file),\n ('Quit', gtk.STOCK_QUIT, '_Quit', None, None, self.quit_viewer),\n ('File', None, '_File')])\n \n #Add the actiongroup to the uimanager\n uimanager.insert_action_group(actiongroup, 0)\n\n #Add a UI description\n uimanager.add_ui_from_string(self.ui)\n\n #Create a MenuBar\n menubar = uimanager.get_widget('/MenuBar')\n \n #Pack the menubar and the drawing area into a vbox\n vbox.pack_start(menubar, False)\n vbox.pack_start(self.drawing) \n\n self.window.show_all()\n\n return", "def _on_press(self, event):\n if tk.DISABLED in self.state():\n return\n\n region = self.identify_region(event.x, event.y)\n\n if self._drag_cols and region == 'heading':\n self._start_drag_col(event)\n elif self._drag_rows and region == 'cell':\n self._start_drag_row(event)", "def _on_mouse(self, event):\n x, y = event.GetPosition()\n if self._drag_mode == DepthCanvas.SASH_DRAG_NONE: \n self._canvas_hit_test(x, y) \n if event.LeftDown():\n self.start_dragging(y)\n elif self._drag_mode == DepthCanvas.SASH_DRAG_DRAGGING:\n if event.LeftIsDown():\n self.drag_it(y) \n elif event.LeftUp():\n self.end_dragging()\n event.Skip()", "def detach(self, *items):\n self._visual_drag.detach(*items)\n ttk.Treeview.detach(self, *items)", "def btn_upload_callback(self):\n # Create File Select Dialog\n dialog = QFileDialog(parent=self, caption='Images')\n dialog.setMimeTypeFilters(\n [\"image/jpeg\", \"image/png\", \"image/tiff\", 'application/zip'])\n dialog.setFileMode(QFileDialog.ExistingFile)\n\n if dialog.exec_() == QDialog.Accepted:\n\n filename = dialog.selectedFiles()[0]\n\n with open(filename, 'rb') as f:\n file_b64s = fio_to_b64s(f)\n\n if ext_from_path(filename) == '.zip':\n ret = api.upload_zip(\n file_b64s,\n nameext_from_path(filename),\n self.user_hash\n )\n else:\n ret = api.upload_image(\n file_b64s,\n nameext_from_path(filename),\n self.user_hash\n )\n if ret.get('success') is False:\n self.show_error(ret['error_msg'])\n self.update_table()", "def _update_droppings(self):\n\t\t# Update dropping positions.\n\t\tself.droppings.update()\n\n\t\t# Get rid of the droppings that have disappeared.\n\t\tfor dropping in self.droppings.copy():\n\t\t\tif dropping.rect.top >= 1050:\n\t\t\t\tself.droppings.remove(dropping)\n\n\t\tself._check_dropping_auto_collisions()", "def OnLeftUp(self, event):\r\n\r\n if self._action == actionResize:\r\n## self._frame.Freeze()\r\n self.OnLeftUp_Resize(event)\r\n## self._frame.Thaw()\r\n \r\n elif self._action == actionClickButton:\r\n self.OnLeftUp_ClickButton(event)\r\n \r\n elif self._action == actionDragFloatingPane:\r\n self.OnLeftUp_DragFloatingPane(event)\r\n \r\n elif self._action == actionDragToolbarPane:\r\n self.OnLeftUp_DragToolbarPane(event)\r\n \r\n else:\r\n event.Skip() \r\n\r\n if self._frame.HasCapture():\r\n self._frame.ReleaseMouse()\r\n \r\n self._action = actionNone", "def callback(self, event):\n # IN_CLOSE_WRITE -> 0x00000008\n if event.mask == 0x00000008:\n if event.name.endswith('.json'):\n print_success(\"Ldapdomaindump file found\")\n if event.name in ['domain_groups.json', 'domain_users.json']:\n if event.name == 'domain_groups.json':\n self.domain_groups_file = event.pathname\n if event.name == 'domain_users.json':\n self.domain_users_file = event.pathname\n if self.domain_groups_file and self.domain_users_file:\n print_success(\"Importing users\")\n subprocess.Popen(['jk-import-domaindump', self.domain_groups_file, self.domain_users_file])\n elif event.name == 'domain_computers.json':\n print_success(\"Importing computers\")\n subprocess.Popen(['jk-import-domaindump', event.pathname])\n\n # Ldap has been dumped, so remove the ldap targets.\n self.ldap_strings = []\n self.write_targets()\n\n if event.name.endswith('_samhashes.sam'):\n host = event.name.replace('_samhashes.sam', '')\n # TODO import file.\n print_success(\"Secretsdump file, host ip: {}\".format(host))\n subprocess.Popen(['jk-import-secretsdump', event.pathname])\n\n # Remove this system from this ip list.\n self.ips.remove(host)\n self.write_targets()", "def help_drop(self):\n print(DROP)", "def dropped(self, dropped):\n if self.local_vars_configuration.client_side_validation and dropped is None: # noqa: E501\n raise ValueError(\"Invalid value for `dropped`, must not be `None`\") # noqa: E501\n\n self._dropped = dropped", "def getTransferData(self, f: java.awt.datatransfer.DataFlavor) -> object:\n ...", "def drag(self, event):\n self.yview('scroll', self.ypos - event.y, 'units')\n self.xview('scroll', self.xpos - event.x, 'units')\n self.ypos = event.y\n self.xpos = event.x", "def OnMouse(self, event):\n if not event.Dragging():\n self._dragPos = None\n if self.HasCapture():\n self.ReleaseMouse()\n return\n else:\n if not self.HasCapture():\n self.CaptureMouse()\n\n if not self._dragPos:\n self._dragPos = event.GetPosition()\n else:\n pos = event.GetPosition()\n displacement = self._dragPos - pos\n self.SetPosition(self.GetPosition() - displacement)", "def DoDropNonFloatingPane(self, docks, panes, target, pt):\r\n \r\n screenPt = self._frame.ClientToScreen(pt)\r\n clientSize = self._frame.GetClientSize()\r\n frameRect = GetInternalFrameRect(self._frame, self._docks)\r\n\r\n drop = self.CopyTarget(target)\r\n\r\n # The result should always be shown\r\n drop.Show()\r\n\r\n part = self.HitTest(pt.x, pt.y)\r\n\r\n if not part:\r\n return False, target\r\n\r\n if part.type == AuiDockUIPart.typeDockSizer:\r\n \r\n if len(part.dock.panes) != 1:\r\n return False, target\r\n \r\n part = self.GetPanePart(part.dock.panes[0].window)\r\n if not part:\r\n return False, target\r\n \r\n if not part.pane:\r\n return False, target\r\n\r\n part = self.GetPanePart(part.pane.window)\r\n if not part:\r\n return False, target\r\n\r\n insert_dock_row = False\r\n insert_row = part.pane.dock_row\r\n insert_dir = part.pane.dock_direction\r\n insert_layer = part.pane.dock_layer\r\n\r\n direction = part.pane.dock_direction\r\n \r\n if direction == AUI_DOCK_TOP:\r\n if pt.y >= part.rect.y and pt.y < part.rect.y+auiInsertRowPixels:\r\n insert_dock_row = True\r\n\r\n elif direction == AUI_DOCK_BOTTOM:\r\n if pt.y > part.rect.y+part.rect.height-auiInsertRowPixels and \\\r\n pt.y <= part.rect.y + part.rect.height:\r\n insert_dock_row = True\r\n\r\n elif direction == AUI_DOCK_LEFT:\r\n if pt.x >= part.rect.x and pt.x < part.rect.x+auiInsertRowPixels:\r\n insert_dock_row = True\r\n\r\n elif direction == AUI_DOCK_RIGHT:\r\n if pt.x > part.rect.x+part.rect.width-auiInsertRowPixels and \\\r\n pt.x <= part.rect.x+part.rect.width:\r\n insert_dock_row = True\r\n\r\n elif direction == AUI_DOCK_CENTER:\r\n \r\n # \"new row pixels\" will be set to the default, but\r\n # must never exceed 20% of the window size\r\n new_row_pixels_x = auiNewRowPixels\r\n new_row_pixels_y = auiNewRowPixels\r\n\r\n if new_row_pixels_x > (part.rect.width*20)/100:\r\n new_row_pixels_x = (part.rect.width*20)/100\r\n\r\n if new_row_pixels_y > (part.rect.height*20)/100:\r\n new_row_pixels_y = (part.rect.height*20)/100\r\n\r\n # determine if the mouse pointer is in a location that\r\n # will cause a new row to be inserted. The hot spot positions\r\n # are along the borders of the center pane\r\n\r\n insert_layer = 0\r\n insert_dock_row = True\r\n pr = part.rect\r\n \r\n if pt.x >= pr.x and pt.x < pr.x + new_row_pixels_x:\r\n insert_dir = AUI_DOCK_LEFT\r\n elif pt.y >= pr.y and pt.y < pr.y + new_row_pixels_y:\r\n insert_dir = AUI_DOCK_TOP\r\n elif pt.x >= pr.x + pr.width - new_row_pixels_x and pt.x < pr.x + pr.width:\r\n insert_dir = AUI_DOCK_RIGHT\r\n elif pt.y >= pr.y+ pr.height - new_row_pixels_y and pt.y < pr.y + pr.height:\r\n insert_dir = AUI_DOCK_BOTTOM\r\n else:\r\n return False, target\r\n\r\n insert_row = GetMaxRow(panes, insert_dir, insert_layer) + 1\r\n \r\n if insert_dock_row:\r\n \r\n panes = DoInsertDockRow(panes, insert_dir, insert_layer, insert_row)\r\n drop.Dock().Direction(insert_dir).Layer(insert_layer). \\\r\n Row(insert_row).Position(0)\r\n \r\n return self.ProcessDockResult(target, drop)\r\n\r\n # determine the mouse offset and the pane size, both in the\r\n # direction of the dock itself, and perpendicular to the dock\r\n\r\n if part.orientation == wx.VERTICAL:\r\n \r\n offset = pt.y - part.rect.y\r\n size = part.rect.GetHeight()\r\n \r\n else:\r\n \r\n offset = pt.x - part.rect.x\r\n size = part.rect.GetWidth()\r\n \r\n drop_position = part.pane.dock_pos\r\n\r\n # if we are in the top/left part of the pane,\r\n # insert the pane before the pane being hovered over\r\n if offset <= size/2:\r\n \r\n drop_position = part.pane.dock_pos\r\n panes = DoInsertPane(panes,\r\n part.pane.dock_direction,\r\n part.pane.dock_layer,\r\n part.pane.dock_row,\r\n part.pane.dock_pos)\r\n\r\n # if we are in the bottom/right part of the pane,\r\n # insert the pane before the pane being hovered over\r\n if offset > size/2:\r\n \r\n drop_position = part.pane.dock_pos+1\r\n panes = DoInsertPane(panes,\r\n part.pane.dock_direction,\r\n part.pane.dock_layer,\r\n part.pane.dock_row,\r\n part.pane.dock_pos+1)\r\n \r\n\r\n drop.Dock(). \\\r\n Direction(part.dock.dock_direction). \\\r\n Layer(part.dock.dock_layer).Row(part.dock.dock_row). \\\r\n Position(drop_position)\r\n \r\n return self.ProcessDockResult(target, drop)", "def handle_event(self, event):\n if event.key == BattleActions.SELECT.value:\n prev_item = self.pokemon.held_item\n self.pokemon.held_item = self.item\n self.bag.subtract_item(self.item)\n self.bag.add_item(prev_item)\n self.is_dead = True", "def handle(self, event):\n\n if event == FL_PUSH:\n if Fl.event_button1():\n if self.gamewin.placing >= 0:\n self.gamewin.place_boat()\n \n else:\n self.gamewin.tile_clicked(self)\n return 1\n \n if event == FL_DRAG:\n return 0\n \n return super().handle(event)", "def on_moved(self, event):\n super(myEventHandler,self).on_moved(event)\n #moveto events from external folders have no src_path\n source = event.src_path\n dest = event.dest_path\n if event.is_directory:\n splitpath = split(source)\n splitdest = split(dest)\n if splitpath[1] == splitdest[1]:\n try:\n #where are we moving from\n pass\n #file = splitpath[1]\n #pathtoonedir = self.onedir.getonedirrectory()\n #oldpath = splitpath[0].replace(pathtoonedir ,\"\")\n #calculate new path\n #newpath = splitdest[0].replace(pathtoonedir ,\"\")\n #if oldpath is \"\":\n # oldpath = os.path.sep\n #self.onedir.movefile(file,newpath,oldpath)\n except OSError as e:\n print \"Error copying file! \" + e\n exit(1)\n else:\n #rename!!!!!!!!\n oldname = source\n newname = dest\n pathtoonedir = self.onedir.getonedirrectory()\n oldname = oldname.replace(pathtoonedir ,\"\")\n newname = newname.replace(pathtoonedir ,\"\")\n self.onedir.renamedirectory(oldname,newname)\n else:\n #if it comes from outside the folder structure\n if source is None:\n try:\n #use os.path.split to get file name and path\n splitpath = split(dest)\n file = splitpath[1]\n pathtoonedir = self.onedir.getonedirrectory()\n relpath = splitpath[0].replace(pathtoonedir ,\"\")\n self.onedir.sendfile(file, relpath)\n except OSError as e:\n print \"Error copying file! \" + e.strerror\n exit(1)\n except IOError as e:\n print \"IOerror creating file \" + e.strerror\n else:\n #file was moved!\n #check if name stays the same i.e. it's a move not a rename!\n splitpath = split(source)\n splitdest = split(dest)\n if splitpath[1] == splitdest[1]:\n try:\n #where are we moving from\n file = splitpath[1]\n pathtoonedir = self.onedir.getonedirrectory()\n oldpath = splitpath[0].replace(pathtoonedir ,\"\")\n #calculate new path\n newpath = splitdest[0].replace(pathtoonedir ,\"\")\n if oldpath is \"\":\n oldpath = os.path.sep\n self.onedir.movefile(file,newpath,oldpath)\n except OSError as e:\n print \"Error copying file! \" + e\n exit(1)\n else:\n #rename!!!!!!!!\n file = splitpath[1]\n newname = splitdest[1]\n pathtoonedir = self.onedir.getonedirrectory()\n path = splitpath[0].replace(pathtoonedir ,\"\")\n if path is \"\":\n path = os.path.sep\n else:\n path = path[1:]\n self.onedir.rename(file,path,newname)" ]
[ "0.72224736", "0.7178574", "0.7033632", "0.69308865", "0.69137365", "0.68517953", "0.66538286", "0.660371", "0.6481881", "0.6399347", "0.6314507", "0.63019335", "0.6220606", "0.6072313", "0.60482043", "0.6036351", "0.59223694", "0.59200025", "0.58687717", "0.58277893", "0.5734516", "0.56892866", "0.56366456", "0.56327146", "0.5616902", "0.55873185", "0.5570762", "0.5556108", "0.5525009", "0.55130804", "0.5495399", "0.5482396", "0.5450135", "0.54215217", "0.5369904", "0.53605336", "0.53423023", "0.53269815", "0.531934", "0.53087324", "0.5283762", "0.5241791", "0.52326345", "0.5222645", "0.52175194", "0.5214214", "0.51947457", "0.51713085", "0.51713085", "0.5170968", "0.51621383", "0.5153949", "0.51286787", "0.5117842", "0.51163244", "0.51041967", "0.5077833", "0.5052566", "0.5039383", "0.50068283", "0.4988413", "0.49841556", "0.49797967", "0.49678066", "0.49637955", "0.48908144", "0.48854396", "0.4880978", "0.4865574", "0.48463678", "0.48407745", "0.4836152", "0.48303056", "0.4824467", "0.4820127", "0.48152485", "0.48148572", "0.48119757", "0.48106006", "0.48065785", "0.4794525", "0.47882456", "0.4778417", "0.477379", "0.47688243", "0.47669476", "0.476431", "0.4757513", "0.47523054", "0.47477016", "0.47464442", "0.47421065", "0.47363034", "0.47358024", "0.4735249", "0.47333997", "0.4733353", "0.47230282", "0.47169688", "0.4713779" ]
0.81672764
0
Update every cell in the grid. This is a single step in the evolution. Compute number in each state and number of state changes.
def update_grid(self, x): # Append boundary rows and columns to matrix x = self.append_boundary(x) # the boundary is recomputed at each step y = np.copy(x) # For each cell within boundary, compute state according to rules. chg_0_1 = 0 # the number of cells that changed from state 0 to state 1 chg_1_0 = 0 # the number of cells that changes from state 1 to state 0 chg_none = 0 # the number of cells that did not change index = np.arange(1, x.shape[0] - 1) for i in index: for j in index: neighborhood = x[i - 1:i + 2:1, j - 1:j + 2:1] # 3x3 sub matrix centered at i, j y[i, j] = self.update_cell(neighborhood) change = int(y[i, j] - x[i, j]) if change == -1: chg_1_0 += 1 if change == 0: chg_none += 1 if change == 1: chg_0_1 += 1 # Compute statistics excluding boundary total = np.power(x[1:-1:1, 1:-1:1].shape[0] - 1, 2) start_1 = np.sum(x[1:-1:1, 1:-1:1]) end_1 = np.sum(y[1:-1:1, 1:-1:1]) stats = [total, start_1, end_1, chg_1_0, chg_none, chg_0_1] return y[1:-1:1, 1:-1:1], stats # remove the boundary
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _update_cells(self):\n for row_number in range(self.number_cells_y):\n for col_number in range(self.number_cells_x):\n if self.to_be_updated[row_number][col_number]:\n self.cells[row_number][col_number].update()", "def update_cells(self, state):\n width = WIDTH / CELL_SIZE\n height = HEIGHT / CELL_SIZE\n\n for index in range(0, width * height):\n if state[index] != self.get_state(index):\n self.toggle_color(index)", "def update_state(self):\n self.reset_state()\n for piece in self.pieces:\n coordinates = piece.get_block_positions()\n for coor in coordinates:\n x, y = coor\n self.state[y][x] = piece", "def run(self):\n\t\tfrom loc import loc as Loc\n\t\tfor r in range(1,self.size):\n\t\t\tfor c in range(self.size): \n\t\t\t\tthis = Loc(r,c)\n\t\t\t\tself.state.set_cell(this, self.rule(self.neighbor_vals(this), self.__prob))\n\t\tself.__ran = True", "def run_iterations(self, n, verbose = False):\n for i in range(n):\n # Calculate total number of neighbors for each cell\n all_neighbors = self.get_all_neighbors()\n all_num_neighbors = np.sum(all_neighbors, axis = (-2,-1)) - self.board\n # Determine new state for each cell using lookup table and number of neighbors\n self.board[:] = np.where(self.board, \n self.lookup[1][all_num_neighbors], \n self.lookup[0][all_num_neighbors])\n # Verbosity check\n if verbose:\n print(self.board)", "def update_positions(self, grid):\r\n self.grid = grid", "def update_E(self):\n self.grid.E[self.loc] += (\n self.grid.courant_number\n * self.grid.inverse_permittivity[self.loc]\n * self.phi_E\n )", "def increment(grid):\n height = len(grid)\n width = len(grid[0])\n for r in range(height):\n for c in range(width):\n grid[r][c] += 1\n if grid[r][c] == 10:\n grid [r][c] = 0", "def update(self):\n self.grid.update()\n sleep(self.update_rate)", "def _check_cells(self):\n for row_number in range(self.number_cells_y):\n for col_number in range(self.number_cells_x):\n alive_neighbours = self._get_neighbours(row_number,col_number)\n \n self.to_be_updated[row_number][col_number] = False\n if self.cells[row_number][col_number].get_status():\n if alive_neighbours < 2:\n self.to_be_updated[row_number][col_number] = True\n elif alive_neighbours > 3:\n self.to_be_updated[row_number][col_number] = True\n else:\n if alive_neighbours == 3:\n self.to_be_updated[row_number][col_number] = True", "def update_grid(comp, grid):\n while not comp.waiting and not comp.halt_status:\n time.sleep(0.001)\n\n while not comp.output.empty():\n # Get 3 results\n x = comp.output.get()\n y = comp.output.get()\n b = comp.output.get()\n\n if x == -1 and y == 0:\n print(f\"Score = {b}\")\n\n grid[x][y] = b", "def update(self):\n changes = {}\n for coord in INDICES: # the need for two for loops is necessary\n if self.chart[coord] == ALIVE and (\n self.number_of_neighbors(coord) < 2 or self.number_of_neighbors(coord) > 3):\n changes[coord] = KILL\n elif self.number_of_neighbors(coord) == 3:\n changes[coord] = REVIVE\n for coord in changes.keys(): # because the evolution is discrete\n if changes[coord] == KILL:\n self.kill(coord)\n elif changes[coord] == REVIVE:\n self.givebirth(coord)", "def life_step(state):\n\t# For every cell each live cell in any of the 8 neighbouring cells contributes 1 to the sum\n\t# Rolling matricies is periodic so this implements periodic boundary conditions\n\tnumberOfNeigbours = sum(np.roll(np.roll(state, i, axis=0), j, axis=1)\n\t\t\t\t\t\t for i in (-1,0,1) for j in (-1,0,1) if (i != 0 or j != 0))\n\n\t# Any live cell with fewer than two live neighbours dies, as if caused by under-population\n\tstate = np.where(numberOfNeigbours < 2, 0, state)\n\t# Any live cell with more than three live neighbours dies, as if by over-population\n\tstate = np.where(numberOfNeigbours > 3, 0, state)\n\t# Any dead cell with exactly three live neighbours becomes a live cell, as if by reproduction.\n\tstate = np.where(numberOfNeigbours == 3, 1, state)\n\n\treturn state", "def update_E(self):\n self.grid.E[0, :, :, :] = self.grid.E[-1, :, :, :]", "def play_round_Fredkin_Cell(self):\n for x in self.board:\n for f in x:\n f.live_neighbors = 0\n\n for i in range(1, self.cols - 1):\n for j in range(1, self.rows - 1):\n status = self.board[i][j].status\n assert type(status)==int \n for m in range(i-1 , i +2):\n self.board[m][j].live_neighbors += status\n for n in range(j-1 , j +2):\n self.board[i][n].live_neighbors += status\n\n self.board[i][j].live_neighbors -= status", "def step(self):\n\t\tnewBoard = CellArray(self.size)\n\t\tfor i in range(0, self.size, 1):\n\t\t\tfor j in range(0, self.size, 1):\n\t\t\t\tnewBoard.board[i][j] = self.changeCell(i, j)\n\t\tself.board = newBoard.board", "def __iterate(self):\n\t\tnext_board = []\n\n\t\tfor y, row in enumerate(self.__board):\n\t\t\tnext_board.append([])\n\n\t\t\tfor x, cell in enumerate(row):\n\t\t\t\tneighbors = [\n\t\t\t\t\tself.__get_cell_state(y - 1, x - 1),\n\t\t\t\t\tself.__get_cell_state(y - 1, x),\n\t\t\t\t\tself.__get_cell_state(y - 1, x + 1),\n\t\t\t\t\tself.__get_cell_state(y, x - 1),\n\t\t\t\t\tself.__get_cell_state(y, x + 1),\n\t\t\t\t\tself.__get_cell_state(y + 1, x - 1),\n\t\t\t\t\tself.__get_cell_state(y + 1, x),\n\t\t\t\t\tself.__get_cell_state(y + 1, x + 1)\n\t\t\t\t]\n\t\t\t\tnum_neighbors = sum(neighbors)\n\t\t\t\tstate = get_new_state(cell, num_neighbors)\n\t\t\t\tnext_board[y].append(state)\n\n\t\tself.__board = next_board\n\t\tself.__display(self.__board)", "def play_round_Conway_Cell(self):\n for x in self.board:\n for f in x:\n f.live_neighbors = 0\n\n for i in range(1, self.cols - 1):\n for j in range(1, self.rows - 1):\n status = self.board[i][j].status\n assert type(status)==int \n\n for m in range(i - 1, i + 2):\n for n in range(j - 1, j + 2):\n self.board[m][n].live_neighbors += status\n self.board[i][j].live_neighbors -= status", "def _updateTransitionMatrix(self):\n N = self.N\n K = self.K\n T= self.T\n\n for i in range(1,self.K+1):\n den = 0\n for t in range(1,self.T):\n for n in range(1,N+1):\n den = den + self.posterior_state_trellis[n][(t,i)]\n \n for j in range(1,self.K+1): \n # For some state i,j\n s = 0\n for n in range(1,N+1): \n for t in range(1,self.T): \n cur_prob = self.posterior_transition_trellis[n][(t,t+1,j,i)]\n s = s+cur_prob\n\n # Compute total \n self.state_transition_mat[(j,i)] = (s/den)", "def updateCells(cell_positions):\n # Build a set of canditates for live cells at the next generation, instead of looking through the whole grid\n # These will be dead neighbours of living cells\n possible_future_cells = set()\n # Make sets of cells to add and remove at the end of the check\n cells_remove = set()\n cells_add = set()\n for cell in cell_positions:\n # Get adjacent squares\n neighbours_dict = cellNeighbours(cell)\n number_live_neighbours = 0\n # Check which of these corresponds to another living cell\n for square in neighbours_dict.values():\n if square in cell_positions:\n number_live_neighbours+=1\n else:\n possible_future_cells.add(square)\n\n # Any live cell with fewer than two live neighbours dies, as if caused by under-population\n if number_live_neighbours<2:\n cells_remove.add(cell)\n # Any live cell with two or three live neighbours lives on to the next generation\n # do nothing\n # Any live cell with more than three live neighbours dies, as if by overcrowding\n elif number_live_neighbours>3:\n cells_remove.add(cell)\n # Any dead cell with exactly three live neighbours becomes a live cell, as if by reproduction\n for cell_candidate in possible_future_cells:\n cell_candidate_neighbours = cellNeighbours(cell_candidate).values()\n # Count number of live neighbours\n count = 0\n for square in cell_candidate_neighbours:\n if square in cell_positions:\n count+=1\n if count == 3:\n cells_add.add(cell_candidate)\n # Update cell_positions by removing dead cells and adding new-born cells\n for cell in cells_add:\n cell_positions.add(cell)\n for cell in cells_remove:\n cell_positions.remove(cell)\n # Return the update live cell list\n return cell_positions", "def update(self, iteration):\n pass", "def update_board(self):\n for x in self.board:\n for f in x:\n if f.status == 0:\n if f.name == \"conway\":\n assert type(self.population)==int\n if f.live_neighbors == 3:\n f.symbol =\"*\"\n f.status = 1\n self.population += 1\n elif f.name == \"fredkin\":\n if f.live_neighbors == 1 or f.live_neighbors == 3 :\n f.status = 1\n f.symbol = str(f.age)\n self.population += 1\n else:\n f.status = 0\n\n elif f.status == 1:\n if f.name == \"conway\":\n assert type(self.population)==int\n #assert type(f.status)== 1\n if not((f.live_neighbors == 2 or f.live_neighbors == 3)):\n f.symbol = \".\"\n f.status = 0\n else:\n self.population += 1\n elif f.name == \"fredkin\":\n if f.live_neighbors == 1 or f.live_neighbors == 3:\n f.status = 1\n f.age += 1\n if f.age <= 2:\n f.symbol = str(f.age)\n self.population += 1\n else:\n self.board.replace(f, Conway_Cell(\"*\"))\n else:\n f.status = 0\n f.symbol = \"-\"", "def _compute_world_params(self) -> None:\n\n self.states = []\n for row in range(self.grid_height):\n for col in range(self.grid_width):\n cell = row * self.grid_width + col\n cell_type = self.grid[cell]\n\n possible_actions = {\n Action.up: self._get_action(max(row - 1, 0) * self.grid_width + col),\n Action.down: self._get_action(min(row + 1, self.grid_height - 1) * self.grid_width + col),\n Action.right: self._get_action(row * self.grid_width + min(col + 1, self.grid_width - 1)),\n Action.left: self._get_action(row * self.grid_width + max(col - 1, 0))\n }\n\n self.states.append(State(cell, possible_actions, cell_type))", "def calculate_next_board_state(self):\n new_board_state = np.zeros_like(self.board_state)\n\n for x in range(self.board_size[0]):\n for y in range(self.board_size[0]):\n new_board_state[x][y] = self.next_state_of_cell(x,y)\n \n self.set_state(new_board_state)", "def update_grid(self, player: str, letter: str) -> None:\n self.cells = [cell.update_cell(player, letter) for cell in self.cells]\n i = 0\n while i <= self.size:\n self.leylines[i].update_line(player, letter)\n self.rights[i].update_line(player, letter)\n self.lefts[i].update_line(player, letter)\n i += 1", "def compute_updates(self, xs, gs, state=None):\n raise NotImplementedError()", "def calc(self):\n np = 0\n for cell in self.cells:\n n = self.cell_np[cell]\n np += n\n self.dnp = np - self.np\n self.np = np", "def cycle(self):\n\n coordinates = self.get_random_coordinates()\n\n for coord in coordinates:\n if isinstance(self.cells[coord], (Jungle, Savannah, Desert)):\n self.cells[coord].feeding()\n\n for coord in coordinates:\n if isinstance(self.cells[coord], (Jungle, Savannah, Desert)):\n self.cells[coord].procreation()\n\n self.migration()\n\n for coord in coordinates:\n if isinstance(self.cells[coord], (Jungle, Savannah, Desert)):\n self.cells[coord].aging()\n\n for coord in coordinates:\n if isinstance(self.cells[coord], (Jungle, Savannah, Desert)):\n self.cells[coord].loss_of_weight()\n\n for coord in coordinates:\n if isinstance(self.cells[coord], (Jungle, Savannah, Desert)):\n self.cells[coord].death()\n\n self.animals_on_island()", "def updateBoard():\n #Drawing the initial board positions;\n for y in range(1, n+1): #1,2,3\n for x in range(1, n+1):\n val = positions[y][x];\n colorNode((x,y), numColors[val])\n label = Text(Point((x-0.5)*grid_side, (y-0.5)*grid_side),val);\n label.setSize(30)\n label.draw(win)", "def change_cell(self):\n # TODO: assess whether this may partly moved into the base class\n\n x, mu = self.update_position_direction(self.l_edge)\n mu_mean = self.calculate_mean_mu(self.x, x, self.l_edge)\n self.update_estimators(self.l_edge, mu_mean)\n\n if self.next_cell_index == self.grid.Ncells:\n # packet escapes\n self.is_escaped = True\n self.is_active = False\n self.x = self.cell_xr\n\n elif self.next_cell_index == -1:\n # packets gets reflected\n\n self.x = self.cell_xl\n self.mu = -self.mu\n\n self.calculate_and_set_propagation_distances()\n\n else:\n # packet is transported into target cell\n if self.next_cell_index > self.cell_index:\n # packet is moved one cell to the right\n\n self.x = self.grid.xl[self.next_cell_index]\n\n else:\n # packet is moved one cell to the left\n\n self.x = self.grid.xr[self.next_cell_index]\n\n # reset cell-based properties for easy access\n self.cell_index = self.next_cell_index\n self.cell_chi = self.grid.chi[self.cell_index]\n self.cell_xl = self.grid.xl[self.cell_index]\n self.cell_xr = self.grid.xr[self.cell_index]\n self.cell_dx = self.grid.dx[self.cell_index]\n\n # recalculate distances\n self.calculate_and_set_propagation_distances()", "def build_grains(self):\n\t\ttime = datetime.datetime.now()\n\t\tif self.probability == 0:\n\t\t\tfor cell in self.space.flat:\n\t\t\t\tif cell.state != 0 :\n\t\t\t\t\tcontinue\n\t\t\t\telif self.check_empty_neighbours(cell):\n\t\t\t\t\tcontinue\n\t\t\t\telse:\t\n\t\t\t\t\tneighbours = self.get_neighbours(cell)\n\t\t\t\t\tgrains = [0 for i in range(self.grains)]\n\t\t\t\t\tfor i in range(1,self.grains+1):\n\t\t\t\t\t\tfor neighbour in neighbours:\n\t\t\t\t\t\t\tif neighbour.state == i and neighbour.timestamp < time:\n\t\t\t\t\t\t\t\tgrains[i] = grains[i] + 1\n\t\t\t\t\tif grains == [0 for i in range(self.grains)]:\n\t\t\t\t\t\tcontinue\n\t\t\t\t\tnew_grain = 0\n\t\t\t\t\tfor i in range(self.grains):\n\t\t\t\t\t\tif grains[i] >= new_grain:\n\t\t\t\t\t\t\tnew_grain = i\n\t\t\t\t\tcell.change_state(time, new_grain)\n\t\t\t\t\tself.empty_cells = self.empty_cells - 1\n\t\telse:\n\t\t\tfor cell in self.space.flat:\n\t\t\t\tif cell.state != 0 :\n\t\t\t\t\tcontinue\n\t\t\t\telif self.check_empty_neighbours(cell):\n\t\t\t\t\tcontinue\n\t\t\t\telse:\n\t\t\t\t\tneighbours = self.get_neighbours(cell)\n\t\t\t\t\tif self.decide_changing(cell,neighbours,5, time):\n\t\t\t\t\t\tneighbours = self.get_nearest_neighbours(cell)\n\t\t\t\t\t\tif self.decide_changing(cell,neighbours,3, time):\n\t\t\t\t\t\t\tneighbours = self.get_further_neighbours(cell)\n\t\t\t\t\t\t\tif self.decide_changing(cell,neighbours,3, time):\n\t\t\t\t\t\t\t\tneighbours = self.get_neighbours(cell)\n\t\t\t\t\t\t\t\tgrains = [0 for i in range(self.grains)]\n\t\t\t\t\t\t\t\tfor i in range(1,self.grains+1):\n\t\t\t\t\t\t\t\t\tfor neighbour in neighbours:\n\t\t\t\t\t\t\t\t\t\tif neighbour.state == i and neighbour.timestamp < time:\n\t\t\t\t\t\t\t\t\t\t\tgrains[i] = grains[i] + 1\n\t\t\t\t\t\t\t\tif grains == [0 for i in range(self.grains)]:\n\t\t\t\t\t\t\t\t\tcontinue\n\t\t\t\t\t\t\t\tnew_grain = 0\n\t\t\t\t\t\t\t\tfor i in range(self.grains):\n\t\t\t\t\t\t\t\t\tif grains[i] >= new_grain:\n\t\t\t\t\t\t\t\t\t\tnew_grain = i\n\t\t\t\t\t\t\t\trandom_number = random.random() * 100\n\t\t\t\t\t\t\t\tif random_number <= self.probability:\n\t\t\t\t\t\t\t\t\tcell.change_state(time, new_grain)\n\t\t\t\t\t\t\t\t\tself.empty_cells = self.empty_cells - 1\n\t\t\t\t\t\t\t\telse:\n\t\t\t\t\t\t\t\t\tcontinue", "def next_generation(self):\n new_board = self.array.copy()\n for cell in self.cells:\n cell.update(new_board)\n \n if np.array_equal(self.prev_array, new_board):\n self.game.stable = True\n else:\n self.prev_array = self.array\n self.array = new_board", "def current_update():\n # Compute the multiplier coefficient:\n ci = dt / (L * dx)\n for k in range(0, nx-1):\n I[k] = I[k] - (ci * (V[k + 1] - V[k]))", "def state_generator(self):\n\n kernel = np.array([\n [1, 1, 1],\n [1, 0, 1],\n [1, 1, 1]])\n iteration = 0\n\n while True: # (Game of Life does not end)\n # Run 2D convolution with the given kernel to find out how many neighbors each cell has.\n # Boundary option determines whether to run with hard boundaries on the game board or\n # using a toroid board which wraps circularly. These are the two strategies for handling\n # a finite game board. scipy.signal.convolve2d handles these two modes gracefully, which\n # is why it is used here. There is also a performance gain when using numpy/scipy matrix\n # operations as opposed to iterating element-wise over the whole matrix.\n # See https://docs.scipy.org/doc/scipy-0.19.1/reference/generated/scipy.signal.convolve2d.html\n\n # There is a more sophisticated and efficient algorithm for determining next game state\n # (see http://dotat.at/prog/life/life.html) but for clarity and a lack of time, the standard\n # implementation was chosen.\n\n num_neighbors_board = convolve2d(self.board, kernel, mode='same', boundary=self.boundary.value)\n\n # Find empty cells that have three neighbors\n birth_coordinates = np.where(np.logical_and(self.board == 0, num_neighbors_board == 3))\n\n # Find live cells with too few or too many neighbors\n death_coordinates = np.where(\n np.logical_and(\n self.board == 1,\n np.logical_or(num_neighbors_board < 2, num_neighbors_board > 3)\n )\n )\n\n births = np.array(birth_coordinates).transpose().tolist()\n deaths = np.array(death_coordinates).transpose().tolist()\n self.board[birth_coordinates] = 1\n self.board[death_coordinates] = 0\n\n iteration += 1\n yield self.board, births, deaths, iteration", "def _ignite_cells(self, istep, ip):\n particle = self.particles[ip] # get particle\n state, x, y = particle.get_from_keys([\"state\", \"x\", \"y\"])\n if state > STTHR:\n for i in range(self.grid.NX-1):\n if abs(x - self.grid.XCELL[i, 0]) < self.grid.DX/2:\n INDX = i\n for j in range(self.grid.NY-1):\n if abs(y - self.grid.YCELL[0, j]) < self.grid.DY/2:\n INDY = j\n cell = self.grid.CELLS[INDX, INDY]\n cell.BURNPROG += 1\n if (cell.QMAXTR > 0 or cell.QMAXBLD > 0) and cell.BURNSTAT == 0:\n cell.BURNSTAT = 1\n cell.CLOCK = self.TIME[istep]\n # elif cell.QMAXTR == 0 or cell.QMAXBLD == 0:\n # particle.update(state=0.0, factor=0.0)\n # if pType == 2:\n # particle.update(state=0.0)", "def increment_time_step(self):\n for grid in self.get_grid_list():\n try:\n self[grid].increment_time_step()\n except AttributeError:\n pass", "def update_grid(self):\n # Check to see if we have moved squares\n _new_grid = self.calc_grid()\n if _new_grid == self._grid:\n return\n # Remove from old square and add to new square\n self.target._grid[self._grid][self._type].discard(self)\n self.target._grid[_new_grid][self._type].add(self)\n # Update coordinates\n self._grid = _new_grid", "def update_state(self, context: GANContext) -> None:\n updater = lambda value: lambda: self._metric.update_state(value)\n for real_xy, noise in context.dataset:\n real_x, real_y = real_xy\n\n g_inputs = noise\n if len(context.generator_model.inputs) == 2:\n g_inputs = [noise, real_y]\n\n fake = context.generator_model(\n g_inputs, training=context.log_eval_mode == LogEvalMode.TRAIN\n )\n\n # check the resolution is the same as the one passed as input\n resolution = real_x.shape[1]\n if resolution != self.resolution:\n raise ValueError(\n \"Image resolution is not the same as the input resolution.\"\n )\n\n scores = sliced_wasserstein_distance(\n real_x,\n fake,\n resolution_min=self.resolution_min,\n patches_per_image=self.patches_per_image,\n use_svd=self.use_svd,\n patch_size=self.patch_size,\n random_projection_dim=self.random_projection_dim,\n random_sampling_count=self.random_sampling_count,\n )\n\n fake_scores = []\n\n for i, couple in enumerate(scores):\n self.children_real_fake[i][0].update_state(context, couple[0])\n self.children_real_fake[i][1].update_state(context, couple[1])\n fake_scores.append(tf.expand_dims(couple[1], axis=0))\n\n fake_scores = tf.concat(fake_scores, axis=0)\n\n self._distribute_strategy.experimental_run_v2(updater(fake_scores))", "def update(self,dt):\n self.rebuild_lists()\n self.step(self.gather_state,self.derivatives, \\\n self.gather_derivatives,self.scatter_state,dt)\n self.box.apply(self)\n self.steps += 1", "def update_E(self):\n self.grid.E[:, 0, :, :] = self.grid.E[:, -1, :, :]", "def update_pop_matrix(self):\n for row in self.unique_rows[1:-1]: # First and last cell is water\n for col in self.unique_cols[1:-1]: # First and last cell is water\n cell = self.landscape[(row, col)]\n if cell.is_mainland:\n # print(cell)\n self.herb_pop_matrix[row - 1][col - 1] = cell.herb_count\n self.carn_pop_matrix[row - 1][col - 1] = cell.carn_count", "def update_neighbours(self, iteration, iterations, input_vector, bmu):\n\n t = iteration / iterations\n learning_rate = self.learning_rate(t)\n for node in self.codebook:\n influence = self.codebook.neighbourhood(node, bmu, t)\n node.update(learning_rate, influence, input_vector, bmu)", "def update_E(self):\n self.grid.E[:, :, 0, :] = self.grid.E[:, :, -1, :]", "def update_cnt_map(self,s):\r\n cnts = []\r\n num_grid = self.cnt_map.shape[0]*self.cnt_map.shape[1]\r\n old_coverage =num_grid- self.cnt_map.flatten().tolist().count(0)\r\n for sj in s:\r\n grid_s = self.get_gridState(sj)\r\n self.cnt_map[grid_s[0], grid_s[1]] += 1\r\n cnts.append(self.cnt_map[grid_s[0], grid_s[1]])\r\n\r\n self.map_coverage = num_grid - self.cnt_map.flatten().tolist().count(0)\r\n print(\"Coverage:\",self.map_coverage)\r\n print(\"Change of coverage:\",self.map_coverage-old_coverage)\r\n\r\n return cnts", "def update(frame_num, mat, grid, N):\n\n new_grid = np.copy(grid)\n #print(\"grid size:\", grid.shape)\n for i in range(1, grid.shape[0]-1):\n for j in range(1, grid.shape[1]-1):\n neighbors = int(grid[i-1, j] + grid[i+1, j] + \\\n grid[i, j+1] + grid[i, j-1] + \\\n grid[i-1,j-1] + grid[i+1,j+1] + \\\n grid[i+1,j-1] + grid[i-1,j+1])\n if grid[i, j] == ON:\n if not (2 <= neighbors <= 3):\n new_grid[i, j] = OFF\n elif grid[i, j] == OFF and neighbors == 3:\n # Grow a cell\n new_grid[i, j] = ON\n else:\n new_grid[i, j] = OFF\n\n ### Update new grid\n mat.set_data(new_grid)\n grid[:] = new_grid[:] # Brackets are important\n return mat", "def update_all(self,delta_t):\n self.update_thrust()\n self.update_climb_rate()\n self.update_height(delta_t)", "def rnn_cell_loop(self):\n\n\t\t# Set up initial state\n\t\tself.h_out = [tf.zeros([par['batch_size'],par['n_hidden']])]\t\t\t# Spike\n\t\tself.h = tf.ones([par['batch_size'],par['n_hidden']])\t\t\t\t\t# State\n\t\tself.h *= 0.1 if par['cell_type'] == 'rate' else par[par['cell_type']]['V_r']\n\t\tself.h = [self.h]\n\t\tadapt = par['w_init']*tf.ones([par['batch_size'],par['n_hidden']])\n\n\t\tsyn_x = par['syn_x_init']*tf.ones([par['batch_size'], par['n_hidden']]) if par['use_stp'] else None\n\t\tsyn_u = par['syn_u_init']*tf.ones([par['batch_size'], par['n_hidden']]) if par['use_stp'] else None\n\n\t\t# Apply the EI mask to the recurrent weights\n\t\tself.W_rnn_effective = par['EI_matrix'] @ tf.nn.relu(self.var_dict['W_rnn'])\n\n\t\t# Set up latency buffer if being used\n\t\tif par['use_latency']:\n\t\t\tself.state_buffer = [tf.zeros([par['batch_size'], par['n_hidden']]) for t in range(par['latency_max'])]\n\t\t\tself.state_buffer = deque(self.state_buffer)\n\t\t\tself.W_rnn_latency = self.W_rnn_effective[tf.newaxis,...] * par['latency_mask']\n\t\t\tself.lat_spike_shape = tf.ones([par['latency_max'], 1, 1])\n\n\t\t# Set up output record\n\t\tself.output = []\n\t\tself.syn_x = []\n\t\tself.syn_u = []\n\n\t\ty = 0.\n\t\tfor t in range(par['num_time_steps']):\n\t\t\tself.t = t \t\t# For latency calculations\n\n\t\t\tif par['cell_type'] == 'rate':\n\t\t\t\traise Exception('Rate cell not yet implemented.')\n\t\t\telif par['cell_type'] == 'adex':\n\t\t\t\tif t < 10:\n\t\t\t\t\tspike, state, adapt, syn_x, syn_u = self.AdEx_cell(tf.zeros_like(self.h_out[-1]), self.h[-1], \\\n\t\t\t\t\t\tadapt, self.input_data[t], syn_x, syn_u)\n\t\t\t\telse:\n\t\t\t\t\tspike, state, adapt, syn_x, syn_u = self.AdEx_cell(self.h_out[-10], self.h[-1], \\\n\t\t\t\t\t\tadapt, self.input_data[t], syn_x, syn_u)\n\t\t\t\ty = 0.95*y + 0.05*(spike @ self.var_dict['W_out'] + self.var_dict['b_out'])\n\n\t\t\t\tself.h_out.append(spike)\n\t\t\t\tself.h.append(state)\n\t\t\t\tself.output.append(y)\n\t\t\t\tself.syn_x.append(syn_x)\n\t\t\t\tself.syn_u.append(syn_u)\n\n\t\t\telif par['cell_type'] == 'lif':\n\t\t\t\tspike, state, adapt, syn_x, syn_u = self.LIF_cell(self.h_out[-1], self.h[-1], adapt, self.input_data[t], syn_x, syn_u)\n\t\t\t\ty = 0.95*y + 0.05*spike @ self.var_dict['W_out'] + 0.*self.var_dict['b_out']\n\n\t\t\t\tself.h_out.append(spike)\n\t\t\t\tself.h.append(state)\n\t\t\t\tself.output.append(y)\n\n\t\t# Stack records\n\t\tself.output = tf.stack(self.output, axis=0)\n\t\tself.h = tf.stack(self.h, axis=0)\n\t\tself.h_out = tf.stack(self.h_out, axis=0)\n\t\tself.syn_x = tf.stack(self.syn_x, axis=0)\n\t\tself.syn_u = tf.stack(self.syn_u, axis=0)", "def set_cells(self, val=None):\t\r\n self._cells = \\\r\n (self.nx-1 if self.nx>1 else 1)* \\\r\n (self.ny-1 if self.ny>1 else 1)* \\\r\n (self.nz-1 if self.nz>1 else 1)", "def change_cell(self):\n\n x, mu = self.update_position_direction(self.l_edge)\n mu_mean = self.calculate_mean_mu(self.x, x, self.l_edge)\n self.update_estimators(self.l_edge, mu_mean)\n\n if self.next_cell_index == self.grid.Ncells:\n # packet escapes\n self.is_escaped = True\n self.is_active = False\n self.mu = mu\n self.x = self.cell_xr\n\n elif self.next_cell_index == -1:\n\n raise GeometryException(\"No inner boundary in homogeneous sphere\")\n\n else:\n # packet is transported into target cell\n\n self.mu = mu\n\n if self.next_cell_index > self.cell_index:\n # packet is moved one cell to the right\n\n self.x = self.grid.xl[self.next_cell_index]\n\n else:\n # packet is moved one cell to the left\n\n self.x = self.grid.xr[self.next_cell_index]\n\n # reset cell-based properties for easy access\n self.cell_index = self.next_cell_index\n self.cell_chi = self.grid.chi[self.cell_index]\n self.cell_xl = self.grid.xl[self.cell_index]\n self.cell_xr = self.grid.xr[self.cell_index]\n self.cell_dx = self.grid.dx[self.cell_index]\n self.cell_dV = self.grid.dV[self.cell_index]\n\n # recalculate distances\n self.calculate_and_set_propagation_distances()", "def update_state_game_variables(self):\n self.model.numbers = [0, 1, 2, 3, 4, 5, 6, 7, 8]\n self.model.player_mark = \"\"\n self.model.player_move = 0\n self.model.boards = [\"board\"] * 9\n self.update_score_board()", "def main_loop(self):\n for iteration in xrange(1, self.num_iterations + 1):\n print \"At iteration %d\" % iteration\n self.it_num = iteration\n \n ### Select cells randomly without replacement\n x, y = np.meshgrid(np.arange(self.x_len), np.arange(self.y_len))\n \n x = x.flat\n y = y.flat\n \n shuffled_indices = np.random.permutation(np.arange(self.x_len * self.y_len))\n \n for index in shuffled_indices:\n # Get the current y and x indices\n cur_y, cur_x = y[index], x[index]\n \n \n if self.altered == False:\n # Use the standard version\n if self.grid[cur_y, cur_x] == 0:\n # If there's no slab there then we can't erode it!\n continue\n else:\n # Use the altered version of checking if we can erde\n if self.grid[cur_y, cur_x] == self.depth[cur_y, cur_x]:\n # We can't erode it, so continue\n continue\n \n # Check to see if the cell is in shadow.\n if self.cell_in_shadow(cur_y, cur_x):\n # If it's in shadow then we can't erode it, so go to the next random cell \n continue\n \n if True:\n # Move a slab\n self.grid[cur_y, cur_x] -= 1\n \n orig_y, orig_x = cur_y, cur_x\n \n # Loop forever - until we break out of it\n while True:\n new_y, new_x = cur_y, self.add_x(cur_x, self.jump_length)\n \n if self.grid[new_y, new_x] == 0:\n prob = self.pd_ns\n else:\n prob = self.pd_s\n \n if np.random.random_sample() <= prob:\n # Drop cell\n break\n else:\n cur_y, cur_x = new_y, new_x\n \n #print \"Dropping on cell\"\n #print new_y, new_x\n # Drop the slab on the cell we've got to\n self.grid[new_y, new_x] += 1\n \n self.do_repose(orig_y, orig_x)\n \n self.do_repose(new_y, new_x)\n \n self.write_file()", "def update_(self, k):\n for z in range(self.sweeps_per_update):\n new_u_grid = self.u_grid.copy()\n new_v_grid = self.v_grid.copy()\n for i in range(self.N):\n for j in range(self.N):\n\n deltaU = (self.D1*self.dt) * (self.laplacian_(self.u_grid, i, j))\\\n - self.dt * self.u_grid[i][j]*self.v_grid[i][j]**2 \\\n + self.dt * self.F*(1-self.u_grid[i][j])\n new_u_grid[i][j] += deltaU\n deltaV = (self.D2*self.dt) * (self.laplacian_(self.v_grid, i, j))\\\n + self.dt*self.u_grid[i][j]*self.v_grid[i][j]**2 \\\n - self.dt*(self.F+self.k)*self.v_grid[i][j]\n new_v_grid += deltaV\n self.u_grid = new_u_grid.copy()\n self.v_grid = new_v_grid.copy()\n if self.animation:\n self.fig.clear()\n plt.imshow(self.u_grid, interpolation='nearest',\n cmap='coolwarm', origin='lower')\n plt.colorbar()", "def update(self, board):\n for row in range(8):\n for col in range(8):\n if board[row, col] == -1:\n self.circles[row][col].undraw()\n self.circles[row][col].draw(self.win)\n self.circles[row][col].setFill(self.piece_colors[0])\n elif board[row, col] == -2:\n self.circles[row][col].undraw()\n self.circles[row][col].draw(self.win)\n self.circles[row][col].setFill(self.piece_colors[2])\n elif board[row, col] == 0:\n self.circles[row][col].undraw()\n self.pieces[row][col].setFill(self.frame_colors[(row+col)%2])\n elif board[row, col] == 1:\n self.circles[row][col].undraw()\n self.circles[row][col].draw(self.win)\n self.circles[row][col].setFill(self.piece_colors[1])\n elif board[row, col] == 2:\n self.circles[row][col].undraw()\n self.circles[row][col].draw(self.win)\n self.circles[row][col].setFill(self.piece_colors[3])", "def update(self, i, losses):\n pass", "def update_cell_nodes(self):\n self.cells['nodes'] = -1\n\n for c in range(self.Ncells()):\n # consider two edges at a time, and find the common node\n for i,(ja,jb) in enumerate(circular_pairs(self.cell_to_edges(c))):\n for n in self.edges['nodes'][ja,:]: \n if n in self.edges['nodes'][jb]:\n self.cells['nodes'][c,i] = n\n break", "def step(self):\n for p, grad, v, square_grad_avg, delta_x_acc in self.params:\n # Compute the running average of the squared gradients \n square_grad_avg.mul_(self.rho)\n square_grad_avg.addcmul_(grad, grad, value = 1 - self.rho)\n # Compute the RMS of the previous squared gradients (eps to avoid numerical issues later for division)\n std = (square_grad_avg.add_(self.eps)).sqrt_()\n # Compute the accumulated update\n delta_x = ((delta_x_acc.add_(self.eps)).sqrt_()) * grad / std\n # Accumulate the updates\n delta_x_acc.mul_(self.rho)\n delta_x_acc.addcmul_(delta_x, delta_x, value = 1 - self.rho) \n # Update the parameters\n p.add_(delta_x, alpha = - self.lr)", "def updateState(self):\n self.state = self.microgridPolicy.computeState();", "def update_cell_edges(self):\n self.cells['edges'] = -1\n for c in range(self.Ncells()):\n for i,(a,b) in enumerate(circular_pairs(self.cell_to_nodes(c))):\n self.cells['edges'][c,i] = self.nodes_to_edge(a,b)", "def update(self, grid, colRamp = ['white', 'blue']):\n \n # update the cell colors\n for y in range(len(grid)):\n yl = y + 1\n for x in range(len(grid[y])):\n xl = x + 1\n color = colRamp[int(grid[y][x])]\n self.displayWindow.update((xl, yl), color)\n\n # refresh the window\n self.displayWindow.tkupdate()", "def update_grid(self):\n if self.game_over:\n return\n if self.active_piece is None:\n self.place_new_piece()\n if self.piece_collision_exists(self.active_piece):\n self.handle_active_piece_collision()\n self.place_new_piece()\n self.shift_cells(self.active_piece, self.current_direction)\n self.active_piece = TransformPiece.shift_coordinates(self.active_piece, self.current_direction)\n self.merge_with_completed_rows()\n if self.is_game_won():\n self.game_over = True", "def update(self):\n self.board.update()", "def updateGrid(self) -> None:\n emu = self.emulator\n arch = self.root.arch\n registers = arch.registers\n self.__values.setRowCount(len(registers))\n for i, reg in enumerate(registers):\n self.__values.setRowHeight(i, self.__row_size)\n name = QTableWidgetItem(reg)\n name.setFlags(Qt.NoItemFlags)\n val = emu.get_register_value(reg) if emu.vm else 0\n old_val = self.__old_register_values.get(reg, 0)\n if type(val) in (int, int):\n value = format_address(val, arch)\n else:\n value = str(val)\n value = QTableWidgetItem( value )\n if old_val != val:\n self.__old_register_values[reg] = val\n value.setForeground(QColor(Qt.red))\n value.setFlags(Qt.ItemIsEnabled | Qt.ItemIsSelectable | Qt.ItemIsEditable)\n self.__values.setItem(i, 0, name)\n self.__values.setItem(i, 1, value)\n return", "def update1(self):\r\n tmp = [row.copy() for row in self.grid]\r\n changed = False\r\n for y in range(self.height):\r\n for x in range(self.width):\r\n if self.grid[y][x] == '#' and 5 <= sum(\r\n self.is_occupied((x + i, y + j)) for i in [-1, 0, 1] for j in [-1, 0, 1]):\r\n # >= 5, because we also count (x,y)\r\n tmp[y][x] = 'L'\r\n changed = True\r\n elif self.grid[y][x] == 'L' and self.is_available(x, y):\r\n tmp[y][x] = '#'\r\n changed = True\r\n else:\r\n tmp[y][x] = self.grid[y][x]\r\n self.grid = tmp\r\n return changed", "def update_cells(self):\n mineboard = self.mineboard\n gameboard = mineboard.gameboard\n for change in mineboard.changes:\n i, j = change[0], change[1]\n text_val = gameboard[i][j]\n\n if text_val == 'M':\n self.canvas.delete(self.cells[i][j])\n self.cells[i][j] = self.canvas.create_image(\n 2+j*CELLWIDTH, 2+i*CELLWIDTH, image=EXPLODED, anchor='nw')\n self.reveal_mines(i, j)\n\n elif text_val == 'F':\n self.canvas.delete(self.cells[i][j])\n self.cells[i][j] = self.canvas.create_image(\n 2+j*CELLWIDTH, 2+i*CELLWIDTH, image=FLAG, anchor='nw')\n\n elif text_val == ' ':\n self.canvas.delete(self.cells[i][j])\n self.cells[i][j] = self.canvas.create_rectangle(\n 2+j*CELLWIDTH, 2+i*CELLWIDTH, (j+1)*CELLWIDTH, (i+1)*CELLWIDTH, fill=DEFAULT_COLOR, outline=\"\")\n\n elif text_val in ['0', '1', '2', '3', '4', '5', '6', '7', '8']:\n self.canvas.itemconfig(\n self.cells[i][j], fill=COLORS[int(text_val)])\n if text_val != '0':\n # offset here is by 12 pixels\n self.canvas.create_text(\n 2+j*CELLWIDTH+(CELLWIDTH-1)//2, 2+i*CELLWIDTH+(CELLWIDTH-1)//2, anchor='center', text=f\"{text_val}\")\n\n mineboard.changes = [] # removes previous changes\n if mineboard.gamestate is not None:\n # if the game has ended displays game end message and buttons\n self.win_lose_lbl.grid(row=3, column=0, columnspan=4)\n self.win_lose_msg.set(\n f\"You {self.mineboard.gamestate}! Play again?\")\n self.same_again_bttn.grid(row=4, column=0, columnspan=2)\n self.play_again_bttn.grid(row=4, column=2, columnspan=2)", "def update(self, k):\n for z in range(self.sweeps_per_update):\n\n\n u_update = self.dt* (np.multiply(self.D1,(self.lap2D(self.u_grid))))\\\n - self.dt * np.multiply(self.u_grid, np.square(self.v_grid))\\\n + self.dt * self.F * (1 - self.u_grid)\n\n\n v_update = self.dt*(np.multiply(self.D2,(self.lap2D(self.v_grid))))\\\n + self.dt * np.multiply(self.u_grid, np.square(self.v_grid))\\\n - self.dt * (self.F + self.k)*self.v_grid\n\n\n self.u_grid = np.add(self.u_grid, u_update)\n self.v_grid = np.add(self.u_grid, v_update)\n\n if self.animation:\n self.fig.clear()\n plt.xlabel(\"F: %.3f, dt:%.3f\" % (self.F, self.dt))\n plt.imshow(self.u_grid, interpolation='nearest',\n cmap='coolwarm', origin='lower')\n plt.colorbar()", "def next_period_step(self):\n self.update_forces()\n element: Cell\n for element in self.cells:\n self.update_coordinates(element)\n self.update_volosity(element)", "def test_live_cell(self, alive_cells, alive):\n for positions in alive_cells:\n world = gol.World(3, 3)\n world.set_cell((0, 0))\n for x, y in positions:\n world.set_cell((x, y))\n world.update()\n assert world[(0, 0)] == alive", "def get_transition(self, row, col, action, tot_row, tot_col):\n\n '''\n Expand the grid of the environment to handle when the \n agent decides to move in the direction of a wall \n '''\n state_probabilities = np.zeros((int(np.sqrt(self.env.observation_space.n)) + 2, int(np.sqrt(self.env.observation_space.n)) + 2), dtype=float)\n\n if action == 'UP':\n row += 1\n col += 1\n state_probabilities[row - 1, col] = 0.33 #UP\n state_probabilities[row, col - 1 ] = 0.33 #LEFT\n state_probabilities[row, col + 1] = 0.33 # RIGHT\n state_probabilities[row + 1, col] = 0.0 #DOWN\n elif action == 'LEFT':\n row += 1\n col += 1\n state_probabilities[row - 1, col] = 0.33 #UP\n state_probabilities[row, col - 1 ] = 0.33 #LEFT\n state_probabilities[row, col + 1] = 0.0 # RIGHT\n state_probabilities[row + 1, col] = 0.33 #DOWN\n elif action == 'RIGHT':\n row += 1\n col += 1\n state_probabilities[row - 1, col] = 0.33 #UP\n state_probabilities[row, col - 1 ] = 0.0 #LEFT\n state_probabilities[row, col + 1] = 0.33 # RIGHT\n state_probabilities[row + 1, col] = 0.33 #DOWN\n elif action == 'DOWN':\n row += 1\n col += 1\n state_probabilities[row - 1, col] = 0.0 # UP\n state_probabilities[row, col - 1] = 0.33 # LEFT\n state_probabilities[row, col + 1] = 0.33 # RIGHT\n state_probabilities[row + 1, col] = 0.33 # DOWN\n\n for row in range (0, tot_row+1):\n if state_probabilities[row, 0] != 0:\n state_probabilities[row, 1] += state_probabilities[row, 0]\n elif state_probabilities[row, -1] != 0:\n state_probabilities[row, -2] += state_probabilities[row, -1]\n\n for col in range (0, tot_col+1):\n if state_probabilities[0, col] != 0:\n state_probabilities[1, col] += state_probabilities[0, col]\n elif state_probabilities[-1, col] != 0:\n state_probabilities[-2, col] += state_probabilities[-1, col]\n\n return state_probabilities[1: 1+tot_row, 1:1+tot_col]", "def get_transition(self, row, col, action, tot_row, tot_col):\n\n '''\n Expand the grid of the environment to handle when the \n agent decides to move in the direction of a wall \n '''\n state_probabilities = np.zeros((int(np.sqrt(self.env.observation_space.n)) + 2, int(np.sqrt(self.env.observation_space.n)) + 2), dtype=float)\n\n if action == 'UP':\n row += 1\n col += 1\n state_probabilities[row - 1, col] = 0.33 #UP\n state_probabilities[row, col - 1 ] = 0.33 #LEFT\n state_probabilities[row, col + 1] = 0.33 # RIGHT\n state_probabilities[row + 1, col] = 0.0 #DOWN\n elif action == 'LEFT':\n row += 1\n col += 1\n state_probabilities[row - 1, col] = 0.33 #UP\n state_probabilities[row, col - 1 ] = 0.33 #LEFT\n state_probabilities[row, col + 1] = 0.0 # RIGHT\n state_probabilities[row + 1, col] = 0.33 #DOWN\n elif action == 'RIGHT':\n row += 1\n col += 1\n state_probabilities[row - 1, col] = 0.33 #UP\n state_probabilities[row, col - 1 ] = 0.0 #LEFT\n state_probabilities[row, col + 1] = 0.33 # RIGHT\n state_probabilities[row + 1, col] = 0.33 #DOWN\n elif action == 'DOWN':\n row += 1\n col += 1\n state_probabilities[row - 1, col] = 0.0 # UP\n state_probabilities[row, col - 1] = 0.33 # LEFT\n state_probabilities[row, col + 1] = 0.33 # RIGHT\n state_probabilities[row + 1, col] = 0.33 # DOWN\n\n for row in range (0, tot_row+1):\n if state_probabilities[row, 0] != 0:\n state_probabilities[row, 1] += state_probabilities[row, 0]\n elif state_probabilities[row, -1] != 0:\n state_probabilities[row, -2] += state_probabilities[row, -1]\n\n for col in range (0, tot_col+1):\n if state_probabilities[0, col] != 0:\n state_probabilities[1, col] += state_probabilities[0, col]\n elif state_probabilities[-1, col] != 0:\n state_probabilities[-2, col] += state_probabilities[-1, col]\n\n return state_probabilities[1: 1+tot_row, 1:1+tot_col]", "def gameOfLife(self, board) -> None:\n changelist = []\n for i in range(len(board)):\n for j in range(len(board[0])):\n if self.ischange(i, j, board):\n changelist.append([i, j])\n\n for x, y in changelist:\n board[x][y] = ~board[x][y] + 2", "def update(self):\n self._state = 23", "def update(self,dt):\n t1 = time()\n\n if SPLIT:\n self.check_refine()\n if AMALGAMATE:\n self.check_amalg(self.nl_default)\n\n t = time()\n self.rebuild_lists()\n self.timing['nlist rebuild time'] = time() - t\n\n # Is this derivative step required?\n t = time()\n self.derivatives()\n self.timing['deriv time'] = time() - t\n \n t = time()\n self.step(self.gather_state,self.derivatives, \\\n self.gather_derivatives,self.scatter_state,dt)\n self.timing['integrate time'] = time() - t\n \n self.box.apply(self)\n\n if self.thermostat:\n self.apply_thermostat(self.thermostat_temp)\n \n self.timing['update time'] = time() - t1\n self.steps += 1", "def __init__(self):\n pygame.init()\n self.settings = Settings()\n self.number_cells_x = int(input(\"Enter number of cells in a row: \"))\n self.cell_width = float(self.settings.screen_width // self.number_cells_x)\n #print(self.cell_width)\n self.number_cells_y = int(self.settings.screen_height // self.cell_width)\n\n self.screen = pygame.display.set_mode((self.settings.screen_width,self.settings.screen_height))\n pygame.display.set_caption(\"Game of Life\")\n\n self.cells = []\n self.to_be_updated = []\n self._create_cells()\n\n self.bg_colour = (self.settings.bg_colour)\n self.waiting = True", "def advance_generation(self):\n self.generation += 1\n next_cells = [[self.cell_state['dead']] * self.cols for x in range(self.lines)]\n for i in range(self.lines):\n for j in range(self.cols):\n neighbors = self.get_neighbors(i, j)\n if self[i][j] == self.cell_state['alive']:\n if neighbors == 2 or neighbors == 3:\n next_cells[i][j] = self.cell_state['alive']\n elif self[i][j] == self.cell_state['dead']:\n if neighbors == 3:\n next_cells[i][j] = self.cell_state['alive']\n super().__init__(next_cells)", "def _simulate_all_cells(self):\n for ID in tqdm(self.condition_dict, desc='Simulating cells'):\n for n in range(len(self.condition_dict[ID])):\n cond_dict = self.condition_dict[ID][n]\n g, tc, rsh_mult, rs_mult, Io_mult, Il_mult, nnsvth_mult = cond_dict['E'], cond_dict['Tc'], cond_dict[\n 'Rsh_mult'], cond_dict['Rs_mult'], cond_dict['Io_mult'], cond_dict['Il_mult'], cond_dict['nnsvth_mult']\n # calculate the 5 parameters for each set of cell conditions\n\n # Eventually, replace this with derived 5-parameters\n iph, io, rs, rsh, nnsvth = pvlib.pvsystem.calcparams_cec(effective_irradiance=g, temp_cell=tc,\n alpha_sc=self.cell_parameters['alpha_sc'],\n a_ref=self.cell_parameters['a_ref'],\n I_L_ref=self.cell_parameters['I_L_ref'],\n I_o_ref=self.cell_parameters['I_o_ref'],\n R_sh_ref=self.cell_parameters['R_sh_ref'],\n R_s=self.cell_parameters['R_s'],\n Adjust=self.cell_parameters['Adjust'])\n rs, rsh, io, iph, nnsvth = rs * rs_mult, rsh * \\\n rsh_mult, io * Io_mult, iph * Il_mult, nnsvth * nnsvth_mult\n\n # calculate cell IV curves by condition, rather than by cell index\n voc_est = pvlib.singlediode.estimate_voc(iph, io, nnsvth)\n v = voltage_pts(self.num_points_in_IV, voc_est,\n self.module_parameters['breakdown_voltage'])\n i = pvlib.singlediode.bishop88_i_from_v(v, iph, io, rs, rsh, nnsvth,\n breakdown_factor=self.module_parameters['breakdown_factor'],\n breakdown_voltage=self.module_parameters[\n 'breakdown_voltage'],\n breakdown_exp=self.module_parameters['breakdown_exp'])\n\n # @dev: Uncomment if debugging pvlib bishop88 simulation results\n # plt.plot(v,i)\n # plt.xlim(-5,v[-1])\n # plt.ylim(0,iph+1)\n # plt.title(f\"{ID}: {n} :: {rs},\"\n # f\"{rsh}, {io}, {iph}, {nnsvth}\")\n # plt.show()\n\n self.condition_dict[ID][n]['V'] = v\n self.condition_dict[ID][n]['I'] = i\n self.condition_dict[ID][n]['E'] = g\n self.condition_dict[ID][n]['Tc'] = tc\n return", "def update_E(self):", "def solve(self):\n dim = self.puzzle.dimension\n\n # initial loop\n for value, (row, col) in self.puzzle:\n if value:\n self.clear_row(row, value)\n self.clear_col(col, value)\n self.clear_subgrid(row, col, value)\n self.updates.add((value, (row, col)))\n for ps in self.possibilities:\n ps.discard((row, col))\n\n while self.updates:\n while self.updates:\n # while self.updates:\n value, (row, col) = self.updates.pop()\n for i in range(1, dim + 1):\n self.check_row(i, value)\n self.check_col(i, value)\n for i in range(2, 8, 3):\n self.check_subgrid(row, i, value)\n self.check_subgrid(i, col, value)\n\n for value, (row, col) in self.puzzle:\n if not value:\n self.check_cell(row, col)\n\n # for value in range(1, dim + 1):\n # for row in [2, 5, 8]:\n # for col in [2, 5, 8]:\n # self.check_subgrid(row, col, value)", "def process_cell(self, neighbourhood: List[Cell], old_cell: Cell) -> Cell:", "def update(self):\n for player in self.players:\n player.update()\n player.last_seen = player.current_cell\n\n self.in_cell()\n for player in self.players:\n if player.current_cell != player.last_seen:\n self.player_paths.add(player.last_seen)\n #If the player has left a cell and moved into another, the vacated cell is\n #added to the list of cells that have been hit\n if player.current_cell in self.player_paths:\n self.players.remove(player)\n player.alive = False\n if len(self.players) == 1:\n self.end_game(self.players[0].name,self.players[0].color)", "def advance(self):\n count = [[0 for col in range(self.width+2)] for row in range(self.height+2)]\n for y in range(1, self.height+1):\n for x in range(1, self.width+1):\n if self.array[y][x]:\n count[y][x-1] += 1\n count[y][x+1] += 1\n count[y-1][x-1] += 1\n count[y-1][x] += 1\n count[y-1][x+1] += 1\n count[y+1][x-1] += 1\n count[y+1][x] += 1\n count[y+1][x+1] += 1\n for y in range(1, self.height+1):\n for x in range(1, self.width+1):\n if count[y][x] == 3:\n self.array[y][x] = 1\n elif count[y][x] == 2 and self.array[y][x]:\n self.array[y][x] = 1\n else:\n self.array[y][x] = 0\n self.array[1][1] = 1\n self.array[1][self.width] = 1\n self.array[self.height][self.width] = 1\n self.array[self.height][1] = 1", "def apply(self, simulation):\n t = simulation.time\n dt = simulation.timeStep\n if main_rank == 0:\n simulation.printState()\n # OpenCL update\n self.numMethod(self.gpu_field.gpu_data[self.component],\n self.color)\n self.window.widget.updateGL()\n if simulation.currentIteration > 1:\n self.window.label.setText(\n self.labelText + \"t={0:6.2f}, fps={1:6.2f}\".format(\n t + dt,\n 1. / (self.timer.f_timers.values()[0].t - self.ctime)))\n self.ctime = self.timer.f_timers.values()[0].t", "def update_eligs(self, *args):\n self.splitGD.update_eligs()", "def update_eligs(self, *args):\n self.splitGD.update_eligs()", "def generate_nodes(self):\n \n # For all state nodes\n node = 0\n \n for i in range(self.x0_n):\n for j in range(self.x1_n):\n \n # State\n x = np.array([ self.xd[0][i] , self.xd[1][j] ])\n \n # State and grid index based on node #\n self.nodes_state[node,:] = x\n self.nodes_index[node,:] = np.array([i,j])\n \n # Node # based on index ij\n self.x_grid2node[i,j] = node\n\n # Increment node number\n node = node + 1", "def update2(self):\r\n tmp = [row.copy() for row in self.grid]\r\n changed = False\r\n for y in range(self.height):\r\n for x in range(self.width):\r\n count = sum(self.see_occupant(x, y, i, j) for i in [-1, 0, 1] for j in [-1, 0, 1])\r\n if self.grid[y][x] == '#' and count >= 5:\r\n tmp[y][x] = 'L'\r\n changed = True\r\n elif self.grid[y][x] == 'L' and count == 0:\r\n tmp[y][x] = '#'\r\n changed = True\r\n else:\r\n tmp[y][x] = self.grid[y][x]\r\n self.grid = tmp\r\n return changed", "def updateGameState(self):\n boardArray = self._board.get_board()\n \n if self.state is self._BEGIN:\n\n for i in [1, self._board.get_board_size() - 2]:\n for j in range(1, self._board.get_board_size() - 1):\n\n if boardArray[i][j] != self._board._EMPTY:\n self.state = self._MIDDLE\n return\n\n if boardArray[j][i] != self._board._EMPTY:\n self.state = self._MIDDLE\n return\n\n\n elif self.state is self._MIDDLE:\n nbPieces = self._board.get_total_pieces()\n\n if nbPieces >= self._board.get_board_size()**2 - ENDGAME:\n self.state = self._END\n return", "def evolve(tiles, epochs):\n for _ in range(epochs):\n tiles = update_floor(tiles)\n return tiles", "def game_updated(self):\n\n # replace with your game updated logic\n self.update_board()", "def postSI(self):\n # for cell in self.cells:\n # cell.resetTotOrdFlux()\n self.depth = 0", "def calcUpdateByRows(self, rows):\n\n delta_w, delta_hb, delta_vb = \\\n zeros((self.rbm.visibleDim, self.rbm.hiddenDim)), \\\n zeros(self.rbm.hiddenDim), zeros(self.rbm.visibleDim)\n\n for row in rows:\n dw, dhb, dvb = self.calcUpdateByRow(row)\n delta_w += dw\n delta_hb += dhb\n delta_vb += dvb\n\n delta_w /= len(rows)\n delta_hb /= len(rows)\n delta_vb /= len(rows)\n\n # !!! note that this delta is only the 'theoretical' delta\n return delta_w, delta_hb, delta_vb", "def updateNodeStates (self,listAtoms):\r\n \r\n for i in range(len(listAtoms)):\r\n for j in range(len(listAtoms[i].nodeArray)):\r\n self.mol[i].nodeArray[j].state = listAtoms[i].nodeArray[j].state", "def run_all(self):\n # print(\"running all nodes\")\n executed = set()\n node_update_states = {node: node.block_updates for node in self.flow_view.node_items}\n\n def traverse_upwards(node):\n # Traverse upwards to the top of data flow graph\n if node in executed:\n return\n for port in node.inputs:\n for connection in port.connections:\n traverse_upwards(connection.out.node)\n # print(\"executing\", node)\n node.update_event()\n executed.add(node)\n\n for node in self.flow_view.node_items:\n node.block_updates = True\n\n for node in self.flow_view.node_items:\n traverse_upwards(node)\n\n for node in self.flow_view.node_items:\n node.block_updates = node_update_states[node]\n # print(\"All nodes executed\")", "def restart(self):\n self.grid = np.zeros((3, 3), dtype=int)\n self.state = 0", "def next_state_of_cell(self, x_cell, y_cell):\n neighbours = self.get_number_neighbours_of_cell(x_cell, y_cell)\n if(self.board_state[x_cell][y_cell] == 1):\n # Any live cell with more than three live neighbours dies, \n # as if by overpopulation.\n if(neighbours > 3):\n return 0\n # Any live cell with fewer than two live neighbours dies,\n # as if by underpopulation.\n elif(neighbours < 2):\n return 0\n # Any live cell with two or three live neighbours lives\n # on to the next generation.\n else:\n return 1\n if(self.board_state[x_cell][y_cell] == 0):\n # Any dead cell with exactly three live neighbours becomes a live cell, \n # as if by reproduction.\n if(neighbours == 3):\n return 1\n else:\n return 0", "def _update_loc(self) -> None:\n self.state[:, :, Boids.Attr.LOC] += self.state[:, :, Boids.Attr.VEL]\n # wrap-around the simulated environment\n self.state[:, :, Boids.Attr.LOC] %= np.expand_dims(self.env_bounds, axis=1)", "def update_totals(self):\n # Reset counts to 0\n self.total_f = self.total_s = self.total_intra = self.total_mac_regular = self.total_mac_infected = \\\n self.total_mac_activated = self.total_regular_fast = self.total_regular_slow = self.total_infected_fast = \\\n self.total_infected_slow = self.total_activated_fast = self.total_activated_slow = self.total_f_degree = \\\n self.total_s_degree = self.total_activation = 0\n self.total_f_o2 = self.total_s_o2 = 0.0\n\n for node in self.node_list.values():\n # Get values from node\n fast_in_node = node.subpopulations[BACTERIA_FAST]\n slow_in_node = node.subpopulations[BACTERIA_SLOW]\n intra_in_node = node.subpopulations[BACTERIA_INTRACELLULAR]\n reg_mac_in_node = node.subpopulations[MACROPHAGE_REGULAR]\n inf_mac_in_node = node.subpopulations[MACROPHAGE_INFECTED]\n act_mac_in_node = node.subpopulations[MACROPHAGE_ACTIVATED]\n degree = node.degree\n o2_tens = node.oxygen_tension\n # Update relevant totals\n self.total_f += fast_in_node\n self.total_s += slow_in_node\n self.total_intra += intra_in_node\n self.total_mac_regular += reg_mac_in_node\n self.total_mac_infected += inf_mac_in_node\n self.total_mac_activated += act_mac_in_node\n self.total_regular_fast += fast_in_node * reg_mac_in_node\n self.total_regular_slow += slow_in_node * reg_mac_in_node\n self.total_infected_fast += fast_in_node * inf_mac_in_node\n self.total_infected_slow += slow_in_node * inf_mac_in_node\n self.total_activated_fast += fast_in_node * act_mac_in_node\n self.total_activated_slow += slow_in_node * act_mac_in_node\n # TODO - check usage of degree\n self.total_f_degree += fast_in_node * degree\n self.total_s_degree += slow_in_node * degree\n self.total_f_o2 += fast_in_node * (1/o2_tens)\n self.total_s_o2 += slow_in_node * o2_tens\n self.total_activation += reg_mac_in_node * inf_mac_in_node", "def calculateState (self):\r\n newState = 0\r\n # print (\"Inside state function the states DNs are: \\n\")\r\n # print (\"Before starting \\n\")\r\n self.stateDanglingNodes()\r\n #for i in range(len(self.metaSpikes)):\r\n # if self.metaSpikes[i].typeSpike == 1:\r\n # print (\"Meta atom number is: \" + str(self.atomNumber) + \"\\n\")\r\n \r\n insideMetState = []\r\n # To calculate the state we need to update every atom the metaatom consistrs off then see\r\n # the states of every dangling node in the metaspikes\r\n for i in range(len(self.metaSpikes)):\r\n if self.metaSpikes[i].typeSpike == 1:\r\n #print (\"Inside type 1 \\n\")\r\n #print (\"Number of type 1 nodes: \" + str(len(self.metaSpikes[i].danglingNodeList)) + \"\\n\")\r\n for j in range(len(self.metaSpikes[i].danglingNodeList)):\r\n insideMetState.append(self.metaSpikes[i].danglingNodeList[j].state)\r\n if self.metaSpikes[i].danglingNodeList[j].state == 1:\r\n # print (\"Adding one \\n\" )\r\n newState += 1\r\n else:\r\n # print (\"Subracting one \\n\")\r\n newState -= 1\r\n else:\r\n \r\n # print (\"Inside type 2 \\n\")\r\n # print (\"Number od type 1 tales: \" + str(len(self.metaSpikes[i].danglingTailList)) + \"\\n\")\r\n for j in range(len(self.metaSpikes[i].danglingTailList)):\r\n #print (\"Size of tail: \" + str(len(self.metaSpikes[i].danglingTailList[j].nodeList)) + \"\\n\")\r\n for k in range(len(self.metaSpikes[i].danglingTailList[j].nodeList)):\r\n insideMetState.append(self.metaSpikes[i].danglingTailList[j].nodeList[k].state)\r\n if self.metaSpikes[i].danglingTailList[j].nodeList[k].state == 1:\r\n newState += 1\r\n else:\r\n newState -= 1 \r\n \r\n # print (\"The state of analysed nodes: \\n\" + str(insideMetState) + \"\\n\")\r\n # print (\"The length of analysed nodes: \\n\" + str(len(insideMetState)) + \"\\n\")\r\n # print (\"The new state is: \" + str(newState) + \"\\n\") \r\n self.state = newState", "def update_gol(arr):\n nxt = np.zeros(arr.shape)\n rows,cols = nxt.shape\n for i in range(rows):\n for j in range(cols):\n nn = sum_vonneuman_nn(arr,i,j)\n if arr[i][j]==1:\n if nn==2 or nn==3:\n nxt[i][j]=1\n else:\n if nn==3:\n nxt[i][j]=1\n return nxt", "def update_nodes(self):\n raise NotImplementedError('ERROR: sweeper has to implement update_nodes(self)')", "def gameOfLife(self, board: List[List[int]]) -> None:\r\n self.board = board\r\n self.l = len(board)\r\n self.w = len(board[0])\r\n status = [[0] * self.w for _ in range(self.l)]\r\n for i in range(self.l):\r\n for j in range(self.w):\r\n status[i][j] = self.statusUpdate(board[i][j], self.countLivingNeighbor([i, j]))\r\n #print(\"prev: \", i, j ,board[i][j], \" count: \", self.countLivingNeighbor([i, j]), \" after:\", status[i][j])\r\n for i in range(self.l):\r\n for j in range(self.w):\r\n board[i][j] = status[i][j]" ]
[ "0.7286067", "0.70985454", "0.65855974", "0.64528495", "0.6422918", "0.62100416", "0.62099385", "0.61870646", "0.6159121", "0.6139433", "0.6139193", "0.60986966", "0.6086777", "0.60648525", "0.6041667", "0.6039886", "0.6037677", "0.60304636", "0.60248035", "0.6021954", "0.60099727", "0.60075694", "0.5957885", "0.59571236", "0.5940554", "0.5927069", "0.59141165", "0.5886752", "0.58788395", "0.58729833", "0.5853038", "0.5844393", "0.5840988", "0.58341783", "0.58258003", "0.58023655", "0.5801479", "0.5801434", "0.5797384", "0.57925636", "0.57830393", "0.57801676", "0.57595706", "0.5752709", "0.57503414", "0.5743127", "0.5735555", "0.5732149", "0.57297033", "0.57183707", "0.5688037", "0.56840676", "0.56813234", "0.5674297", "0.5672256", "0.56708246", "0.5663088", "0.56508625", "0.5648941", "0.56333303", "0.56256133", "0.5617856", "0.56135595", "0.5610449", "0.5593113", "0.55861515", "0.5581377", "0.5580037", "0.5580037", "0.55787176", "0.5573189", "0.55639035", "0.5561671", "0.55570734", "0.5541345", "0.5527852", "0.55260354", "0.55246854", "0.5510451", "0.5505619", "0.55040735", "0.5500872", "0.5500872", "0.5496566", "0.54943955", "0.54940236", "0.5491125", "0.5490923", "0.54796857", "0.54510534", "0.5448135", "0.5447642", "0.54331785", "0.54301775", "0.5428466", "0.5427429", "0.54269207", "0.5426111", "0.54239196", "0.5422144" ]
0.65298665
3
Compute the final grid at given number of steps
def grid_frame(self, steps, figure_size=(12, 12)): x = self.seed counts = [] for n in np.arange(0, steps): x, stats = self.update_grid(x) counts.append(stats) counts = np.array(counts) fig, ax = plt.subplots(figsize=figure_size) ax.get_xaxis().set_visible(False) ax.get_yaxis().set_visible(False) color_map = matplotlib.colors.ListedColormap(['white', 'black']) img = plt.imshow(x, interpolation='nearest', cmap=color_map) img.axes.grid(False) plt.title(self.title + ' | Step ' + str(steps)) plt.show() return x, counts
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def grid_traveller(m: int, n: int):\n\n if m == 1 and n == 1:\n return 1\n if m == 0 or n == 0:\n return 0\n return grid_traveller(m - 1, n) + grid_traveller(m, n - 1)", "def recursion_loop(pulls, discount, grid_n):\n\n r_grid = np.linspace(0, 1, grid_n)\n gittins, values = initial_approximation(pulls, discount, grid_n)\n n = pulls - 2 # Note that the 2 comes from (1) the initial approximation and (2) python indexing\n while n >= 1:\n g, v = recursion_step(values[:n + 1, n, :], r_grid, discount)\n values[:n, n - 1] = v\n gittins[:n, n - 1] = g\n n -= 1\n return gittins, values", "def final_coord(steps):\n return reduce(add, steps, (0, 0))", "def get_next_steps(self, steps):\n for step in range(steps):\n # Actual calulation: Runge-Kutta 2\n\n # Step 1\n k1 = [\n self.vel * self.dt,\n self.get_next_acc() * self.dt\n ]\n\n # Step 2\n next_pos = self.pos + k1[0] * 0.5\n next_vel = self.vel + k1[1] * 0.5\n self.disps, self.dists = self.get_relative_distances(positions=next_pos)\n k2 = [\n next_vel * self.dt,\n self.get_next_acc(save=False) * self.dt\n ]\n\n # Step 3\n next_pos = self.pos + k2[0] * 0.5\n next_vel = self.vel + k2[1] * 0.5\n self.disps, self.dists = self.get_relative_distances(positions=next_pos)\n k3 = [\n next_vel * self.dt,\n self.get_next_acc(save=False) * self.dt\n ]\n\n # Step 4\n next_pos = self.pos + k3[0]\n next_vel = self.vel + k3[1]\n self.disps, self.dists = self.get_relative_distances(positions=next_pos)\n k4 = [\n next_vel * self.dt,\n self.get_next_acc(save=False) * self.dt\n ]\n\n # Move forward\n self.pos = self.pos + 1/6 * (k1[0] + 2*k2[0] + 2*k3[0] + k4[0])\n self.vel = self.vel + 1/6 * (k1[1] + 2*k2[1] + 2*k3[1] + k4[1])\n\n # Saving of statistics\n self.save_system_information(self.pos, self.vel)", "def grid_step(self, axis):\n if self.sizes[axis] != 1:\n return (self.bounds[axis][1] - self.bounds[axis][0]) / (self.sizes[axis] - 1)", "def dynamic_programming_path_counter(grid_size):\n G = [1] * grid_size\n for i in range(grid_size):\n for j in range(i):\n G[j] = G[j] + G[j-1]\n G[i] = 2 * G[i - 1]\n return G[grid_size - 1]", "def total_steps(self) -> global___Expression:", "def echelon_with_steps(self):\n new_value = [self.matrix[i] + [self.result[i]] for i in range(len(self.matrix))]\n\n # Loop through each row\n print(\"Step begin here\")\n for i, current in enumerate(new_value):\n\n # Check if it's a potential pivot\n if i < len(new_value) - 1:\n\n # Elimate all in the same column beside the pivot\n for j in range(i + 1, len(new_value)):\n coef = new_value[j][i] / current[i]\n new_value[j] = minus(new_value[j], times(coef, current))\n\n # printing the grid in 2D\n for row in new_value:\n print(row)\n print(\"vvvvvvv\")\n return new_value", "def figure_out_grid(tot):\n guess = np.round(np.sqrt(tot))\n\n dims = [guess,guess]\n\n flag = True\n\n while(flag):\n if dims[0]*dims[1] < tot:\n dims[0] += 1\n flag = True\n elif dims[0]*dims[1] >tot and dims[0]*dims[1] <(tot-dims[1]):\n dims[0] -= 1\n flag = True\n else:\n flag = False\n return tuple(dims)", "def gen_grids(self):\n self.dx = self.grid_width / self.grid_resol\n self.dk = 2 * np.pi/self.grid_width\n self.grid_x_shifted = -self.grid_width/2 + self.dx * np.arange(0, self.grid_resol)\n self.grid_x = self.grid_x_shifted + self.grid_center\n self.grid_k = - (np.pi * self.grid_resol)/self.grid_width + self.dk * np.arange(0, self.grid_resol)\n self.grid_k = np.roll(self.grid_k, int((self.grid_resol)/2))\n self.grid_kin = np.square(self.h)/ (2*self.m) * np.square(self.grid_k)", "def create_grids(self):\n \n par = self.par\n\n # a. retirement\n \n # pre-decision states\n par.grid_m_ret = nonlinspace(par.eps,par.m_max_ret,par.Nm_ret,par.phi_m)\n par.Nmcon_ret = par.Nm_ret - par.Na_ret\n \n # post-decision states\n par.grid_a_ret = nonlinspace(0,par.a_max_ret,par.Na_ret,par.phi_m)\n \n # b. working: state space (m,n,k) \n par.grid_m = nonlinspace(par.eps,par.m_max,par.Nm,par.phi_m)\n\n par.Nn = par.Nm\n par.n_max = par.m_max + par.n_add\n par.grid_n = nonlinspace(0,par.n_max,par.Nn,par.phi_n)\n\n par.grid_n_nd, par.grid_m_nd = np.meshgrid(par.grid_n,par.grid_m,indexing='ij')\n\n # c. working: w interpolant (and wa and wb and wq)\n par.Na_pd = np.int_(np.floor(par.pd_fac*par.Nm))\n par.a_max = par.m_max + par.a_add\n par.grid_a_pd = nonlinspace(0,par.a_max,par.Na_pd,par.phi_m)\n \n par.Nb_pd = np.int_(np.floor(par.pd_fac*par.Nn))\n par.b_max = par.n_max + par.b_add\n par.grid_b_pd = nonlinspace(0,par.b_max,par.Nb_pd,par.phi_n)\n \n par.grid_b_pd_nd, par.grid_a_pd_nd = np.meshgrid(par.grid_b_pd,par.grid_a_pd,indexing='ij')\n \n # d. working: egm (seperate grids for each segment)\n \n if par.solmethod == 'G2EGM':\n\n # i. dcon\n par.d_dcon = np.zeros((par.Na_pd,par.Nb_pd),dtype=np.float_,order='C')\n \n # ii. acon\n par.Nc_acon = np.int_(np.floor(par.Na_pd*par.acon_fac))\n par.Nb_acon = np.int_(np.floor(par.Nb_pd*par.acon_fac))\n par.grid_b_acon = nonlinspace(0,par.b_max,par.Nb_acon,par.phi_n)\n par.a_acon = np.zeros(par.grid_b_acon.shape)\n par.b_acon = par.grid_b_acon\n\n # iii. con\n par.Nc_con = np.int_(np.floor(par.Na_pd*par.con_fac))\n par.Nb_con = np.int_(np.floor(par.Nb_pd*par.con_fac))\n \n par.grid_c_con = nonlinspace(par.eps,par.m_max,par.Nc_con,par.phi_m)\n par.grid_b_con = nonlinspace(0,par.b_max,par.Nb_con,par.phi_n)\n\n par.b_con,par.c_con = np.meshgrid(par.grid_b_con,par.grid_c_con,indexing='ij')\n par.a_con = np.zeros(par.c_con.shape)\n par.d_con = np.zeros(par.c_con.shape)\n \n elif par.solmethod == 'NEGM':\n\n par.grid_l = par.grid_m\n\n # e. shocks\n assert (par.Neta == 1 and par.var_eta == 0) or (par.Neta > 1 and par.var_eta > 0)\n\n if par.Neta > 1:\n par.eta,par.w_eta = log_normal_gauss_hermite(np.sqrt(par.var_eta), par.Neta)\n else:\n par.eta = np.ones(1)\n par.w_eta = np.ones(1)\n\n # f. timings\n par.time_work = np.zeros(par.T)\n par.time_w = np.zeros(par.T)\n par.time_egm = np.zeros(par.T)\n par.time_vfi = np.zeros(par.T)", "def gridgen4(num_points, diameter, min_dist, n_miss_max=10000):\n\n # Grid size and scaling onto the grid\n grid_size = min(100, int(floor(float(diameter) / min_dist)))\n grid_cell = float(diameter) / grid_size # Grid sector cell size\n scale = 1.0 / grid_cell # Scaling onto the sector grid.\n print('- Grid size: %i' % grid_size)\n print('- Grid cell: %f' % grid_cell)\n\n r = diameter / 2.0 # Radius\n r_sq = r**2 # Radius, squared\n min_dist_sq = min_dist**2 # minimum distance, squared\n\n # Pre-allocate coordinate arrays\n x = numpy.zeros(num_points)\n y = numpy.zeros(num_points)\n\n # Grid meta-data\n next = numpy.zeros(num_points, dtype='i8') # Next coordinate index.\n h1 = -numpy.ones((grid_size, grid_size), dtype='i8') # First index in the grid\n h2 = -numpy.ones((grid_size, grid_size), dtype='i8') # Last index in the grid\n grid_count = numpy.zeros((grid_size, grid_size), dtype='i8') # Points in grid cell.\n\n n = num_points\n n_req = num_points\n num_miss = 0\n for j in range(n_req):\n\n # First time no need to check the minimum distance req, just needs\n # to be inside the diameter.\n if j == 0:\n done = False\n while not done:\n x[j], y[j] = get_trail_position(r)\n done = (x[j]**2 + y[j]**2) <= r_sq\n jx, jy = grid_position(x[j], y[j], scale, r)\n grid_count[jx, jy] += 1\n h1[jx, jy] = 0\n h2[jx, jy] = 0\n\n # All other points have to be inside the diameter and match the\n # minimum separation requirements.\n else:\n done = False\n while not done:\n xt, yt = get_trail_position(r)\n\n # Check if the point is inside the diameter\n if (xt**2 + yt**2) > r_sq:\n num_miss += 1\n else:\n # Scale onto grid.\n jx, jy = grid_position(xt, yt, scale, r)\n # Find minimum distance to other points\n y0 = max(0, jy - 1)\n y1 = min(grid_size - 1, jy + 1)\n x0 = max(0, jx - 1)\n x1 = min(grid_size - 1, jx + 1)\n dmin_sq = diameter\n for ky in range(y0, y1 + 1):\n for kx in range(x0, x1 + 1):\n if grid_count[kx, ky] > 0:\n kh1 = h1[kx, ky]\n for kh in range(grid_count[kx, ky]):\n dx = xt - x[kh1]\n dy = yt - y[kh1]\n dist_sq = dx**2 + dy**2\n dmin_sq = min(dist_sq, dmin_sq)\n kh1 = next[kh1]\n\n # Check if the minimum distance requirement is met.\n if dmin_sq >= min_dist_sq:\n x[j] = xt\n y[j] = yt\n if h1[jx, jy] == -1:\n h1[jx, jy] = j\n else:\n next[h2[jx, jy]] = j\n h2[jx, jy] = j\n grid_count[jx, jy] += 1\n num_miss = 0\n done = True\n else:\n num_miss += 1\n\n if num_miss >= n_miss_max:\n n = j - 1\n done = True\n\n if num_miss >= n_miss_max:\n break\n\n if n < n_req:\n x = x[0:n]\n y = y[0:n]\n\n return x, y", "def makeGrid(self):\n self.h = self.step_x\n self.k = self.step_t\n self.t, self.x = np.meshgrid(np.arange(self.min_t, self.max_t, self.step_t), np.arange(self.min_x, self.max_x\n , self.step_x))", "def next_step(self):\n\n c = 1\n dt = 0.001\n dx = 1 / 20**2\n\n # copy current state first\n next_state = np.copy(self.state)\n\n # iterate over matrix\n for i in range(self.width - 1):\n for j in range(self.height - 1):\n\n if not self.shape == \"circle\" or self.circle[i, j] == 1:\n\n # left bottom corner\n if i == 0 and j == 0:\n next_state[i, j] = ((c * dt)/ dx)** 2\\\n * (self.state[i + 1, j] + 0\\\n + 0 + self.state[i, j + 1]\\\n - 4 * self.state[i, j])\\\n + 2 * self.state[i, j] - self.prev_state[i, j]\n # right top corner\n elif i == 0 and j == self.height - 1:\n next_state[i, j] = ((c * dt)/ dx)** 2\\\n * (0 + self.state[i + 1, j]\\\n + self.state[i, j - 1] + 0\\\n - 4 * self.state[i, j])\\\n + 2 * self.state[i, j] - self.prev_state[i , j]\n # right bottom corner\n elif i == self.width - 1 and j == 0:\n next_state[i, j] = ((c * dt)/ dx)** 2\\\n * (self.state[i, j - 1] + 0\\\n + 0 + self.state[i - 1, j]\\\n - 4 * self.state[i, j])\\\n + 2 * self.state[i, j] - self.prev_state[i , j]\n # left bottom corner\n elif i == self.width - 1 and j == self.height - 1:\n next_state[i, j] = ((c * dt)/ dx)** 2\\\n * (self.state[i, j - 1] + self.state[i - 1, j]\\\n + 0 + 0\\\n - 4 * self.state[i, j])\\\n + 2 * self.state[i, j] - self.prev_state[i , j]\n elif i == 0: # j is not 0\n next_state[i, j] = ((c * dt)/ dx)** 2\\\n * (self.state[i + 1, j] + 0\\\n + self.state[i, j - 1] + self.state[i, j + 1]\\\n - 4 * self.state[i, j])\\\n + 2 * self.state[i, j] - self.prev_state[i , j]\n elif j == 0:\n next_state[i, j] = ((c * dt)/ dx)** 2\\\n * (self.state[i + 1, j] + self.state[i - 1, j]\\\n + 0 + self.state[i, j + 1]\\\n - 4 * self.state[i, j])\\\n + 2 * self.state[i, j] - self.prev_state[i , j]\n elif i == self.width - 1:\n next_state[i, j] = ((c * dt)/ dx)** 2\\\n * (0 + self.state[i - 1, j]\\\n + self.state[i, j - 1] + self.state[i, j + 1]\\\n - 4 * self.state[i, j])\\\n + 2 * self.state[i, j] - self.prev_state[i , j]\n elif j == self.height - 1:\n next_state[i, j] = ((c * dt)/ dx)** 2\\\n * (self.state[i + 1, j] + self.state[i - 1, j]\\\n + self.state[i, j - 1] + 0\\\n - 4 * self.state[i, j])\\\n + 2 * self.state[i, j] - self.prev_state[i , j]\n else:\n next_state[i, j] = ((c * dt)/ dx)** 2\\\n * (self.state[i + 1, j] + self.state[i - 1, j]\\\n + self.state[i, j - 1] + self.state[i, j + 1]\\\n - 4 * self.state[i, j])\\\n + 2 * self.state[i, j] - self.prev_state[i , j]\n\n self.prev_state = np.copy(self.state)\n self.state = np.copy(next_state)\n\n self.timestep += 1", "def filled_grid(n):\n\n i = 0\n r, c = 1, 1\n while r * c < n:\n if i % 2:\n r += 1\n else:\n c += 1\n\n i += 1\n\n return r, c", "def DrawGrid(self, count):\n for i in range(0, self.width, self.incr):\n self.canvas.create_line(i, 100, i, 700, fill = \"#696969\", width = 1)\n for i in range(100, 800, 100):\n self.canvas.create_line(0, i, self.width, i, fill = \"#696969\", width = 1)\n self.canvas.create_rectangle(self.incr * 4, self.height - self.incr * 3.5,\n self.width - self.incr * 4, self.height, fill = \"black\", width = 3)\n for i in range(int(self.height - self.incr * 3.5), self.height, int(self.incr / 4)):\n self.canvas.create_line(self.incr * 4, i, self.width - self.incr * 4,\n i, fill = \"#696969\", width = 1)\n for i in range(self.incr * 4, self.width - self.incr * 4 + 1, int(self.incr / 4)):\n self.canvas.create_line(i, self.height - self.incr * 3.5, i, self.height,\n fill = \"#696969\", width = 1)", "def Green_func(self):\n if self.bc == True:\n size = self.grid_size\n else:\n size = 2*self.grid_size\n self.Green = np.zeros([size, size])\n for x in range(len(self.Green[0])):\n for y in range(len(self.Green[1])):\n radius = np.sqrt(x**2 + y**2) \n if radius < self.soften: \n radius = self.soften\n self.Green[x, y]=1/(4 * np.pi * radius)\n if self.grid_size%2 == 0: \n self.Green[: size//2, size//2 : ] = np.flip(self.Green[: size//2, : size//2], axis = 1) # an intermittent step - the original grid has only been flipped once (2 x the original size)\n self.Green[ size//2 : , :] = np.flip(self.Green[: size//2, :], axis = 0)\n else: \n print(\"Exiting - Grid size is currently odd. Pleaset set to an even value.\")", "def _incremental_steps(start, end, steps, stepsize=None):\n if stepsize is None: step_size = (end - start) / np.maximum((steps - 1), 1)\n gradient = []\n for i in range(steps):\n value = start + step_size * i\n gradient.append(value)\n\n return gradient[0:steps]", "def grid(gmin, gmax, gstep):\n n_vals = int((gmax - gmin)/gstep + 1)\n my_grid = linspace(gmin, gmax, n_vals)\n return my_grid", "def swipeBase (self) :\n grid = self.grid\n\n #we start by putting every tile up\n for columnNbr in range(4) :\n nbrZeros = 4 - np.count_nonzero(grid[:,columnNbr])\n\n for lineNbr in range(4) :\n counter = 0\n while (grid[lineNbr, columnNbr] == 0) and (counter < 4):\n counter += 1\n if np.count_nonzero(grid[lineNbr:4, columnNbr]) != 0 :\n for remainingLine in range (lineNbr, 3) :\n grid[remainingLine, columnNbr] = grid[remainingLine+1, columnNbr]\n grid[3, columnNbr] = 0\n\n #now we do the additions\n for lineNbr in range(3) :\n if grid[lineNbr, columnNbr] == grid[lineNbr+1, columnNbr] :\n grid[lineNbr, columnNbr] *= 2\n for remainingLine in range (lineNbr+1, 3) :\n grid[remainingLine, columnNbr] = grid[remainingLine+1, columnNbr]\n grid[3, columnNbr] = 0\n\n return (grid)", "def main():\n\n rules = parse_input(get_input())\n for part in [5, 18]:\n image = np.array(START_PATTERN).astype(bool)\n for i in range(part):\n image = enlarge(image, rules)\n count = sum(sum(ch for ch in row) for row in image)\n\n print(\"Number of # in the final matrix after {} iterations is {}.\".format(part, count))\n return", "def get_cross_size_grid(n, m, grid):\n grid = [[int(c == '#') for c in row] for row in grid]\n acc = [[[0] * 4 for _ in range(m)] for _ in range(n)]\n for i in range(n):\n acc[i][0][L] = grid[i][0]\n acc[i][-1][R] = grid[i][-1]\n for j in range(1, m):\n val = grid[i][j]\n acc[i][j][L] = acc[i][j-1][L] + val if val else 0\n val = grid[i][-j-1]\n acc[i][-j-1][R] = acc[i][-j][R] + val if val else 0\n for j in range(m):\n acc[0][j][T] = grid[0][j]\n acc[-1][j][B] = grid[-1][j]\n for i in range(1, n):\n val = grid[i][j]\n acc[i][j][T] = acc[i-1][j][T] + val if val else 0\n val = grid[-i-1][j]\n acc[-i-1][j][B] = acc[-i][j][B] + val if val else 0\n\n for i in range(n):\n for j in range(m):\n grid[i][j] = min(acc[i][j])\n return grid", "def grid(self):\r\n dimA = self.dimA ; dimC = self.dimA ; W_grid = self.W_grid\r\n \r\n self.tol = 10e-5\r\n self.Niter = 10000\r\n \r\n a0 = 100 / self.dimA\r\n c0 = 100 / self.dimA\r\n a_grid = np.mgrid[0:(dimA):1] ; a_grid = a0 * a_grid ; self.a_grid = a_grid\r\n c_grid = np.mgrid[0:(dimC):1] ; c_grid = c0 * c_grid ; self.c_grid = c_grid\r\n self.W_grid = W_grid", "def grid_animation(self, steps, figure_size=(12, 12), speed=100):\r\n\r\n steps -= 1\r\n x = self.seed\r\n\r\n fig, ax = plt.subplots(figsize=figure_size)\r\n ax.grid(False)\r\n ax.get_xaxis().set_visible(False)\r\n ax.get_yaxis().set_visible(False)\r\n color_map = matplotlib.colors.ListedColormap(['white', 'black'])\r\n im = plt.imshow(x[1:-1:1, 1:-1:1], interpolation='nearest', cmap=color_map, animated=True)\r\n counter = 0\r\n\r\n def update_figure(*args):\r\n nonlocal x, counter, fig\r\n\r\n counter += 1\r\n x, stats = self.update_grid(x)\r\n plt.title(self.title + ' | Step ' + str(counter), fontsize=14)\r\n im.set_array(x[1:-1:1, 1:-1:1])\r\n\r\n return im, # why is this comma necessary?\r\n\r\n ani = animation.FuncAnimation(fig, update_figure, frames=steps,\r\n interval=speed, blit=False, repeat=False)\r\n\r\n return ani", "def reduce(self, steps=1):\n self.h -= steps", "def life_step(state):\n\t# For every cell each live cell in any of the 8 neighbouring cells contributes 1 to the sum\n\t# Rolling matricies is periodic so this implements periodic boundary conditions\n\tnumberOfNeigbours = sum(np.roll(np.roll(state, i, axis=0), j, axis=1)\n\t\t\t\t\t\t for i in (-1,0,1) for j in (-1,0,1) if (i != 0 or j != 0))\n\n\t# Any live cell with fewer than two live neighbours dies, as if caused by under-population\n\tstate = np.where(numberOfNeigbours < 2, 0, state)\n\t# Any live cell with more than three live neighbours dies, as if by over-population\n\tstate = np.where(numberOfNeigbours > 3, 0, state)\n\t# Any dead cell with exactly three live neighbours becomes a live cell, as if by reproduction.\n\tstate = np.where(numberOfNeigbours == 3, 1, state)\n\n\treturn state", "def recursion_step(value_n, r_grid, discount):\n\n n = value_n.shape[0]\n r_len = r_grid.shape[0]\n value_n_minus_1 = np.zeros([n - 1, r_len]) # Value function length reduced by 1\n gittins_n_minus_1 = np.zeros(n - 1) # Value function length reduced by 1\n for k in range(0, n - 1):\n a = k + 1 # a in range [1,n-1]\n b = n - k - 1 # b in range [1,n-1]\n value_n_minus_1[k, :] = np.maximum((r_grid / float(1 - discount)),\n (a / float(n)) * (1 + discount * value_n[k + 1, :]) +\n (b / float(n)) * discount * value_n[k, :]\n )\n try:\n # Find first index where Value = (Value of Safe Arm)\n idx_git = np.argwhere((r_grid / float(1 - discount)) == value_n_minus_1[k, :]).flatten()\n gittins_n_minus_1[k] = 0.5 * (r_grid[idx_git[0]] + r_grid[idx_git[0] - 1]) # Take average\n except:\n print(\"Error in finding Gittins index\")\n\n return gittins_n_minus_1, value_n_minus_1", "def tab_grid_traveller(m: int, n: int):\n\n if m == 1 and n == 1:\n return 1\n\n if m == 0 or n == 0:\n return 0\n\n table = [[0 for j in range(n + 1)] for i in range(m + 1)] # Initialize table with m + 1 rows and n + 1 columns.\n table[1][1] = 1\n\n for i in range(m + 1):\n for j in range(n + 1):\n if j + 1 <= n:\n table[i][1 + j] = table[i][1 + j] + table[i][j]\n if i + 1 <= m:\n table[i + 1][j] = table[i + 1][j] + table[i][j]\n\n return table[m][n]", "def make_move(grid, n_columns, n_rows):\r\n # Generate the game grid to be manipulated\r\n new_grid = [[0] * (n_columns + 1) for i in range(n_rows + 1)]\r\n\r\n\r\n for i in range(n_rows):\r\n for j in range(n_columns):\r\n upper_left = grid[i-1][j-1] # neighbor to upper left of cell of interest\r\n upper = grid[i-1][j] # neighbor above cell of interest\r\n upper_right = grid[i-1][j+1] # neighbor to upper right of cell of interest\r\n left = grid[i][j-1] # neighbor to left of cell of interest\r\n right = grid[i][j+1] # neighbor to right of cell of interest\r\n bot_left = grid[i+1][j-1] # neighbor to bottom left cell of interest\r\n bot = grid[i+1][j] # neighbor below cell of interest\r\n bot_right = grid[i+1][j+1] # neighbor to bottom right of cell of interest\r\n\r\n # sum of the state of all neighbors\r\n on_neighbors = upper_left + upper + upper_right + left + right + bot_left + bot + bot_right\r\n\r\n # Any ON cell with fewer than two ON neighbors turns OFF\r\n if grid[i][j] == 1 and on_neighbors < 2:\r\n new_grid[i][j] = 0\r\n\r\n # Any ON cell with two or three ON neighbours stays ON\r\n elif grid[i][j] == 1 and (on_neighbors == 2 or on_neighbors == 3):\r\n new_grid[i][j] = 1\r\n\r\n # Any ON cell with more than three ON neighbors turns OFF\r\n elif grid[i][j] == 1 and on_neighbors > 3:\r\n new_grid[i][j] = 0\r\n\r\n # Any OFF cell with three ON neighbors turns ON\r\n elif grid[i][j] == 0 and on_neighbors == 3:\r\n new_grid[i][j] = 1\r\n\r\n return new_grid #manipulated game grid\r", "def gd(a, step_size=0.1, steps=42):\n out = []\n ### YOUR CODE HERE\n out.append(np.array([256,1]))\n for i in range(steps):\n point = out[i]\n gradient = np.array([0.5*2*a[i],0.5*2*a[i+1]])\n npoint = point - step_size*gradient\n out.append(npoint)\n ### END CODE\n return out", "def calc_stepsize(self):\n # Calculate step size\n step = 1.0/((self.n+self.d)*np.max(np.sum(self.p, axis=0)))\n return step", "def main():\r\n\r\n n_rows = 30\r\n n_columns = 80\r\n grid = [[0] * (n_columns + 1) for i in range(n_rows + 1)]\r\n initialize_grid(argv, grid)\r\n print_grid(n_rows, n_columns, grid)\r\n\r\n max_iterations = int(argv[1]) #argv imports as string, must change to int for count\r\n loop_count = 0\r\n\r\n while loop_count < max_iterations:\r\n # print the first grid with user inputs, then print all of the new grids\r\n if loop_count == 0:\r\n new_grid = make_move(grid, n_columns, n_rows)\r\n else:\r\n new_grid = make_move(new_grid, n_columns, n_rows)\r\n print_grid(n_rows, n_columns, new_grid)\r\n loop_count += 1", "def grid(min_val, max_val, step=1, *, num_dimensions=2):\n axis = itertools.takewhile(lambda x: x <= max_val,\n itertools.count(min_val, step))\n axes = itertools.tee(axis, num_dimensions)\n return itertools.product(*axes)", "def g_iter(n):\n \"*** YOUR CODE HERE ***\"\n vals = [1, 2, 3]\n if n <= 3:\n return vals[n-1]\n for i in range(n - 3):\n new_val = 3 * vals[0] + 2 * vals[1] + 1 * vals[2]\n vals = vals[1:] + [new_val]\n return vals[-1]", "def step(self):\n lmax = 32\n\n star = self.star\n\n K, N = star.mesh_size\n mu = star.mu_coords\n r = star.r_coords\n\n def D1(k, n):\n return 1 / 6 * np.sum((mu[2::2] - mu[:-2:2]) *\n (eval_legendre(2 * n, mu[:-2:2]) * star.rho[:-2:2, k] +\n 4 * eval_legendre(2 * n, mu[1:-1:2]) * star.rho[1:-1:2, k] +\n eval_legendre(2 * n, mu[2::2]) * star.rho[2::2, k]))\n\n def D2(n, j):\n sum = 0\n\n def fl(r_dash, r, l=2 * n):\n if r_dash < r:\n return r_dash**(l + 2) / r**(l + 1)\n else:\n return r**l / r_dash**(l - 1)\n\n for k in range(0, N - 2, 2):\n sum += (r[k + 2] - r[k]) * (fl(r[k], r[j]) * D1(k, n) +\n 4 * fl(r[k + 1], r[j]) * D1(k + 1, n) +\n fl(r[k + 2], r[j]) * D1(k + 2, n))\n\n return sum / 6\n\n def calc_Phi(i, j):\n Phi = 0\n\n for n in range(lmax + 1):\n Phi -= 4 * np.pi * D2(n, j) * eval_legendre(2 * n, mu[i])\n\n return Phi\n\n # calculate Phi across grid\n for n in range(N):\n for k in range(K):\n star.Phi[k, n] = calc_Phi(k, n)\n\n # print(f'Phi = {star.Phi[0,:]}')\n\n # update the enthalpy\n\n Omega2 = star.eos.Omega2(star.Phi, star.Psi)\n C = star.eos.C(star.Phi, star.Psi)\n\n H = C - star.Phi - Omega2 * star.Psi\n\n # use new enthalpy and Phi to calculate the density\n\n star.rho = star.eos.rho_from_h(H)\n star.rho /= np.max(star.rho)\n\n # print(f\"rho = {np.average(star.rho, axis=0)}\")\n\n # calculate the errors\n\n H_err = np.max(np.abs(H - star.H)) / np.max(np.abs(H))\n\n if np.max(Omega2) == 0:\n if np.abs(Omega2 - star.Omega2) == 0:\n Omega2_err = 0\n else:\n Omega2_err = 1\n else:\n Omega2_err = np.abs(Omega2 - star.Omega2) / np.abs(Omega2)\n\n if np.max(star.C) == 0:\n if np.abs(C - star.C) == 0:\n C_err = 0\n else:\n C_err = 1\n else:\n C_err = np.abs(C - star.C) / np.abs(star.C)\n\n # set variables to new values\n\n star.H = H\n star.Omega2 = Omega2\n star.C = C\n print(\n f\"Errors: H_err = {H_err}, Omega2_err = {Omega2_err}, C_err = {C_err}\")\n\n return H_err, Omega2_err, C_err", "def hailstone_steps(num):\n steps = 0\n\n while num > 1:\n steps = steps + 1\n if num % 2 == 1:\n num = int(num * 3 + 1)\n else:\n num = int(num / 2)\n\n return steps", "def step(self, closure):\r\n self.state['step'] += 1\r\n for y_ind in range(self.dim_out):\r\n err = closure(y_ind)\r\n for group_ind in range(len(self.param_groups)):\r\n group = self.param_groups[group_ind]\r\n iekf_mat = self.state['iekf_groups'][group_ind]\r\n for ii, w in enumerate(group['params']):\r\n if w.grad is None:\r\n continue\r\n H_n = iekf_mat[ii]['H']\r\n grad = w.grad.data.detach()\r\n if len(w.size()) > 1:\r\n grad = grad.transpose(1, 0)\r\n grad = grad.contiguous().view((1, -1))\r\n if y_ind == 0:\r\n H_n = grad\r\n else:\r\n H_n = torch.cat([H_n, grad], dim=0)\r\n self.state['iekf_groups'][group_ind][ii]['H'] = H_n\r\n\r\n err_T = err.transpose(0, 1)\r\n\r\n for group_ind in range(len(self.param_groups)):\r\n group = self.param_groups[group_ind]\r\n iekf_mat = self.state['iekf_groups'][group_ind]\r\n for ii, w in enumerate(group['params']):\r\n if w.grad is None:\r\n continue\r\n\r\n lbd_n = iekf_mat[ii]['lbd']\r\n P_n = iekf_mat[ii]['P']\r\n EPS = iekf_mat[ii]['EPS']\r\n H_n = iekf_mat[ii]['H']\r\n H_n_T = H_n.transpose(0, 1)\r\n if group['lbd_decay']:\r\n miu = 1.0 / min(self.state['step'], group['lbd_max_step'])\r\n lbd_n = lbd_n + miu * (err_T.mm(err).flatten()[0] / self.dim_out - lbd_n)\r\n self.state['iekf_groups'][group_ind][ii]['lbd'] = lbd_n\r\n R_n = lbd_n * torch.eye(self.dim_out, dtype=torch.float, device=iekf_mat[ii]['device'],\r\n requires_grad=False)\r\n\r\n g_n = R_n + H_n.mm(P_n).mm(H_n_T)\r\n g_n = g_n.inverse()\r\n\r\n K_n = P_n.mm(H_n_T).mm(g_n)\r\n delta_w = K_n.mm(err)\r\n if len(w.size()) > 1:\r\n delta_w = delta_w.view((w.size(1), w.size(0))).transpose(1, 0)\r\n else:\r\n delta_w = delta_w.view(w.size())\r\n\r\n new_P = (group['alpha'] + 1) * (P_n - K_n.mm(H_n).mm(P_n) + EPS)\r\n self.state['iekf_groups'][group_ind][ii]['P'] = new_P\r\n\r\n w.data.add_(delta_w)\r\n\r\n return err", "def traverse_grid(self, start_cell, direction, num_steps):\n elements = []\n\n for step in range(num_steps):\n row = start_cell[0] + step * direction[0]\n col = start_cell[1] + step * direction[1]\n elements.append(self._grid[row][col])\n\n return elements", "def make_grid(self):\n for k in range(0, NUM + 1):\n self.create_line(k * UNIT, 0, k * UNIT, SIZE, width=THICKNESS)\n self.create_line(0, k * UNIT, SIZE, k * UNIT, width=THICKNESS)", "def backtrack_steps():\n\n # Initialize position and number of steps\n x = 0\n n_steps = 0\n\n # Walk until we get to positive 1\n while x < 1:\n x += 2 * np.random.randint(0, 2) - 1\n n_steps += 1\n\n return n_steps", "def grid_image(output):\n grid = []\n for data in output:\n grid += [make_grid(data, nrow=5, normalize=True)]\n return grid", "def task2_extra():\n N = 0\n lam = 0\n Ls = numpy.array([2*L for L in range(1,23)])\n h = 0.01\n tau = 0.000099\n\n iterss = []\n\n for L in Ls:\n a = L // 2\n print(L)\n x = numpy.linspace(-L, L, int(2*L/h) + 1)\n # eps = int(0.1 * len(x))\n\n Vm = V1D(lam, x)\n state = phi(N, x-a)\n\n iters = 0\n while True:\n prob = numpy.abs(state)**2\n mid = int(2*L/h) // 2\n # if max(prob) in prob[mid-eps:mid+eps]:\n if numpy.argmax(prob) <= mid:\n print(iters)\n iterss.append(iters)\n break\n\n state[0] = 0\n state[-1] = 0\n state = implicit_scheme_step(state, tau, h, Vm)\n iters += 1\n\n fig = plt.figure()\n plt.title(\"Iterations of Gaussian travel to center\")\n plt.xlabel(\"$L$\")\n plt.ylabel(\"Time\")\n plt.plot(Ls, tau*numpy.array(iterss))\n plt.show()\n fig.savefig(\"naloga2_iters_of_gaussian_travel.pdf\", bbox_inches=\"tight\")", "def monteCarloRun(startingPoints, qms, vs, directions, BR, BZ, r, z, rLim, fluxGridCoarseness, steppingMethod):\n totalGrid = np.zeros((BR.shape[0]//fluxGridCoarseness, BR.shape[1]//fluxGridCoarseness))\n trappedGrid = np.zeros((BR.shape[0]//fluxGridCoarseness, BR.shape[1]//fluxGridCoarseness))\n rReduced = np.linspace(np.min(r), np.max(r), len(r)//fluxGridCoarseness)\n rDelta = rReduced[1]-rReduced[0]\n rReduced += rDelta/2. # Use distance to cell centers to count particles\n zReduced = np.linspace(np.min(z), np.max(z), len(z)//fluxGridCoarseness)\n zDelta = zReduced[1]-zReduced[0]\n zReduced += zDelta/2. # Use distance to cell centers to count particles\n \n habitatCrossings = 0\n GDTcrossings = 0\n detectorCounts = np.zeros(14)\n \n gridStep = r[1]-r[0]\n \n numParticles = len(qms)\n for particleNumber in prange(numParticles):\n if particleNumber % (numParticles/10) == 0:\n print(particleNumber)\n \n qm = qms[particleNumber]\n v0 = vs[particleNumber]\n dt = (r[1]-r[0])/v0/2\n maxTime = rLim * 3 / v0\n maxSteps = int(maxTime / dt)\n particleGrid = np.zeros((BR.shape[0]//fluxGridCoarseness, BR.shape[1]//fluxGridCoarseness))\n crossedHabitat = 0\n crossedGDT = 0\n particleDetectorCounts = np.zeros(14)\n \n # Generate random point and direction\n point1 = startingPoints[particleNumber]\n direction = directions[particleNumber]\n noAccelStep = 0.99*gridStep*direction\n trapped = True\n \n x = point1.copy() # copy is important... \n v = direction*v0\n E = np.zeros(3)\n \n if steppingMethod == 2:\n x, _ = RKnext(x, v, qm, BR, BZ, r, z, dt/2)\n\n for i in range(maxSteps):\n # Count crossings\n particleR = (x[0]**2 + x[1]**2)**.5\n nearestR = nearestIndex(rReduced, particleR)\n nearestZ = nearestIndex(zReduced, x[2])\n particleGrid[nearestZ, nearestR] = 1\n if 9.7 < particleR < 12.3 and -1.3 < x[2] < 1.3:\n crossedHabitat = 1\n if -14 < x[2] < 14 and particleR < 5:\n crossedGDT = 1\n # Will's detectors\n # for det in range(14):\n # vd = (x[0] - det*1.4, x[1], x[2])\n # if (vd[0]**2+vd[1]**2+vd[2]**2)**.5 < 0.5:\n # particleDetectorCounts[det] = 1\n \n # Step\n if steppingMethod == 0:\n x += noAccelStep\n elif steppingMethod == 1:\n x, v = RKnext(x, v, qm, BR, BZ, r, z, dt)\n elif steppingMethod == 2:\n B = BxyzInterpolated(x, BR, BZ, r, z)\n x, v = BBnext(x, v, qm, B, E, dt)\n \n # Stop stepping if out of bounds\n if (particleR**2+x[2]**2)**.5 > rLim + .001: \n trapped = False\n break\n detectorCounts += particleDetectorCounts\n totalGrid += particleGrid\n if trapped:\n trappedGrid += particleGrid\n habitatCrossings += crossedHabitat\n GDTcrossings += crossedGDT\n \n print(\"Will's detectors:\", detectorCounts)\n \n # Divide cell counts by volume of cell\n totalGridUnscaled = totalGrid.copy()\n trappedGridUnscaled = trappedGrid.copy()\n for i in range(len(rReduced)):\n for j in range(len(zReduced)):\n volume = np.pi*((rReduced[i]+rDelta/2.)**2-(rReduced[i]-rDelta/2.)**2)*zDelta\n totalGrid[j, i] /= volume\n trappedGrid[j, i] /= volume\n \n return rReduced, zReduced, totalGrid, trappedGrid, habitatCrossings, GDTcrossings, totalGridUnscaled, trappedGridUnscaled", "def Greens_function_approxRW(n=10, nwalks=200, start_position=[5,5]):\n \n G_func = np.zeros((n+1, n+1)) # Matrix to indicate how often the walk end att each boundary position\n \n for i in range(nwalks):\n position = start_position[:]\n # Perform one random walk and store the boundary position it reaches\n while (position[0] != 0 and position[0] != n and position[1] != 0 and position[1] != n):\n new_move = next_move()\n position[0] = position[0] + new_move[0]\n position[1] = position[1] + new_move[1]\n G_func[position[1], position[0]] += 1\n \n return G_func", "def genGrid(nTot,gDict):\n \n # Generate nTot-by-8 array, and dump to disk.\n grid = np.empty([nTot,8])\n \n # Initialize Simulation ID (SID) to keep track of the number of propagations.\n SID = 1\n\n # The grid array is filled in the order: MA, AOP, RAAN, INC, ECC, SMA, MJD.\n \n # Get deltas\n for key in gDict:\n if gDict[key]['points'] > 1:\n gDict[key]['delta'] = (gDict[key]['end'] - gDict[key]['start']) / (gDict[key]['points'] - 1)\n else:\n gDict[key]['delta'] = 0.\n \n # Here's the Big Nested Loop.\n for i0 in range(0, gDict['MJD']['points']):\n MJD = gDict['MJD']['start'] + i0 * gDict['MJD']['delta']\n\n for i1 in range(0, gDict['SMA']['points']):\n SMA = gDict['SMA']['start'] + i1 * gDict['SMA']['delta']\n\n for i2 in range(0, gDict['ECC']['points']):\n ECC = gDict['ECC']['start'] + i2 * gDict['ECC']['delta']\n\n for i3 in range(0, gDict['INC']['points']):\n INC = gDict['INC']['start'] + i3 * gDict['INC']['delta']\n\n for i4 in range(0, gDict['RAAN']['points']):\n RAAN = gDict['RAAN']['start'] + i4 * gDict['RAAN']['delta']\n\n for i5 in range(0, gDict['AOP']['points']):\n AOP = gDict['AOP']['start'] + i5 * gDict['AOP']['delta']\n\n for i6 in range(0, gDict['MA']['points']):\n MA = gDict['MA']['start'] + i6 * gDict['MA']['delta']\n \n grid[SID - 1,:] = [SID,MJD,SMA,ECC,INC,RAAN,AOP,MA]\n SID = SID + 1\n\n return grid", "def collatz_step(n):\n if n<1:\n raise ValueError('Input must be greater than 0')\n elif n==1:\n return 1\n elif n%2==0:\n return (n/2)\n elif n%2==1:\n return(3*n+1)", "def next_step():\n global fenetre, grid\n if len(l)>2:\n board_display(l[0])\n l.pop(0)\n steps.set(str(len(l))+\" steps remaining\")\n elif len(l)==2:\n board_display(l[0])\n l.pop(0)\n steps.set(str(len(l))+\" step remaining\")\n else:\n board_display(l[0])\n steps.set(\"No more steps remaining\")\n btn.set(\"Finished!\")", "def grid(iant,xgrid=[0],ygrid=[0],sleep=4):\n d=Carma(iant).drive()\n d.setOffset(xgrid[0],ygrid[0])\n time.sleep(sleep)\n time.sleep(sleep)\n for y in ygrid:\n for x in xgrid:\n print x,y\n d.setOffset(x,y)\n time.sleep(sleep)", "def initial_approximation(pulls, discount, grid_n):\n\n values = np.zeros([pulls - 1, pulls - 1, grid_n]) # Store V(a=k, b=n-k, r) in values[k,n-1,:] as k varies\n gittins = np.zeros([pulls - 1, pulls - 1]) # Store Gittins(a=k, b=n-k) in gittins[k,n-1] as k varies\n\n a_grid = np.arange(1, pulls)\n r_grid = np.linspace(0, 1, grid_n)\n\n initial_gittins = a_grid / float(pulls) # Initial Gittins Approximation to start Backward Induction\n gittins[0:pulls, pulls - 2] = initial_gittins # Record initial Gittins approximation\n\n for idx_a, a in enumerate(a_grid):\n values[idx_a, pulls - 2, :] = (1.0 / (1 - discount)) * \\\n np.maximum(r_grid, a / float(pulls)) # Record initial Value approximation\n\n return gittins, values", "def makeGrid(self, width, height, rewardLocs, exit, nPick=1, nAux=1, walls=[]):\n # Make mapping from coordinate (x, y, (takenreward1, takenreward2, ...))\n # to state number, and vice-versa.\n rTaken = iter([(),])\n for nPicked in range(1, nPick+1):\n rTaken = itertools.chain(rTaken, \n myCombinations(rewardLocs, r=nPicked)\n )\n # Iterators are hard to reset, so we list it.\n rTaken = list(rTaken)\n\n # Mappings from state to coordinates, vice-versa\n coordToState = {}\n stateToCoord = {}\n stateIdx = 0\n for x in range(width):\n for y in range(height):\n for stuff in rTaken:\n for holding in self.holdingPossibilities:\n coordToState[(x, y, stuff, holding)] = stateIdx\n stateToCoord[stateIdx] = (x, y, stuff, holding)\n stateIdx += 1\n self.deadEndState = stateIdx\n\n # Actually make the transition function\n def trans(f, p): \n aux = p\n (x, y, stuff, holding) = stateToCoord[f]\n actionMap = {}\n default = {(f, aux): 1}\n # Make the transition dictionary if the dead-end state (state width*height)\n if f == self.F-1:\n for action in range(5):\n actionMap[action] = default\n return actionMap\n\n # Otherwise, determine directions of motion, etc. \n for i in range(4):\n actionMap[i] = default\n if x != 0 and ((x-1, y) not in walls):\n actionMap[0] = {(coordToState[(x-1,y,stuff, holding)], aux): 1}\n if x < width-1 and ((x+1, y) not in walls):\n actionMap[1] = {(coordToState[(x+1,y,stuff, holding)], aux): 1}\n if y != 0 and ((x, y-1) not in walls):\n actionMap[2] = {(coordToState[(x,y-1,stuff, holding)], aux): 1}\n if y < height-1 and ((x, y+1) not in walls):\n actionMap[3] = {(coordToState[(x,y+1,stuff, holding)], aux): 1}\n # What happens when the agent uses action 4?\n if (x, y) == exit:\n # Some cases, depending on self.oneAtATime\n if not self.oneAtATime:\n # The agent is leaving.\n actionMap[4] = {(self.deadEndState, aux): 1}\n else:\n # The agent is dropping off a reward. holeFiller will\n # take care of the reward value.\n if len(stuff) >= nPick:\n # The agent is not allowed to pick up more stuff\n actionMap[4] = {(self.deadEndState, aux): 1}\n else:\n # The agent drops off the object.\n actionMap[4] = {(coordToState[(x,y,stuff, -1)], aux): 1}\n elif (x, y) not in rewardLocs:\n # No reward to pick up. Do nothing.\n actionMap[4] = default\n elif (x, y) in stuff:\n # This reward has already been used. Do nothing.\n actionMap[4] = default\n elif len(stuff) >= nPick or (holding != -1 and holding < len(stuff)\n and self.oneAtATime):\n # The agent has its hands full.\n actionMap[4] = default\n else:\n # The agent is allowed to pick up an object.\n newStuff = tuple(sorted(list(stuff) + [(x, y)]))\n if self.oneAtATime:\n newHoldingIdx = newStuff.index((x, y))\n else:\n newHoldingIdx = -1\n actionMap[4] = {(coordToState[(x, y, newStuff, newHoldingIdx)], aux): 1}\n return actionMap\n\n # Man, I'm outputting a lot of stuff.\n # coordToState[(x, y, rewardsLeft, holding)] -> index of this state\n # stateToCoord[index] -> (x, y, rewardsLeft, holding)\n # rTaken is a list of all possible combinations of leftover rewards.\n return (trans, coordToState, stateToCoord, rTaken)", "def grid_numbering(n, x_0, y_0, x_1, y_1):\n \n if n == 0:\n return \"\"\n\n arg = complex_number(x_0 + 0.5 - x_1, y_0 + 0.5 - y_1).argument()\n\n if arg >= 0 and arg < np.pi / 2: \n x = \"1\"\n x_1 += 2 ** (n - 2)\n y_1 += 2 ** (n - 2)\n elif arg >= np.pi / 2 and arg <= np.pi:\n x = \"2\"\n x_1 -= 2 ** (n - 2)\n y_1 += 2 ** (n - 2)\n elif arg < 0 and arg >= -np.pi / 2:\n x = \"4\"\n x_1 += 2 ** (n - 2)\n y_1 -= 2 ** (n - 2)\n else:\n x = \"3\"\n x_1 -= 2 ** (n - 2)\n y_1 -= 2 ** (n - 2)\n\n return str(x) + grid_numbering(n - 1, x_0, y_0, x_1, y_1)", "def grid_distortion(\n img: np.ndarray,\n num_steps: int = 10,\n xsteps: Tuple = (),\n ysteps: Tuple = (),\n interpolation: int = cv2.INTER_LINEAR,\n border_mode: int = cv2.BORDER_REFLECT_101,\n value: Optional[ImageColorType] = None,\n) -> np.ndarray:\n height, width = img.shape[:2]\n\n x_step = width // num_steps\n xx = np.zeros(width, np.float32)\n prev = 0\n for idx in range(num_steps + 1):\n x = idx * x_step\n start = int(x)\n end = int(x) + x_step\n if end > width:\n end = width\n cur = width\n else:\n cur = prev + x_step * xsteps[idx]\n\n xx[start:end] = np.linspace(prev, cur, end - start)\n prev = cur\n\n y_step = height // num_steps\n yy = np.zeros(height, np.float32)\n prev = 0\n for idx in range(num_steps + 1):\n y = idx * y_step\n start = int(y)\n end = int(y) + y_step\n if end > height:\n end = height\n cur = height\n else:\n cur = prev + y_step * ysteps[idx]\n\n yy[start:end] = np.linspace(prev, cur, end - start)\n prev = cur\n\n map_x, map_y = np.meshgrid(xx, yy)\n map_x = map_x.astype(np.float32)\n map_y = map_y.astype(np.float32)\n\n remap_fn = _maybe_process_in_chunks(\n cv2.remap,\n map1=map_x,\n map2=map_y,\n interpolation=interpolation,\n borderMode=border_mode,\n borderValue=value,\n )\n return remap_fn(img)", "def g_iter(n):\n \"*** YOUR CODE HERE ***\"\n if n <= 3:\n return n\n else:\n i = 3\n x, y, z = 1, 2, 3\n new = 1\n while i < n:\n new = z + (2*y) + (3*x)\n x, y, z = y, z, new \n i += 1\n return new", "def make_step(self):\n self.step_vals = np.cumsum(self.vals)", "def draw_grid(self):\n for i in range(N * N + 1):\n color = \"blue\" if i % N == 0 else \"gray\"\n x0 = MARGIN + i * SIDE\n y0 = MARGIN\n x1 = MARGIN + i * SIDE\n y1 = HEIGHT - MARGIN\n self.canvas.create_line(x0, y0, x1, y1, fill=color)\n\n x0 = MARGIN\n y0 = MARGIN + i * SIDE\n x1 = WIDTH - MARGIN\n y1 = MARGIN + i * SIDE\n self.canvas.create_line(x0, y0, x1, y1, fill=color)", "def grid_inflation(self):\n for obs in self.obstacle_list:\n\n inflation_x1 = round((obs[0][0]-self._inflation_radius)/self.step_size)\n\n inflation_y2 = round((obs[0][1] + obs[2] +self._inflation_radius)/self.step_size)\n\n inflation_x2 = round((obs[0][0] + obs[1] +self._inflation_radius)/self.step_size)\n\n inflation_y1 = round((obs[0][1] -self._inflation_radius)/self.step_size)\n\n self.grid[1, inflation_x1:inflation_x2+1,\n inflation_y1:inflation_y2+1] = INFLATION_COST\n\n # border inflation\n self.grid[1, 0:self.gridwidth+1, 0:round(self._inflation_radius/self.step_size)+1] = INFLATION_COST\n self.grid[1, 0:self.gridwidth+1, self.gridheight-round(self._inflation_radius / self.step_size):self.gridheight+1] = INFLATION_COST\n self.grid[1, 0:round(self._inflation_radius/self.step_size)+1, 0:self.gridheight+1] = INFLATION_COST\n self.grid[1, self.gridwidth-round(self._inflation_radius/self.step_size):self.gridwidth+1, 0:self.gridheight+1] = INFLATION_COST\n\n # if NEED_DRAW_INFLATED_GRID:\n # for i in range(self.gridwidth):\n # plt.scatter(i,0)\n # plt.scatter(i,self.gridheight)\n # for j in range(self.gridheight):\n # plt.scatter(0,j)\n # plt.scatter(self.gridwidth,j)\n # if self.grid[i, j] != 0:\n # plt.scatter(i,j)\n # plt.show()\n\n return self.grid", "def generate_mandelbrot(self, iterations):\n if self.grid is None:\n raise RuntimeError(\"Grid hasn't been setup - call set_grid first.\")\n # Define the tensorflow variables\n c = tf.constant(self.grid.astype(np.complex64))\n z = tf.Variable(c)\n n = tf.Variable(tf.zeros_like(c, tf.float32))\n # Start the tensorflow session\n with tf.Session():\n tf.global_variables_initializer().run()\n # Define the main mandelbrot algorithm - either take the square plus x, or keep z\n z_out = tf.where(tf.abs(z) < self.threshold, z ** 2 + c, z)\n not_diverged = tf.abs(z_out) < self.threshold\n # Create a group of tensorflow operations\n step = tf.group(z.assign(z_out), n.assign_add(tf.cast(not_diverged, tf.float32)))\n # Run the operations for a set number of steps\n for i in range(iterations):\n step.run()\n self.end_step = n.eval()\n self.end_z = z_out.eval()", "def mem_grid_traveller(m: int, n: int, memo: dict={}):\n\n value = memo.get((m, n))\n if not value:\n if m == 1 and n == 1:\n value = 1\n elif m == 0 or n == 0:\n value = 0\n else:\n value = mem_grid_traveller(m - 1, n, memo) + mem_grid_traveller(m, n - 1, memo)\n memo[(m, n)] = value\n\n return value", "def define_grid():\n grid_left = np.array([[-13.1000000000000, -35.5000000000000, -48.3000000000000, -60, -16.9000000000000,\n -34.8000000000000, -67.5000000000000, -46.1000000000000, -59.8000000000000,\n -14.2000000000000, -28.3000000000000, -42.3000000000000, -67.6000000000000,\n -50.5000000000000, -14.6000000000000, -60.9000000000000, -31.6000000000000,\n -5.10000000000000, -65.6000000000000, -41.8000000000000, -55.1000000000000,\n -22.7000000000000, -5.80000000000000, -49.2000000000000, -34.5000000000000,\n -61.5500000000000, -63.6000000000000, -40.4000000000000, -48.7000000000000,\n -21.8000000000000, -58.2000000000000, -7, -36.3000000000000, -48.1000000000000,\n -56.8000000000000, -7.30000000000000, -22.2000000000000, -36.8000000000000,\n -46.8000000000000],\n [-67.7000000000000, -60, -55.1000000000000, -51.8000000000000, -51.6000000000000,\n -49.3000000000000, -47.1000000000000, -43.7000000000000, -39.6000000000000,\n -39.1000000000000, -31.2000000000000, -30.7000000000000, -30.1000000000000,\n -24.4000000000000, -22.7000000000000, -18.7000000000000, -16.9000000000000,\n -12.6000000000000, -10.8000000000000, -10.2000000000000, -4.01000000000000, 1.20000000000000,\n 2.80000000000000, 3.70000000000000, 3.90000000000000, 6.20000000000000, 8.30000000000000,\n 11.8000000000000, 14.5000000000000, 16, 18.2000000000000, 18.4000000000000, 19.9000000000000,\n 24.6000000000000, 28.5200000000000, 33.8000000000000, 35, 35.4000000000000,\n 35.6000000000000],\n [69.1000000000000, 66, 58.2000000000000, 48, 78, 71.7000000000000, 31, 61.1000000000000,\n 53.3000000000000, 81.1000000000000, 76, 70.2000000000000, 41.2000000000000, 64.4000000000000,\n 80.2000000000000, 50.9000000000000, 75.2000000000000, 77.3000000000000, 37.8000000000000, 67,\n 53.2000000000000, 72, 74.8000000000000, 54.7000000000000, 66.5000000000000, 35.9000000000000,\n 25.7000000000000, 60.7000000000000, 50.5000000000000, 68.9000000000000, 27.3000000000000,\n 70.3000000000000, 59.6000000000000, 44, 20.8000000000000, 61.7000000000000, 57.2000000000000,\n 47, 36]])\n stn_left = np.array([[-14.6, -13.2, -11.7, -9.10, -11.7, -13.2, -7.90, -10],\n [-15.1, -15.1, -15.1, -12.6, -12.6, -12.6, -9.40, -10.1],\n [-5.40, -7.20, -8.70, -8.70, -7.50, -5.10, -10.3, -7.80]])\n grid_right = np.copy(grid_left)\n grid_right[0, :] = grid_right[0, :] * -1\n stn_right = np.copy(stn_left)\n stn_right[0, :] = stn_right[0, :] * -1\n\n return grid_left, grid_right, stn_left, stn_right", "def n_cols_generated(n_features: int, max_steps: int, n_transformations: int = 7, n_combinations: int = 4) -> int:\n # n_transformations is 1-len(func_transform) because either abs() or sqrt and log will be applied\n n_transformations -= 1\n original_cols = n_features\n new_cols = 0\n new_new_cols = 0\n # count additions at the highest level\n n_additions = 0\n steps = 1\n if steps <= max_steps:\n # Step 1: apply transformations to original features\n original_cols += n_features * n_transformations\n # n_additions += n_features * 2 # only if 1+ or 1- is in transformations!\n steps += 1\n if steps <= max_steps:\n # Step 2: first combination of features\n new_cols = n_combinations * (ncr(original_cols, 2))\n n_additions += 3 * new_cols // 4\n steps += 1\n while steps <= max_steps:\n # apply transformations on these new features\n # n_additions += new_cols * 2\n new_cols += new_cols * n_transformations\n steps += 1\n # get combinations of old and new features\n if steps <= max_steps:\n new_new_cols = n_combinations * (original_cols * new_cols)\n n_additions += 3 * new_new_cols // 4\n steps += 1\n # and combinations of new features within themselves\n if steps <= max_steps:\n n = n_combinations * (ncr(new_cols, 2))\n new_new_cols += n\n n_additions += 3 * n // 4\n steps += 1\n # update old and new features and repeat\n original_cols += new_cols\n new_cols = new_new_cols\n new_new_cols = 0\n # finally, apply transformation on the last new features\n if steps <= max_steps:\n # n_additions += new_cols * 2\n new_cols += new_cols * n_transformations\n return original_cols + new_cols + new_new_cols - n_additions", "def drawGrid(self):\n for div in range(NBCELL):\n sec = SSIZE*div\n self.can.create_line(0, sec, GSIZE, sec, width=3, fill=GFILL)\n self.can.create_line(sec, 0, sec, GSIZE, width=3, fill=GFILL)", "def estimate_grids(self, **kwargs):\n npts = kwargs.get('npts', 500)\n return np.linspace(self.zvals.min(), self.zvals.max(), npts)", "def part2():\r\n my_input = 368078\r\n coords = [(1, 0), (1, -1), (0, -1), (-1, -1), (-1, 0), (-1, 1), (0, 1), (1, 1)]\r\n x = y = dx = 0\r\n dy = -1\r\n grid = {}\r\n\r\n while True:\r\n total = 0\r\n for offset in coords:\r\n ox, oy = offset\r\n if (x+ox, y+oy) in grid:\r\n total += grid[(x+ox, y+oy)]\r\n if total > int(my_input):\r\n return total\r\n if (x, y) == (0, 0):\r\n grid[(0, 0)] = 1\r\n else:\r\n grid[(x, y)] = total\r\n if (x == y) or (x < 0 and x == -y) or (x > 0 and x == 1-y):\r\n dx, dy = -dy, dx\r\n x, y = x+dx, y+dy", "def reducer(sudoku_grid):\n for i in range(9):\n sudoku_grid = reduce_row(i,sudoku_grid)\n sudoku_grid = reduce_col(i,sudoku_grid)\n sudoku_grid = reduce_sub(i,sudoku_grid)\n return sudoku_grid", "def mbieLoop (self) :\n self.iterCnt = 0\n while self.iterCnt < 5000:\n s = self.mdp.s0\n for h in range(self.H) :\n self.QUpper = QBoundsSolver(self.mdp, self.PHat, self.QUpper, self.Ntotal, 0.1, True, self.stop)\n a = np.argmax(self.QUpper[s])\n s_, self.R[s,a] = self.mdp.step(s, a)\n self.updateVisitStatistics(s, a, s_)\n s = s_\n\n if self.iterCnt % 10 == 0: \n print(self.iterCnt)\n print(self.QUpper)\n\n self.iterCnt += 1", "def sum_grid(self, grid):\n new_grid = []\n for i in range(self.grid_size):\n new_grid.append(self.sum_row(grid[i]))\n return new_grid", "def idx_to_grid(n):\n\n x = n % MAX_Y\n y = int(n / MAX_X)\n return(x, y)", "def run_iterations(self, n, verbose = False):\n for i in range(n):\n # Calculate total number of neighbors for each cell\n all_neighbors = self.get_all_neighbors()\n all_num_neighbors = np.sum(all_neighbors, axis = (-2,-1)) - self.board\n # Determine new state for each cell using lookup table and number of neighbors\n self.board[:] = np.where(self.board, \n self.lookup[1][all_num_neighbors], \n self.lookup[0][all_num_neighbors])\n # Verbosity check\n if verbose:\n print(self.board)", "def make_evaluation_grids(W, M, N):\n nu = (np.arange(W * M, dtype=float) + 0.5) / (2 * M)\n x = np.arange(N + 1, dtype=float) / (2 * N)\n return nu, x", "def get_grid_size(self, ui, res_dir):\r\n print_it('determining grid size', PrintOpts.lvl1.value)\r\n self.sun.simple_clone()\r\n self.sun.clone.make_profile(PreSol.res_x.value, PreSol.res_y.value,\r\n self.init_force)\r\n self.planet.simple_clone()\r\n self.planet.clone.make_slave_to(self.sun.clone)\r\n\r\n init_displ = hertz_displ(self.sun.clone.e, self.planet.e,\r\n self.sun.clone.ny, self.planet.ny,\r\n self.sun.clone.r_hertz_x,\r\n self.sun.clone.r_hertz_y,\r\n self.planet.clone.r_hertz_x,\r\n self.planet.clone.r_hertz_y,\r\n self.sun.norm_forces[0])\r\n too_many_els_in_y = 1\r\n too_many_els_in_x = 1\r\n contact_width_y = 0.05\r\n contact_width_x = 0.05\r\n while too_many_els_in_y != 0 or \\\r\n too_many_els_in_x != 0:\r\n self.sun.clone.make_profile(self.sun.clone.res_x,\r\n self.sun.clone.res_y, self.init_force,\r\n contact_width=contact_width_y,\r\n contact_length=contact_width_x)\r\n self.planet.clone.make_slave_to(self.sun.clone)\r\n\r\n pressure, init_displ = \\\r\n pre_solve_half_space(self.sun.clone.profile,\r\n self.planet.clone.profile,\r\n self.sun.clone.x_axis,\r\n self.sun.clone.y_axis,\r\n self.sun.clone.res_x, self.sun.clone.res_y,\r\n self.sun.clone.delta_x,\r\n self.sun.clone.delta_y, self.sun.clone.e,\r\n self.planet.clone.e, self.sun.clone.ny,\r\n self.planet.clone.ny,\r\n self.sun.norm_forces[0],\r\n init_displ=init_displ, print_prog=False)\r\n\r\n pressure_els_y = sum(\r\n pressure[math.floor(self.sun.clone.res_y / 2), :] > 0)\r\n too_many_els_in_y = self.sun.clone.res_y - pressure_els_y - 2\r\n if too_many_els_in_y:\r\n contact_width_y += -np.sign(\r\n too_many_els_in_y) * contact_width_y / 25\r\n\r\n pressure_els_x = sum(\r\n pressure[:, math.floor(self.sun.clone.res_x / 2)] > 0)\r\n too_many_els_in_x = self.sun.clone.res_x - pressure_els_x - 2\r\n if too_many_els_in_x:\r\n contact_width_x += -np.sign(\r\n too_many_els_in_x) * contact_width_x / 25\r\n\r\n self.sun.make_profile(self.sun.res_x, self.sun.res_y, self.init_force,\r\n contact_width=contact_width_y,\r\n contact_length=contact_width_x)\r\n self.planet.make_slave_to(self.sun)\r\n return init_displ", "def define_grid(self):\n self.h_shape = int(\n np.round((self.h_stop - self.h_start) / self.h_step, 2)) + 1\n self.k_shape = int(\n np.round((self.k_stop - self.k_start) / self.k_step, 2)) + 1\n self.l_shape = int(\n np.round((self.l_stop - self.l_start) / self.l_step, 2)) + 1\n self.grid_origin = [self.h_start, self.k_start, self.l_start]\n self.grid_step = [int(np.rint(1.0/self.h_step)),\n int(np.rint(1.0/self.k_step)),\n int(np.rint(1.0/self.l_step))]\n self.grid_shape = [self.h_shape, self.k_shape, self.l_shape]\n self.grid_basis = [[1, 0, 0], [0, 1, 0], [0, 0, 1]]", "def calculate_pyramid_rows_loop(number_of_rows):\n total_blocks = 0\n for x in range(1, number_of_rows + 1):\n total_blocks += x\n return total_blocks", "def montage(images, w_sub, h_sub, step):\n target = Image.new('RGB', (w_sub*step, h_sub*step))\n left = 0\n right = w_sub\n for i in range(len(images)):\n top=(i//step)*h_sub\n target.paste(images[i], (left, top, right, top+h_sub))\n if(i//step < (i+1)//step):#Check if this row is done\n left = 0#Reset the position in a row\n right = w_sub\n else: #Next picture\n left += w_sub\n right += w_sub\n quality_value = 100\n return target", "def n_steps(self) -> int:\n return len(self) - 1 # subtract the base metric", "def count_tilings(n: int) -> int:\n if n < 5:\n # handle recursive base case\n return 2**(n - 1)\n else:\n # place each tile at end of row and recurse on remainder\n return (count_tilings(n - 1) +\n count_tilings(n - 2) +\n count_tilings(n - 3) +\n count_tilings(n - 4))", "def total_steps(self):\n return self.turns + (self.rounds*self.game_length)", "def solve_puzzle(self):\r\n \r\n counter = 0\r\n rows = self._height-1\r\n cols = self._width-1\r\n # print rows, cols\r\n # print 'The greed has %s rows and %s coloumn indexes' %(rows, cols) \r\n solution_move = ''\r\n if self.get_number(0,0) == 0 and \\\r\n self.get_number(0,1) == 1:\r\n # print 'Congrads Puxxle is Aolved at start!!!!!'\r\n return ''\r\n #appropriate_number = (self._height * self._width) - 1\r\n appropriate_number = (rows+1) * (cols+1) -1\r\n # print 'First appropriate_number=',appropriate_number\r\n # print \"Grid first tile that we will solwing has value =\", self._grid[rows][cols]\r\n \r\n while counter < 300:\r\n counter +=1\r\n # print self\r\n #appropriate_number = (rows+1) * (cols+1) -1\r\n # print 'Appropriate number in loop=',appropriate_number\r\n # print 'We are solving %s index_row and %s index_col' %(rows, cols) \r\n ####Case when we use solve_interior_tile\r\n if rows > 1 and cols > 0:\r\n if self._grid[rows][cols] == appropriate_number:\r\n # print 'This tile is already solved!!!'\r\n cols -= 1\r\n appropriate_number -=1\r\n else:\r\n # print 'We are solving interior tile', (rows, cols)\r\n solution_move += self.solve_interior_tile(rows, cols)\r\n # print 'Solution move=', solution_move\r\n cols -= 1\r\n #### Case when we use solve_col0_tile\r\n elif rows > 1 and cols == 0:\r\n if self._grid[rows][cols] == appropriate_number:\r\n # print 'This tile is already solved!!!'\r\n rows -= 1\r\n cols = self._width-1\r\n appropriate_number -=1\r\n else:\r\n # print 'We are solwing tile 0 in row', rows\r\n # print 'Appropriate number here ='\r\n solution_move += self.solve_col0_tile(rows)\r\n # print 'Solution move=', solution_move\r\n rows -=1\r\n cols = self._width-1\r\n\r\n\r\n #### Cases when we use solve_row0_tile\r\n elif rows == 1 and cols > 1:\r\n if self._grid[rows][cols] == appropriate_number:\r\n # print 'This tile is already solved!!!'\r\n rows -= 1\r\n #cols = self._width-1\r\n appropriate_number -= self._width\r\n\r\n else:\r\n # print 'Solving upper 2 rows right side'\r\n solution_move += self.solve_row1_tile(cols)\r\n rows -=1\r\n appropriate_number -= self._width\r\n #### Cases when we use solve_row1_tile \r\n if rows < 1 and cols > 1:\r\n if self._grid[rows][cols] == appropriate_number:\r\n # print 'This tile is already solved!!!'\r\n rows += 1\r\n cols -= 1\r\n appropriate_number +=self._width-1\r\n else:\r\n # print '(1,J) tile solved, lets solwe tile (0,j) in tile',(rows,cols)\r\n # print 'Greed after move solve_row1_tile'\r\n # print self\r\n solution_move += self.solve_row0_tile(cols)\r\n rows +=1\r\n cols -=1\r\n appropriate_number +=self._width-1\r\n\r\n\r\n #### Case when we use solve_2x2\r\n elif rows <= 1 and cols <= 1:\r\n # print 'We are solving 2x2 puzzle'\r\n solution_move += self.solve_2x2()\r\n if self._grid[0][0] == 0 and \\\r\n self._grid[0][1] == 1:\r\n # print 'Congrads Puxxle is SOLVED!!!!!'\r\n break\r\n\r\n\r\n\r\n\r\n if counter > 100:\r\n # print 'COUNTER BREAK'\r\n break\r\n # print solution_move, len(solution_move)\r\n return solution_move\r\n\r\n\r\n\r\n\r\n\r\n\r\n # for row in solution_greed._grid[::-1]:\r\n # print solution_greed._grid\r\n # print 'Row =',row\r\n \r\n # if solution_greed._grid.index(row) > 1:\r\n # print \"Case when we solwing Interior and Tile0 part\"\r\n \r\n\r\n # for col in solution_greed._grid[solution_greed._grid.index(row)][::-1]:\r\n # print 'Coloumn value=', col\r\n #print row[0]\r\n # if col !=row[0]:\r\n # print 'Case when we use just Interior tile solution'\r\n # print solution_greed._grid.index(row)\r\n # print row.index(col)\r\n \r\n # solution += solution_greed.solve_interior_tile(solution_greed._grid.index(row) , row.index(col))\r\n # print 'Solution =', solution\r\n # print self \r\n # print solution_greed._grid\r\n # elif col ==row[0]:\r\n # print 'Case when we use just Col0 solution'\r\n\r\n # else:\r\n # print 'Case when we solwing first two rows'\r\n\r\n #return \"\"\r", "def make_grid(n, gl=False):\n\n if gl:\n low = 0.0 # Lower Range\n high = 1.0\n p = 0.5\n\n # The method here uses 2*n points so halve it\n n, r, wt = GridGenerator.gaussp(low, high, n//2)\n r = np.concatenate((r, np.zeros((n))))\n wt = np.concatenate((wt, np.zeros((n))))\n for i in range(n):\n r[2*n-(i+1)] = (1.0/r[i])**2\n wt[2*n-(i+1)] = (wt[i]/p)*r[2*n - (i+1)]**1.5\n else:\n n, r, wt = GridGenerator.radial_chebyshev(n)\n\n return n, r, wt", "def generate_julia(self, iterations, c):\n if self.grid is None:\n raise RuntimeError(\"Grid hasn't been setup - call set_grid first.\")\n # Define the tensorflow variables\n c_val = tf.constant(np.full(shape=self.grid.shape, fill_value=c, dtype=self.grid.dtype))\n z = tf.Variable(self.grid)\n n = tf.Variable(tf.zeros_like(c_val, tf.float32))\n # Start the tensorflow session\n with tf.Session():\n tf.global_variables_initializer().run()\n # Define the main julia algorithm - either take the square plus x, or keep z\n\n z_out = tf.where(tf.abs(z) < self.threshold, z ** 2 + c_val, z)\n not_diverged = tf.abs(z_out) < self.threshold\n step = tf.group(z.assign(z_out), n.assign_add(tf.cast(not_diverged, tf.float32)))\n\n for i in range(iterations):\n step.run()\n self.end_step = n.eval()\n self.end_z = z_out.eval()", "def step_through_generations(self, num_steps):\n for island in self._islands:\n for _ in range(num_steps):\n island.execute_generational_step()\n self.archipelago_age += num_steps", "def greedy(self):\n n_step_t = self.filter['n_step_t']\n n_traj = self.filter['n_traj']\n traj = self.filter['traj']\n steps = [0 for i in xrange(n_step_t)]\n for i in xrange(n_traj):\n n_step = traj[i]['n_step']\n for j in xrange(n_step):\n steps[j] += 1\n self.filter['steps'] = steps\n \n return", "def push_up (grid):\r\n for a in range (4): \r\n for i in range(3,0,-1): \r\n for j in range(4): \r\n if grid[i-1][j]==0: \r\n grid[i-1][j]=grid[i][j] \r\n grid[i][j]=0\r\n #joining like numbers \r\n for i in range(3): \r\n for j in range(4): \r\n if grid[i][j]==grid[i+1][j]: \r\n grid[i][j]=(grid[i][j])*2\r\n grid[i+1][j]=0\r\n #pafter adding the numbers continue to move them \r\n for a in range (4): \r\n for i in range(3,0,-1): \r\n for j in range(4): \r\n if grid[i-1][j]==0: \r\n grid[i-1][j]=grid[i][j] \r\n grid[i][j]=0", "def part2():\n grid[(0, 0)] = 1\n coordinates_value = 0\n layer = 1\n x = 0; y = 0\n done = False\n while not done:\n # print(\"Layer: \", layer)\n # go right one step\n layer += 1; x += 1\n grid[(x,y)] = check_neighbours((x,y))\n\n # go up to the boundary of layer\n for y_up in range(y+1, layer):\n coord = (x, y_up)\n coordinates_value = check_neighbours(coord)\n if coordinates_value > puzzle_input:\n return coordinates_value\n y = y_up\n\n # go left till the boundary of layer\n for x_left in range(x-1, -layer, -1):\n coord = (x_left, y)\n coordinates_value = check_neighbours(coord)\n if coordinates_value > puzzle_input:\n return coordinates_value\n x = x_left\n\n # go down till the boundary of layer\n for y_down in range(y-1, -layer, -1):\n coord = (x, y_down)\n coordinates_value = check_neighbours(coord)\n if coordinates_value > puzzle_input:\n return coordinates_value\n y = y_down\n\n # go right till the boundary of layer\n for x_right in range(x+1, layer):\n coord = (x_right, y)\n coordinates_value = check_neighbours(coord)\n if coordinates_value > puzzle_input:\n return coordinates_value\n x = x_right", "def generate_grid():\n y_offset = -10\n for a in range(20):\n # Line 1\n # Adds offset to the x position of the squares\n x_offset = 10\n for b in range(1):\n # Adds offset to the y position of the squares\n y_offset += 20\n for c in range(20):\n # Prints a row of squares(5 squares along the x)\n square(x_offset, y_offset, 20, 20, dark_green)\n for d in range(1):\n # Adds x offset for the next line of squares on the y axis\n x_offset += 40\n # Line 2 (needs 2 lines because the offset of each line)\n # Adds offset to the x position of the squares\n x_offset = 30\n for e in range(1):\n # Adds offset to the y position of the squares\n y_offset += 20\n for f in range(20):\n # Prints a row of squares(5 squares along the x)\n square(x_offset, y_offset, 20, 20, dark_green)\n for g in range(1):\n # Adds x offset for the next line of squares on the y axis\n x_offset += 40", "def start_points(n, world):\n world[0, 0] = 1\n world[n-1, n-1] = 1\n world[0, n-1] = 1\n world[n-1, 0] = 1\n world[np.round(n/2).astype(int)][np.round(n/2).astype(int)] = 1\n return world", "def step(self):\n lmax = 32\n\n star = self.star\n\n M, K, N = star.mesh_size\n ph = star.phi_coords\n mu = star.mu_coords\n r = star.r_coords\n\n def D1(t, s, m):\n return 1 / 3 * np.sum((ph[2::2] - ph[:-2:2]) *\n (np.cos(m * ph[:-2:2] * star.rho[:-2:2, t, s])\n + 4 * np.cos(m * ph[1:-1:2] * star.rho[1:-1:2, t, s]) +\n np.cos(m * ph[2::2] * star.rho[2::2, t, s])))\n\n def D2(s, l, m):\n sum = 0\n for t in range(0, K - 2, 2):\n sum += (mu[t + 2] - mu[t]) * (lpmv(m, l, mu[t]) * D1(t, s, m) +\n 4 * lpmv(m, l, mu[t + 1]) * D1(t + 1, s, m) +\n lpmv(m, l, mu[t + 2]) * D1(t + 2, s, m))\n\n return sum / 3\n\n def D3(l, m, k):\n sum = 0\n\n def fl(r_dash, r):\n if r_dash < r:\n return r_dash**(l + 2) / r**(l + 1)\n else:\n return r**l / r_dash**(l - 1)\n\n for s in range(0, N - 2, 2):\n sum += (r[s + 2] - r[s]) * (fl(r[s], r[k]) * D2(s, l, m) +\n 4 * fl(r[s + 1], r[k]) * D2(s + 1, l, m) +\n fl(r[s + 2], r[k]) * D2(s + 2, l, m))\n\n return sum / 6\n\n def calc_Phi(i, j, k):\n Phi = 0\n\n for l in range(lmax + 1):\n for m in range(min(l + 1, 2)):\n if (m + l % 2 == 1):\n continue\n if m == 0:\n eps = 1\n else:\n eps = 2\n Phi -= eps / factorial(1 + m) * \\\n D3(l, m, k) * lpmv(m, l, mu[j]) * np.cos(m * ph[i])\n\n return Phi\n\n # calculate Phi across grid\n for n in range(N):\n for k in range(K):\n for m in range(M):\n star.Phi[m, k, n] = calc_Phi(m, k, n)\n\n print(f'Phi = {star.Phi[0,0,:]}')\n\n # update the enthalpy\n\n Omega2 = star.eos.Omega2(star.Phi, star.Psi)\n C = star.eos.C(star.Phi, star.Psi)\n\n H = C - star.Phi - Omega2 * star.Psi\n\n # use new enthalpy and Phi to calculate the density\n\n star.rho = star.eos.rho_from_h(H)\n star.rho /= np.max(star.rho)\n\n print(f\"rho = {np.average(star.rho[:,0,:], axis=0)}\")\n\n # make sure density is always non-negative\n # star.rho[star.rho < 0] = 0\n\n print(f\"rho = {np.average(star.rho[:,0,:], axis=0)}\")\n\n # calculate the errors\n\n H_err = np.max(np.abs(H - star.H)) / np.max(np.abs(H))\n\n if np.max(Omega2) == 0:\n if np.abs(Omega2 - star.Omega2) == 0:\n Omega2_err = 0\n else:\n Omega2_err = 1\n else:\n Omega2_err = np.abs(Omega2 - star.Omega2) / np.abs(Omega2)\n\n if np.max(star.C) == 0:\n if np.abs(C - star.C) == 0:\n C_err = 0\n else:\n C_err = 1\n else:\n C_err = np.abs(C - star.C) / np.abs(star.C)\n\n # set variables to new values\n\n star.H = H\n star.Omega2 = Omega2\n star.C = C\n print(\n f\"Errors: H_err = {H_err}, Omega2_err = {Omega2_err}, C_err = {C_err}\")\n\n return H_err, Omega2_err, C_err", "def batch_steps(num_examples, batch_size):\n steps = num_examples // batch_size\n if num_examples % batch_size > 0:\n steps += 1\n return steps", "def __generate_goal_board(self):\n element = 1\n array = []\n\n for row in range(self._n):\n row_to_append = []\n for col in range(self._n):\n row_to_append.append(element)\n element += 1\n array.append(row_to_append)\n\n array[self._n - 1][self._n - 1] = 0\n self._solved_board = Board(array=array, space=[self._n - 1, self._n - 1])", "def WarpStep(iters=5):\n MSG(\"WarpStep\")\n for j in range(iters):\n warp.step()\n return", "def dynamic_iteration(k, n):\n # If only one egg remains, n attempts must be made to find the correct floor.\n if k == 1:\n return n\n # Lookup table for previous solutions.\n W = [[0 for y in range(n + 1)] for x in range(k)]\n # Initialize the first row.\n for i in range(n + 1):\n W[0][i] = i\n # Start on second row, working downward.\n for i in range(1, k):\n # Calculate values for each cell.\n for j in range(1, n + 1):\n W[i][j] = min((max(W[i][j - x], W[i - 1][x - 1]) for x in range(1, j + 1))) + 1\n # Return the result.\n return W[k - 1][n]", "def _compute_step_sizes(self, number_knots, knots):\n # expected bounds on the knots sequence\n x_lower = self.model.workers.lower\n x_upper = self.model.workers.upper\n\n if (number_knots is not None) and (knots is None):\n step_size = (x_upper - x_lower) / (number_knots - 1)\n step_sizes = np.repeat(step_size, number_knots - 1)\n elif (number_knots is None) and (knots is not None):\n #assert knots[0] == x_lower\n #assert knots[-1] == x_upper\n step_sizes = np.diff(knots, 1)\n else:\n raise ValueError(\"Either 'number_knots' or 'knots' must be specified!\")\n \n if self.model.assortativity == 'positive':\n step_sizes = step_sizes[::-1]\n\n return step_sizes", "def calculateAllCosts(self, goalCol, goalRow, maxIters):\n\n if (goalCol == self.lastGoalDistCol and\n goalRow == self.lastGoalDistRow):\n return\n\n startTime = time.time()\n print \"Calculating all costs given the current goal.\"\n self.distanceToGoal=[[self.infinity for col in range(self.cols)]\n for row in range(self.rows)]\n if not self.inRange(goalRow, goalCol):\n raise Exception('goalOutOfMapRange')\n self.distanceToGoal[goalRow][goalCol] = 0.0\n\n for iter in range (maxIters):\n\t valuesChanged = 0\n\t if (self.showIterations):\n print \"Displaying result after iteration:\",iter\n self.redraw(self.distanceToGoal, stepByStep=1)\n\t self.frame.update()\n time.sleep(1)\n for row in range(self.rows):\n for col in range(self.cols):\n for i in [-1,0,1]:\n for j in [-1,0,1]:\n if self.inRange(row+i, col+j):\n if self.grid[row][col] > self.threshhold:\n self.distanceToGoal[row][col] = self.infinity\n else:\n if abs(i) == 0 and abs(j) == 0:\n d = 0.00\n elif abs(i) == 1 and abs(j) == 1:\n if self.tooTight(row, col, i, j):\n d = self.infinity\n\t\t\t else:\n d = 1.41\n else:\n d = 1.00\n adj = self.distanceToGoal[row+i][col+j] + self.grid[row+i][col\n+j] + d\n\t\t\t if (adj < self.distanceToGoal[row][col]):\n\t\t\t valuesChanged += 1\n self.distanceToGoal[row][col] = min(self.distanceToGoal[row][col], adj)\n\n if valuesChanged == 0:\n\t endTime = time.time()\n\t print \"Value iteration converged after %d iterations.\" % iter\n\t print \"Time elapsed %0.3f ms.\" % ((endTime-startTime)*1000.0)\n\t break", "def g_iter(n):\n \"*** YOUR CODE HERE ***\"\n g1, g2, g3, cur, ind = 1, 2, 3, 0, 3\n if n < 3:\n return n\n else:\n while ind < n:\n cur = g3 + 2 * g2 + 3 * g1\n ind += 1\n g1, g2, g3 = g2, g3, cur\n return g3", "def push_down (grid):\r\n \r\n #moves values down\r\n for row in range (3,0,-1):\r\n for column in range (4):\r\n if grid[row][column]==0 and grid[row-1][column]!=0:\r\n grid[row][column]=grid[row-1][column]\r\n grid[row-1][column]=0\r\n \r\n #moves values down\r\n for row in range (3,0,-1):\r\n for column in range (4):\r\n if grid[row][column]==0 and grid[row-1][column]!=0:\r\n grid[row][column]=grid[row-1][column]\r\n grid[row-1][column]=0 \r\n \r\n #checks for similar values and combines whilst ensuring values dont get added twice\r\n check_row=-1\r\n check_column=-1\r\n for row in range (0,3):\r\n for column in range (4):\r\n #check if values have been added already\r\n if row==check_row and column==check_column:\r\n continue\r\n \r\n elif grid[row][column]==grid[row+1][column]:\r\n grid[row+1][column]= 2*grid[row][column]\r\n grid[row][column]=0\r\n check_row=row+1\r\n check_column=column\r\n \r\n elif grid[row+1][column] == 0:\r\n grid[row+1][column]=grid[row][column]\r\n grid[row][column]=0\r\n \r\n \r\n for row in range (3,0,-1):\r\n for column in range (4):\r\n if grid[row][column]==0 and grid[row-1][column]!=0:\r\n grid[row][column]=grid[row-1][column]\r\n grid[row-1][column]=0 \r\n \r\n for row in range (3,0,-1):\r\n for column in range (4):\r\n if grid[row][column]==0 and grid[row-1][column]!=0:\r\n grid[row][column]=grid[row-1][column]\r\n grid[row-1][column]=0", "def Compute_Grid(Idx, Coeff, q_max, rules, growth, LevelMax, sc, p, tol ):\n\n seed = 123456789\n #Coeff= Sandia.calculate_coefficients(Idx, q_max)\n new_np = Sandia.max_next_points(Idx, Coeff, rules, growth)\n points = Sandia.weights_and_points(new_np, LevelMax, Idx, Coeff, growth, rules, sc, p)\n N_Unique, sparse_index = Sandia.unique_points(seed, tol, points)\n return Sandia.reduce_points_and_weights(N_Unique, points, Idx, sparse_index, Coeff, growth, rules, sc, p)", "def regex_grid(n):\n cx = 2 ** (n - 1)\n cy = 2 ** (n - 1)\n grid = [[grid_numbering(n, i , j, cx, cy) for i in range(2 ** n)] for j in range(2 ** n)]\n \n return grid", "def infinite_dmrg_step(system, current_size, number_of_states_kept):\n set_hamiltonian_to_AF_Heisenberg(system)\n ground_state_energy, ground_state_wf = system.calculate_ground_state()\n entropy, truncation_error = grow_block_by_one_site('left', ground_state_wf, \n\t\t system, \n\t\t\t\t\t\t number_of_states_kept)\n system.right_block = system.left_block\n return ground_state_energy / current_size, entropy, truncation_error", "def xSteps(self,start,ziel,steps=10):\n erg=[]\n wert=(ziel-start)/(steps)\n for i in range(1, steps+1):\n erg.append(round(start+wert*i,2))\n return erg", "def echelon_with_steps(aug_matrix: list) -> list:\n new_value = aug_matrix[:]\n\n # Loop through each row\n print(\"Steps begins here\")\n for i, current in enumerate(new_value):\n\n # Check if it's a potential pivot\n if i < len(new_value) - 1:\n\n # Elimate all in the same column beside the pivot\n for j in range(i + 1, len(new_value)):\n coef = new_value[j][i] / current[i]\n new_value[j] = minus(new_value[j], times(coef, current))\n\n # printing the grid in 2D\n for row in new_value:\n print(row)\n print(\"vvvvvvv\")\n return new_value", "def steps(self, step_count):\n self.dir.value(0 if step_count > 0 else 1)\n for i in range(abs(step_count)):\n self.stp.value(1)\n sleep_us(self.step_time)\n self.stp.value(0)\n sleep_us(self.step_time)\n self.current_position += step_count" ]
[ "0.62839276", "0.6177618", "0.6006914", "0.5955859", "0.5950042", "0.5941752", "0.58852285", "0.58458596", "0.5777134", "0.57740855", "0.5760194", "0.57317764", "0.572567", "0.5684606", "0.5644186", "0.5635062", "0.563393", "0.5599859", "0.5587036", "0.55789906", "0.55467486", "0.55253345", "0.55242723", "0.5519299", "0.5510146", "0.5509966", "0.55013883", "0.54930526", "0.54923177", "0.54829895", "0.54795486", "0.5462071", "0.5452944", "0.5447374", "0.54381293", "0.54339683", "0.54278785", "0.5420682", "0.54195625", "0.54183114", "0.5400571", "0.5391548", "0.53882205", "0.5387278", "0.53836334", "0.53753334", "0.53737336", "0.53693825", "0.5369066", "0.5355846", "0.5346508", "0.53410375", "0.5340784", "0.533977", "0.5333583", "0.5325432", "0.5324272", "0.5322385", "0.5308004", "0.5305585", "0.5301809", "0.52966636", "0.5289803", "0.5285444", "0.5285231", "0.5281761", "0.5276532", "0.5270764", "0.52677333", "0.5259147", "0.52518153", "0.525088", "0.52453595", "0.52363634", "0.52348673", "0.5232572", "0.5231499", "0.5230786", "0.5227626", "0.5225448", "0.5222063", "0.52203727", "0.5213028", "0.52112305", "0.5210196", "0.5208985", "0.51986957", "0.51984763", "0.51981825", "0.51915437", "0.518818", "0.5183294", "0.5182715", "0.51821613", "0.51734", "0.5172577", "0.51695275", "0.5153791", "0.5149459", "0.51486605" ]
0.61110514
2
Display a step by step animation of the cellular automata rule.
def grid_animation(self, steps, figure_size=(12, 12), speed=100): steps -= 1 x = self.seed fig, ax = plt.subplots(figsize=figure_size) ax.grid(False) ax.get_xaxis().set_visible(False) ax.get_yaxis().set_visible(False) color_map = matplotlib.colors.ListedColormap(['white', 'black']) im = plt.imshow(x[1:-1:1, 1:-1:1], interpolation='nearest', cmap=color_map, animated=True) counter = 0 def update_figure(*args): nonlocal x, counter, fig counter += 1 x, stats = self.update_grid(x) plt.title(self.title + ' | Step ' + str(counter), fontsize=14) im.set_array(x[1:-1:1, 1:-1:1]) return im, # why is this comma necessary? ani = animation.FuncAnimation(fig, update_figure, frames=steps, interval=speed, blit=False, repeat=False) return ani
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def animate(self):\n if self.board.automaton.get() == \"life\":\n self.state = self.life.game_of_life(self.state)\n #self.life.random_activations(self.state)\n elif self.board.automaton.get() == \"seeds\":\n self.state = self.life.seeds(self.state)\n else:\n pass\n\n self.board.update_cells(self.state)\n self.parent.after(DELAY, self.animate)", "def render(self):\n step = 1\n while step < self.number_steps and self.update():\n step += 1", "def animate(agent, steps, initialize=None):\n grid, r, c = random_world()\n image = plt.imshow(grid, cmap=cmap, norm=norm)\n if initialize:\n state = initialize()\n for t in range(steps):\n draw_world(grid, r, c, image)\n percept = get_percept(grid, r, c)\n if initialize:\n action, *state = agent(percept, *state)\n else:\n action = agent(percept)\n\n r, c = apply(grid, r, c, action)\n plt.pause(0.0001)\n plt.show()", "def step(self):\n #nx.draw_networkx(self.G, nx.get_node_attributes(self.G, 'pos'))\n self.schedule.step()", "def grid_animation_quick(self, frames, iterations=10, fps=0.02, figsize=(6, 6)):\r\n color_map = matplotlib.colors.ListedColormap(['white', 'black'])\r\n\r\n fig, ax = plt.subplots(figsize=figsize)\r\n\r\n for r in np.arange(0, iterations):\r\n ax.cla()\r\n ax.axes.grid(False)\r\n ax.set_axis_off()\r\n im = ax.imshow(frames[0], cmap=color_map, animated=True)\r\n for image, step in zip(frames[1:], np.arange(1, len(frames[1:])+1)):\r\n time.sleep(fps)\r\n ax.title.set_text('Rule 942 | Step ' + str(step) + ' | Active ' + str(int(np.sum(image))))\r\n im.set_data(image)\r\n fig.canvas.draw()", "def start_sim(self):\n self.anim = animation.FuncAnimation(self.fig, self.anim_func, frames = self.timesteps, interval = 1, blit=True)\n plt.show()", "def visualize_trial(self, n_steps=100):\n\n # prepare for the visualization\n plb.ion()\n mv = mountain_car.MountainCarViewer(self.mountain_car)\n mv.create_figure(n_steps, n_steps)\n plb.draw()\n\n # make sure the mountain-car is reset\n self.mountain_car.reset()\n\n for n in range(n_steps):\n print('\\rt =', self.mountain_car.t)\n print(\"Enter to continue...\")\n input()\n\n sys.stdout.flush()\n\n reward = self.mountain_car.act(self.agent.act())\n self.agent.state = [self.mountain_car.x, self.mountain_car.vx]\n\n # update the visualization\n mv.update_figure()\n plb.draw()\n\n # check for rewards\n if reward > 0.0:\n print(\"\\rreward obtained at t = \", self.mountain_car.t)\n break", "def do_animations(self):\n self.animate_bloop(700, 160, 50)", "def step_animation(self):\n self._light.toggle()", "def start_animation(self):\n\t\ttime.sleep(1)\n\t\tself.fishbowl.animate_balls()", "def run():\r\n \r\n match = a4_acc.Game() # Instantiate a Game object \r\n setup(match)\r\n\r\n if constants.SHOW_GRAPHICS:\r\n axes= startGraphics(match.board) #step 0\r\n \r\n \r\n for k in range(constants.STEPS):\r\n update(match)\r\n updateGraphics(board, k, caxes)\r\n \r\n ########\r\n # TO DO: \r\n # Simulate game given the intial state for constants.STEPS iterations\r\n \r\n # Example code to call the updateGraphics function; the second argument\r\n # needs to be replaced:\r\n # if constants.SHOW_GRAPHICS:\r\n # updateGraphics(match.board, None, axes) \r\n \r\n # Do not change or add code below here for function run\r\n endNow= raw_input('Press ENTER to continue.')", "def _animation_step(self, par_dict):\n\n t0 = time.time()\n dt = par_dict[\"dt\"]\n controller = par_dict[\"controller\"]\n integrator = par_dict[\"integrator\"]\n if controller is not None:\n _, _, tau = controller.get_control_output(\n meas_pos=self.x[:self.plant.dof],\n meas_vel=self.x[self.plant.dof:],\n meas_tau=np.zeros(self.plant.dof),\n meas_time=self.t)\n else:\n tau = np.zeros(self.plant.n_actuators)\n self.step(tau, dt, integrator=integrator)\n ee_pos = self.plant.forward_kinematics(self.x[:self.plant.dof])\n ee_pos.insert(0, self.plant.base)\n ani_plot_counter = 0\n for link in range(self.plant.n_links):\n self.animation_plots[ani_plot_counter].set_data(\n [ee_pos[link][0], ee_pos[link+1][0]],\n [ee_pos[link][1], ee_pos[link+1][1]])\n ani_plot_counter += 1\n self.animation_plots[ani_plot_counter].set_data(ee_pos[link+1][0],\n ee_pos[link+1][1])\n ani_plot_counter += 1\n\n set_arrow_properties(self.tau_arrowarcs[link],\n self.tau_arrowheads[link],\n float(np.squeeze(tau)),\n ee_pos[link][0],\n ee_pos[link][1])\n t = float(self.animation_plots[ani_plot_counter].get_text()[4:])\n t = round(t+dt, 3)\n self.animation_plots[ani_plot_counter].set_text(f\"t = {t}\")\n\n # if the animation runs slower than real time\n # the time display will be red\n if time.time() - t0 > dt:\n self.animation_plots[ani_plot_counter].set_color(\"red\")\n else:\n self.animation_plots[ani_plot_counter].set_color(\"black\")\n return self.animation_plots + self.tau_arrowarcs + self.tau_arrowheads", "def animate(frames):\n plt.grid('on')\n ax = plt.gca()\n ax.set_xticks(np.arange(0.5, 10, 1))\n ax.set_yticks(np.arange(0.5, 10, 1))\n ax.set_xticklabels([])\n ax.set_yticklabels([])\n\n for i in range(len(env_list)):\n ax.imshow(env_list[i],cmap='binary')\n plt.pause(0.05)", "def display(self):\n for _jumpline in range(self.y):\n print(end=\"\\n\")\n for _height in range(self.height):\n for _space in range(self.x):\n print(\" \", end=\"\")\n for _width in range(self.width):\n print(\"#\", end=\"\")\n print(end=\"\\n\")", "def animate(i, graph, node_positions, parameters, test=False):\n #print(i)\n if test:\n node = graph.node[i]\n node['adopter'] = 1\n else:\n evolution_step(graph, parameters)\n draw_graph(graph, node_positions)", "def updateGraphics(board, step, caxes):\r\n boardArray= numpy.transpose(numpy.asarray(board))\r\n caxes.set_data(boardArray)\r\n plt.title('Step ' + str(step))\r\n plt.pause(constants.BLINK)\r\n plt.show()", "def display(self):\n ob = self._convert_state(self._env.reset())\n done = False\n while not done:\n ac, _ = self._act(ob, stochastic=False)\n ob, rew, done, _ = self._env.step(ac)\n ob = self._convert_state(ob)\n self._env.render()\n self._env.close()", "def show(self, fig=None):\n i = 0\n # for t = 0:obj.step_size:obj.duration\n # TODO: make a generator?\n iterator = np.linspace(0, self.duration(), num=math.ceil(self.duration() / self.step_precision) + 1)\n tfInterp_l = np.zeros((4, 4, len(iterator)))\n tfInterp_r = np.zeros((4, 4, len(iterator)))\n for t in iterator:\n [lfp, rfp] = self.footPosition(t)\n tfInterp_l[:, :, i] = lfp\n tfInterp_r[:, :, i] = rfp\n i = i + 1\n\n self.show_tf(fig, tfInterp_l, len(iterator))\n self.show_tf(fig, tfInterp_r, len(iterator))", "def step(self):\n\n #make a dictionary of rules and which are active\n binary = self.int_to_8_bit_binary(self.rule_nbr)\n binary_str = ''.join(binary)\n\n active_rules = dict(zip(CA_World.bin_0_to_7, list(binary_str)))\n\n\n #variable to store the new computed line\n #first one will always be zero as there is no rule\n #find better explanation\n new_line = [0]\n\n #compute the new line\n\n #for each ca triplet to check\n for i in range(len(self.ca_lines[-1]) - 2):\n #make the array of three cells together ex [1,0,0] starting at index one and ending at index 3 before the end\n #then check to see if it is active from the dictionary\n #add the new cell as a 1 if active else set it to 0\n new_line.append(1 if active_rules[\"\".join(str(x) for x in self.ca_lines[-1][i:i + 3])] == '1' else 0)\n\n # variable to store the new computed line\n # first one will always be zero as there is no rule\n # find better explanation\n new_line.append(0)\n\n\n self.ca_lines.append(new_line)\n\n\n #add leading and trailing zeroes to all entries in the history\n for row in self.ca_lines:\n row.insert(0,0)\n row.append(0)\n\n self.set_display_from_lines()", "def run_next(self, action):\r\n self.screen.fill((0, 0, 0))\r\n\r\n # Run the simulation loop\r\n self.SimulationLoop(action)\r\n if GUIEnabled and self.settings.drawMenu:\r\n self.gui_app.paint(self.screen)\r\n\r\n pygame.display.flip()\r\n self.clock.tick(self.settings.hz)\r\n self.fps = self.clock.get_fps()", "def simulate(self, n: int, show_neighbourhood: bool = False):\n self.draw_current_state(show_neighbourhood)\n for _ in range(n):\n self.move()\n self.draw_current_state(show_neighbourhood)\n self.plotting_engine.animate()", "def anime(df,filename,save=False):\n walls = gc[\"walls\"]\n obstacles = gc[\"obstacles\"]\n destinations = gc[\"destinations\"]\n\n step_index = {}\n for step in df.STEP:\n step_index[step] = df.index[df.STEP == step]\n\n fig = plt.figure(figsize=matplotlib.figure.figaspect(1))\n plt.gca().set_aspect('equal', adjustable='box')\n ax = fig.add_subplot(111)\n # contour\n for wall in walls:\n wall = plt.Polygon(wall.vertices,color=\"red\",alpha=0.2)\n ax.add_patch(wall)\n # obstacles\n for obstacle in obstacles:\n obst = plt.Circle(obstacle.center,obstacle.r,color=\"green\",alpha = 0.5)\n ax.add_patch(obst)\n # destinations\n for dest in destinations:\n dest_range = plt.Polygon(dest.dest_range,color=\"black\",alpha=0.5)\n dest = plt.Polygon(dest.vertices,color=\"black\",alpha=0.2)\n ax.add_patch(dest_range)\n ax.add_patch(dest)\n\n ims = []\n colors = [\"red\",\"blue\"]\n for step in range(len(step_index)):\n im = plt.scatter(df.y[step_index[step + 1]],\n df.x[step_index[step + 1]],\n color=[colors[i] for i in df.KIND[step_index[step+1]]],\n s=80,\n alpha=0.5)\n ims.append([im])\n\n ani = animation.ArtistAnimation(fig,ims,interval=10)\n plt.xlim(gc[\"min_xy\"][0],gc[\"max_xy\"][0])\n plt.ylim(gc[\"min_xy\"][1],gc[\"max_xy\"][1])\n # if save:\n # print(\"start saving animation\")\n # ani.save(filename)\n # print(\"finish saving animation\")\n plt.show()", "def animate(self,i): # Animate function is called iteratively\n\n for x in self.list_patient: # Traversing through each patient\n x.Update()\n # updating each\n self.Computational_part()\n # invoking obj's function\n self.boot_induvidual_plot()\n # invoking obj's function", "def anim():\n i = 0\n while 1:\n\n for r in Reprs:\n r.draw(i)\n i = i+ 1\n i = i % len(t)\n yield", "def _animate(self):\n steps = (1, 7, 14)\n if self.rect.x < self.start_x - 100:\n self.change_dir = False\n elif self.rect.x > self.start_x + 100:\n self.change_dir = True\n self.direction = -1 if self.change_dir else 1\n self.rect.x += self.direction * choice(steps)", "def time_history_animation(self, frame_step=1, magnification=1):\n import matplotlib.pyplot as plt\n import matplotlib.animation as ani\n\n \"\"\"Retrieve maximum displacement for axis limits\"\"\"\n max_list = [max(map(abs, item)) * magnification for item in self.displacement]\n\n \"\"\"Start figure for animation\"\"\"\n fig = plt.figure()\n ax = fig.add_subplot(111)\n\n \"\"\"Define the rectangles that represent the DOFs\"\"\"\n rectangle = []\n for i in range(len(self.coordinates)):\n rectangle.append(plt.Rectangle((self.coordinates[i][0],\n self.coordinates[i][1]),\n self.size[i][0], self.size[i][1], alpha=0.5))\n\n \"\"\"Init function for animation draws the frame, so that blip can be used and the animation runs faster\"\"\"\n\n def init():\n for i in range(len(self.coordinates)):\n ax.add_patch(rectangle[i])\n plt.axis('auto')\n plt.xlim([-max(max_list) + min(self.coordinates[:][0]),\n max(max_list) + max([item[0] for item in self.coordinates]) + max(self.size[:][0])])\n return rectangle\n\n \"\"\"Animation function: only the coordinates of the rectangles are updated here\"\"\"\n\n def motion(t_step):\n for i in range(len(self.coordinates)):\n rectangle[i].set_xy((float(self.coordinates[i][0]\n + self.displacement[i][t_step * frame_step] * magnification),\n float(self.coordinates[i][1])))\n return rectangle\n\n \"\"\"Animation function: inter gives the time delay between frames in milli seconds\"\"\"\n inter = int(1000 * self.dt * frame_step)\n self.anim = ani.FuncAnimation(fig,\n motion,\n init_func=init,\n interval=inter,\n blit=True)\n\n motion(int(len(self.displacement) / frame_step))\n plt.show()", "def run_anime(self, inval=10, rep=True, blitit=False):\n ani = animation.FuncAnimation(self.fig, self.animate,\n len(self.artists[0][0]), repeat=rep,\n interval=inval, blit=blitit,\n init_func=self.init_anime)\n plt.show()", "def animate(x,y,title=None,step=None,pause=0.001):\n plt.clf()\n\n if step is not None and title is not None:\n plt.title(\"%s (Step %d)\" %(title,step))\n\n plt.plot(x,y)\n plt.draw()\n plt.pause(pause)\n plt.show(block=False)", "def run(self) -> None:\n for episode in range(1, self.episodes + 1):\n print('Episode:', episode)\n steps, state_action_history = self.run_one_episode()\n self.steps_per_episode.append(steps)\n if episode % parameters.CACHING_INTERVAL == 0 or steps < 1000:\n visualize.animate_track(state_action_history, f'agent-{episode}')\n\n print('Training completed.')\n visualize.plot_steps_per_episode(self.steps_per_episode)\n visualize.plot_epsilon(self.agent.epsilon_history)\n\n if parameters.VISUALIZE_FINAL_GAME:\n print('Showing one episode with the greedy strategy.')\n self.agent.epsilon = 0\n steps, state_action_history = self.run_one_episode()\n print(f'Episode completed in {steps} steps.')\n visualize.animate_track(state_action_history)", "def step(self, state):", "def display(self):\n for r in range(1, self.size+1):\n print(\"+\" + (\"-+\"*self.size))\n print(\"|\", end=\"\")\n for c in range(1, self.size+1):\n print(self.gameState[r,c], end=\"\")\n print(\"|\",end=\"\")\n print()\n print(\"+\" + (\"-+\"*self.size))", "def display(self):\r\n os.system('cls')\r\n index = 0\r\n for i in self.list:\r\n print(str(index) + \" \" + i.showRule())\r\n index += 1", "def step(self):\n # update the score\n isScoreUpdated = self.update_score()\n \n # the player hit an obstacle\n isFail = self.fail()\n if isFail:\n self.inGame = False\n # display an explosion instead of the bird image\n self.bird.img = jpg2numpy(self.args.explosion_sprite, self.args.explosion_dims)\n\n # if the AI is playing: take an action\n if not self.isHuman:\n self.agent.choose_action() \n \n # compute the new bird position\n self.bird.move()\n \n # scroll 1 frame and generate the new environment\n new_state = self.env.scroll()\n \n # if the AI is playing: set the new state\n if not self.isHuman:\n # get the new_state\n new_state = self.env.get_state()\n # feed the transition information to the agent\n self.agent.set_transition(new_state, isScoreUpdated, isFail) \n \n # only display 1 frame every 'n_frames' frames for fluidity\n if not self.muteDisplay and (self.fail() or (self.isHuman and (self.t % self.args.n_frames_human == 0)) or (not self.isHuman and (self.t % self.args.n_frames_ai == 0))): \n # change the displayed image to account for changes\n self.im.set_data(self.env.map)\n # update the image without pausing\n plt.draw()\n \n # new time step\n self.t += 1\n self.hasJumped = False", "def animate(directory,gifname,n_t,step=2,duration=0.2):\n\t# create list of filenames\n\tfnames = dir_fname(directory,\"*\")\n\t# create list of plots\n\timages=[] \n\tfor k in range(0,n_t):\n\t\tk = k*step\n\t\tprint('Mounting Im '+ str(k))\n\t\tFIG_NAME=fnames[k]\n\t\timages.append(imageio.imread(FIG_NAME)) # read\n\t# Now we can assemble the video\n\timageio.mimsave(gifname, images,duration=duration) # create gif\n\tprint('Animation'+gifname+'Ready')\n\treturn True", "def Advance():\n warp.step()", "def example(self):\n while self.check_end() == False:\n plt.pause(0.25)\n end = self.update_board(random.choice(self.get_actions()), True)", "def cont(a=0, show_arrows=True):\n global simulator, recorder\n if simulator is None:\n print \"Program is not started\"\n return\n __record(pc(), cont, a)\n try:\n simulator.step(a)\n while True:\n simulator.step()\n except:\n simulation_error()\n exec_hooks(cont)\n if show_arrows:\n arrows()", "def drawJumper(self):\n for x in self.jumper:\n print(x)", "def step(self):\n #1. Time progresses\n self.time_operator.step()\n \n #2. Form and dissolve relationships\"\n self.relationship_operator.step()\n\n #3. HIV transmission\n self.infection_operator.step()", "def animate(self, save_path=None):\n interval = 100 # 1 frame per interval ms\n frames = int(20 * len(self.experienced_states_kancks_paths) / self.frame_skip) # times to call updatefig\n blit = True # acceralate computation\n ani = animation.FuncAnimation(self.fig, self.updateifig, frames=frames,\n interval=interval, blit=blit)\n if save_path is not None:\n ani.save(os.path.join(save_path, 'anim.mp4'), writer='ffmpeg')\n else:\n plt.show()", "def display_loop(self):\n from time import sleep\n self.displaying = True\n while self.displaying:\n print self\n sleep(.083)\n print loc(self.y, self.x) + ' '*self.size", "def SetAnimationStep(self, step):\r\n\r\n self._animation_step = float(step)", "def start(self, total_iterations):\n sys.stdout.write(\"[%s]\" % (\" \" * self.width))\n sys.stdout.flush()\n # return to start of line, after '['\n sys.stdout.write(\"\\b\" * (self.width+1)) \n\n self.state, self.total = 0, total_iterations", "def generate_animated_gif(env, case_dir, save_dir, writer='imagemagick'):\n # initialize actor\n actor = Actor(env.num_states, env.num_actions)\n # Load trained actor\n trained_actor = torch.load(os.path.join(os.getcwd(), case_dir, 'actor_trained.pt'))\n actor.load_state_dict(trained_actor)\n\n s = env.reset()\n s_traj = [s]\n done = False\n while not done:\n (mu, std) = actor(torch.from_numpy(s))\n dist = torch.distributions.normal.Normal(mu, std)\n a = dist.sample().numpy()\n (s, r, done) = env.step(a)\n s_traj.append(s)\n\n fig = plt.figure(figsize=(5, 4))\n ax = fig.add_subplot(111, autoscale_on=False, xlim=(-1.2, 1.2), ylim=(-1.2, 1.2))\n ax.set_aspect('equal')\n ax.grid()\n line, = ax.plot([], [], 'o-', lw=2)\n text = ax.set_title('')\n\n def animate(i):\n theta = s_traj[i][0]\n line.set_data([0, -np.sin(theta)], [0, np.cos(theta)])\n text.set_text(f'time = {i * env.dt:3.1f}')\n return line, text\n\n anim = animation.FuncAnimation(fig, animate, len(s_traj), interval=(1000 * env.dt), blit=True, repeat=False)\n anim.save(os.path.join(save_dir, 'animated_trajectory.gif'), writer=writer, fps=10)\n\n plt.close()", "def on_step(self) -> None:\r\n\r\n if self.board == None:\r\n return\r\n\r\n TkState.disable(self.edit_menu.winfo_children())\r\n TkState.enable([self.reset_button])\r\n self.anim_board.next_gen()\r\n self.on_new_generation()\r\n self.painter.draw_board()", "def animate(self, interval=100, title='test', medium_from=None, medium_to=None):\n # data input\n N = self.get_N()\n plotting_data = self.data[:,N:2*N]\n time = self.get_time()\n y_min = np.min(plotting_data)\n y_max = np.max(plotting_data)\n n_frames = plotting_data.shape[0]\n \n # initialization of plots\n fig = plt.figure()\n ax = plt.axes(xlim=(0, N), ylim=(y_min, y_max))\n time_text = ax.text(0.02, 0.95, '', transform=ax.transAxes)\n time_text.set_text('')\n l, = plt.plot([], [], '.-')\n plt.xlabel('bead number')\n plt.ylabel('velocity (computer units)')\n plt.title(title)\n \n # visualize interfaces\n if medium_from is not None:\n if medium_to is None:\n medium_to = N\n \n plt.fill_between(range(medium_from, medium_to+1), y_min, y_max, facecolor='red', alpha=0.5)\n \n # updat4e function for animation\n def update_line(num, plotting_data, time, line):\n dat = plotting_data[num,:]\n line.set_data([range(len(dat)), dat])\n time_text.set_text('time = %.1f' % time[num])\n line.set_label('t= 10')\n return line,\n \n line_ani = animation.FuncAnimation(fig, update_line, n_frames, fargs=(plotting_data, time, l), interval=interval, blit=False)\n return line_ani", "def example_2(): \n maze = MazeWorld(8,8)\n for i in range(maze.width):\n if i is not 6 : maze.add_wall([2, i], \"N\") \n for i in range(maze.width):\n if i is not 2 : maze.add_wall([5, i], \"N\")\n n_step = 4\n E = maze.compute_empowerment(n_step=n_step, n_samples=8000)\n maze.plot(colorMap=E)\n plt.title('%i-step empowerment' % n_step)\n plt.show()", "def show_progress(self, game):\n if self.verbose:\n if self.params.eval_interval is not None and (self.episode % self.params.eval_interval == 0):\n self._print_progress()\n # evaluate one run\n state = game.reset()\n self.tracker.reset()\n done = False\n while not done:\n controls = self.sess.run(self.graph.target_actor_outputs,\n feed_dict={self.graph.states: np.array([state])}).reshape(-1)\n state, reward, done, _ = game.step(controls)\n self.tracker.step(reward)\n print('****** ', self.tracker.total_reward, self.tracker.discounted_rewards, ' ******')", "def step(self):\n self.latent.step()", "def horde_step(self, observation):", "def display_hangman(self):\n print(Fore.CYAN + HANGMAN_PICS[self.stage])\n print('\\n')\n print(self.progress + Style.RESET_ALL)\n print('\\n')", "def animate():\n for c in itertools.cycle(['|', '/', '-', '\\\\']):\n if done:\n break\n sys.stdout.write('\\rloading ' + c)\n sys.stdout.flush()\n time.sleep(0.1)\n sys.stdout.write('\\rDone! ')", "def printStep(self):\n\n\t\tprint '\\nConfiguracao da fita: ',\n\n\t\tcount = 0\n\t\twhile count < len(self.tape):\n\t\t\tif count == self.currentPos:\n\t\t\t\tprint '_',\n\n\t\t\tprint self.tape[count],\n\t\t\tcount += 1\n\n\t\tprint '\\nEstado atual: ', self.currentState", "def startPresentation(ntrials):\n\n for trialIdx in range(ntrials):\n\n # draw the fixcross followed by the star\n drawFixCross(1.0)\n drawStar(4.0)", "def step(self, screen):\n max_y, _ = screen.getmaxyx()\n allowed_y = range(0, max_y)\n def is_on_screen(y):\n return y in allowed_y\n still_visible = False\n y, x = -1, -1\n for char in self.characters:\n y, x = int(char.y), char.x\n char.y += self.min_falling_speed\n if not is_on_screen(y):\n continue\n still_visible = True\n screen.addstr(y, x, char.symbol, char.color)\n # Clear trail of characters after column\n y -= 1\n while is_on_screen(y):\n screen.addstr(y, x, ' '*self.width)\n y -= 1\n return still_visible", "def animate(results, draw_func, *args, interval=None):\n plt.figure()\n try:\n for t, state in results.iterrows():\n draw_func(t, state, *args)\n plt.show()\n if interval:\n sleep(interval)\n clear_output(wait=True)\n draw_func(t, state, *args)\n plt.show()\n except KeyboardInterrupt:\n pass", "def next_step(self):\n\n c = 1\n dt = 0.001\n dx = 1 / 20**2\n\n # copy current state first\n next_state = np.copy(self.state)\n\n # iterate over matrix\n for i in range(self.width - 1):\n for j in range(self.height - 1):\n\n if not self.shape == \"circle\" or self.circle[i, j] == 1:\n\n # left bottom corner\n if i == 0 and j == 0:\n next_state[i, j] = ((c * dt)/ dx)** 2\\\n * (self.state[i + 1, j] + 0\\\n + 0 + self.state[i, j + 1]\\\n - 4 * self.state[i, j])\\\n + 2 * self.state[i, j] - self.prev_state[i, j]\n # right top corner\n elif i == 0 and j == self.height - 1:\n next_state[i, j] = ((c * dt)/ dx)** 2\\\n * (0 + self.state[i + 1, j]\\\n + self.state[i, j - 1] + 0\\\n - 4 * self.state[i, j])\\\n + 2 * self.state[i, j] - self.prev_state[i , j]\n # right bottom corner\n elif i == self.width - 1 and j == 0:\n next_state[i, j] = ((c * dt)/ dx)** 2\\\n * (self.state[i, j - 1] + 0\\\n + 0 + self.state[i - 1, j]\\\n - 4 * self.state[i, j])\\\n + 2 * self.state[i, j] - self.prev_state[i , j]\n # left bottom corner\n elif i == self.width - 1 and j == self.height - 1:\n next_state[i, j] = ((c * dt)/ dx)** 2\\\n * (self.state[i, j - 1] + self.state[i - 1, j]\\\n + 0 + 0\\\n - 4 * self.state[i, j])\\\n + 2 * self.state[i, j] - self.prev_state[i , j]\n elif i == 0: # j is not 0\n next_state[i, j] = ((c * dt)/ dx)** 2\\\n * (self.state[i + 1, j] + 0\\\n + self.state[i, j - 1] + self.state[i, j + 1]\\\n - 4 * self.state[i, j])\\\n + 2 * self.state[i, j] - self.prev_state[i , j]\n elif j == 0:\n next_state[i, j] = ((c * dt)/ dx)** 2\\\n * (self.state[i + 1, j] + self.state[i - 1, j]\\\n + 0 + self.state[i, j + 1]\\\n - 4 * self.state[i, j])\\\n + 2 * self.state[i, j] - self.prev_state[i , j]\n elif i == self.width - 1:\n next_state[i, j] = ((c * dt)/ dx)** 2\\\n * (0 + self.state[i - 1, j]\\\n + self.state[i, j - 1] + self.state[i, j + 1]\\\n - 4 * self.state[i, j])\\\n + 2 * self.state[i, j] - self.prev_state[i , j]\n elif j == self.height - 1:\n next_state[i, j] = ((c * dt)/ dx)** 2\\\n * (self.state[i + 1, j] + self.state[i - 1, j]\\\n + self.state[i, j - 1] + 0\\\n - 4 * self.state[i, j])\\\n + 2 * self.state[i, j] - self.prev_state[i , j]\n else:\n next_state[i, j] = ((c * dt)/ dx)** 2\\\n * (self.state[i + 1, j] + self.state[i - 1, j]\\\n + self.state[i, j - 1] + self.state[i, j + 1]\\\n - 4 * self.state[i, j])\\\n + 2 * self.state[i, j] - self.prev_state[i , j]\n\n self.prev_state = np.copy(self.state)\n self.state = np.copy(next_state)\n\n self.timestep += 1", "def step(self):\n self.game.step()", "def next_step():\n global fenetre, grid\n if len(l)>2:\n board_display(l[0])\n l.pop(0)\n steps.set(str(len(l))+\" steps remaining\")\n elif len(l)==2:\n board_display(l[0])\n l.pop(0)\n steps.set(str(len(l))+\" step remaining\")\n else:\n board_display(l[0])\n steps.set(\"No more steps remaining\")\n btn.set(\"Finished!\")", "def train(self, n_steps=1000, show=False):\n epsilon = 0.01\n state = self.env.reset()\n done = False\n episode_rewards = []\n episode_reward = 0\n losses = []\n for step_i in range(n_steps):\n action = self.act(state, epsilon)\n next_state, reward, done, _ = self.env.step(action)\n episode_reward += reward\n\n self.optimizer.zero_grad()\n loss = self._compute_loss(state, action, reward, next_state, done)\n losses.append(loss)\n loss.backward()\n self.optimizer.step()\n\n if done:\n state = self.env.reset()\n episode_rewards.append(episode_reward)\n episode_reward = 0\n else:\n state = next_state\n \n if show:\n self._plot(step_i, episode_rewards, losses)", "def Start(self): # this is used to start the object\n ani = anim.FuncAnimation(self.f, self.animate, interval=1000)\n # animating object wth 1 sec gap\n self.plt_0.tight_layout()\n self.plt_0.show()\n # showing the plot", "def step(self):\n r, c = self.ant_r, self.ant_c\n if self.grid[r][c] == LangtonAnt.WHITE:\n self.direction = rotate_counterclock(self.direction)\n self.grid[r][c] = LangtonAnt.BLACK\n else:\n self.direction = rotate_clockwise(self.direction)\n self.grid[r][c] = LangtonAnt.WHITE\n\n self.ant_r -= self.direction[1]\n self.ant_c += self.direction[0]", "def step(self):\n self.turn_on_corners()\n part_1.Grid.step(self)\n self.turn_on_corners()", "def step(self, amt=1):\n \n # For checking if all the animations have their framse looked at\n #activewormind = [i for i, x in enumerate(self._idlelist) if x == False]\n #print \"Worm {} at {:5g}\".format(activewormind, 1000*(time.time() - starttime))\n # save times activated for each worm \n [self.timedata[i].append(1000*(time.time() - starttime)) for i, x in enumerate(self._idlelist) if x == False]\n \n #self._led.buffer = [0] * 480\n self._led.pixheights = [-100] * self._led.numLEDs\n #print type(self._led.buffer)\n for ledcopy in self._ledcopies:\n # self._led.buffer = map(ixor, self._led.buffer, ledcopy.buffer)\n # use pixheights but assume all buffers same size\n # print ledcopy.driver[0].pixheights\n for pix in range(self._led.numLEDs):\n #for ledcopy in self._ledcopies:\n if self._led.pixheights[pix] == ledcopy.driver[0].pixheights[pix]:\n for i in range(3):\n self._led.buffer[3*pix + i] ^= ledcopy.buffer[3*pix + i]\n elif self._led.pixheights[pix] < ledcopy.driver[0].pixheights[pix]:\n for i in range(3):\n self._led.buffer[3*pix + i] = ledcopy.buffer[3*pix + i]\n self._led.pixheights[pix] = ledcopy.driver[0].pixheights[pix] \n self._step += 1", "def show_evolution_chromosome(to_show, steps):\n pattern = np.fromstring('1 1 0 1 0 1 0 1 0 1', dtype=int, sep=' ')\n board = Board(pattern, 2)\n if to_show:\n gui = Gui(board,steps, 50)\n tkinter.Button(text=\"Start\", command=lambda: TimerUpdate(gui)).pack()\n gui.window.mainloop()\n else:\n board.print_pattern()\n board.evolve(steps)\n board.print_data()", "def performSimulation(self):\n \n if self.parameters['verbose']:\n print(\"=====================\\nStarting simulation with parameters\\n\",self.parameters)\n print(\"=====================\\nInitial Graph\\n\")\n self.showState()\n print(\"=====================\")\n\n while self.parameters['steps'] > 0:\n if self.parameters['verbose']: print(\"Performing step\")\n self.performStep()\n if self.parameters['verbose']: self.showState()\n\n if self.parameters['verbose']:\n print(\"=====================\\nFinished Simulation\\n\\nResult graph:\")\n self.showState()\n #self.showGraph(self.parameters['file_name'])\n #self.showState()\n #self.showStats()", "def step(self):\n self.schedule.step()", "def step_button(self):\r\n self.update_settings()\r\n self.set_val(\"display_move\")\r\n if self.step_call is not None:\r\n self.step_call()", "def animate(self, save_path=None):\n interval = 100 # 1 frame per interval ms\n # frames = int(len(self.map_paths) / self.frame_skip) # times to call updatefig\n frames = int(min(len(self.map_paths) / self.frame_skip, len(self.experienced_states_kancks_paths) / self.frame_skip / 2))\n frames -= 1 # because of my bad logging\n blit = True # acceralate computation\n ani = animation.FuncAnimation(self.fig, self.updateifig, frames=frames,\n interval=interval, blit=blit)\n if save_path is not None:\n ani.save(os.path.join(save_path, self.save_name), writer='ffmpeg')\n else:\n plt.show()", "def setDisplayMode(self):\n self.step = (self.max_step + int(self.include))\n self.display = Fourier.inverseTransform(\n self.coefficients, self.display_number)", "def run(self):\n self.simulate_test_data()\n self.pipeline_test_data()\n self.plot_jump_flags_image()\n self.plot_groupdq_flags(pixel=[884, 550])\n self.plot_ramps_pre_post_correction(pixel=[884, 550])", "def simulate(self, num_steps=100, vis_steps=1, img_steps=2000,\n abscissa=None, ordinate=20000, colour_herb=None,\n colour_carn=None):\n self.vis_steps = vis_steps\n self.img_steps = img_steps\n self.plot_update(num_steps, abscissa, ordinate,\n colour_herb, colour_carn)\n plt.show()", "def fourLegSimulator(beta_list, gamma_list, beta_list2, gamma_list2, beta_list3, gamma_list3, beta_list4, gamma_list4, bodyHeight, femur, tibia):\n \n #import necessary packages\n import numpy as np \n import itertools # This package is specifically used for having multiple variable \"for\" loop using zip function\n from numpy import pi, sin, cos, sqrt\n import matplotlib.pyplot as plt\n import matplotlib.animation as animation\n get_ipython().run_line_magic('matplotlib', 'qt')\n\n\n\n # input parameters\n Femur_one_leg = femur # Length of femur (upper bone)\n Tibia_one_leg = tibia # Length of Tibia (lower bone)\n\n\n # Making arrays for containing value of respective coordinates\n X1 = np.zeros(len(beta_list)) # array for x_coordinates of moving point of femur\n Y1 = np.zeros(len(beta_list)) # array for y_coordinates of moving point of femur\n X2 = np.zeros(len(gamma_list)) # array for x_coordinates of moving point of tibia i.e end effector in our case\n Y2 = np.zeros(len(gamma_list)) # array for y_coordinates of moving point of tibia i.e end effector in our case\n \n X1_2 = np.zeros(len(beta_list2)) # array for x_coordinates of moving point of femur\n Y1_2 = np.zeros(len(beta_list2)) # array for y_coordinates of moving point of femur\n X2_2 = np.zeros(len(gamma_list2)) # array for x_coordinates of moving point of tibia i.e end effector in our case\n Y2_2 = np.zeros(len(gamma_list2)) # array for y_coordinates of moving point of tibia i.e end effector in our case\n\n X1_3 = np.zeros(len(beta_list3)) # array for x_coordinates of moving point of femur\n Y1_3 = np.zeros(len(beta_list3)) # array for y_coordinates of moving point of femur\n X2_3 = np.zeros(len(gamma_list3)) # array for x_coordinates of moving point of tibia i.e end effector in our case\n Y2_3 = np.zeros(len(gamma_list3)) # array for y_coordinates of moving point of tibia i.e end effector in our case \n \n \n X1_4 = np.zeros(len(beta_list4)) # array for x_coordinates of moving point of femur\n Y1_4 = np.zeros(len(beta_list4)) # array for y_coordinates of moving point of femur\n X2_4 = np.zeros(len(gamma_list4)) # array for x_coordinates of moving point of tibia i.e end effector in our case\n Y2_4 = np.zeros(len(gamma_list4)) # array for y_coordinates of moving point of tibia i.e end effector in our case\n \n \n #Populating the above defined arrays currently filled with zeros to respective coordinates\n #Here in the for loop zip function is used to iterate two variales simultaneously and enumerate function to return index numbers\n\n for index,(beta,gamma) in enumerate(zip(beta_list,gamma_list)):\n x1 = Femur_one_leg*cos(-beta - (pi/2)) # x-cooridnate of femur\n y1 = Femur_one_leg*sin(-beta - (pi/2)) # y-cooridnate of femur\n x2 = x1 + Tibia_one_leg*cos(-pi/2 - (beta + gamma)) # x-coordinate of tibia\n y2 = y1 + Tibia_one_leg*sin(-pi/2 - (beta + gamma)) # y-coordinate of tibia\n \n\n # using above used flag variables to replace zeros with respective corrdinates\n X1[index] = x1 \n Y1[index] = y1 \n X2[index] = x2 \n Y2[index] = y2 \n \n for index2,(beta2,gamma2) in enumerate(zip(beta_list2,gamma_list2)):\n x1_2 = Femur_one_leg*cos(-beta2 - (pi/2)) # x-cooridnate of femur\n y1_2 = Femur_one_leg*sin(-beta2 - (pi/2)) # y-cooridnate of femur\n x2_2 = x1_2 + Tibia_one_leg*cos(-pi/2 - (beta2 + gamma2)) # x-coordinate of tibia\n y2_2 = y1_2 + Tibia_one_leg*sin(-pi/2 - (beta2 + gamma2)) # y-coordinate of tibia\n \n\n # using above used flag variables to replace zeros with respective corrdinates\n X1_2[index2] = x1_2 \n Y1_2[index2] = y1_2 \n X2_2[index2] = x2_2 \n Y2_2[index2] = y2_2 \n\n for index3,(beta3,gamma3) in enumerate(zip(beta_list3,gamma_list3)):\n x1_3 = 40 + Femur_one_leg*cos(-beta3 - (pi/2)) # x-cooridnate of femur\n y1_3 = Femur_one_leg*sin(-beta3 - (pi/2)) # y-cooridnate of femur\n x2_3 = x1_3 + Tibia_one_leg*cos(-pi/2 - (beta3 + gamma3)) # x-coordinate of tibia\n y2_3 = y1_3 + Tibia_one_leg*sin(-pi/2 - (beta3 + gamma3)) # y-coordinate of tibia\n \n\n # using above used flag variables to replace zeros with respective corrdinates\n X1_3[index3] = x1_3 \n Y1_3[index3] = y1_3 \n X2_3[index3] = x2_3 \n Y2_3[index3] = y2_3\n \n for index4,(beta4,gamma4) in enumerate(zip(beta_list4,gamma_list4)):\n x1_4 = 40 + Femur_one_leg*cos(-beta4 - (pi/2)) # x-cooridnate of femur\n y1_4 = Femur_one_leg*sin(-beta4 - (pi/2)) # y-cooridnate of femur\n x2_4 = x1_4 + Tibia_one_leg*cos(-pi/2 - (beta4 + gamma4)) # x-coordinate of tibia\n y2_4 = y1_4 + Tibia_one_leg*sin(-pi/2 - (beta4 + gamma4)) # y-coordinate of tibia\n \n\n # using above used flag variables to replace zeros with respective corrdinates\n X1_4[index4] = x1_4 \n Y1_4[index4] = y1_4 \n X2_4[index4] = x2_4 \n Y2_4[index4] = y2_4 \n\n # Setting up figure and subplot\n\n fig = plt.figure()\n fig.canvas.set_window_title('One Leg trajectory Planning')\n ax = fig.add_subplot(111, aspect='equal', autoscale_on=False, xlim=(-30,70), ylim=(-50,50))\n ax.grid()\n ax.set_title('Leg Trajectory')\n ax.axes.xaxis.set_ticklabels([])\n ax.axes.yaxis.set_ticklabels([])\n \n line, = ax.plot([], [], 'o-', lw=5, color='#05143b')\n line2, = ax.plot([], [], 'o-', lw=5, color='#37acf0')\n line3, = ax.plot([], [], 'o-', lw=5, color='#05143b')\n line4, = ax.plot([], [], 'o-', lw=5, color='#37acf0')\n \n\n\n # initialization function\n def init():\n line.set_data([], [])\n line2.set_data([], [])\n line3.set_data([], [])\n line4.set_data([], [])\n return line,line2,line3,line4,\n\n # animation function\n def animate(i):\n x_points = [0, X1[i], X2[i]]\n y_points = [0, Y1[i], Y2[i]]\n \n x2_points = [0, X1_2[i], X2_2[i]]\n y2_points = [0, Y1_2[i], Y2_2[i]]\n \n x3_points = [40, X1_3[i], X2_3[i]]\n y3_points = [0, Y1_3[i], Y2_3[i]]\n \n x4_points = [40, X1_4[i], X2_4[i]]\n y4_points = [0, Y1_4[i], Y2_4[i]]\n \n\n line.set_data(x_points, y_points)\n line2.set_data(x2_points, y2_points)\n line3.set_data(x3_points, y3_points)\n line4.set_data(x4_points, y4_points)\n \n return line, line2, line3, line4\n\n # call the animation\n ani = animation.FuncAnimation(fig, animate, init_func=init, frames=len(X1), interval=100, blit=True, repeat=True)\n \n\n # plotting respective movement trajectories in the same plot\n plt.plot(X2,Y2, '#05143b')\n# plt.plot(X1,Y1)\n \n plt.plot(X2_2,Y2_2,'#37acf0')\n# plt.plot(X1_2,Y1_2)\n \n plt.plot(X2_3,Y2_3,'#05143b')\n# plt.plot(X1_3,Y1_3)\n \n plt.plot(X2_4,Y2_4,'#37acf0')\n# plt.plot(X1_4,Y1_4)\n \n \n \n plt.plot([-20,60],[-bodyHeight,-bodyHeight],'brown')\n plt.plot([-4,44],[0,0],'#010b24')\n plt.plot([-4,-4],[0,5],'#010b24')\n plt.plot([44,44],[0,5],'#010b24')\n plt.plot([-4,44],[5,5],'#010b24')\n \n for ind in range(100):\n plt.plot([-4,44],[ind*5/100,ind*5/100],'black')\n \n return None", "def runFrame(self):\n self._drawFrame(self._advanceTime())", "def animate():\n for c in itertools.cycle(['|', '/', '-', '\\\\']):\n if done:\n break\n sys.stdout.write('\\rLoading ' + c)\n sys.stdout.flush()\n time.sleep(0.1)\n #prefer sys.stdout instead of print for continuously updating\n #the Loading animation", "def setDisplay(self):\n self.graph_display=[self.complexCompose(self.coefficients,(t+1)/self.display_number)[-1] for t in range(self.display_number)]", "def step(self, move):", "def main(self):\n\t\t\tfactor = 0.0\n\t\t\tif self.time.time() > (self.startTime + self.showTime + self.displayTime + self.hideTime):\n\t\t\t\tself.end()\n\t\t\telif self.time.time() > (self.startTime + self.showTime + self.displayTime):\n\t\t\t\t#Hide animation will occur here.\n\t\t\t\tfactor = (self.hideTime - (self.time.time() - (self.startTime + self.showTime + self.displayTime))) / self.hideTime\n\t\t\t\tself.frame.colors = [(0.2, 0.2, 0.2, (0.8 * factor)) for i in range(4)]\n\t\t\t\tself.frame._update_position(self.frame._base_size, [0.7, (1.0 - (0.2 * factor))])\n\t\t\t\tself.display._update_position(self.display._base_size, self.display._base_pos)\n\t\t\t\tself.display.text = self.text\n\t\t\t\n\t\t\telif self.time.time() > (self.startTime + self.showTime):\n\t\t\t\tif self.frame._base_pos != [0.7, 0.8]:\n\t\t\t\t\tself.frame._update_position(self.frame._base_size, [0.7, 0.8])\n\t\t\t\t\tself.display._update_position(self.display._base_size, self.display._base_pos)\n\t\t\t\t\tself.display.text = self.text\n\t\t\t\n\t\t\telse:\n\t\t\t\t#play show animation.\n\t\t\t\tfactor = (self.time.time() - self.startTime) / self.showTime\n\t\t\t\tself.frame.colors = [(0.2, 0.2, 0.2, (0.8 * factor)) for i in range(4)]\n\t\t\t\tself.frame._update_position(self.frame._base_size, [0.7, (1.0 - (0.2 * factor))])\n\t\t\t\tself.display._update_position(self.display._base_size, self.display._base_pos)\n\t\t\t\tself.display.text = self.text", "def start_animation(self) -> None:\n increment_values = {0: 1, self.original_height: -1}\n self.increment = increment_values.get(self.current_height, 0) # Compressed if", "def display(self, agent):\n agent.prepare()\n self.env.render()\n while not self.ended():\n self.perform(agent.act(self, verbose=True), render=True, delay=FAST)\n self.update()\n agent.update(self)\n self.env.close()", "def animate_traj(traj, robot, pause=True, restore=True):\n if restore: _saver = openravepy.RobotStateSaver(robot)\n viewer = trajoptpy.GetViewer(robot.GetEnv())\n for (i,dofs) in enumerate(traj):\n print \"step %i/%i\"%(i+1,len(traj))\n robot.SetActiveDOFValues(dofs)\n if pause: viewer.Idle()\n else: viewer.Step()", "def _update_anim(self):\n if self._skip_frames > 1:\n # Do not render while _skip_frames is > 1\n self._skip_frames -= 1\n else:\n # Render frame\n self._visualization.taskMgr.step()\n # Calculate number of frames that need to be skipped\n self._skip_frames = int(1 / self._fps / self._dt)", "def step(self):\n for layer in self.layers:\n layer.step()", "def graphics_loop(self, font):\n self.screen.blit(self.background, (0, 0))\n if not self.scroll:\n self.all_sprites.draw(self.screen)\n else:\n self.draw_onscreen()\n #display which step we're on\n if pygame.font:\n text = font.render(str(self.stepid), 1, (255, 255, 255))\n textpos = text.get_rect(centerx = int(\n (self.screen.get_width() * 0.5)))\n self.screen.blit(text, textpos)\n pygame.display.flip()\n #cap at x fps\n self.clock.tick(self.max_fps)", "def display(self):\n count = 0\n self.displays[0].start() # call only once to support shift chain\n for d in self.displays:\n d.output(self.data[count])\n count += 1\n self.displays[0].latch() # call only once to support shift chain", "def show_progress(self, game):\n if self.verbose:\n if self.params.eval_interval is not None and (self.episode % self.params.eval_interval == 0):\n self.print_progress()\n # evaluate one run\n state = game.reset()\n self.tracker.reset()\n done = False\n while not done:\n q_values = self.sess.run(self.graph.target_q_values, feed_dict={self.graph.states: [state]})\n action = np.argmax(q_values)\n state, reward, done, _ = game.step(action)\n self.tracker.step(reward)\n print('****** ', self.tracker.total_reward, self.tracker.discounted_rewards, ' ******')", "def animate_menu(grid_display, cycle_img, cycle_rect, start_x, end_x, step, wall_color):\n\n if cycle_rect.right >= end_x:\n cycle_rect.left = start_x\n else:\n cycle_rect.move_ip(step, 0)\n\n pygame.draw.rect(grid_display, wall_color, (start_x, cycle_rect.top + 5, cycle_rect.left - start_x + 40,\n cycle_rect.height - 10))\n grid_display.blit(cycle_img, cycle_rect)", "def episode_step(self):\n self.nsteps += 1", "def step(self, steps):\n self._simulate(endStep=self.currentStep+steps)", "def anim(self):\n\n if self.anims.get(0):\n if self.anims.get(0).done:\n # Remove current animation when done.\n self.anims.pop()\n else:\n # Progress current animation.\n self.anims.get(0).step()\n\n elif self.solving and self.solved():\n # Reset cube after `Cube.solve()`.\n self.queue.items = []\n self.solving = False\n\n elif self.queue.get(0):\n if self.timing and not self.scrambling and not self.timer.ing:\n # Start the timer after scrambling.\n self.timer.start()\n\n # Add queued moves to the animation queues.\n self.move(self.queue.pop())\n\n elif self.timer.ing and self.solved():\n # Stop the timer.\n self.timer.end()\n self.timing = False\n\n if self.solved() and self.timer.times[-1] != 'DNF':\n self.disp = True\n\n self.timer.update()", "def __show_rules(self):\n self.clear_screen()\n self.__print_logo()\n self.__print_rules(left=20)\n input()", "def animation1(strip, wait_ms=1, range_begin=0, range_end=-1, iteration_step=-1):\n if range_end == 1:\n range_end = strip.numPixels()\n\n j = iteration_step\n\n pixel_to_change = iteration_step % (range_end - range_begin) + range_begin\n\n if pixel_to_change - range_begin == 0:\n for i in range(range_begin, range_end):\n strip.setPixelColor(i, Color(0, 0, 255))\n strip.show()\n time.sleep(wait_ms/1000)\n for i in range(range_begin, range_end):\n strip.setPixelColor(i, Color(255, 0, 0))\n strip.show()\n #time.sleep(wait_ms/1000.0)", "def playAGame(t_Param, t_Qtable, t_Movements, t_States, t_Rewards, t_Maze, t_line=None, t_point=None):\n # start from the position next to the entrance of maze.\n pos = np.array([1,1], np.int16)\n \n # a list to memorize history step with maximum memory length of 2\n path = [0,0]\n \n # update plot\n if t_line is not None and t_point is not None:\n xdata = [pos[1],]; ydata = [pos[0],]\n t_line.set_xdata(xdata); t_line.set_ydata(ydata)\n t_point.set_xdata([pos[1],]); t_point.set_ydata(pos[0,])\n #t_line.figure.canvas.draw()\n plt.pause(0.01)\n\n for k in range(t_Param[\"nStep_Max\"]):\n # calculate current state index\n state_idx = t_Param[\"ncol\"] * pos[0] + pos[1]\n\n # modify history\n path.append( state_idx ); path.remove( path[0] )\n\n # update current position , and then return choice\n choice = move(t_Qtable[state_idx, :], pos, t_Movements)\n\n # update plot\n if t_line is not None and t_point is not None:\n xdata.append(pos[1]); ydata.append(pos[0])\n t_line.set_xdata(xdata); t_line.set_ydata(ydata)\n t_point.set_xdata([pos[1],]); t_point.set_ydata(pos[0,])\n #t_line.figure.canvas.draw()\n plt.pause(0.01)\n\n # calculate new state index\n state_idx_new = t_Param[\"ncol\"] * pos[0] + pos[1]\n #print(f\"[{pos[0]:>2d}, {pos[1]:2d}]\", end=\" \")\n # get environment; based on the new position, get reward\n env = t_Maze[pos[0], pos[1]]\n \n # if is turning back, punish\n if state_idx_new in path:\n R = -2\n # get reward from the Maze pixel value of the new state\n else:\n R = t_Rewards[ env ]\n\n # update Qtable\n try:\n t_Qtable[state_idx,choice] = (1-Param[\"alpha\"]) * t_Qtable[state_idx,choice] + \\\n Param[\"alpha\"] * (R + Param[\"gamma\"] * t_Qtable[state_idx_new, :].max())\n except IndexError:\n print(pos[0],pos[1])\n break\n\n # whether game over\n if env != 3:\n break\n\n step = k+1\n \n # if reach maximum nStep, set env to 4\n if step == t_Param[\"nStep_Max\"]:\n env = 4\n\n return env, step, tuple(pos)", "def showState(self):\n for i in self.state[0]:\n for j in self.state[1]:\n print(self.table[i][j], end=\"\")\n print(\"\")", "def animation(self, t):\n self.program['u_clock'] = 2*t\n gloo.clear('black')\n self.program.draw('points')\n return _screenshot((0, 0, self.size[0], self.size[1]))[:,:,:3]", "def update_animations():\n\tt = _pg.time.get_ticks()\n\tfor a in _running:\n\t\ta._update(t)", "def run(self):\n self.axs[0][0].clear()\n simulate(params=self.params,plt=plt,callback=self.callback,home=self.home,work=self.work, positions=self.initial_positions, stopping_t=150)", "def display_gameclock(interpolation):\n ## GOTCHA: See the comment in update_gameclock().\n sprite_group.clear(screen, eraser_image)\n for ball in sprite_group:\n ball.predict(interpolation, USE_PREDICTION)\n sprite_group.draw(screen)\n pygame.display.update()", "def generate_mode_animation_html(self, i = 0 , is_3d = False ):\n \n from pyro.dynamic.statespace import linearize\n \n # \n ss = linearize( self )\n \n # Compute eigen decomposition\n ss.compute_eigen_modes()\n \n # Simulate one mode\n traj = ss.compute_eigen_mode_traj( i )\n \n # Animate mode\n animator = ss.get_animator()\n \n #label\n template = 'Mode %i \\n%0.1f+%0.1fj'\n label = template % (i, ss.poles[i].real, ss.poles[i].imag)\n animator.top_right_label = label\n \n animator.animate_simulation( traj, 3.0, is_3d , show = False)\n \n html_video = animator.ani.to_html5_video()\n \n return html_video", "def display(self, grid):\n for i in range(grid.height):\n print(\"-\" + \"-------\"*grid.width)\n for j in range(grid.width):\n if not j:\n print(\"|\", end=\"\") # begin row with vertical line\n a = self.actions.get((i, j), ' ')\n print(\" %s |\" % a, end=\"\")\n print(\"\") # new line\n print(\"-\" + \"-------\"*grid.width, end='\\n\\n')" ]
[ "0.62367564", "0.6091286", "0.6087436", "0.6078769", "0.6029971", "0.60093224", "0.59166366", "0.59036875", "0.581892", "0.5817786", "0.5748448", "0.5748111", "0.5671084", "0.5665676", "0.5663996", "0.5663854", "0.563546", "0.5605197", "0.5595019", "0.55923826", "0.55907106", "0.5575306", "0.55707794", "0.5565015", "0.5540911", "0.5534121", "0.55087346", "0.5486996", "0.5480954", "0.54609513", "0.5437686", "0.54240763", "0.5422127", "0.54112405", "0.54055935", "0.5397019", "0.53827214", "0.5376678", "0.5350047", "0.53484774", "0.53459394", "0.53402925", "0.5338425", "0.53287363", "0.5317491", "0.5315551", "0.5295117", "0.5286447", "0.52809143", "0.5280367", "0.5279281", "0.5274721", "0.52737397", "0.5269014", "0.52668554", "0.5253897", "0.5250586", "0.5249614", "0.52468854", "0.5236627", "0.52316374", "0.5228073", "0.5226304", "0.5222503", "0.5218401", "0.521601", "0.51964366", "0.51927", "0.51828676", "0.5174714", "0.5171577", "0.51654637", "0.51639533", "0.51557046", "0.5151694", "0.51312", "0.5123682", "0.5115981", "0.5113845", "0.5103895", "0.5103582", "0.5102609", "0.51002675", "0.5098604", "0.5097225", "0.5093791", "0.50933754", "0.5090332", "0.5074673", "0.5070899", "0.5059244", "0.5056044", "0.505498", "0.5054292", "0.5054214", "0.5041971", "0.50401604", "0.5039452", "0.5038921", "0.5038521" ]
0.603831
4
For Jupyter Notebook Display Only?
def grid_animation_quick(self, frames, iterations=10, fps=0.02, figsize=(6, 6)): color_map = matplotlib.colors.ListedColormap(['white', 'black']) fig, ax = plt.subplots(figsize=figsize) for r in np.arange(0, iterations): ax.cla() ax.axes.grid(False) ax.set_axis_off() im = ax.imshow(frames[0], cmap=color_map, animated=True) for image, step in zip(frames[1:], np.arange(1, len(frames[1:])+1)): time.sleep(fps) ax.title.set_text('Rule 942 | Step ' + str(step) + ' | Active ' + str(int(np.sum(image)))) im.set_data(image) fig.canvas.draw()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def jupyter():", "def notebook():\n pass", "def notebook():\n pass", "def _ipython_display_(self):\n with self._sc:\n self._box._ipython_display_()", "def in_notebook():\n from IPython import get_ipython\n return get_ipython() is not None", "def output_notebook(self):\n self._notebook = True", "def _ipython_display_(self):\n spec, render_type = self._get_spec_info()\n\n id = uuid.uuid4()\n publish_display_data(\n {'text/html': self._generate_html(id)},\n metadata={'jupyter-vega3': '#{0}'.format(id)}\n )\n publish_display_data(\n {'application/javascript':\n self._generate_js(id, spec, render_type)},\n metadata={'jupyter-vega3': '#{0}'.format(id)}\n )", "def is_notebook():\n return \"ipykernel\" in sys.modules", "def in_notebook() -> bool:\r\n try:\r\n if (\r\n str(type(get_ipython()))\r\n == \"<class 'ipykernel.zmqshell.ZMQInteractiveShell'>\"\r\n ):\r\n return True\r\n else:\r\n return False\r\n except NameError:\r\n return False", "def is_notebook():\n try:\n shell = get_ipython().__class__.__name__\n if shell == 'ZMQInteractiveShell':\n return True\n elif shell == 'TerminalInteractiveShell':\n return False\n else:\n return False\n except NameError:\n return False", "def is_notebook():\n if \"get_ipython\" not in globals():\n # Python shell\n return False\n env_name = get_ipython().__class__.__name__\n if env_name == \"TerminalInteractiveShell\":\n # IPython shell\n return False\n # Jupyter Notebook\n return True", "def show(self) -> None:\n\n # if not call from notebook environment, ref to show_browser function.\n if not is_notebook():\n print(\n \"The report will not show in a notebook environment, \"\n \"please try 'show_browser' if you want to open it in browser\",\n file=sys.stderr,\n )\n try:\n from IPython.display import ( # pylint: disable=import-outside-toplevel\n HTML,\n display,\n )\n\n display(HTML(self._repr_html_()))\n except ImportError:\n pass", "def show(self) -> None:\n # if not called from notebook environment, ref to show_browser function.\n if not is_notebook():\n print(\n \"The plot will not show in a notebook environment, \"\n \"please try 'show_browser' if you want to open it in browser\",\n file=sys.stderr,\n )\n try:\n from IPython.display import ( # pylint: disable=import-outside-toplevel\n HTML,\n display,\n )\n\n display(HTML(self._repr_html_()))\n except ImportError:\n pass", "def test_ipython_repr_no_nglview(self):\n molecule = Molecule().from_smiles(\"CCO\")\n molecule._ipython_display_()", "def export_notebook():\n #system(\"jupyter nbconvert --to HTML \\\"Look At Enron data set.ipynb\\\"\")\n system(\"jupyter nbconvert --to HTML --output=Look+At+Enron+data+set.html \\\"Look At Enron data set.ipynb\\\"\")\n return", "def shell(state):\n IPython.embed(colors=\"Neutral\", banner1=BANNER, user_ns={\"sdk\": state.sdk})", "def _in_notebook(): # pragma: no cover\n try:\n shell = get_ipython().__class__.__name__\n if shell == \"ZMQInteractiveShell\":\n return True\n\n return False\n except NameError:\n return False", "def _isnotebook():\n try:\n shell = get_ipython().__class__.__name__\n if shell == 'ZMQInteractiveShell':\n return True # Jupyter notebook or qtconsole\n elif shell == 'TerminalInteractiveShell':\n return False # Terminal running IPython\n else:\n return False # Other type (?)\n except NameError:\n return False # Probably standard Python interpreter", "def default_display_function(feature):\n # n_samples = min(n_samples, feature.shape[0])\n IPython.display.display(widgets.Box(layout=widgets.Layout(height=\"2.5%\")))\n IPython.display.display(feature)\n IPython.display.display(widgets.Box(layout=widgets.Layout(height=\"2.5%\")))", "def _in_ipynb():\n __VALID_NOTEBOOKS = [\n \"<class 'google.colab._shell.Shell'>\",\n \"<class 'ipykernel.zmqshell.ZMQInteractiveShell'>\",\n ]\n try:\n return str(type(get_ipython())) in __VALID_NOTEBOOKS\n except NameError:\n return False", "def show(self):\n import IPython\n if self._output is None:\n self.render()\n IPython.display.display(self._output, display_id=str(id(self)))", "def _is_jupyter():\n try:\n ipy = get_ipython()\n except NameError:\n return False\n\n # If jupyter, ipy is zmq shell\n return ipy.__class__.__name__ == 'ZMQInteractiveShell'", "def jupyter_inline(url):\n # note: `get_ipython` is available without import since ipython 5.1\n # (and it's fine to fail here, since the next viewer is attempted in that case)\n ipython = get_ipython()\n logger.debug('Running inside ipython: %r', ipython)\n if 'ZMQInteractiveShell' not in type(ipython).__name__:\n raise ValueError('non-gui interactive shell')\n\n # render URL/IFrame inline in jupyter notebook, or fail trying\n # note: since ipython 5.4/6.1 (May 2017) `display` is available without import\n rich_url = RichDisplayURL(url)\n display(rich_url)\n\n # don't block if gui interactive shell is used\n return False", "def show(self, notebook=notebook_display):\n print(\"\\nCluster Ensemble:\")\n if notebook is True:\n display(self._df)\n elif notebook is False:\n print(self._df)\n self.massrich_parameters()", "def _is_running_in_notebook():\n\n # apparently get_ipython is lost when this gets called from a callback of\n # an ipython widget. See https://github.com/jupyter/jupyter/issues/299\n try:\n from IPython import get_ipython\n except ImportError:\n return False\n\n try:\n shell = get_ipython().__class__.__name__\n # ZMQInteractiveShell is the standard Jupyter Kernel\n # Interpreter is used by pyiodide\n if shell in [\"ZMQInteractiveShell\", \"Interpreter\"]:\n return True\n elif shell == \"TerminalInteractiveShell\":\n return False\n else:\n return False\n except NameError:\n return False", "def renderMyNotebook():\n return render_template(\n \"myNotebook.html\",\n suspect_dict=session[\"game\"].detective_notebook[\"suspects\"],\n weapon_dict=session[\"game\"].detective_notebook[\"weapons\"],\n room_dict=session[\"game\"].detective_notebook[\"rooms\"],\n log=session[\"game\"].log\n )", "def interactive():\n IPython.start_ipython(argv=[])", "def show(self):\n import IPython.display\n disp = IPython.display.HTML(self.render())\n return IPython.display.display(disp, display_id=str(id(self)))", "def describe_notebook_instance(NotebookInstanceName=None):\n pass", "def test_ipython_display(self, r, rep, capsys):\n r._ipython_display_() # pylint: disable=protected-access\n captured = capsys.readouterr()\n assert rep in captured.out", "def should_show():", "def _init_display(self):\n raise NotImplementedError", "def show(self):\n #show the viewlet if we are not using Firefox\n user_agent = self.request.get('HTTP_USER_AGENT', '')\n display = not ('Firefox' in user_agent or 'Chrome' in user_agent)\n return display", "def show(self):\n\t\traise NotImplementedError()", "def show(self):\n raise NotImplementedError", "def show(self):\n raise NotImplementedError", "def main():\n app = guisupport.get_app_qt4()\n\n if INPROCESS:\n from qtconsole.inprocess import QtInProcessKernelManager\n km = QtInProcessKernelManager()\n else:\n from qtconsole.manager import QtKernelManager\n km = QtKernelManager()\n km.start_kernel()\n km.kernel.gui = 'qt4'\n kc = km.client()\n kc.start_channels()\n\n widget = RichJupyterWidget()\n widget.kernel_manager = km\n widget.kernel_client = kc\n if CLEANSHUTDOWN: # slow exit on CTRL+D\n def stop():\n kc.stop_channels()\n km.shutdown_kernel()\n app.exit()\n widget.exit_requested.connect(stop)\n else: # fast exit on CTRL+D\n widget.exit_requested.connect(app.quit)\n widget.show()\n guisupport.start_event_loop_qt4(app)", "def display_only(self):\n return self.__class__(None, self.display)", "def init_notebook_plotting(offline: bool = False) -> None:\n display_bundle = {\"text/html\": _wrap_js(_js_requires(offline=offline))}\n display(display_bundle, raw=True)\n logger.info(\"Injecting Plotly library into cell. Do not overwrite or delete cell.\")\n init_notebook_mode()", "def debug():\n # \"EMBEDDED_MODE\" is True precisely when the Sage notebook is running.\n from sage.plot.plot import EMBEDDED_MODE\n if not EMBEDDED_MODE:\n # Must be the command line, so suggest using the IPython debugger.\n print(\"You should use %debug on the command line.\")\n else:\n # Create the Debug object and make it interactive.\n Debug().interact()", "def show(self) -> None:", "def init_notebook():\n \n # Note: not using IPython Comm objects yet, since they seem rather\n # undocumented and I could not get them to work when I tried for a bit.\n # This means though, that flexx in the notebook only works on localhost.\n \n from IPython.display import display, clear_output, HTML\n # from .. import ui # noqa - make ui assets available\n \n # Make default log level warning instead of \"info\" to avoid spamming\n # This preserves the log level set by the user\n config.load_from_string('log_level = warning', 'init_notebook')\n set_log_level(config.log_level)\n \n # Get session or create new\n session = manager.get_default_session()\n if session is None:\n session = manager.create_default_session()\n \n # Open server - the notebook helper takes care of the JS resulting\n # from running a cell, but any interaction goes over the websocket.\n server = current_server()\n host, port = server.serving\n \n # Trigger loading phosphor assets\n if 'flexx.ui' in sys.modules:\n from flexx import ui\n session.register_model_class(ui.Widget)\n \n # Get assets, load all known modules to prevent dynamic loading as much as possible\n js_assets, css_assets = session.get_assets_in_order(css_reset=False, load_all=True)\n \n # Pop the first JS asset that sets flexx.app_name and flexx.session_id\n # We set these in a way that it does not end up in exported notebook.\n js_assets.pop(0)\n url = 'ws://%s:%i/flexx/ws/%s' % (host, port, session.app_name)\n flexx_pre_init = \"\"\"<script>window.flexx = window.flexx || {};\n window.flexx.app_name = \"%s\";\n window.flexx.session_id = \"%s\";\n window.flexx.ws_url = \"%s\";\n window.flexx.is_live_notebook = true;\n </script>\"\"\" % (session.app_name, session.id, url)\n \n # Check if already loaded, if so, re-connect\n if not getattr(session, 'init_notebook_done', False):\n session.init_notebook_done = True # also used in assetstore\n else:\n display(HTML(flexx_pre_init))\n clear_output()\n display(HTML(\"\"\"<script>\n flexx.is_exported = !flexx.is_live_notebook;\n flexx.init();\n </script>\n <i>Flexx already loaded. Reconnected.</i>\n \"\"\"))\n return # Don't inject Flexx twice\n # Note that exporting will not work anymore since out assets\n # are no longer in the outputs\n \n # Install helper to make things work in exported notebooks\n NoteBookHelper(session)\n \n # Compose HTML to inject\n t = \"<i>Injecting Flexx JS and CSS</i>\"\n t += '\\n\\n'.join([asset.to_html('{}', 0) for asset in css_assets + js_assets])\n t += \"\"\"<script>\n flexx.is_notebook = true;\n flexx.is_exported = !flexx.is_live_notebook;\n /* If Phosphor is already loaded, disable our Phosphor CSS. */\n if (window.jupyter && window.jupyter.lab) {\n document.getElementById('phosphor-all.css').disabled = true;\n }\n flexx.init();\n </script>\"\"\"\n \n display(HTML(flexx_pre_init)) # Create initial Flexx info dict\n clear_output() # Make sure the info dict is gone in exported notebooks\n display(HTML(t))\n \n # Note: the Widget._repr_html_() method is responsible for making\n # the widget show up in the notebook output area.", "def jupyter_notebook(ip, all_kernels, env, idle_timeout, args):\n if \"-h\" not in args and \"--help\" not in args:\n ipython_message(all_kernels)\n\n arguments = _build_jupyter_command(\n \"notebook\", ip=ip, all_kernels=all_kernels, args=args, idle_timeout=idle_timeout\n )\n\n python_call_kwargs = _build_jupyter_env(env)\n python_call(\"jupyter\", arguments, **python_call_kwargs)", "def test_JupyterNotebooks1(self):\n\n self.delayDisplay(\"Starting the test\")\n\n # TODO: implement test\n\n self.delayDisplay('Test passed!')", "def _in_ipython(self):\n import graphlab.canvas.target\n return isinstance(graphlab.canvas.get_target(), graphlab.canvas.target.IPythonTarget)", "def in_ipynb(debug=False):\n\n frame_info_list = get_frame_list()[1]\n filenames = [fi.filename for fi in frame_info_list]\n\n test_str = \"\\n\".join(filenames)\n # this should be made more reliable\n if \"ipykernel_launcher\" in test_str and \\\n \"ipykernel/kernelapp.py\" in test_str and \\\n \"zmqshell\" in test_str:\n res = True\n else:\n res = False\n\n if debug:\n dbgc = Container(fetch_locals=True)\n return dbgc\n else:\n return res", "def display_stdout_and_err_in_curr_cell(self):\n ipy_display(self.output_widget)", "def jupyter_is_interactive_run():\n return \"_JUPYTER_BAZEL_INTERACTIVE_RUNFILES_DIR\" in os.environ", "def display_html_snapshots_widget():\n if not get_ipython():\n print('The HTML snapshot widget cannot be display in environments other than IPython.')\n return\n\n # Configure notebook display preferences to better suit this UI. These display settings\n # will be in effect for all cells in the notebook run after this one is run.\n pd.set_option('display.max_colwidth', None)\n pd.set_option('display.max_rows', None)\n get_ipython().run_cell_magic(\n 'javascript',\n '',\n '''// Display cell outputs to full height (no vertical scroll bar)\n IPython.OutputArea.auto_scroll_threshold = 9999;''')\n\n # Retrieve the workspace metadata for the current user and environment.\n ws_meta = WorkspaceMetadata()\n workspace_names2id = collections.OrderedDict(sorted(\n ws_meta.get_workspace_name_to_id_mapping().items()))\n workspace_names2id_include_readonly = collections.OrderedDict(sorted(\n ws_meta.get_workspace_name_to_id_mapping(include_private_readonly=True).items()))\n workspace_ids2bucket_include_readonly = ws_meta.get_workspace_id_to_bucket_mapping(include_private_readonly=True)\n workspace_paths = {k: WorkspacePaths(workspace_bucket=v)\n for k, v in workspace_ids2bucket_include_readonly.items()}\n\n ui_output = widgets.Output()\n\n ui_tabs = widgets.Tab()\n ui_tabs.children = [create_html_snapshot_widget(ws_names2id=workspace_names2id,\n ws_paths=workspace_paths,\n output=ui_output),\n create_view_files_widget(ws_names2id=workspace_names2id_include_readonly,\n ws_paths=workspace_paths,\n output=ui_output),\n create_view_all_comments_widget(ws_names2id=workspace_names2id_include_readonly,\n ws_paths=workspace_paths,\n output=ui_output)]\n ui_tabs.set_title(title='Create', index=0)\n ui_tabs.set_title(title='View one', index=1)\n ui_tabs.set_title(title='View all', index=2)\n\n display(ui_tabs, ui_output)", "def showPlot2():\n raise NotImplementedError", "def show_figure(self):\n pylab.show()", "def run_from_ipython():\n try:\n __IPYTHON__\n return True\n except NameError:\n return False", "def driver_open_display(self):\n raise Exception(\"missing implementation \")", "def show(self):\n pass", "def __window_print(self):\n pass", "def disp(df):\n display(HTML(df.to_html(index=False)))", "def showPlot1(): \n raise NotImplementedError", "def subnotebook_show(self):\n if not self.subnotebook:\n logger.debug(\"Showing subnotebook\")\n self.subnotebook = self.add_subnotebook()", "def summary(self):\n if _have_ipython:\n IPython.display.display(IPython.display.HTML(self._repr_html_()))\n else:\n print(self)", "def notebook_display(self):\n time = self.out_channels['0'].samples / self.out_channels['0'].samprate\n\n vmax = 0.\n for c in range(len(self.out_channels)):\n vmax = max(\n abs(self.out_channels[str(c)].values.max()),\n abs(self.out_channels[str(c)].values.min()),\n vmax\n ) * 1.05\n \n for i in range(len(self.out_channels)):\n plt.plot(time[::20], self.out_channels[str(i)].values[::20]+2*i*vmax, label=self.channels.labels[i])\n\n plt.xlabel('Time (s)')\n plt.ylabel('Relative Amplitude')\n plt.legend(frameon=False, loc=5)\n plt.xlim(-time[-1]*0.05,time[-1]*1.2)\n for s in plt.gca().spines.values():\n s.set_visible(False)\n plt.gca().get_yaxis().set_visible(False)\n\n if len(self.channels.labels) == 1:\n # we have used 48000 Hz everywhere above as standard, but to quickly hear the sonification sped up / slowed down,\n # you can modify the 'rate' argument below (e.g. multiply by 0.5 for half speed, by 2 for double speed, etc)\n outfmt = np.column_stack([self.out_channels['0'].values, self.out_channels['0'].values]).T\n else:\n outfmt = np.column_stack([self.out_channels['0'].values, self.out_channels['1'].values]).T\n plt.show()\n display(ipd.Audio(outfmt,rate=self.out_channels['0'].samprate, autoplay=False))", "def _run_from_ipython():\n try:\n __IPYTHON__\n return True\n except NameError:\n return False", "def get_virtual_display(self):\n pass", "def displayNative(self):\n\n\t\tcmd.hide(\"lines\", \"native\")\n\t\tcmd.color(\"gray\", \"native & name CA\")\n\t\t#cmd.zoom(\"nearby\")", "def running_in_ipython() -> bool:\n try:\n from IPython import get_ipython\n\n return get_ipython() is not None\n except ImportError:\n return False", "def html_viewer(i):\n\n i['module_uoa']='experiment.tune.compiler.flags'\n i['module_cfg']=copy.deepcopy(cfg)\n i['module_work']=copy.deepcopy(work)\n return ck.access(i)", "def show(self):", "def show_df_ui(df,transpose=False,default=\"Hide\",message=\"Show dataframe: \"):\n def make_btn(val):\n btn_widget=widgets.Button(\n value=False,\n description=val,\n disabled=False,\n button_style='',\n layout=widgets.Layout(width=\"80px\"),\n )\n return btn_widget\n \n def show_head():\n if not transpose:\n display(df.head(10))\n else:\n display(df.head(10).transpose())\n def show_tail():\n if not transpose:\n display(df.tail(10))\n else:\n display(df.tail(10).transpose())\n def show_full():\n if not transpose:\n display(df)\n else:\n display(df.transpose())\n def show_random():\n if not transpose:\n display(Frames.smart_sample(df,10))\n else:\n display(Frames.smart_sample(df,10).transpose())\n def hide_output():\n pass\n \n def refresh():\n Notebook.clear()\n Widgets.show_df_ui(df,transpose=transpose,message=message)\n \n def show_head_refresh(b):\n refresh()\n show_head()\n def show_tail_refresh(b):\n refresh()\n show_tail()\n def show_full_refresh(b):\n refresh()\n show_full()\n def show_random_refresh(b):\n refresh()\n show_random()\n def hide_output_refresh(b):\n refresh()\n \n behaviors={\n \"Hide\": hide_output,\n \"Head\": show_head,\n \"Tail\": show_tail,\n \"Random\": show_random,\n \"Full\": show_full\n }\n \n btn_head=make_btn(\"Head\")\n btn_random=make_btn(\"Random\")\n btn_tail=make_btn(\"Tail\")\n btn_full=make_btn(\"Full\")\n btn_hide=make_btn(\"Hide\")\n \n btn_head.on_click(show_head_refresh)\n btn_tail.on_click(show_tail_refresh)\n btn_full.on_click(show_full_refresh)\n btn_random.on_click(show_random_refresh)\n btn_hide.on_click(hide_output_refresh)\n \n ui_group=widgets.HBox([\n widgets.Label(value=message),\n btn_head,\n btn_random,\n btn_tail,\n btn_full,\n btn_hide,\n ])\n display(ui_group)\n if default in behaviors:\n behaviors[default]()", "def execute_notebook(nb, resources):\n\n if is_ipython_3():\n from IPython.nbconvert.preprocessors import ExecutePreprocessor\n nb, resources = ExecutePreprocessor().preprocess(nb, resources)\n elif runipy_available:\n from runipy.notebook_runner import NotebookRunner\n r = NotebookRunner(nb)\n r.run_notebook(skip_exceptions=True)\n nb = r.nb\n else:\n raise ImportError(\"Can't execute notebooks. Please install IPython >= 3 or runipy.\")\n\n return nb", "def _save_nb():\n try:\n import IPython\n except ImportError:\n raise ImportError(\"You need to install IPython to use _save_nb\")\n from IPython.display import HTML, Javascript, display\n\n if is_lab():\n script = \"\"\"\n this.nextElementSibling.focus();\n this.dispatchEvent(new KeyboardEvent('keydown', {key:'s', keyCode: 83, metaKey: true}));\n \"\"\"\n display(HTML(('<img src onerror=\"{}\" style=\"display:none\">' '<input style=\"width:0;height:0;border:0\">').format(script)))\n else:\n display(Javascript(\"IPython.notebook.save_checkpoint();\"))", "def showPlot3():\n raise NotImplementedError", "def plot_notebook(cp_profile, *args, **kwargs):\n plot(cp_profile, *args, destination=\"notebook\", **kwargs)", "def export_html(self):\n self._svg_warning_displayed = False\n super(RichJupyterWidget, self).export_html()", "def imdisplay(filename, representation):\n img = read_image(filename, representation)\n if representation == GS_REP:\n plt.imshow(img, cmap=plt.cm.gray)\n else:\n plt.imshow(img)", "def run_shell(options):\n from IPython import embed\n embed()", "def start_displayhook(self):\n pass", "def matlab_console():\n from sage.repl.rich_output.display_manager import get_display_manager\n if not get_display_manager().is_in_terminal():\n raise RuntimeError('Can use the console only in the terminal. Try %%matlab magics instead.')\n os.system('matlab -nodisplay')", "def render_notebook_script(scene, sig=None, **kwargs):\n js = generate_notebook_js(scene, sig=sig, **kwargs)\n display(js)", "def display_df(df):\n with pd.option_context(\"display.max_rows\", 1000, \"display.max_columns\", 100):\n display(df.head(10))", "def _read_rendered_notebook(nb_str):\n # add debug cells\n nb = nbformat.reads(nb_str, as_version=nbformat.NO_CONVERT)\n nbformat_v = nbformat.versions[nb.nbformat]\n\n source = \"\"\"\n# Debugging settings (this cell will be removed before saving)\n# change the current working directory to directory of the session that\n# invoked the jupyter app to make relative paths work\nimport os\n{}\n\"\"\".format(chdir_code(Path('.').resolve()))\n\n cell = nbformat_v.new_code_cell(source,\n metadata={'tags': ['debugging-settings']})\n nb.cells.insert(0, cell)\n\n return nb", "def jntToDisplay():\n DISPLAY=\"DISPLAY\"\n # check obj exist\n if pm.objExists(DISPLAY) != 1:\n pm.error(\"no object call DISPLAY\")\n jnt=pm.ls(\"*_ikJnt*\",\"*_fkJnt*\",\"*_ctrlJnt*\",type ='joint')\n for obj in jnt:\n\n pm.delete(obj + \".overrideDisplayType\",icn=1)\n pm.setAttr(obj + \".overrideEnabled\",1)\n pm.setAttr(obj + \".overrideDisplayType\",0)\n pm.connectAttr(DISPLAY + \".ctrlJntDisplay\",obj + \".overrideDisplayType\",f=1)\n pm.setAttr(DISPLAY + \".ctrlJntDisplay\",0) # set to normal\n\n jnt=pm.ls(\"*_skinJnt*\",\"*:*_skinJnt*\",type ='joint')\n for obj in jnt:\n pm.delete(obj + \".overrideDisplayType\",icn=1)\n pm.setAttr(obj + \".overrideEnabled\",1)\n pm.setAttr(obj + \".overrideDisplayType\",0)\n pm.connectAttr(DISPLAY + \".skeletonDisplay\",obj + \".overrideDisplayType\",f=1)\n pm.setAttr(DISPLAY + \".skeletonDisplay\",0) # set to normal\n\n\n pm.setAttr(DISPLAY + \".geoDisplay\",0) # set to normal\n pm.setAttr((\"GEO.overrideEnabled\"),1)\n pm.setAttr((\"GEO.overrideDisplayType\"),0)\n pm.delete((\"GEO.overrideDisplayType\"),icn=1)\n pm.connectAttr((DISPLAY + \".geoDisplay\"),(\"GEO.overrideDisplayType\"),f=1)", "def ipython(self):\n try:\n __IPYTHON__\n return True\n except NameError:\n return False", "def dataframe_displayer(df):\n\n #On paramètre les options d'affichage du module pandas\n pd.set_option('display.max_rows', None)\n pd.set_option('display.max_columns', None)\n pd.set_option('display.width', None)\n pd.set_option('display.max_colwidth', -1)\n\n print(df)", "def __repr__(self):\n return self.display()", "def show_output(self, conn, rcode):\n\n if rcode:\n title, results = self.connector.get_results(0)\n scrolled = Gtk.ScrolledWindow()\n scrolled.add(results)\n tab_label = self.get_tab_label(title, scrolled)\n self.notebook.append_page(scrolled, tab_label)\n\n if not self.notebook.get_parent():\n child = self.output_window.get_children()[0]\n self.output_window.remove(child)\n self.output_window.add(self.notebook)\n self.notebook.show_all()\n del child\n\n tab_label.show_all()\n scrolled.show_all()\n else:\n self.connector.get_results(0)", "def in_console(self, mpl_backend: str) -> bool:\n return mpl_backend not in [\n \"module://ipykernel.pylab.backend_inline\",\n \"NbAgg\",\n ]", "def display_synth_editor(self, sid):\n pass", "def show():\n plt.show()", "def show():\n plt.show()", "def show():\n plt.show()", "def show_inline_matplotlib_plots():\n if 'matplotlib' not in sys.modules:\n # matplotlib hasn't been imported, nothing to do.\n return\n\n try:\n import matplotlib as mpl\n from ipykernel.pylab.backend_inline import flush_figures\n except ImportError:\n return\n\n if (mpl.get_backend() == 'module://ipykernel.pylab.backend_inline' or\n mpl.get_backend() == 'module://matplotlib_inline.backend_inline'):\n flush_figures()", "def show():\n setup()\n plt.show()", "def display(self, buffer = None):\n raise NotImplementedError", "def load_jupyter_server_extension(nbapp):\n resuseconfig = ResourceUseDisplay(parent=nbapp)\n nbapp.web_app.settings['nbresuse_display_config'] = resuseconfig\n route_pattern = url_path_join(nbapp.web_app.settings['base_url'], '/metrics')\n nbapp.web_app.add_handlers('.*', [(route_pattern, MetricsHandler)])", "def command_show(problem):\r\n print problem.get_html()", "def show_plots():\n plt.show()", "def show():\n\tplt.show()", "def getImmediateRendering():\n\n\treturn False", "def ion():\n plt.ion()", "def is_ipython():\n return 'get_ipython' in globals()", "def _repr_html_(self) -> str:\n return self.all(pandas=True)._repr_html_() # type: ignore", "def showZoomToolbar():\n\treturn False" ]
[ "0.8027189", "0.7526061", "0.7526061", "0.7300398", "0.71970946", "0.6907858", "0.68669766", "0.6833837", "0.67673033", "0.6737627", "0.67263234", "0.66920686", "0.6689149", "0.66816413", "0.6628358", "0.65491647", "0.65302175", "0.6369754", "0.6367418", "0.6348409", "0.63246626", "0.6307671", "0.63021827", "0.61833525", "0.616384", "0.609693", "0.6082762", "0.6025215", "0.6001261", "0.5970714", "0.59579533", "0.58877605", "0.5870047", "0.58572173", "0.58433926", "0.58433926", "0.58179194", "0.5816548", "0.5802665", "0.5795343", "0.5789215", "0.57848006", "0.5769881", "0.57641375", "0.5763343", "0.5757674", "0.5755532", "0.57518643", "0.57393265", "0.57287216", "0.5702222", "0.56983685", "0.5685448", "0.5678559", "0.56712276", "0.5662902", "0.565813", "0.5633656", "0.56316847", "0.56267273", "0.5625852", "0.56100374", "0.5599264", "0.5598902", "0.5588617", "0.5585978", "0.5574294", "0.5571958", "0.55675733", "0.5565028", "0.55638915", "0.55595577", "0.5558875", "0.5551852", "0.55385506", "0.55276173", "0.5526358", "0.55256695", "0.5519017", "0.5515643", "0.55154896", "0.55132324", "0.549871", "0.549023", "0.5475638", "0.54692346", "0.5463385", "0.5463385", "0.5463385", "0.545176", "0.54266715", "0.54175967", "0.5410565", "0.54086363", "0.54060614", "0.5403256", "0.5400465", "0.5399308", "0.539308", "0.5389942", "0.5381485" ]
0.0
-1
Main function to get data into Splunk.
def collect_events(helper, ew): # pylint: disable=no-self-argument,invalid-name,too-many-statements,too-many-branches def clear_checkbox(session_key, stanza): """ Sets the 'reindex_data' value in the REST API to 0 to clear it. Splunk then automatically restarts the input.""" url = f'https://localhost:8089/servicesNS/nobody/TA-strava-for-splunk/data/inputs/strava_api/{stanza}' headers = {'Authorization': f'Splunk {session_key}'} payload = 'reindex_data=0' helper.send_http_request(url, "POST", headers=headers, payload=payload, verify=False, use_proxy=False) def get_activities(ts_activity, access_token): """Gets all activities, 30 per page as per Strava's default.""" params = {'after': ts_activity, 'access_token': access_token} url = "https://www.strava.com/api/v3/activities" response = return_json(url, "GET", parameters=params) return response def get_activity(activity, token): """Gets specific activity.""" url = f'https://www.strava.com/api/v3/activities/{activity}?include_all_efforts=true' params = {'access_token': token} response = return_json(url, "GET", parameters=params, timeout=10) return response def get_activity_stream(token, activity, types, series_type='time', resolution='high'): """Gets the activity stream for given activity id.""" types = ','.join(types) params = {'access_token': token} url = f'https://www.strava.com/api/v3/activities/{activity}/streams/{types}&series_type={series_type}&resolution={resolution}&key_by_type=' response = return_json(url, "GET", parameters=params, timeout=10) return response def get_athlete(token): """Gets details on currently logged in athlete.""" url = "https://www.strava.com/api/v3/athlete" params = {'access_token': token} response = return_json(url, "GET", parameters=params, timeout=10) return response def get_epoch(timestamp): """Converts Strava datetime to epoch timestamp""" timestamp_dt = datetime.datetime.strptime(timestamp, "%Y-%m-%dT%H:%M:%SZ") epoch = calendar.timegm(timestamp_dt.timetuple()) return epoch def get_token(client_id, client_secret, token, renewal): """Get or refresh access token from Strava API.""" url = "https://www.strava.com/api/v3/oauth/token" if renewal: payload = { 'client_id': client_id, 'client_secret': client_secret, 'refresh_token': token, 'grant_type': 'refresh_token'} message = "Successfully refreshed Strava token." else: payload = { 'client_id': client_id, 'client_secret': client_secret, 'code': token, 'grant_type': 'authorization_code'} message = "Successfully authenticated with Strava using access code." response = return_json(url, "POST", payload=payload) helper.log_info(message) return response def kvstore_save_athlete(session_key, athlete_id, firstname, lastname, weight, ftp): # pylint: disable=too-many-arguments """Stores athlete's id, first name, last name, weight and ftp into strava_athlete KV Store collection.""" url = 'https://localhost:8089/servicesNS/nobody/TA-strava-for-splunk/storage/collections/data/strava_athlete/batch_save' headers = {'Content-Type': 'application/json', 'Authorization': f'Splunk {session_key}'} payload = [{"_key": athlete_id, "id": athlete_id, "firstname": firstname, "lastname": lastname, "fullname": firstname + " " + lastname, "weight": weight, "ftp": ftp}] helper.send_http_request(url, "POST", headers=headers, payload=payload, verify=False, use_proxy=False) def parse_data(data, activity_id, activity_start_date): """Gets raw JSON data, parses it into events and writes those to Splunk.""" data_dict = {} final_dict = {} for i in data: data_dict[i['type']] = i['data'] counter = 1 nrange = len(data_dict['time']) for item in range(1, nrange + 1): final_dict[item] = {} for key, value in data_dict.items(): counter = 1 for i in value: final_dict[counter][key] = i final_dict[counter]['activity_id'] = activity_id if 'time' in key: final_dict[counter]['time'] = final_dict[counter]['time'] + activity_start_date final_dict[counter]['time'] = time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime(final_dict[counter]['time'])) if 'latlng' in key: final_dict[counter]['lat'] = final_dict[counter]['latlng'][0] final_dict[counter]['lon'] = final_dict[counter]['latlng'][1] final_dict[counter].pop('latlng') counter += 1 result_list = [value for key, value in final_dict.items()] for event in result_list: write_to_splunk(index=helper.get_output_index(), sourcetype='strava:activities:stream', data=json.dumps(event)) helper.log_info(f'Added activity stream {activity_id} for {athlete_id}.') return True def return_json(url, method, **kwargs): """Gets JSON from URL and parses it for potential error messages.""" response = helper.send_http_request(url, method, use_proxy=False, **kwargs) try: response.raise_for_status() except requests.HTTPError as ex: # status code 429 means we hit Strava's API limit, wait till next 15 minute mark (+5 seconds) and try again if ex.response.status_code == 429: # Get the 15m/24h API limits for this user api_usage_15m = response.headers['X-RateLimit-Usage'].split(",")[0] api_usage_24h = response.headers['X-RateLimit-Usage'].split(",")[1] api_limit_15m = response.headers['X-RateLimit-Limit'].split(",")[0] api_limit_24h = response.headers['X-RateLimit-Limit'].split(",")[1] timestamp_now = int(time.time()) modulus_time = timestamp_now % 900 sleepy_time = 0 if modulus_time == 0 else (900 - modulus_time + 5) helper.log_warning(f'Strava API rate limit hit. Used {api_usage_15m}/15min (limit {api_limit_15m}), {api_usage_24h}/24h (limit {api_limit_24h}). Sleeping for {sleepy_time} seconds.') time.sleep(sleepy_time) response = return_json(url, method, **kwargs) helper.log_debug(f'429 detail: {response}') return response if ex.response.status_code in (400, 401): helper.log_error(f'{ex.response.status_code} Error: Strava API credentials invalid or session expired. Make sure Client ID & Client Secret have been added to the Configuration -> Add-On Parameters tab and your access code is valid.') sys.exit(1) if ex.response.status_code == 404: helper.log_warning(f'404 Error: no stream data for url {url}, can happen for manually added activities.') return False if ex.response.status_code == 500: helper.log_warning(f'500 Error: no data received from Strava API for url {url}, it might be corrupt or invalid. Skipping activity.') return False # In case there's any other error than the ones described above, log the error and exit. helper.log_error(f'Error: {ex}') sys.exit(1) # Must have been a 200 status code return response.json() def set_athlete(response): """Creates dict with athlete details, including token expiry.""" name = response['athlete']['firstname'] + " " + response['athlete']['lastname'] athlete = { 'id': response['athlete']['id'], 'name': name, 'access_token': response['access_token'], 'refresh_token': response['refresh_token'], 'expires_at': response['expires_at'], 'ts_activity': 0} return athlete def write_to_splunk(**kwargs): """Writes activity to Splunk index.""" event = helper.new_event(**kwargs) ew.write_event(event) # get configuration arguments client_id = helper.get_global_setting('client_id') client_secret = helper.get_global_setting('client_secret') access_code = helper.get_arg('access_code') start_time = helper.get_arg('start_time') or 0 types = ['time', 'distance', 'latlng', 'altitude', 'velocity_smooth', 'heartrate', 'cadence', 'watts', 'temp', 'moving', 'grade_smooth'] # stanza is the name of the input. This is a unique name and will be used as a checkpoint key to save/retrieve details about an athlete stanza = list(helper.get_input_stanza())[0] athlete = helper.get_check_point(stanza) helper.log_debug(f'Athlete: {athlete}') # if reindex_data checkbox is set, update the start_time to be the one specified and clear the checkbox. if helper.get_arg('reindex_data'): if int(helper.get_arg('reindex_data')) == 1: athlete.update({'ts_activity': start_time}) helper.save_check_point(stanza, athlete) # the clear_checkbox function will restart this input as soon as the change is made, so no further code required. clear_checkbox(helper.context_meta['session_key'], stanza) # if athlete is set, get details & tokens - otherwise fetch tokens with get_token() if athlete: athlete_id = athlete['id'] athlete_name = athlete['name'] expires_at = athlete['expires_at'] refresh_token = athlete['refresh_token'] else: expires_at = False refresh_token = False # Check if expires_at token is set and renew token if token expired. Otherwise fetch token with initial access code. if expires_at: if time.time() >= expires_at: response = get_token(client_id, client_secret, refresh_token, renewal=True) helper.log_debug(f"Access token: {response['access_token']}, refresh token: {response['refresh_token']}") athlete.update({'access_token': response['access_token'], 'refresh_token': response['refresh_token'], 'expires_at': response['expires_at']}) else: response = get_token(client_id, client_secret, access_code, renewal=False) athlete = set_athlete(response) athlete_id = athlete['id'] athlete_name = athlete['name'] helper.save_check_point(stanza, athlete) access_token = athlete['access_token'] athlete_detail = get_athlete(access_token) athlete_firstname = athlete_detail['firstname'] athlete_lastname = athlete_detail['lastname'] athlete_weight = '' athlete_ftp = '' if athlete_detail['resource_state'] == 3: athlete_weight = athlete_detail['weight'] athlete_ftp = athlete_detail['ftp'] helper.log_debug("Saving athlete's details to KV Store.") kvstore_save_athlete(helper.context_meta['session_key'], str(athlete_id), athlete_firstname, athlete_lastname, str(athlete_weight), str(athlete_ftp)) # For backwards compatibility with upgrades from pre-2.5.0, which uses athlete['ts_newest_activity']. If there, clean them up. if 'ts_newest_activity' in athlete: helper.log_info(f"Found existing timestamp {athlete['ts_newest_activity']}! Will remove it now.") ts_activity = athlete['ts_newest_activity'] athlete.update({'ts_activity': ts_activity}) athlete.pop('ts_newest_activity') athlete.pop('get_old_activities') athlete.pop('ts_oldest_activity') helper.save_check_point(stanza, athlete) else: ts_activity = athlete['ts_activity'] or start_time # webhook_updates contains updated activities that came in via webhook. webhook_updates = helper.get_check_point('webhook_updates') or {} if str(athlete_id) in webhook_updates: for activity in webhook_updates[str(athlete_id)][:]: helper.log_info(f'Received update via webhook for activity {activity} from athlete {athlete_id}') response = get_activity(activity, access_token) ts_activity = get_epoch(response['start_date']) # Store the event in Splunk write_to_splunk(index=helper.get_output_index(), sourcetype=helper.get_sourcetype(), data=json.dumps(response)) # Get stream data for this activity and write to Splunk stream_data = get_activity_stream(access_token, activity, types) if stream_data: parse_data(stream_data, activity, ts_activity) # Remove from dict and save dict webhook_updates[str(athlete_id)].remove(activity) helper.save_check_point('webhook_updates', webhook_updates) helper.log_info(f'Got all webhook events for athlete {athlete_id}') helper.log_info(f'Checking if there are new activities for {athlete_name} ({athlete_id})') while True: response_activities = get_activities(ts_activity, access_token) # if all activities retrieved, set get_old_activities, save checkpoint and end loop to finish if len(response_activities) == 0: # pylint: disable=no-else-break helper.log_info(f'All done, got all activities for {athlete_name} ({athlete_id})') break else: # Get more details from each activity for event in response_activities: activity_id = event['id'] response = get_activity(activity_id, access_token) # response = False for a 500 Error, which is likely an invalid Strava API file. In that case skip the activity and continue. if response: data = json.dumps(response) # Get start_date (UTC) and convert to UTC timestamp ts_activity = get_epoch(event['start_date']) # Store the event in Splunk write_to_splunk(index=helper.get_output_index(), sourcetype=helper.get_sourcetype(), data=data) helper.log_info(f'Added activity {activity_id} for {athlete_id}.') # Get stream data for this activity stream_data = get_activity_stream(access_token, activity_id, types) if stream_data: parse_data(stream_data, activity_id, ts_activity) # Save the timestamp of the last event to a checkpoint athlete.update({'ts_activity': ts_activity}) helper.save_check_point(stanza, athlete)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def main():\n print(dumps(get_data()))\n return 0", "def main():\n print get_latest_data()", "def get_data():\n pass", "def get_data():\n pass", "def get_data():\n pass", "def get_data():\n pass", "def get_data():\n return", "def main():\n logger = logging.getLogger(__name__)\n logger.info('making final data set from raw data')\n get_user_config()", "def main() -> int:\n print(json.dumps(get_data())) # noqa: WPS421 (side-effect)\n return 0", "def main():\n\n parser = argparse.ArgumentParser('Download all data into a folder')\n parser.add_argument('--homepage_url', help='url for adap-kdb homepage', required=True)\n parser.add_argument('--msp_path', help='path for msv file', required=True)\n parser.add_argument('--user_name', help='username for login', required=True)\n parser.add_argument('--user_password', help='user password fpr login', required=True)\n args = parser.parse_args()\n\n homepage_url = args.homepage_url\n msp_path = args.msp_path\n user_name = args.user_name\n user_password = args.user_password\n\n individual_search_test(homepage_url, msp_path, user_name, user_password)", "async def main():\n async with aiohttp.ClientSession() as session:\n data = Luftdaten(SENSOR_ID, loop, session)\n await data.get_data()\n\n if not await data.validate_sensor():\n print(\"Station is not available:\", data.sensor_id)\n return\n\n if data.values and data.meta:\n # Print the sensor values\n print(\"Sensor values:\", data.values)\n\n # Print the coordinates fo the sensor\n print(\"Location:\", data.meta['latitude'], data.meta['longitude'])", "def main():\n\n data = get_data(URL)\n\n if not data:\n raise ValueError('No data to process')\n\n datacenters = [\n Datacenter(key, value)\n for key, value in data.items()\n ]\n\n pass # the rest of your logic here", "def main():\r\n mvip, user, user_pass, mvip_node = get_inputs()\r\n payload = build_payload()\r\n headers, url = build_auth(mvip, user, user_pass, mvip_node)\r\n response_json = connect_cluster(headers, url, payload)\r\n paired_vols = get_replication_status(response_json)\r\n payload = get_vol_stats(paired_vols)\r\n response_json = connect_cluster(headers, url, payload)\r\n parse_volume_stats(paired_vols, response_json)", "def execute():\n \n # URL to the datasets part of SHARKdata.\n sharkdata_url = u'http://sharkdata.se/datasets/'\n\n # Download a list of all available datasets. The JSON format is used.\n datasets = json.load(urllib2.urlopen(sharkdata_url + u'list.json'))\n \n # Exit if no datasets are found. \n if len(datasets) < 1:\n print(u'No datasets found. Script terminated.')\n return\n\n # Print some info for all available datasets.\n print(u'\\nAvailable datasets on SHARKdata:' + u'\\n')\n for dataset in datasets:\n print(u' Datatype: ' + dataset[u'datatype'] + u' Name: ' + dataset[u'dataset_name'])\n \n # Get the name of the first dataset in the list.\n dataset_name = datasets[0][u'dataset_name']\n \n # Download header and data and print the content. The text format is used.\n print(u'\\nPrint dataset content for: ' + dataset_name + u'\\n')\n header_and_data = urllib2.urlopen(sharkdata_url + dataset_name + u'/data.txt')\n \n for row in header_and_data:\n # The text format character encoding is cp1252 (equal to windows-1252).\n row = row.decode(u'cp1252')\n# print(row.strip())\n\n # Download header and data and save to file.\n dataset_name = datasets[0][u'dataset_name']\n filename = datasets[0][u'dataset_file_name'].replace(u'.zip', u'.txt')\n character_encoding = u'utf8' # Some alternatives: cp1252, utf-8, utf-16, ascii, latin1, macroman.\n row_delimiter = u'\\r\\n'\n print(u'\\nDataset content for: ' + dataset_name + u' to file: ' + filename + u'\\n')\n out_file = None\n try:\n out_file = codecs.open(filename, mode = 'w', encoding = character_encoding)\n header_and_data = urllib2.urlopen(sharkdata_url + dataset_name + u'/data.txt')\n for row in header_and_data:\n row = row.decode(u'cp1252')\n out_file.write(row.strip() + row_delimiter)\n finally:\n if out_file: out_file.close()", "def getData(version, scriptPath):\n print(\"getData\")\n popd=os.getcwd()\n dssvuePath = \"C:/Program Files (x86)/HEC/HEC-DSSVue/\"\n os.chdir(dssvuePath)\n # Path to script that extracts data from DSS file\n with open(scriptPath + \"version.txt\", 'wb') as vFile:\n pickle.dump([version], vFile)\n # Use HEC-DSSVue to run script (only way to use hec package that accesses DSS files)\n print(\"HEC-DSSVue.cmd\", \"-s\", scriptPath + \"getStageData.py\")\n call([\"HEC-DSSVue.cmd\", \"-s\", scriptPath + \"getStageData.py\"], shell=True)\n os.chdir(popd)", "def mover_get_data(lfns,\n path,\n sitename,\n queuename,\n stageinTries,\n inputpoolfcstring=\"xmlcatalog_file:PoolFileCatalog.xml\",\n ub=\"outdated\", # to be removed\n dsname=\"\",\n dsdict={},\n rucio_dataset_dictionary={},\n guids=[],\n analysisJob=False,\n usect=True,\n pinitdir=\"\",\n proxycheck=True,\n spsetup=\"\",\n tokens=[],\n userid=\"\",\n inputDir=\"\",\n jobId=None,\n jobDefId=\"\",\n access_dict=None,\n scope_dict=None,\n workDir=\"\",\n DN=None,\n dbh=None,\n jobPars=\"\",\n cmtconfig=\"\",\n filesizeIn=[],\n checksumIn=[],\n transferType=None,\n experiment=\"\",\n eventService=False,\n sourceSite=\"\"):\n\n tolog(\"Mover get data started\")\n\n statusPFCTurl = None\n pilotErrorDiag = \"\"\n\n # FAX counters (will be reported in jobMetrics; only relevant when FAX has been activated after a stage-in failure)\n N_filesWithoutFAX = 0\n N_filesWithFAX = 0\n bytesWithoutFAX = 0L\n bytesWithFAX = 0L\n\n # FAX control variable, if FAX is used as primary site mover in combination with direct I/O\n usedFAXandDirectIO = False\n\n # The FAX variables above will be stored in a dictionary, to be returned by this function\n FAX_dictionary = {}\n\n # Is the DBRelease file available locally?\n DBReleaseIsAvailable = handleDBRelease(dbh, lfns, jobPars, path)\n\n # Should stage-in be aborted? (if there are only locally available DBRelease files in the stage-in list)\n if abortStageIn(dbh, lfns, DBReleaseIsAvailable):\n return 0, pilotErrorDiag, statusPFCTurl, FAX_dictionary\n\n # Setup the dictionary necessary for all instrumentation\n report = getInitialTracingReport(userid, sitename, dsname, \"get_sm\", analysisJob, jobId, jobDefId, DN)\n\n if stageinTries != 0:\n get_RETRY = min(stageinTries, MAX_NUMBER_OF_RETRIES)\n else:\n get_RETRY = MAX_RETRY\n get_TIMEOUT = 5*3600/get_RETRY\n\n fail = 0\n guidfname = {}\n error = PilotErrors()\n\n region = readpar('region')\n\n # Space tokens currently not used for input files\n # # check if there is are any space tokens\n # _token = getProperSpaceTokenList(token, listSEs, len(lfns))\n\n # Select the correct mover\n copycmd, setup = getCopytool(mode=\"get\")\n\n # Get the sitemover object corresponding to the default copy command\n sitemover = getSiteMover(copycmd, setup)\n\n # Get the experiment object\n thisExperiment = getExperiment(experiment)\n\n # Get the name for the PFC file\n _path = path\n if eventService:\n # Update the path (create the PFC in one level above the payload workdir)\n path = os.path.abspath(os.path.join(path, '..'))\n pfc_name = getPFCName(path, inputpoolfcstring)\n # done with the event server modification (related to the PFC generation), reset the path again\n path = _path\n\n # Build the file info dictionary (use the filesize and checksum from the dispatcher if possible) and create the PFC\n # Format: fileInfoDic[file_nr] = (guid, gpfn, fsize, fchecksum, filetype, copytool)\n # replicas_dic[guid1] = [ replica1, .. ] where replicaN is an object of class replica\n ec, pilotErrorDiag, fileInfoDic, totalFileSize, replicas_dic = \\\n getFileInfo(region, ub, queuename, guids, dsname, dsdict, lfns, pinitdir, analysisJob, tokens, DN, sitemover, error, path, dbh, DBReleaseIsAvailable,\\\n scope_dict, pfc_name=pfc_name, filesizeIn=filesizeIn, checksumIn=checksumIn, thisExperiment=thisExperiment)\n if ec != 0:\n return ec, pilotErrorDiag, statusPFCTurl, FAX_dictionary\n\n # Until the Mover PFC file is no longer needed, call the TURL based PFC \"PoolFileCatalogTURL.xml\"\n pfc_name_turl = pfc_name.replace(\".xml\", \"TURL.xml\")\n\n # Create a SURL to space token dictionary\n tokens_dictionary = getSurlTokenDictionary(lfns, tokens)\n\n # Create a TURL based PFC if necessary/requested (i.e. if copy tool should not be used [useCT=False] and\n # if oldPrefix and newPrefix are not already set in copysetup [useSetPrefixes=False])\n ec, pilotErrorDiag, createdPFCTURL, usect = PFC4TURLs(analysisJob, transferType, fileInfoDic, pfc_name_turl, sitemover, sitename, usect, dsdict, eventService, tokens_dictionary, sitename, sourceSite, lfns)\n if ec != 0:\n return ec, pilotErrorDiag, statusPFCTurl, FAX_dictionary\n\n # Correct the total file size for the DBRelease file if necessary\n totalFileSize = correctTotalFileSize(totalFileSize, fileInfoDic, lfns, dbh, DBReleaseIsAvailable)\n\n # Only bother with the size checks if the copy tool is to be used (non-direct access mode)\n if usect:\n # Get a proper maxinputsize from schedconfig/default \n _maxinputsize = getMaxInputSize()\n\n # Check the total input file size\n ec, pilotErrorDiag = verifyInputFileSize(totalFileSize, _maxinputsize, error)\n if ec != 0:\n return ec, pilotErrorDiag, statusPFCTurl, FAX_dictionary\n\n # Do we have enough local space to stage in all data and run the job?\n ec, pilotErrorDiag = verifyAvailableSpace(sitemover, totalFileSize, path, error)\n if ec != 0:\n return ec, pilotErrorDiag, statusPFCTurl, FAX_dictionary\n\n # Get the replica dictionary from file (used when the primary replica can not be staged due to some temporary error)\n replica_dictionary = getReplicaDictionaryFile(path)\n\n # file counters\n N_files_on_tape = 0\n N_root_files = 0\n N_non_root_files = 0\n\n # If FAX is used as a primary site mover then set the default FAX mode to true, otherwise to false (normal mode)\n if copycmd == \"fax\":\n usedFAXMode = True\n else:\n usedFAXMode = False\n\n # Use isOneByOneFileTransfer() to determine if files should be transferred one by one or all at once\n if not sitemover.isOneByOneFileTransfer():\n\n # Note: this mode is used by the aria2c site mover only\n # Normal stage-in is below\n\n tolog(\"All files will be transferred at once\")\n\n # Extract the file info for the first file in the dictionary\n guid, gpfn, lfn, fsize, fchecksum, filetype, copytool = extractInputFileInfo(fileInfoDic[0], lfns)\n file_access = getFileAccess(access_dict, lfn)\n dsname = getDataset(lfn, dsdict)\n\n # Perform stage-in using the sitemover wrapper method\n s, pErrorText = sitemover_get_all_data(sitemover, error, gpfn, lfn, path, fsize=fsize, spsetup=spsetup, fchecksum=fchecksum,\\\n guid=guid, analysisJob=analysisJob, usect=usect, pinitdir=pinitdir, proxycheck=proxycheck,\\\n sitename=sitename, token=None, timeout=get_TIMEOUT, dsname=dsname, userid=userid, report=report,\\\n access=file_access, inputDir=inputDir, jobId=jobId, workDir=workDir, cmtconfig=cmtconfig, lfns=lfns,\\\n experiment=experiment, replicas_dic=replicas_dic, dsdict=dsdict, scope_dict=scope_dict)\n if s != 0:\n tolog('!!WARNING!!2999!! Failed during stage-in of multiple files: %s' % (error.getErrorStr(s)))\n tolog(\"Exit code: %s\" % (s))\n fail = s\n\n # Normal stage-in (one by one file transfers)\n if sitemover.isOneByOneFileTransfer() or fail != 0:\n \n tolog(\"Files will be transferred one by one\")\n\n # Reset any previous failure\n fail = 0\n\n # Loop over all files in the file info dictionary\n number_of_files = len(fileInfoDic.keys())\n tolog(\"Will process %d file(s)\" % (number_of_files))\n for nr in range(number_of_files):\n # Extract the file info from the dictionary\n guid, gpfn, lfn, fsize, fchecksum, filetype, copytool = extractInputFileInfo(fileInfoDic[nr], lfns)\n\n # Has the copycmd/copytool changed? (E.g. due to FAX) If so, update the sitemover object\n if copytool != copycmd:\n copycmd = copytool\n # Get the sitemover object corresponding to the new copy command\n sitemover = getSiteMover(copycmd, setup)\n tolog(\"Site mover object updated since copytool has changed\")\n\n # Update the dataset name\n dsname = getDataset(lfn, dsdict)\n proper_dsname = getDataset(lfn, rucio_dataset_dictionary)\n scope = getFileScope(scope_dict, lfn)\n\n # Update the tracing report with the proper container/dataset name\n report = updateReport(report, gpfn, proper_dsname, fsize, sitemover)\n report['scope'] = scope\n\n # The DBRelease file might already have been handled, go to next file\n if isDBReleaseFile(dbh, lfn) and DBReleaseIsAvailable:\n updateFileState(lfn, workDir, jobId, mode=\"transfer_mode\", state=\"no_transfer\", type=\"input\")\n guidfname[guid] = lfn # needed for verification below\n continue\n else:\n tolog(\"(Not a DBRelease file)\")\n\n tolog(\"Mover is preparing to copy file %d/%d (lfn: %s guid: %s dsname: %s)\" % (nr+1, number_of_files, lfn, guid, dsname))\n tolog('Copying %s to %s (file catalog checksum: \\\"%s\\\", fsize: %s) using %s (%s)' %\\\n (gpfn, path, fchecksum, fsize, sitemover.getID(), sitemover.getSetup()))\n\n # Get the number of replica retries\n get_RETRY_replicas = getNumberOfReplicaRetries(createdPFCTURL, replica_dictionary, guid)\n\n file_access = getFileAccess(access_dict, lfn)\n\n # Loop over get function to allow for multiple get attempts for a file\n will_use_direct_io = False\n get_attempt = 0\n\n #get_RETRY = 1 #2 #PN\n while get_attempt < get_RETRY:\n if get_attempt > 0:\n _rest = 5*60\n tolog(\"(Waiting %d seconds before next stage-in attempt)\" % (_rest))\n sleep(_rest)\n tolog(\"Get attempt %d/%d\" % (get_attempt + 1, get_RETRY))\n replica_number = 0\n replica_transferred = False\n s = 1\n\n # Loop over replicas\n while s != 0 and replica_number < get_RETRY_replicas:\n # Grab the gpfn from the replicas dictionary in case alternative replica stage-in is allowed\n gpfn = getAlternativeReplica(gpfn, guid, replica_number, createdPFCTURL, replica_dictionary)\n\n # Perform stage-in using the sitemover wrapper method\n s, pErrorText, N_files_on_tape, N_root_files, N_non_root_files, replica_transferred, will_use_direct_io = sitemover_get_data(sitemover, error,\\\n get_RETRY, get_RETRY_replicas, get_attempt,\\\n replica_number, N_files_on_tape, N_root_files,\\\n N_non_root_files, gpfn, lfn, path,\\\n fsize=fsize, spsetup=spsetup, fchecksum=fchecksum,\\\n guid=guid, analysisJob=analysisJob, usect=usect,\\\n pinitdir=pinitdir, proxycheck=proxycheck,\\\n sitename=sitename, token=None, timeout=get_TIMEOUT,\\\n dsname=dsname, userid=userid, report=report,\\\n access=file_access, inputDir=inputDir, jobId=jobId,\\\n workDir=workDir, cmtconfig=cmtconfig,\\\n experiment=experiment, scope_dict=scope_dict,\\\n sourceSite=sourceSite)\n # Get out of the multiple replica loop\n if replica_transferred:\n break\n\n # Increase the replica attempt counter in case the previous replica could not be transferred\n replica_number += 1\n\n # Get out of the multiple get attempt loop\n if replica_transferred:\n break\n\n # Increase the get attempt counter in case of failure to transfer the file\n get_attempt += 1\n\n # Increase the successful file transfer counter (used only when reporting FAX transfers)\n if s == 0:\n # note the special case if FAX is the primary site mover (normally FAX is the fallback)\n if sitemover.copyCommand == \"fax\":\n N_filesWithFAX += 1\n bytesWithFAX += long(fsize)\n else:\n # Normal case\n N_filesWithoutFAX += 1\n bytesWithoutFAX += long(fsize)\n\n if s != 0:\n # Normal stage-in failed, now try with FAX if possible\n if error.isPilotFAXErrorCode(s):\n if isFAXAllowed(filetype, gpfn) and transferType != \"fax\" and sitemover.copyCommand != \"fax\": # no point in trying to fallback to fax if the fax transfer above failed\n tolog(\"Normal stage-in failed, will attempt to use FAX\")\n usedFAXMode = True\n\n # Get the FAX site mover\n old_sitemover = sitemover\n sitemover = getSiteMover(\"fax\", \"\")\n\n # Perform stage-in using the sitemover wrapper method\n s, pErrorText, N_files_on_tape, N_root_files, N_non_root_files, replica_transferred, will_use_direct_io = sitemover_get_data(sitemover, error,\\\n get_RETRY, get_RETRY_replicas, get_attempt, replica_number,\\\n N_files_on_tape, N_root_files, N_non_root_files,\\\n gpfn, lfn, path,\\\n fsize=fsize, spsetup=spsetup, fchecksum=fchecksum,\\\n guid=guid, analysisJob=analysisJob, usect=usect,\\\n pinitdir=pinitdir, proxycheck=proxycheck,\\\n sitename=sitename, token=None, timeout=get_TIMEOUT,\\\n dsname=dsname, userid=userid, report=report,\\\n access=file_access, inputDir=inputDir, jobId=jobId,\\\n workDir=workDir, cmtconfig=cmtconfig, experiment=experiment)\n if replica_transferred:\n tolog(\"FAX site mover managed to transfer file from remote site (resetting error code to zero)\")\n pilotErrorDiag = \"\"\n s = 0\n\n # Increase the successful FAX transfer counter\n N_filesWithFAX += 1\n bytesWithFAX += long(fsize)\n else:\n tolog(\"FAX site mover also failed to transfer file from remote site, giving up\")\n\n # restore the old sitemover\n del sitemover\n sitemover = old_sitemover\n else:\n tolog(\"(Not an error code eligible for FAX fail-over)\")\n\n if s != 0:\n tolog('!!FAILED!!2999!! Failed to transfer %s: %s (%s)' % (os.path.basename(gpfn), s, error.getErrorStr(s)))\n tolog(\"Exit code: %s\" % (s))\n\n # report corrupt file to consistency server if needed\n if s == error.ERR_GETADMISMATCH or s == error.ERR_GETMD5MISMATCH or s == error.ERR_GETWRONGSIZE or s == error.ERR_NOSUCHFILE:\n reportFileCorruption(gpfn, sitemover)\n\n # exception for object stores\n if (gpfn.startswith(\"s3:\") or 'objectstore' in gpfn) and '.log.tgz' in gpfn:\n tolog(\"!!FAILED!!2999!! Failed to transfer a log file from S3 objectstore. Will skip it and continue the job.\")\n else:\n fail = s\n break\n\n # Build the dictionary used to create the PFC for the TRF\n # In the case of FAX, use the global paths if direct access is to be used for the particlar file\n if usedFAXMode and will_use_direct_io:\n # The site mover needed here is the FAX site mover since the global file path methods are defined there only\n old_sitemover = sitemover\n sitemover = getSiteMover(\"fax\", \"\")\n guidfname[guid] = sitemover.findGlobalFilePath(lfn, dsname, sitename, sourceSite)\n\n # Restore the old sitemover\n del sitemover\n sitemover = old_sitemover\n\n # If FAX is used as a primary site mover, in combination with direct access, set the usedFAXandDirectIO flag\n # this will later be used to update the run command (e.g. --lfcHost is not needed etc)\n if copycmd == \"fax\":\n usedFAXandDirectIO = True\n else:\n guidfname[guid] = lfn # local_file_name\n\n if fail == 0:\n # Make sure the PFC has the correct number of files\n fail, pilotErrorDiag = verifyPFCIntegrity(guidfname, lfns, dbh, DBReleaseIsAvailable, error)\n\n # Now that the Mover PFC file is no longer needed, back it up and rename the TURL based PFC if it exists\n # (the original PFC is no longer needed. Move it away, and then create the PFC for the trf/runAthena)\n # backupPFC4Mover(pfc_name)\n\n # Create a standard PFC with SURLs if needed (basically this is default)\n # note: if FAX was used as a primary site mover in combination with direct I/O, then the SURLs will actually be TURLs\n # but there is no need to use the special TURL creation method PFC4TURL used above (FAX will have returned the TURLs instead)\n createStandardPFC4TRF(createdPFCTURL, pfc_name_turl, pfc_name, guidfname)\n\n tolog(\"Number of identified root files : %d\" % (N_root_files))\n tolog(\"Number of transferred non-root files: %d\" % (N_non_root_files))\n\n if usedFAXMode:\n tolog(\"Number of files without FAX : %d (normal transfers)\" % (N_filesWithoutFAX))\n tolog(\"Number of files with FAX : %d (successful FAX transfers)\" % (N_filesWithFAX))\n tolog(\"Bytes without FAX : %d (normal transfers)\" % (bytesWithoutFAX))\n tolog(\"Bytes with FAX : %d (successful FAX transfers)\" % (bytesWithFAX))\n\n if N_files_on_tape > 0:\n tolog(\"!!WARNING!!2999!! Number of skipped files: %d (not staged)\" % (N_files_on_tape))\n if N_root_files == 0:\n # This should only happen for user jobs\n tolog(\"Mover get_data failed since no root files could be transferred\")\n fail = error.ERR_NOSTAGEDFILES\n else:\n tolog(\"Mover get_data finished (partial)\")\n else:\n if fail == 0:\n tolog(\"Get successful\")\n tolog(\"Mover get_data finished\")\n else:\n tolog(\"Mover get_data finished (failed)\")\n tolog(\"Will return exit code = %d, pilotErrorDiag = %s\" % (fail, pilotErrorDiag)) \n\n # Now populate the FAX dictionary before finishing\n FAX_dictionary = getFAXDictionary(N_filesWithoutFAX, N_filesWithFAX, bytesWithoutFAX, bytesWithFAX, usedFAXandDirectIO)\n\n return fail, pilotErrorDiag, statusPFCTurl, FAX_dictionary", "def main():\n\n # Setup `pysc` to use BASIC auth, with a username, and password. Also sets the endpoint to use.\n setup_sensorcloud_basic(CONSTS['SC_USERNAME'], CONSTS['SC_PASSWORD'],\n CONSTS['SC_ENDPOINT'], CONSTS['PYSC_DEBUG'])\n\n org_id = CONSTS['ORG_ID']\n\n # Ensure the organisation exists on the SensorCloud endpoint.\n try:\n organisation = pysc.models.Organisation.single(org_id)\n except KeyError:\n raise RuntimeWarning(\"\"\"The organisation named {:s} was not found.\\n\"\"\"\n \"\"\"Although the `pysc` api has functionality to create an organisation, it cannot \"\"\"\n \"\"\"do so on the sensor-cloud.io instance on AWS.\"\"\".format(org_id))\n # Ensure sanity, check we got the organisation that we asked for.\n assert (org_id == organisation.id)\n\n # Here we use the Group.resolve_all helper with organisation_id param to filter groups based on id\n # The resolve_all command is similar to .index() however it also calls .follow() on found link automatically,\n # _and_ it converts the resulting HAL objects into real valid `pysc` Group() objects.\n org_groups = pysc.models.Group.resolve_all(params={'organisation_id': org_id})\n # We are not likely to have more than 1000 groups, so we don't need to do return doc pagination here.\n for g in org_groups:\n group_id = g.id\n print(\"Found group: {:s}\".format(group_id))\n\n print(\"Found a total of {:d} groups for {:s} on that SensorCloud endpoint.\".format(len(org_groups), org_id))", "def get_data(self):", "def getStockData():\n pass", "def main() -> None:\n\n download_data_tools.initial_message()\n\n # S&P 500 companies, initial year and time step\n stocks: List[str] = download_data_tools.get_stocks(['all'])\n dates: List[str] = ['1992-01', '2012-12']\n time_step: str = '1d'\n\n # Basic folders\n download_data_tools.start_folders()\n\n # Run analysis\n # Download data\n portfolio_download_data(stocks, dates, time_step)\n\n print('Ay vamos!!!')", "def get_data_from_web():\n pass", "def main():\n try:\n init_file = open('keywords.json', 'r')\n init_file.close()\n except IOError:\n copy2('keywords.base', 'keywords.json')\n try:\n init_file = open('rsslist.json', 'r')\n init_file.close()\n except IOError:\n copy2('rsslist.base', 'rsslist.json')\n \n\n config_file = 'config.ini'\n config_section = 'dev'\n slack_token = load_config(config_file, config_section)\n slack_client = SlackClient(slack_token)\n feed_count = len(feed_db)\n feed_counter = feed_count\n while feed_counter > 0:\n url = feed_db.get(doc_id = feed_counter)['url']\n last_update_obj = feed_db.get(doc_id = feed_counter)['lastupdate']\n post_list, published_date = getfeed(url, last_update_obj)\n feed_counter = feed_counter - 1\n print(post_list)\n post_lastUpdate(url, published_date)\n post_to_slack(slack_client, post_list)", "def get_velib_data():\n api_url = \"https://api.jcdecaux.com/vls/v1/\"\n query_string = \"stations?contract=Paris&apiKey=\"\n api_key = \"ec29d3b17e5162e1459aaad45cddfe74fe832379\"\n my_url = api_url + query_string + api_key\n\n urlobj = URL.urlopen(my_url)\n data = json.load(urlobj)\n# data = urlobj.read()\n# help(data)\n return data", "def test_data_source_soaps_get(self):\n pass", "def main():\n\n # 1.0 DATA PREP\n\n # 1.1 Retrieve SWAPI representation of the Millenium Falcon (base)\n url = f\"{ENDPOINT}/starships\"\n params = {'search': 'falcon'}\n swapi_m_falcon = get_swapi_resource(url, params)[\"results\"][0]\n\n print(f\"\\nSWAPI = {swapi_m_falcon}\\n\")\n\n # 1.2 Read in additional Millenium Falcon data\n filepath = 'wookiee_m_falcon.json'\n wookiee_m_falcon = read_json(filepath)\n\n print(f\"\\nLocal = {wookiee_m_falcon}\\n\")\n\n # 1.3 Combine starship data dicts\n # Note: local vals replace swapi vals on matching keys\n\n # UNCOMMENT\n swapi_m_falcon.update(wookiee_m_falcon) # in-place (no assignment)\n\n print(f\"\\nCombined = {swapi_m_falcon}\\n\")\n\n\n # 2.0 WORK WITH CLASS INSTANCES\n\n # 2.1 Create Starship instance\n m_falcon = Starship(swapi_m_falcon['name'], swapi_m_falcon['url'])\n\n # 2.2 Bulk assign dictionary values to instance variables\n # assign_values() acts as a filter\n # Downside: overwrites init values\n\n #UNCOMMENT\n m_falcon.assign_values(swapi_m_falcon)\n\n print(f\"\\nm_falcon.armament = {m_falcon.armament}\\n\")\n\n # 3.0 ASSIGN CREW TO STARSHIP\n url = f\"{ENDPOINT}/people\"\n\n # 3.1 Get SWAPI Han Solo (Corellian smuggler, pilot)\n params = {'search': 'solo'}\n swapi_solo = get_swapi_resource(url, params)['results'][0]\n\n print(f\"\\nswapi_solo = {swapi_solo}\\n\")\n\n # Add instance variable values the conventional way)\n solo = Person(swapi_solo[\"name\"], swapi_solo['url']) # instantiate Person\n\n # UNCOMMENT\n solo.gender = swapi_solo['gender']\n solo.birth_year = swapi_solo[\"birth_year\"]\n solo.get_homeworld(swapi_solo[\"homeworld\"]) # fetch homeworld dict\n\n\n # 3.2 Get SWAPI Chewbacca (Wookiee, co-pilot)\n params = {'search': 'chewbacca'}\n swapi_chewie = get_swapi_resource(url, params)[\"results\"][0]\n\n print(f\"\\nswapi_chewie = {swapi_chewie}\\n\")\n\n chewie = Person(swapi_chewie[\"name\"], swapi_solo[\"url\"]) # instantiate Person\n\n # UNCOMMENT\n chewie.assign_values(swapi_chewie) # bulk assign\n chewie.get_homeworld(swapi_chewie['homeworld']) # fetch homeworld dicts\n\n # 3.3 Assign crew\n crew = {\"pilot\": solo, \"co-pilot\": chewie} # key = role (pilot), value = name\n\n # UNCOMMENT\n m_falcon.assign_crew(crew)\n\n\n # 4.0 WRITE TO FILE\n filepath = 'si506_m_falcon.json'\n\n # UNCOMMENT (fail)\n write_json(filepath, m_falcon) # raises TypeError exception\n\n # Serialize composite class instances (a complex object)\n # Implement ExtendEncoder(); reference in json.dump()\n\n # UNCOMMENT (success)\n write_complex_json(filepath, m_falcon)", "def fetch_data():\n if request.method == 'GET':\n return (\"Use this endpoint with POST method to fetch data\", 200)\n elif request.method == 'POST':\n # request data\n app.logger.info(\"Requesting data\")\n data = get_data('regulatorydecision')\n\n # write to file\n app.logger.info(\"Writing to file\")\n write_to_file(data)\n\n # upload to cloud storage\n app.logger.info(\"Uploading to GCS\")\n upload_to_gcs('data.json', 'health-ca-data-staging')\n\n # publish message to pubsub\n app.logger.info(\"Publishing status message to Pubsub\")\n message = \"Data uploaded to GCS\"\n pubsub_publish('projects/health-ca-data/topics/gcs_load', message, \"\")\n\n return (\"Fetching data\", 200)", "def ori_data_fbi(request):\n http = urllib3.PoolManager()\n\n\n #base_url=fbi_url(request)\n logging.Logger(base_url)\n print('I am inside the main function')\n # New request url\n request_url = base_url\n logging.Logger(request_url)\n\n\n payload = http.request('GET',\n request_url,\n headers={\n 'Content-Type': 'application/json',\n 'x-api-key': creds\n },\n fields={\n 'API_KEY':creds\n }\n )\n\n #*** only changing it for testing ***\n #return request_url\n # print(f'the type of payload is\\n {type(payload.data)}')\n print(payload.data)\n return load_into_bq(payload.data)\n #return payload.data", "def fetch_data(self):", "def pull_from_kvstore(helper, name, start, stats):\n resp = helper.send_http_request(\n url=_uri(helper),\n headers=_headers(helper),\n method='GET',\n verify=False,\n parameters={'query': json.dumps({'splunk_source': name})})\n resp.raise_for_status()\n\n ans = {}\n for v in resp.json():\n ans[v['indicator']] = {\n '_key': v['_key'],\n 'is_present': False,\n 'splunk_last_seen': v.get('splunk_last_seen', 0.0)}\n\n return ans", "def get_data(params):\n username = params['username']\n provider = params[\"provider\"]\n quality = params[\"quality\"]\n speed = params[\"speed\"]\n vpn = params[\"vpn\"]\n platform = params[\"platform\"]\n clean = params[\"clean\"]\n date = params[\"date\"]\n interface = params[\"interface\"]\n csvmode = params[\"csvmode\"]\n path = params['path']\n output_path = './data/collected/'\n\n output_file = '{}_{}_{}_{}_{}_{}_{}_{}.csv'.format(username, provider, quality, speed, vpn, platform, clean, date)\n command = 'python3.8 {} -i {} -s {} {}'.format(path, interface, csvmode, output_file)\n os.system(command)\n \n return", "def fetch_data():\n log = logging.getLogger(__name__)\n log.info('Checking data files...')\n if not os.path.isfile('CGN.txt'):\n params_cgn = {\n 'institute.code': ['NLD037'],\n # 'crops': ['tomato'],\n 'taxonomy.genus': ['Solanum', 'Lycopersicon'],\n 'taxonomy.species': species\n }\n cgn = GenesysParser(params_cgn)\n cgn.fetch2json('CGN.txt')\n log.info('CGN data has been saved.')\n else:\n log.info('CGN data file already exists.')\n\n if not os.path.isfile('USDA.txt'):\n params_usda = {\n 'institute.code': usda_all,\n # 'crops': ['tomato'],\n 'taxonomy.genus': ['Solanum', 'Lycopersicon'],\n 'taxonomy.species': species\n }\n usda = GenesysParser(params_usda)\n usda.fetch2json('USDA.txt')\n log.info('USDA data has been saved.')\n else:\n log.info('USDA data file already exists.')", "def main(unused_argv):\n make_dir(FLAGS.raw_dir)\n\n # Get paths of download/extracted training and evaluation files.\n print(\"Downloading data from source\")\n train_files = get_raw_files(FLAGS.raw_dir, constants.TRAIN_DATA_SOURCES)\n eval_files = get_raw_files(FLAGS.raw_dir, constants.EVAL_DATA_SOURCES)", "def main():\n if len(sys.argv) != 3:\n print(\"Usage: python season-get [year] [season, lowercase]\")\n return None\n\n year = sys.argv[1]\n season = sys.argv[2]\n print(\"Pulling from API\")\n\n API_url = \"http://api.jikan.moe/season/{}/{}\".format(year, season)\n\n # Error checking\n try:\n data = urllib.request.urlopen(API_url)\n except urllib.error.URLError as e:\n print(\"Error - {}\".format(e))\n return None\n print(\"Complete!\\n\")\n\n result = json.loads(data.read())['season']\n\n print(\"Loading IDs\")\n target_name = '{}-{}.txt'.format(year, season)\n target = open('./input/{}'.format(target_name), 'w')\n\n for anime in result:\n target.write(str(anime['mal_id']) + '\\n')\n print(\"Complete!\\n\")", "def retrieve_data(args):\n if args.model is None:\n error_exit(\"--model is required for retrieve!\")\n if args.grid_type is None:\n error_exit(\"--grid_type is required for retrieve!\")\n if args.init_time is None:\n error_exit(\"--init-time is required for retrieve!\")\n if args.variable is None:\n error_exit(\"--variable is required for retrieve!\")\n if args.level_type is None:\n error_exit(\"--level-type is required for retrieve!\")\n if args.levels is None:\n error_exit(\"--levels is required for retrieve!\")\n if args.lead_time is None:\n error_exit(\"--lead-time is required for retrieve!\")\n if args.dest is None:\n error_exit(\"--dest is required for retrieve!\")\n if not os.path.exists(args.dest):\n error_exit(f\"path not found: {args.dest}\")\n if not os.path.isdir(args.dest):\n error_exit(f\"path is not a directory: {args.dest}\")\n try:\n files = retrieve_nwp(model=args.model,\n grid_type=args.grid_type,\n variable=args.variable,\n level_type=args.level_type,\n levels=args.levels,\n init_time=args.init_time,\n forecast_hour=args.lead_time,\n dest=args.dest,\n merge_files=args.merge)\n for one_file in files:\n logging.info(f\"downloaded: {one_file}\")\n except ValueError as ve:\n error_exit(str(ve))", "def do_GET(s):\r\n print \"processing get request...\"\r\n s.send_response(200)\r\n s.send_header(\"Content-type\", \"application/json\")\r\n s.send_header(\"Access-Control-Allow-Origin\", \"*\")\r\n s.end_headers()\r\n\r\n inputUrl = s.path\r\n\r\n print \"####INPUT########\"\r\n print inputUrl\r\n print \"#####INPUT#######\"\r\n\r\n try:\r\n index = string.index(inputUrl, \"url=\") + 4\r\n urlParam = inputUrl[index:]\r\n print \"url request: \" + urlParam\r\n response = urllib.urlopen(urlParam).read()\r\n s.wfile.write(response)\r\n except:\r\n print \"no url specified\"\r\n\r\n try:\r\n index = string.index(inputUrl, \"random\") + 6\r\n randomParam = inputUrl[index:]\r\n scientists = open(\"C:\\Users\\ASUS 1\\Documents\\GitHub\\girlsWhoCode\\womenScientists\\women_scientists.txt\", 'r').readlines()\r\n size = len(scientists)\r\n fileIndex = random.randint(0, size)\r\n scientist = scientists[fileIndex]\r\n print \"random request returning: \" + scientist\r\n response = json.dumps(scientist)\r\n s.wfile.write(response)\r\n\r\n except Exception as e:\r\n print e.message\r\n print \"no female scientist requested\"\r\n\r\n try:\r\n index = string.index(inputUrl, \"wiki=\") + 5\r\n scientistParam = inputUrl[index:]\r\n scientist = ' '.join(scientistParam.split(\"%20\"))\r\n print \"wiki request for \" + scientist\r\n page = wikipedia.page(scientist)\r\n summary = page.summary\r\n response = json.dumps(summary)\r\n s.wfile.write(response)\r\n except:\r\n print \"no wiki request\"", "def main():\n # Create the flow\n with Flow('pickle flow') as flow:\n db_table = create_table()\n weather_data = get_weather(LAT_NYC, LON_NYC, 2018)\n parsed_data = parse_weather(weather_data)\n populated_table = store_weather(parsed_data)\n populated_table.set_upstream(db_table)\n\n # Run the flow\n flow.run()", "def _get_soundcloud_data(self):\n self.title, self.stream_url = get_track_info(self.URL)", "def main():\n spark = create_spark_session()\n\n input_data = config['STORAGE']['INPUT_DATA']\n output_data = config['STORAGE']['OUTPUT_DATA']\n\n process_song_data(spark, input_data, output_data)\n process_log_data(spark, input_data, output_data)", "def get_fred_data(url):\n pass", "def main():\n pods = openshift_object.get_running_pods()\n me = openshift_object.get_self()\n routes = openshift_object.get_routes()\n nodes = openshift_object.get_nodes()\n pvc = openshift_object.get_pvcs()\n pv = openshift_object.get_pv()\n project = openshift_object.get_projects()\n return jsonify({\n \"pods\": pods,\n \"me\": me,\n \"routes\": routes, \n \"nodes\":nodes,\n \"pvcs\":pvc,\n \"pv\":pv,\n \"projects\":project})", "def main():\n spark = create_spark_session()\n input_data = \"s3a://udacity-dend/\"\n output_data = \"data/analytics\"\n \n process_song_data(spark, input_data, output_data) \n process_log_data(spark, input_data, output_data)", "def getInitialData(nsmapi):\r\n # Done 6-1-2020\r\n # TODO extract ids not using the regex?\r\n initData = {}\r\n\r\n url = f\"/healthcheck\"\r\n print(\"Running basic healthcheck\")\r\n healthcheckData = nsmapi.call(url, method=\"PUT\", message='{\"id\":[\"default\"]}')\r\n initData[\"healthcheck\"] = healthcheckData\r\n\r\n for i in range(20):\r\n print(f\".\", end=\"\", flush=True)\r\n time.sleep(.5)\r\n print(\"\")\r\n\r\n print(\"Getting initial sensor data\")\r\n url = \"/sensors\"\r\n basicData = json.dumps(nsmapi.call(url))\r\n dataType = url[1:].replace(\"/\", \"_\")\r\n initData[dataType] = []\r\n for id in re.findall(\"\\\"sensorId\\\":.*?, \\\"name\\\":.*?,\", basicData):\r\n if id[-1] == \",\":\r\n id = id[:-1]\r\n id = id.replace(\"\\\"\", \"\")\r\n id = id.replace(\": \", \":\")\r\n num, name = id.split(\",\")\r\n num = num.split(\":\")[-1]\r\n name = name.split(\":\")[-1]\r\n idName = f\"{num},{name}\"\r\n initData[dataType].append(idName)\r\n\r\n print(\"Getting initial domain data\")\r\n url = \"/domain\"\r\n basicData = json.dumps(nsmapi.call(url))\r\n dataType = url[1:].replace(\"/\", \"_\")\r\n initData[dataType] = []\r\n for id in re.findall(\"\\\"id\\\":.*?, \\\"name\\\":.*?,\", basicData):\r\n if id[-1] == \",\":\r\n id = id[:-1]\r\n id = id.replace(\"\\\"\", \"\")\r\n id = id.replace(\": \", \":\")\r\n num, name = id.split(\",\")\r\n num = num.split(\":\")[-1]\r\n name = name.split(\":\")[-1]\r\n idName = f\"{num},{name}\"\r\n initData[dataType].append(idName)\r\n\r\n policyURLs = [\r\n \"/domain/{domainId}/ipspolicies\",\r\n \"/domain/{domainId}/firewallpolicy\",\r\n \"/domain/{domainId}/connectionlimitingpolicies\",\r\n \"/domain/{domainId}/qospolicy\",\r\n \"/protectionoptionspolicy\",\r\n \"/domain/{domainId}/malwarepolicy\",\r\n \"/domain/{domainId}/policygroups\"\r\n ]\r\n\r\n print(\"Getting initial policy data\")\r\n initData[\"policy\"] = {}\r\n for domain in initData[\"domain\"]:\r\n domainId, domainName = domain.split(\",\")\r\n initData[\"policy\"][domainId] = {}\r\n for url in policyURLs:\r\n url = url.replace(\"{domainId}\", domainId)\r\n policyData = nsmapi.call(url)\r\n key = list(policyData.keys())[0]\r\n policyType = url.split(\"/\")[-1].replace(\"policy\", \"\").replace(\"policies\", \"\")\r\n initData[\"policy\"][domainId][policyType] = []\r\n for policy in policyData[key]:\r\n policy = json.dumps(policy)\r\n # pattern = \"\\\"([^\\\"]*?)(id|ID|iD|Id){0,1}(name){0,1}\\\": (.*?),\" - don't seem to work\r\n # extracted = re.findall(pattern, policy) - don'tens seem to works\r\n # initData[\"policy\"][domainId][policyType][\"full\"] = policy\r\n for polK, polV in json.loads(policy).items():\r\n if \"omain\" not in polK.lower():\r\n if \"name\" in polK.lower():\r\n name = polV\r\n elif \"id\" in polK.lower():\r\n id = polV\r\n initData[\"policy\"][domainId][policyType].append((id,name))\r\n\r\n print(\"Got Initial Data\")\r\n\r\n return initData", "def test():\n temp_data = fetch_temp_data(\n (\"https://opendata-download-metobs.smhi.se/api/version/\" +\n \"latest/parameter/1/station/52350/period/latest-day/data.json\"))\n data = temp_series(temp_data)\n print(data)", "def main(args):\n\n if args['verbose']:\n logging.basicConfig(level=logging.DEBUG)\n else:\n if args['quiet']:\n logging.basicConfig(level=logging.ERROR)\n else:\n logging.basicConfig(level=logging.WARNING)\n\n # unpack args\n\n json_file = args['JSONfile']\n data_dir = args['data_directory']\n temp_file = args['tmp']\n release = args['release']\n\n if json_file:\n json_data = get_json_data(json_file)\n else:\n logging.log(logging.DEBUG, \"Preparing to download JSONfile\")\n if os.path.isfile(temp_file):\n logging.log(logging.WARNING, \"Removing file %s\" % temp_file)\n os.remove(temp_file)\n logging.log(logging.DEBUG, \"Issuing wget for JSON file\")\n args = ['wget', 'https://security-tracker.debian.org/tracker/data/json',\n '-O', temp_file]\n if os.path.isdir('/etc/ssl'):\n if os.path.isdir('/etc/ssl/ca-debian'):\n args.insert(1, '--ca-directory=/etc/ssl/ca-debian')\n call(args)\n logging.log(logging.DEBUG, \"File %s received\" % temp_file)\n json_data = get_json_data(temp_file)\n if os.path.isfile(temp_file):\n logging.log(logging.DEBUG, \"Removing file %s\" % temp_file)\n os.remove(temp_file)\n\n parseJSON(json_data, release)\n parsedirs(data_dir, re.compile('^dsa.+\\.data$'), 2, release)\n parsedirs(data_dir, re.compile('^dla.+\\.data$'), 2, release)\n logging.log(logging.INFO, \"Finished parsing JSON data\")\n printdsas(ovals)", "def main():\n remote_folder = '/s3mnt/carla-rl/outputs'\n local_folder = '/home/jeremy/Insight/Project'\n dns = get_publicdns()\n\n print(\"fetching data from server...\")\n os.system('rsync=$(/home/jeremy/.scripts_custom/rsync_pull_aws \\\n ubuntu@{} {} {})'.format(dns, remote_folder,\n local_folder))\n print(\"pulled from {} to {}\".format(remote_folder,\n local_folder))", "def main():\n print('<<< Spotify Parser >>>')\n print('-'*101)\n spotify_api.get_json_file(JSON_TRACK_FILE, TRACK, 'track', CLIENT_ID, CLIENT_SECRET)\n parse_json_track(JSON_TRACK_FILE)\n print('-'*101)\n spotify_api.get_json_file(JSON_ARTIST_FILE, ARTIST, 'artist', CLIENT_ID, CLIENT_SECRET)\n parse_json_artist(JSON_ARTIST_FILE)\n print('-'*101)", "def download():\r\n reader = GSODDataReader()\r\n year_list = range(2001, 2012)\r\n austin = reader.collect_data(year_list, exact_station=True,\r\n station_name='AUSTIN CAMP MABRY', state='TX', country='US')\r\n houston = reader.collect_data(year_list, exact_station=True,\r\n station_name='HOUSTON/D.W. HOOKS', state='TX', country='US')\r\n new_york = reader.collect_data(year_list, exact_station=True,\r\n station_name='NEW YORK/LA GUARDIA', state='NY', country='US')\r\n newark = reader.collect_data(year_list, exact_station=True,\r\n station_name='NEWARK INTL AIRPORT', state='NJ', country='US')\r\n punta_arenas = reader.collect_data(year_list, exact_station=True,\r\n station_name='PUNTA ARENAS', country='CH')\r\n wellington = reader.collect_data(year_list, exact_station=True,\r\n station_name='WELLINGTON AIRPORT', country='NZ')\r\n store = HDFStore('weather.h5')\r\n store['austin'] = austin\r\n store['houston'] = houston\r\n store['nyc'] = new_york\r\n store['newark'] = newark\r\n store['punta_arenas'] = punta_arenas\r\n store['wellington'] = wellington\r\n store.close()", "def main():\n # First grab data from db\n engine = create_engine(\"sqlite:///cities.db\")\n session = Session(bind=engine)\n # Grab all data from flat stats table\n query = session.query(FlatStat).all()\n\n X = transform_to_np_array(query)\n\n # Run dbscan now\n results = DBSCAN(eps=2, min_samples=10).fit(X)\n\n # TODO: get matplotlib in here, but for now, just print all properties of dbscan\n print(results.__dict__)", "def main():\n #Set up objects\n client = BigQueryClient()\n writer = FileWriter()\n\n #Send data from big query to a given file.\n # 500 is the limit of data points fetched.\n client.produce_json_data(writer, 500)", "def fetch_zenodo(self):\n\n # full path to the stitches root directory where the example dir will be stored\n if self.data_dir is None:\n data_directory = pkg_resources.resource_filename('stitches', 'data')\n else:\n data_directory = self.data_dir\n\n # build needed subdirectories if they do not already exist\n tas_data_path = os.path.join(data_directory, \"tas-data\")\n temp_data_path = os.path.join(data_directory, \"temp-data\")\n if not os.path.exists(tas_data_path):\n os.mkdir(tas_data_path)\n if not os.path.exists(temp_data_path):\n os.mkdir(temp_data_path)\n\n # get the current version of stitches that is installed\n current_version = pkg_resources.get_distribution('stitches').version\n\n try:\n data_link = InstallPackageData.DATA_VERSION_URLS[current_version]\n\n except KeyError:\n msg = f\"Link to data missing for current version: {current_version}. Using default version: {InstallPackageData.DEFAULT_VERSION}\"\n\n data_link = InstallPackageData.DEFAULT_VERSION\n\n print(msg)\n\n # retrieve content from URL\n print(\"Downloading example data for stitches version {}. This may take a few minutes...\".format(current_version))\n response = requests.get(data_link)\n\n with zipfile.ZipFile(BytesIO(response.content)) as zipped:\n\n # extract each file in the zipped dir to the project\n for f in zipped.namelist():\n\n extension = os.path.splitext(f)[-1]\n\n # Extract only the csv and nc files\n if all([len(extension) > 0, extension in (\".csv\", \".nc\")]):\n\n basename = os.path.basename(f)\n\n # Check to see if tas-data is in the file path\n if \"tas-data\" in f:\n basename = os.path.join(\"tas-data\", basename)\n\n out_file = os.path.join(data_directory, basename)\n\n # extract to a temporary directory to be able to only keep the file out of the dir structure\n with tempfile.TemporaryDirectory() as tdir:\n\n # extract file to temporary directory\n zipped.extract(f, tdir)\n\n # construct temporary file full path with name\n tfile = os.path.join(tdir, f)\n\n print(f\"Unzipped: {out_file}\")\n # transfer only the file sans the parent directory to the data package\n shutil.copy(tfile, out_file)", "def sourcenews(source):\n\turlnews=urlsource\n\turl=urlnews+source\n\turlapi=url+'&'+'apiKey='\n\turlsour=urlapi+apikey\n\tresponse=requests.get(urlsour)\n\tdata=response.json()\n\treturn data", "def main():\n query_dp = SampleQueryDataPointsPredicate()\n query_dp.run()", "def GetData(self):\r\n if self.Error == False:\r\n Extra = {}\r\n try:\r\n result = {}\r\n temp = self.ScrapeMainWebpage()\r\n if temp != None:\r\n result.update(temp)\r\n temp = self.ScrapeParameters1Webpage()\r\n if temp != None:\r\n result.update(temp)\r\n temp = self.ScrapeParameters2Webpage()\r\n if temp != None:\r\n result.update(temp)\r\n temp = self.ScrapeStatusWebpage()\r\n if temp != None:\r\n result.update(temp)\r\n sqlArray = {}\r\n sqlArray[self.deviceDescr] = {}\r\n sqlArray[self.deviceDescr][self.devNumber] = {}\r\n sqlArray[self.deviceDescr][self.devNumber][\"General\"] = result\r\n sqlArray[self.deviceDescr][self.devNumber][\"_ExtractInfo\"] = {}\r\n sqlArray[self.deviceDescr][self.devNumber][\"_ExtractInfo\"][\"ExtractTime\"] = time.time()\r\n sqlArray[\"ReadError\"] = False \r\n return sqlArray\r\n \r\n except Exception as e: \r\n self.log.printError(\"ERROR in Retreiving Seatel VSAT Data,%s Module Error\" % sys._getframe().f_code.co_name) \r\n self.log.printError( str(e))\r\n self.Error = True\r\n Extra[\"ReadError\"] = True\r\n return Extra\r\n else:\r\n self.log.printWarning(\"%s skipped due to previous failure\" % sys._getframe().f_code.co_name)\r\n return None", "def main():\n spark = create_spark_session()\n input_data = \"s3a://udacity-dend/\"\n output_data = \"s3a://udacity-data-lake/output/\"\n\n process_song_data(spark, input_data, output_data)\n process_log_data(spark, input_data, output_data)", "def main():\n\n # Start and import data\n print('>>>>> START')\n dates, prices, price_diff = get_data('data.csv')\n indices = [i for i in range(0, len(prices))]\n \n # Quick filtering to enable good visualization\n # This section can be commented if wished\n delete_outlayers(dates, price_diff)\n delete_outlayers(prices, price_diff)\n delete_outlayers(indices, price_diff)\n price_diff.remove(max(price_diff))\n price_diff.remove(min(price_diff))\n print('>>>>> STATUS: DATA FORMATTING DONE')\n\n # Model and prediction\n predicted_price = predict_price(indices, dates, price_diff, 1829)\n print('RESULTING PREDICTION = ', (predicted_price * -1) + prices[0])\n print('>>>>> DONE')", "def get_sdata(self):\n payload = self.get('data_request?id=sdata&output_format=json')\n return payload", "def on_get(self, req, resp):\n sample = load_data()\n\n print(sample.base.describe())\n resp.status = falcon.HTTP_200\n\n frame_str = str(sample.base.describe()) + \"\\n\"\n resp.body = frame_str\n # resp.body = sample.base.describe()", "def process_cmd():\n web_scraper = SainsburyWebscraper()\n logger.info(\"Sainsbury web scraper initialized and loaded data from SainsburyWebscraper\")\n\n json_data = web_scraper.get_product_data()\n logger.info(\"Found %s products with the following data:\" % len(json_data[\"results\"]))\n print json.dumps(json_data, indent=4, sort_keys=True)", "def _fetch_data(self, samples):\n pass", "def feed() -> None:\n ...", "def main() -> None:\n ROOT_DIR = dirname(abspath(__file__))\n spark = create_spark_session()\n input_data = 's3a://udacity-dend/'\n output_data = ROOT_DIR + '/data/'\n\n process_song_data(spark, input_data, output_data)\n process_log_data(spark, input_data, output_data)", "def main():\n\n print(\"Initiating Spark session...\")\n print('-' * 50)\n spark = create_spark_session()\n \n # Use these settings if you want to test on the full\n # dataset, but it takes a LONG time.\n song_input_data = config['AWS']['SONG_DATA']\n log_input_data = config['AWS']['LOG_DATA']\n \n # Uncomment the two lines if you want to test on\n # minimal data\n #song_input_data = config['AWS']['SINGLE_SONG_DATA']\n #log_input_data = config['AWS']['SINGLE_LOG_DATA']\n \n output_data = config['AWS']['OUTPUT_DATA']\n \n print('-' * 50)\n print(\"Processing song data...\")\n print('-' * 50)\n print('')\n process_song_data(spark, song_input_data, output_data)\n \n print('-' * 50) \n print(\"Processing log data...\")\n print('-' * 50)\n print('')\n process_log_data(spark, song_input_data, log_input_data, output_data)", "def _fetch_data(self):\n pass", "def get_data(self):\n pass", "def get_data(self):\n pass", "def main():\n spark = create_spark_session()\n input_data = \"s3a://udacity-dend/\"\n output_data = \"s3a://udacity-nanodegree-data-engineer/\"\n \n process_song_data(spark, input_data, output_data) \n process_log_data(spark, input_data, output_data)", "def get():", "def get():", "def main():\n right_now = get_current_datetime()\n # print(right_now)\n existing_dict, unique_exist = get_sensor_dict()\n # print(type(existing_dict))\n # print()\n # print(sorted(unique_exist))\n whats_up_list = query_the_api()\n # print(whats_up_list)\n found = filter_json(whats_up_list)\n # print(found)\n lost_new_stillthere(sorted(unique_exist), found, existing_dict, right_now)", "def main():\n\n with open(\".auth_token\", mode=\"r\") as tokenfile:\n authtoken = tokenfile.read().strip()\n\n # Initialize connection to Archivist\n aconn = Archivist(\n \"https://soak-0-avid.engineering-k8s-stage-2.dev.wild.jitsuin.io\",\n auth=authtoken,\n )\n # Get all assets with required attributes and properties\n props = {\"confirmation_status\": \"CONFIRMED\"}\n attrs = {\"arc_display_type\": \"Traffic light\"}\n for event in aconn.events.list(asset_id=\"assets/-\", props=props, attrs=attrs):\n print(\"event\", event)\n\n # alternatively one could pull the list and cache locally...\n events = aconn.events.list(asset_id=\"assets/-\", props=props, attrs=attrs)\n for event in events:\n print(\"event\", event)", "def main(self):\n question = Parser(self.question)\n latitude = 0\n longitude = 0\n address = \"Bonjour, je vais bien merci !\"\n history = self._get_papy_sentence()\n article_title = \"\"\n location = question.get_location()\n google = GoogleApi(location, self.google_key)\n google_response = google.parse()\n if question.get_verification():\n latitude = google_response['latitude']\n longitude = google_response['longitude']\n address = google_response['address']\n if google_response['status'] == \"OK\":\n wiki = WikiApi(google_response['latitude'],\n google_response['longitude'])\n wiki_response = wiki.parse()\n if wiki_response['status'] == \"OK\":\n history = wiki_response['history']\n article_title = wiki_response['article_title']\n\n self.response_grandpy = {\n 'latitude': latitude,\n 'longitude': longitude,\n 'address': address,\n 'history': history,\n 'article_title': article_title\n }\n return self.response_grandpy", "def main(self):\n\n args = self.parser.parse_args()\n # Little magic/abomination to avoid a if-else\n # funct == search_all or search_any\n funct = getattr(self, \"search_\" + args.search_funct) \n cache_file_path = self.get_cache_file_path()\n\n # If cache is missing or user asked to force an update\n # then scrape the wiki\n if not os.path.isfile( cache_file_path ) or args.update:\n self.data_values = WikiTableScraper().scrapeTables()\n pickle.dump( self.data_values, open( cache_file_path, \"wb\" ) )\n # else load the minecraft objects from cache\n else:\n self.data_values = pickle.load( open( cache_file_path, \"rb\" ) )\n\n # call the search function (will return list of ids)\n for k in funct(args.words):\n print k.ljust(4), # Don't print a newline\n # Don't print the name if user requested only id's\n if not args.only_id:\n print self.data_values[k],\n print # print the newline now", "def get_data():\n log = common.LogFile('', LOGFILE)\n settings = load_settings()\n keywords = settings[\"keywords\"]\n api_key = settings[\"api_key\"]\n for keyword in keywords:\n print(\"[{}] : fetching data.\".format(keyword))\n filename = \"results_{0}.json\".format(keyword)\n results = {}\n hits_limit = 500\n start_at = 1\n counter = 0\n while True:\n url = create_url(keyword, hits_limit, start_at, api_key)\n records = get_records_from_url(url)\n total_results = get_total_hits(records)\n records = split_records(records)\n records_on_page = len(records)\n if records_on_page == 0:\n break\n else:\n for record in records:\n counter += 1\n id_no = extract_id_number(record)\n processed_dict = {'ID': id_no, 'problem': []}\n processed_record = parse_record(\n record, processed_dict, log)\n if id_no not in results:\n results[id_no] = processed_record\n if counter % 100 == 0:\n print(\"Processed {} out of {}\".format(\n counter, total_results))\n start_at += hits_limit\n time.sleep(THROTTLE)\n print(\"[{}] : fetched {} records to {}.\".format(\n keyword, len(results), filename))\n save_data(results, filename)", "def apicall():\r\n# try:\r\n print request.get_json()\r\n test_json = request.get_json()\r\n logger.info(\"input json object loaded\")\r\n logger.info(test_json)\r\n k=MetaData(test_json)\r\n int_res=k.getData()\r\n print '------------------------------'\r\n print int_res\r\n return jsonify(int_res)", "def main(self, name):\n\t\tapi_results = [] \n\t\tparams = self.get_search_parameters(name)\n\t\tapi_results.append(self.api_connect(params))\n\t\ttime.sleep(1.0)\n\t\tkey = api_results[0]['businesses'][0]\n\t\tbusiness_information = [key['name'], self.phone_number_organizer(key), key['rating'],\\\n\t\tkey['review_count']]\n\t\treturn business_information", "def main():\n\n # generate a token, we will be sending several queries off\n token = gen_token()\n # build the query string\n s_d, e_d = prev_quarter_boundaries(datetime.datetime.utcnow())\n s_str = s_d.strftime(\"%Y-%m-%d\")\n e_str = e_d.strftime(\"%Y-%m-%d\")\n query_str = (\n 'filingsource:\"Taiwan TWSE\" AND ' +\n 'enddate:[' + s_str + ' TO ' +\n e_str + ']'\n )\n # pull docs\n docs_res = documents_stringquery(query_str, False, token=token)\n # read out number of hits\n num_filings = docs_res['totalHits']\n # print it out\n print('Filing count from last quarter: ' + str(num_filings))", "def main():\n parser = argparse.ArgumentParser()\n parser.add_argument(\"datafile\", help=\"Name of JSON file containing WP3-mapped metadata\")\n parser.add_argument(\"csvfile\", help=\"Corrected CSV file\")\n parser.add_argument(\"vcffile\", help=\"VCF file with samples\")\n args = parser.parse_args()\n\n samples = vcf_samples(args.vcffile)\n\n with open(args.datafile, 'r') as infile:\n data = json.load(infile)\n\n if not data:\n print(f\"Error reading data file {args.datafile}.\", file=sys.stderr)\n return\n\n with open(args.csvfile, newline='') as csvfile:\n reader = csv.DictReader(csvfile)\n for idx, row in enumerate(reader):\n data[idx][\"physiologicalMeasurements\"][\"anthropometry\"][\"weight\"] = [str(float(row['birth_weight_g'])/1000.), row['sbjt_weight_kg']]\n data[idx][\"physiologicalMeasurements\"][\"anthropometry\"][\"height\"] = [row['height_cm'], row['sbjt_length_cm']]\n data[idx][\"physiologicalMeasurements\"][\"circulationAndRespiration\"][\"bloodPressure\"] = [row['sbjt_blood_pressure_systolic']]\n if len(samples) > idx:\n data[idx][\"sample\"] = samples[idx]\n\n print(json.dumps(data))", "def main():\n # Take login credentials from the command line if provided\n # Otherwise, take them from your environment variables file ~/.profile\n description = ('Simple application that logs on to the APIC'\n ' and displays all of the Subnets.')\n creds = Credentials('apic', description)\n creds.add_argument('--tenant', help='The name of Tenant')\n args = creds.get()\n\n # Login to APIC\n session = Session(args.url, args.login, args.password)\n resp = session.login()\n if not resp.ok:\n print('%% Could not login to APIC')\n\n # Download all of the tenants, app profiles, and Subnets\n # and store the names as tuples in a list\n tenants = Tenant.get(session)\n for tenant in tenants:\n check_longest_name(tenant.name, \"Tenant\")\n if args.tenant is None:\n get_subnet(session, tenant)\n else:\n if tenant.name == args.tenant:\n get_subnet(session, tenant)\n\n # Display the data downloaded\n template = '{0:' + str(longest_names[\"Tenant\"]) + '} ' \\\n '{1:' + str(longest_names[\"Bridge Domain\"]) + '} ' \\\n '{2:' + str(longest_names[\"Subnet\"]) + '} ' \\\n '{3:' + str(longest_names[\"Scope\"]) + '}'\n print(template.format(\"Tenant\", \"Bridge Domain\", \"Subnet\", \"Scope\"))\n print(template.format('-' * longest_names[\"Tenant\"],\n '-' * longest_names[\"Bridge Domain\"],\n '-' * longest_names[\"Subnet\"],\n '-' * longest_names[\"Scope\"]))\n for rec in sorted(data):\n print(template.format(*rec))", "def get_data(self):\r\n pass", "def fetch_inspect_data(filename, output, db_url=None):\n r2dt.write_training_data(filename, db_url, output)", "def main():\n data = get_sales_data()\n sales_data = [int(num) for num in data]\n update_worksheet(sales_data, 'sales')\n new_surplus_data = calculate_surplus_sandwiches(sales_data)\n update_worksheet(new_surplus_data, 'surplus')\n list_of_last_five_sales = get_last_five_sales_entries()\n stock_data = get_average_sales(list_of_last_five_sales)\n update_worksheet(stock_data, 'stock')\n return stock_data", "def main():\n with requests.get(API_URL) as response:\n response.raise_for_status()\n data = response.json()\n\n title = data[\"title\"]\n extract = data[\"extract\"]\n\n click.secho(title, fg=\"green\")\n click.echo(textwrap.fill(extract))", "def main():\n #bearer_token = obtain_bearer_token(API_HOST, TOKEN_PATH)\n bearer_token ='SHdrjUqMJXqXBKUc7bGIplM8y6tnbwZbXXDbWPCd9wWMP8tX9PdJrC5MZHwJRhb7jMtLjXxT-hsWjNf2OkdiDWd30HsS84AVI5iRnrpxkak3HbWXAdUKvraQ_wgXWXYx'\n response = transaction_search(bearer_token, '1910 Entrepreneur Dr, Raleigh, NC 27518')\n response = response.get('businesses')\n print(json.dumps(response, indent=4))", "def retrieve(self, sid):\n zx = \"\".join([chr(random.randint(97,122)) for i in xrange(0, 11)])\n resdat = wavehttp.get(\"wave.google.com\", \n \"/wave/wfe/channel?VER=6&RID=rpc&SID=\"+sid+\n \"&CI=0&AID=0&TYPE=xmlhttp&zx=\"+zx+\"&t=1\").read()\n file(\"./tempdata\",\"w+\").write(resdat)\n print resdat", "def execute(self):\n logging.info(\"Gathering scorecard data for [%s]\", str(self.package_url))\n\n source_repo = self.get_source_repository()\n if not source_repo:\n return\n\n token = self.get_api_token(\"github\")\n if not token:\n logging.warning(\"Unable to retrieve Github token.\")\n return\n\n try:\n result = subprocess.run(\n f'docker run --rm -it --env \"GITHUB_AUTH_TOKEN={token}\" docker.io/library/scorecard --repo={source_repo} --format json',\n shell=True,\n stdout=subprocess.PIPE,\n )\n scorecard_output = result.stdout.decode(\"utf-8\")\n scorecard_output = scorecard_output[scorecard_output.find(\"{\") :]\n js = json.loads(scorecard_output)\n\n payloads = []\n\n for check in js.get(\"Checks\", []):\n check_name = check.get(\"CheckName\", \"\").lower().strip()\n if not check_name:\n continue\n pass_value = str(check.get(\"Pass\", False)).lower()\n\n payload = {\n \"package_url\": str(self.package_url),\n \"operation\": \"replace\",\n \"key\": f\"openssf.scorecard.raw.{check_name}\",\n \"values\": [{\"value\": pass_value, \"properties\": check}],\n }\n payloads.append(payload)\n\n res = requests.post(self.METRIC_API_ENDPOINT, json=payloads, timeout=120)\n if res.status_code == 200:\n logging.info(\"Success: %s\", res.text)\n else:\n logging.warning(\"Failure: status code: %s\", res.status_code)\n\n except Exception as msg:\n logging.warn(\"Error processing Scorecard data: %s\", msg)\n raise", "def get_input_data(sample):\n with checkpoints.query_portal.get(sample=sample).output[0].open() as f:\n data = json.read(f)\n return data", "def extract_data():\n args = arguments()\n\n if args.list is not None:\n songs = utility.get_songs(args.list)\n logger.debug(str(songs))\n if len(songs) != 0:\n logger.info(\"Downloading songs in {}\".format(args.list))\n for song_name in songs:\n logger.debug(song_name)\n args.SONG_NAME = [song_name]\n main(args)\n else:\n logger.info(\"{}: is empty\".format(args.list))\n elif args.SONG_NAME and yt.is_playlist(args.SONG_NAME[0]):\n logger.info(\"Youtube playlist passed...extracting!\")\n songs, playlist_name = yt.get_playlist(\n args.SONG_NAME[0],\n args.proxy,\n args.pl_start,\n args.pl_end,\n args.pl_items\n )\n\n # Check if data is actually returned\n if songs is None:\n logger.error(\"Couldn't extract playlist data!\")\n\n logger.info(\"Playlist: {}\".format(playlist_name))\n logger.info(\"{} songs found\".format(len(songs)))\n\n # Iterate and work on the data.\n url_base = \"https://www.youtube.com/watch?v=\"\n for song in songs:\n args.url = url_base + song[\"url\"]\n\n # Try to pass the title as well, if it's not there\n # that will be handled by ytmdl\n try:\n args.SONG_NAME = [stringutils.remove_yt_words(song[\"title\"])]\n except KeyError:\n pass\n\n main(args)\n else:\n main(args)", "def main():\n # the url for african daily and global daily\n african_dialy_url = \"https://data.chc.ucsb.edu/products/CHIRPS-2.0/africa_daily/tifs/p25/\"\n global_daily_url = \"https://data.chc.ucsb.edu/products/CHIRPS-2.0/global_daily/tifs/p25/\"\n\n\n each_year_list = GetRasterYears(url=african_dialy_url)\n new_path = makenewdir(each_year_list)\n years_new_list = fecthrasterurl(url=african_dialy_url)\n downloadwithwget(each_year_list, years_new_list, new_path)", "def get_filters():\n print('Hello! Let\\'s explore some US bikeshare data!')", "def main():\r\n args = get_command_line_args()\r\n settings = read_settings(args)\r\n \r\n census_api_key = settings['census_api_key']\r\n state = settings['state']\r\n district = settings['district']\r\n leg_body = settings['leg_body']\r\n census_year = settings['census_year']\r\n election_year = settings['election_year']\r\n voting_precincts_file = settings['voting_precincts']\r\n voting_results_file = settings['voting_results']\r\n \r\n find_blockgroups_in_district(\r\n state=state,\r\n district=district,\r\n leg_body=leg_body,\r\n year=census_year\r\n )\r\n\r\n categories, district_data = make_district_data(\r\n api=census_api_key,\r\n state=state,\r\n district=district,\r\n leg_body=leg_body,\r\n year=census_year\r\n )\r\n\r\n # Estimate voting precinct data based on block group data\r\n district_data = make_voting_precinct_data(\r\n district_data=district_data, \r\n categories=categories,\r\n state=state,\r\n district=district,\r\n leg_body=leg_body,\r\n year=census_year,\r\n voting_precincts_file=voting_precincts_file\r\n )\r\n\r\n categories, district_data = make_voting_results_data(\r\n categories=categories, \r\n district_data=district_data, \r\n state=state, \r\n district=district, \r\n leg_body=leg_body, \r\n election_year=election_year,\r\n census_year=census_year,\r\n voting_precincts_file=voting_precincts_file, \r\n voting_results_file=voting_results_file\r\n )\r\n\r\n to_json(district_data, \"static/data/district-data.json\")\r\n to_json(categories, \"static/data/categories.json\")", "def main():\n parser = argparse.ArgumentParser()\n parser.add_argument('--allData', default='', action='store', nargs='+',\n help=\"add train test validation dataset together\")\n parser.add_argument('--topic', default='', help=\"target topic\")\n parser.add_argument('--contentWordNumber', default='', help=\"threshold for content Word Number\")\n parser.add_argument('--returnNSents', default='', help=\"top N sentences\")\n\n args = parser.parse_args()\n data = loadData(args.allData, args.topic, args.contentWordNumber)\n allDataDic, targetTweets = data.readData()\n \"\"\"\n ##modify at def_processing, re-run below line##\n outputDB = data.processing(allDataDic)\n with open(ROOT.DATA_ROOT+'/'+'allDataDic.txt', 'w') as file:\n file.write(json.dumps(outputDB))\n \"\"\"\n\n targData = data.processing(targetTweets)\n outputDB = json.load(open(ROOT.DATA_ROOT + '/allDataDic.txt'))\n contentWords = data.tfidf(outputDB, targData)\n return targetTweets,targData, contentWords", "def executeScriptToGetData():\n ulv = random.randrange(42, 420)\n llv = random.randrange(42, 420)\n urv = random.randrange(42, 420)\n lrv = ulv + llv + urv\n return {\n 'title': random.choice(['Sensors title', None]),\n 'description': random.choice(['Sensors description', None]),\n 'big-value': random.randrange(214, 514),\n 'upper-left-label': 'Critical:',\n 'upper-left-value': ulv,\n 'lower-left-label': 'Major:',\n 'lower-left-value': llv,\n 'upper-right-label': 'Minor:',\n 'upper-right-value': urv,\n 'lower-right-label': 'All:',\n 'lower-right-value': lrv\n }", "def get_data(stage=0):\n return get_files(stage)[1]", "def main():\n spark = create_spark_session()\n\n # Used for local testing - commented out\n # input_data = \"./data/\"\n # output_data = \"./data/\"\n input_data = \"s3a://udacity-dend/\"\n output_data = \"s3a://allen-lesson4-datalake-bucket/\"\n\n process_song_data(spark, input_data, output_data)\n process_log_data(spark, input_data, output_data)\n spark.stop()", "def main():\n app_environment = os.getenv(\"APP_ENV\", \"local\").strip().lower()\n config = load_config(app_environment)\n database_connection = mysql.connector.connect(**config[\"database\"])\n\n all_scores = retrieve_all_scores(database_connection)\n stats = calculate_stats(all_scores)\n print_stats(stats)\n\n grouped_scores = retrieve_grouped_scores(database_connection)\n print_score_spread(grouped_scores)\n\n return None", "def getFishData(species = \"none\"):\n #r = req.get(\"https://fishbase.ropensci.org/species?Species=\" + species)\n r = req.get(\"https://fishbase.ropensci.org/species\")\n my_dict = r.json()\n return my_dict", "def main():\n download_insert_title_basics()\n download_insert_title_principals()\n download_insert_name_basics()\n download_insert_title_ratings()\n scrap_keywords()\n create_and_insert_soup()\n return", "def main():\n ds = 72\n title = 'Journal'\n journal_name = 'my-journal'\n headers.dashes_line(ds)\n headers.print_header(title, ds)\n data = journal.load(journal_name)\n event_loop(journal_name, data)\n # list_entries(data)\n # add_entry(data)\n # journal.save(journal_name, data)", "def getResults():", "def get_data():\n try:\n with open(CONS[\"OUTPUT_FILE\"], \"r\") as file:\n data = json.load(file)[1]\n return data\n except FileNotFoundError:\n print(\"Data file not found.\")\n exit()", "def main():\n\n creds = None\n # The file token.json stores the user's access and refresh tokens, and is\n # created automatically when the authorization flow completes for the first\n # time.\n\n # There was no need to do this with Google Sheets... I just felt like being extra.\n if os.path.exists('token.json'):\n creds = Credentials.from_authorized_user_file('token.json', SCOPES)\n # If there are no (valid) credentials available, let the user log in.\n if not creds or not creds.valid:\n if creds and creds.expired and creds.refresh_token:\n creds.refresh(Request())\n else:\n flow = InstalledAppFlow.from_client_secrets_file(\n 'credentials.json', SCOPES)\n creds = flow.run_local_server(port=0)\n # Save the credentials for the next run\n with open('token.json', 'w') as token:\n token.write(creds.to_json())\n\n service = build('sheets', 'v4', credentials=creds)\n\n # Call the Sheets API\n sheet = service.spreadsheets()\n map_df = pd.read_csv('data/rel_map.csv')\n \n for i, rows in map_df.iterrows():\n print(rows.show_season, rows.lead)\n first_url = rows.show_season\n write_from_URL(wiki_head + first_url, sheet, rows.lead, rows.show_season)" ]
[ "0.6757108", "0.63172305", "0.6213477", "0.6213477", "0.6213477", "0.6165671", "0.5924254", "0.5922558", "0.57879984", "0.57552046", "0.57526654", "0.5694825", "0.5639846", "0.563431", "0.55584145", "0.5550838", "0.55116856", "0.55113345", "0.55080515", "0.54997003", "0.54927516", "0.5474136", "0.54535395", "0.543659", "0.54038966", "0.5402808", "0.53993684", "0.5373335", "0.5370152", "0.5354122", "0.5350158", "0.5348618", "0.5347664", "0.5344709", "0.5325385", "0.5316728", "0.5310501", "0.5309305", "0.53040075", "0.5297039", "0.52939427", "0.52929664", "0.52827495", "0.5277507", "0.5267404", "0.5242603", "0.5240181", "0.5239137", "0.52365273", "0.5231257", "0.5219094", "0.5203438", "0.52026886", "0.5202155", "0.51955205", "0.51933306", "0.51911557", "0.51885474", "0.51884234", "0.51786786", "0.5173291", "0.51705956", "0.51660174", "0.5165454", "0.5165454", "0.516144", "0.5150534", "0.5150534", "0.51447725", "0.5135146", "0.5120877", "0.5113876", "0.51135", "0.51127636", "0.5112197", "0.5107421", "0.5106255", "0.51059496", "0.5103442", "0.510209", "0.5097731", "0.5094378", "0.50916314", "0.5083808", "0.5075849", "0.50745237", "0.5073821", "0.50703776", "0.50675374", "0.5062688", "0.5062507", "0.5062226", "0.5061161", "0.5057522", "0.5056712", "0.5056557", "0.5050686", "0.5047501", "0.50451946", "0.504488", "0.5044367" ]
0.0
-1