file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
12.1k
| suffix
large_stringlengths 0
12k
| middle
large_stringlengths 0
7.51k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
headers.js | var specify = require('specify')
, helpers = require('../helpers')
, timeout = helpers.timeout
, nano = helpers.nano
, nock = helpers.nock
;
var mock = nock(helpers.couch, "shared/headers")
, db = nano.use("shared_headers")
;
specify("shared_headers:setup", timeout, function (assert) {
nano.db.create("shared_headers", function (err) {
assert.equal(err, undefined, "Failed to create database");
});
});
specify("shared_headers:test", timeout, function (assert) {
db.attachment.insert("new", "att", "Hello", "text/plain",
function(error, hello) {
assert.equal(error, undefined, "Should store hello");
assert.equal(hello.ok, true, "Response should be ok");
assert.ok(hello.rev, "Should have a revision number");
nano.request({
db: "shared_headers",
doc: "new",
headers: { "If-None-Match": JSON.stringify(hello.rev) }
},
function (error, helloWorld, rh) {
assert.equal(error, undefined, "Should get the hello");
assert.equal(rh["status-code"], 304, "status is 'not modified'");
});
nano.request({
db: "shared_headers",
doc: "new",
att: "att" | function (error, helloWorld, rh) {
assert.equal(error, undefined, "Should get the hello");
assert.equal(rh["status-code"], 200, "status is 'ok'");
});
});
});
specify("shared_headers:teardown", timeout, function (assert) {
nano.db.destroy("shared_headers", function (err) {
assert.equal(err, undefined, "Failed to destroy database");
assert.ok(mock.isDone(), "Some mocks didn't run");
});
});
specify.run(process.argv.slice(2)); | }, | random_line_split |
fetch.ts | import Message from '../components/Message';
import socket from '../socket';
import { SEAL_TEXT, SEAL_USER_TIMEOUT } from '../../../utils/const';
/** 用户是否被封禁 */
let isSeal = false;
export default function fetch<T = any>(
event: string,
data = {},
{ toast = true } = {},
): Promise<[string | null, T | null]> {
if (isSe | }
resolve([res, null]);
} else {
resolve([null, res]);
}
});
});
}
| al) {
Message.error(SEAL_TEXT);
return Promise.resolve([SEAL_TEXT, null]);
}
return new Promise((resolve) => {
socket.emit(event, data, (res: any) => {
if (typeof res === 'string') {
if (toast) {
Message.error(res);
}
/**
* 服务端返回封禁状态后, 本地存储该状态
* 用户再触发接口请求时, 直接拒绝
*/
if (res === SEAL_TEXT) {
isSeal = true;
// 用户封禁和ip封禁时效不同, 这里用的短时间
setTimeout(() => {
isSeal = false;
}, SEAL_USER_TIMEOUT); | identifier_body |
fetch.ts | import Message from '../components/Message';
import socket from '../socket';
import { SEAL_TEXT, SEAL_USER_TIMEOUT } from '../../../utils/const';
/** 用户是否被封禁 */
let isSeal = false;
export default function fetch<T = any> | event: string,
data = {},
{ toast = true } = {},
): Promise<[string | null, T | null]> {
if (isSeal) {
Message.error(SEAL_TEXT);
return Promise.resolve([SEAL_TEXT, null]);
}
return new Promise((resolve) => {
socket.emit(event, data, (res: any) => {
if (typeof res === 'string') {
if (toast) {
Message.error(res);
}
/**
* 服务端返回封禁状态后, 本地存储该状态
* 用户再触发接口请求时, 直接拒绝
*/
if (res === SEAL_TEXT) {
isSeal = true;
// 用户封禁和ip封禁时效不同, 这里用的短时间
setTimeout(() => {
isSeal = false;
}, SEAL_USER_TIMEOUT);
}
resolve([res, null]);
} else {
resolve([null, res]);
}
});
});
}
| (
| identifier_name |
fetch.ts | import Message from '../components/Message';
import socket from '../socket'; | /** 用户是否被封禁 */
let isSeal = false;
export default function fetch<T = any>(
event: string,
data = {},
{ toast = true } = {},
): Promise<[string | null, T | null]> {
if (isSeal) {
Message.error(SEAL_TEXT);
return Promise.resolve([SEAL_TEXT, null]);
}
return new Promise((resolve) => {
socket.emit(event, data, (res: any) => {
if (typeof res === 'string') {
if (toast) {
Message.error(res);
}
/**
* 服务端返回封禁状态后, 本地存储该状态
* 用户再触发接口请求时, 直接拒绝
*/
if (res === SEAL_TEXT) {
isSeal = true;
// 用户封禁和ip封禁时效不同, 这里用的短时间
setTimeout(() => {
isSeal = false;
}, SEAL_USER_TIMEOUT);
}
resolve([res, null]);
} else {
resolve([null, res]);
}
});
});
} |
import { SEAL_TEXT, SEAL_USER_TIMEOUT } from '../../../utils/const';
| random_line_split |
fetch.ts | import Message from '../components/Message';
import socket from '../socket';
import { SEAL_TEXT, SEAL_USER_TIMEOUT } from '../../../utils/const';
/** 用户是否被封禁 */
let isSeal = false;
export default function fetch<T = any>(
event: string,
data = {},
{ toast = true } = {},
): Promise<[string | null, T | null]> {
if (isSeal) {
Message.error(SEAL_TEXT);
return Promise.resolve([SEAL_TEXT, null]);
}
return new Promise((resolve) => {
socket.emit(event, data, (res: any) => {
if (typeof res === 'string') {
if (toast) {
Message.error(res);
}
/**
* 服务端返回封禁状态后, 本地存储该状态
* 用户再触发接口请求时, 直接拒绝
*/
if (res === SEAL_TEXT) {
isSeal = true;
// 用户封禁和ip封禁时效不同, 这 | });
});
}
| 里用的短时间
setTimeout(() => {
isSeal = false;
}, SEAL_USER_TIMEOUT);
}
resolve([res, null]);
} else {
resolve([null, res]);
}
| conditional_block |
archive.py | from __future__ import print_function
import sys, time
import requests, urllib
import demjson, shelve
import os.path
class Archiver:
def __init__(self):
"""
A class for archiving URLS into the wayback machine
"""
self._machine = "http://archive.org/wayback/available?url="
self._arch = "https://web.archive.org/save/"
self.archived_urls = []
# load data
if os.path.isfile("archived_urls.dat"):
self.archived_urls = self.load_data()
def available(self, url, silent=False):
"""
:param: url
:param: silent=False
Checks if the given URL exists in the wayback machine.
The silent argument if set True does not print anything to the console
"""
print("[Checking]: %s\n" % url) if silent == False else 0
data = demjson.decode(requests.get(self._machine+url).text)["archived_snapshots"]
if "closest" in data:
print(self.print_item(data)) if silent == False else 0
return (data["closest"])["available"]
return False
def load_data(self):
"""
Loads the archived URLS from a file called archived_urls.dat
"""
return shelve.open("archived_urls.dat")["main"]
def out_text(self, filename):
"""
:param: filename
Outputs a list of archived urls into text format
"""
map(open(filename, 'w').write, map(lambda x : x+"\n",self.archived_urls))
print("Done.")
def save_data(self):
"""
Saves the archived urls into archived_urls.dat
"""
shelve.open("archived_urls.dat")["main"] = self.archived_urls
def archive(self, url):
"""
:param: url
Archves a url into the wayback machine.
"""
l = requests.get(self._arch+url)
print("Archiving...")
self.archived_urls.append(url)
self.save_data()
def print_item(self, data):
"""
:param: data
Print function for json data for archive data
"""
dat = data["closest"]
stamp = "Archived:%s\nAvailable:%s\nURL:%s\nStatus:%s" % (dat["timestamp"], dat['available'], dat['url'], dat['status'])
return stamp
def save_webpage(self, url, filename):
"""
:param: url
:param: filename
Saves a webpage
"""
print("[OK]: Saving webpage..")
if not os.path.isdir(os.getcwd()+"\\saved_webpages"): os.mkdir("saved_webpages")
open(os.getcwd()+"\\saved_webpages\\"+filename, 'w').write((requests.get(url).text).encode("utf-8"))
if os.path.isfile(os.getcwd()+"\\saved_webpages\\"+filename): print("Done.")
Help = \
" \
Usage: archive.py [option] [option2]\n \
\
Options:\n \
-CH/ch [url] - Check if a URL already exists in the wayback machine and return it's information if it does\n \
-ARCH/arch [url] - Archive a URL\n \
-CHARCH/charch [url] - Archive a url if it doesn't already exists\n \
-OUTT/outt [filename] - Output a list of archived urls in text format\n \
-H/h - Print this help message\n \
-LARCH/larch - print out a list of urls you archived\n \
-SAVE/save [url] [filename] - Save a url into a file"
def main():
global Help
A = Archiver()
args = map(lambda x : x.lower(), sys.argv[1:len(sys.argv)])
print(args)
if len(args) == 2:
print(args[0])
if args[0] == "-ch":
if A.available(args[1]) is True:
print("URL found.")
else:
print("URL not found in wayback machine.")
sys.exit(0)
elif args[0] == "-arch":
A.archive(args[1])
if A.available(args[1], True) is True:
print("[Success]: Archiving is successful")
else:
print("[Error]: Archiving failed!")
b = list(A.archived_urls[len(A.archived_urls)-1])
A.archived_urls.remove(A.archived_urls[len(A.archived_urls)-1])
b.insert(0, "FAILED TO ARCHIVE: ")
A.archived_urls.append(b)
sys.exit(0)
elif args[0] == "-charch":
main = A.available(args[1])
if main is True or main == "True":
|
elif main is False:
print("URL does not exist.")
A.archive(args[1])
sys.exit(0)
elif args[0] == "-outt":
A.out_text(args[1])
sys.exit(0)
elif len(args) == 3:
if args[0] == "-save":
A.save_webpage(args[1], args[2])
sys.exit(0)
elif len(args) == 1:
if args[0] == "-h":
print("-h")
print(Help)
sys.exit(0)
elif args[0] == "-larch":
print("-larch")
map(lambda x : print(x), A.archived_urls)
sys.exit(0)
else:
print("[Error]: Unknown argument \'%s\'" % args[0])
sys.exit(0)
else:
print("Archiver: No arguments found.\n Type '-h' for help")
sys.exit(0)
if __name__ == "__main__":
main()
| print("URL exists.") | conditional_block |
archive.py | from __future__ import print_function
import sys, time
import requests, urllib
import demjson, shelve
import os.path
class Archiver:
def __init__(self):
"""
A class for archiving URLS into the wayback machine
"""
self._machine = "http://archive.org/wayback/available?url="
self._arch = "https://web.archive.org/save/"
self.archived_urls = []
# load data
if os.path.isfile("archived_urls.dat"):
self.archived_urls = self.load_data()
def available(self, url, silent=False):
"""
:param: url
:param: silent=False
Checks if the given URL exists in the wayback machine.
The silent argument if set True does not print anything to the console
"""
print("[Checking]: %s\n" % url) if silent == False else 0
data = demjson.decode(requests.get(self._machine+url).text)["archived_snapshots"]
if "closest" in data:
print(self.print_item(data)) if silent == False else 0
return (data["closest"])["available"]
return False
def load_data(self):
"""
Loads the archived URLS from a file called archived_urls.dat
"""
return shelve.open("archived_urls.dat")["main"]
def | (self, filename):
"""
:param: filename
Outputs a list of archived urls into text format
"""
map(open(filename, 'w').write, map(lambda x : x+"\n",self.archived_urls))
print("Done.")
def save_data(self):
"""
Saves the archived urls into archived_urls.dat
"""
shelve.open("archived_urls.dat")["main"] = self.archived_urls
def archive(self, url):
"""
:param: url
Archves a url into the wayback machine.
"""
l = requests.get(self._arch+url)
print("Archiving...")
self.archived_urls.append(url)
self.save_data()
def print_item(self, data):
"""
:param: data
Print function for json data for archive data
"""
dat = data["closest"]
stamp = "Archived:%s\nAvailable:%s\nURL:%s\nStatus:%s" % (dat["timestamp"], dat['available'], dat['url'], dat['status'])
return stamp
def save_webpage(self, url, filename):
"""
:param: url
:param: filename
Saves a webpage
"""
print("[OK]: Saving webpage..")
if not os.path.isdir(os.getcwd()+"\\saved_webpages"): os.mkdir("saved_webpages")
open(os.getcwd()+"\\saved_webpages\\"+filename, 'w').write((requests.get(url).text).encode("utf-8"))
if os.path.isfile(os.getcwd()+"\\saved_webpages\\"+filename): print("Done.")
Help = \
" \
Usage: archive.py [option] [option2]\n \
\
Options:\n \
-CH/ch [url] - Check if a URL already exists in the wayback machine and return it's information if it does\n \
-ARCH/arch [url] - Archive a URL\n \
-CHARCH/charch [url] - Archive a url if it doesn't already exists\n \
-OUTT/outt [filename] - Output a list of archived urls in text format\n \
-H/h - Print this help message\n \
-LARCH/larch - print out a list of urls you archived\n \
-SAVE/save [url] [filename] - Save a url into a file"
def main():
global Help
A = Archiver()
args = map(lambda x : x.lower(), sys.argv[1:len(sys.argv)])
print(args)
if len(args) == 2:
print(args[0])
if args[0] == "-ch":
if A.available(args[1]) is True:
print("URL found.")
else:
print("URL not found in wayback machine.")
sys.exit(0)
elif args[0] == "-arch":
A.archive(args[1])
if A.available(args[1], True) is True:
print("[Success]: Archiving is successful")
else:
print("[Error]: Archiving failed!")
b = list(A.archived_urls[len(A.archived_urls)-1])
A.archived_urls.remove(A.archived_urls[len(A.archived_urls)-1])
b.insert(0, "FAILED TO ARCHIVE: ")
A.archived_urls.append(b)
sys.exit(0)
elif args[0] == "-charch":
main = A.available(args[1])
if main is True or main == "True":
print("URL exists.")
elif main is False:
print("URL does not exist.")
A.archive(args[1])
sys.exit(0)
elif args[0] == "-outt":
A.out_text(args[1])
sys.exit(0)
elif len(args) == 3:
if args[0] == "-save":
A.save_webpage(args[1], args[2])
sys.exit(0)
elif len(args) == 1:
if args[0] == "-h":
print("-h")
print(Help)
sys.exit(0)
elif args[0] == "-larch":
print("-larch")
map(lambda x : print(x), A.archived_urls)
sys.exit(0)
else:
print("[Error]: Unknown argument \'%s\'" % args[0])
sys.exit(0)
else:
print("Archiver: No arguments found.\n Type '-h' for help")
sys.exit(0)
if __name__ == "__main__":
main()
| out_text | identifier_name |
archive.py | from __future__ import print_function
import sys, time
import requests, urllib
import demjson, shelve
import os.path
class Archiver:
def __init__(self):
"""
A class for archiving URLS into the wayback machine
"""
self._machine = "http://archive.org/wayback/available?url="
self._arch = "https://web.archive.org/save/"
self.archived_urls = []
# load data
if os.path.isfile("archived_urls.dat"):
self.archived_urls = self.load_data()
def available(self, url, silent=False):
"""
:param: url
:param: silent=False
Checks if the given URL exists in the wayback machine.
The silent argument if set True does not print anything to the console
"""
print("[Checking]: %s\n" % url) if silent == False else 0
data = demjson.decode(requests.get(self._machine+url).text)["archived_snapshots"]
if "closest" in data:
print(self.print_item(data)) if silent == False else 0
return (data["closest"])["available"]
return False
def load_data(self):
"""
Loads the archived URLS from a file called archived_urls.dat
"""
return shelve.open("archived_urls.dat")["main"]
def out_text(self, filename):
"""
:param: filename
Outputs a list of archived urls into text format
"""
map(open(filename, 'w').write, map(lambda x : x+"\n",self.archived_urls))
print("Done.")
def save_data(self):
"""
Saves the archived urls into archived_urls.dat
"""
shelve.open("archived_urls.dat")["main"] = self.archived_urls
def archive(self, url):
"""
:param: url
Archves a url into the wayback machine.
"""
l = requests.get(self._arch+url)
print("Archiving...")
self.archived_urls.append(url)
self.save_data()
def print_item(self, data):
"""
:param: data
Print function for json data for archive data
"""
dat = data["closest"]
stamp = "Archived:%s\nAvailable:%s\nURL:%s\nStatus:%s" % (dat["timestamp"], dat['available'], dat['url'], dat['status'])
return stamp
def save_webpage(self, url, filename):
"""
:param: url
:param: filename
Saves a webpage
"""
print("[OK]: Saving webpage..")
if not os.path.isdir(os.getcwd()+"\\saved_webpages"): os.mkdir("saved_webpages")
open(os.getcwd()+"\\saved_webpages\\"+filename, 'w').write((requests.get(url).text).encode("utf-8"))
if os.path.isfile(os.getcwd()+"\\saved_webpages\\"+filename): print("Done.")
Help = \
" \
Usage: archive.py [option] [option2]\n \
\
Options:\n \
-CH/ch [url] - Check if a URL already exists in the wayback machine and return it's information if it does\n \
-ARCH/arch [url] - Archive a URL\n \
-CHARCH/charch [url] - Archive a url if it doesn't already exists\n \
-OUTT/outt [filename] - Output a list of archived urls in text format\n \
-H/h - Print this help message\n \
-LARCH/larch - print out a list of urls you archived\n \
-SAVE/save [url] [filename] - Save a url into a file"
def main():
global Help
A = Archiver()
args = map(lambda x : x.lower(), sys.argv[1:len(sys.argv)])
print(args)
if len(args) == 2:
print(args[0])
if args[0] == "-ch":
if A.available(args[1]) is True:
print("URL found.")
else:
print("URL not found in wayback machine.")
sys.exit(0)
elif args[0] == "-arch":
A.archive(args[1])
if A.available(args[1], True) is True:
print("[Success]: Archiving is successful")
else:
print("[Error]: Archiving failed!")
b = list(A.archived_urls[len(A.archived_urls)-1])
A.archived_urls.remove(A.archived_urls[len(A.archived_urls)-1])
b.insert(0, "FAILED TO ARCHIVE: ")
A.archived_urls.append(b)
sys.exit(0)
elif args[0] == "-charch":
main = A.available(args[1])
if main is True or main == "True":
print("URL exists.")
elif main is False:
| sys.exit(0)
elif args[0] == "-outt":
A.out_text(args[1])
sys.exit(0)
elif len(args) == 3:
if args[0] == "-save":
A.save_webpage(args[1], args[2])
sys.exit(0)
elif len(args) == 1:
if args[0] == "-h":
print("-h")
print(Help)
sys.exit(0)
elif args[0] == "-larch":
print("-larch")
map(lambda x : print(x), A.archived_urls)
sys.exit(0)
else:
print("[Error]: Unknown argument \'%s\'" % args[0])
sys.exit(0)
else:
print("Archiver: No arguments found.\n Type '-h' for help")
sys.exit(0)
if __name__ == "__main__":
main() | print("URL does not exist.")
A.archive(args[1])
| random_line_split |
archive.py | from __future__ import print_function
import sys, time
import requests, urllib
import demjson, shelve
import os.path
class Archiver:
def __init__(self):
"""
A class for archiving URLS into the wayback machine
"""
self._machine = "http://archive.org/wayback/available?url="
self._arch = "https://web.archive.org/save/"
self.archived_urls = []
# load data
if os.path.isfile("archived_urls.dat"):
self.archived_urls = self.load_data()
def available(self, url, silent=False):
|
def load_data(self):
"""
Loads the archived URLS from a file called archived_urls.dat
"""
return shelve.open("archived_urls.dat")["main"]
def out_text(self, filename):
"""
:param: filename
Outputs a list of archived urls into text format
"""
map(open(filename, 'w').write, map(lambda x : x+"\n",self.archived_urls))
print("Done.")
def save_data(self):
"""
Saves the archived urls into archived_urls.dat
"""
shelve.open("archived_urls.dat")["main"] = self.archived_urls
def archive(self, url):
"""
:param: url
Archves a url into the wayback machine.
"""
l = requests.get(self._arch+url)
print("Archiving...")
self.archived_urls.append(url)
self.save_data()
def print_item(self, data):
"""
:param: data
Print function for json data for archive data
"""
dat = data["closest"]
stamp = "Archived:%s\nAvailable:%s\nURL:%s\nStatus:%s" % (dat["timestamp"], dat['available'], dat['url'], dat['status'])
return stamp
def save_webpage(self, url, filename):
"""
:param: url
:param: filename
Saves a webpage
"""
print("[OK]: Saving webpage..")
if not os.path.isdir(os.getcwd()+"\\saved_webpages"): os.mkdir("saved_webpages")
open(os.getcwd()+"\\saved_webpages\\"+filename, 'w').write((requests.get(url).text).encode("utf-8"))
if os.path.isfile(os.getcwd()+"\\saved_webpages\\"+filename): print("Done.")
Help = \
" \
Usage: archive.py [option] [option2]\n \
\
Options:\n \
-CH/ch [url] - Check if a URL already exists in the wayback machine and return it's information if it does\n \
-ARCH/arch [url] - Archive a URL\n \
-CHARCH/charch [url] - Archive a url if it doesn't already exists\n \
-OUTT/outt [filename] - Output a list of archived urls in text format\n \
-H/h - Print this help message\n \
-LARCH/larch - print out a list of urls you archived\n \
-SAVE/save [url] [filename] - Save a url into a file"
def main():
global Help
A = Archiver()
args = map(lambda x : x.lower(), sys.argv[1:len(sys.argv)])
print(args)
if len(args) == 2:
print(args[0])
if args[0] == "-ch":
if A.available(args[1]) is True:
print("URL found.")
else:
print("URL not found in wayback machine.")
sys.exit(0)
elif args[0] == "-arch":
A.archive(args[1])
if A.available(args[1], True) is True:
print("[Success]: Archiving is successful")
else:
print("[Error]: Archiving failed!")
b = list(A.archived_urls[len(A.archived_urls)-1])
A.archived_urls.remove(A.archived_urls[len(A.archived_urls)-1])
b.insert(0, "FAILED TO ARCHIVE: ")
A.archived_urls.append(b)
sys.exit(0)
elif args[0] == "-charch":
main = A.available(args[1])
if main is True or main == "True":
print("URL exists.")
elif main is False:
print("URL does not exist.")
A.archive(args[1])
sys.exit(0)
elif args[0] == "-outt":
A.out_text(args[1])
sys.exit(0)
elif len(args) == 3:
if args[0] == "-save":
A.save_webpage(args[1], args[2])
sys.exit(0)
elif len(args) == 1:
if args[0] == "-h":
print("-h")
print(Help)
sys.exit(0)
elif args[0] == "-larch":
print("-larch")
map(lambda x : print(x), A.archived_urls)
sys.exit(0)
else:
print("[Error]: Unknown argument \'%s\'" % args[0])
sys.exit(0)
else:
print("Archiver: No arguments found.\n Type '-h' for help")
sys.exit(0)
if __name__ == "__main__":
main()
| """
:param: url
:param: silent=False
Checks if the given URL exists in the wayback machine.
The silent argument if set True does not print anything to the console
"""
print("[Checking]: %s\n" % url) if silent == False else 0
data = demjson.decode(requests.get(self._machine+url).text)["archived_snapshots"]
if "closest" in data:
print(self.print_item(data)) if silent == False else 0
return (data["closest"])["available"]
return False | identifier_body |
identity.rs | use std::ops::{Mul, MulAssign, Add, AddAssign, Div, DivAssign};
use std::marker::PhantomData;
use std::cmp::{PartialOrd, Ordering};
use std::fmt;
use num::{Num, Zero, One};
use num_complex::Complex;
use approx::ApproxEq;
use general::{AbstractMagma, AbstractGroup, AbstractLoop, AbstractMonoid, AbstractQuasigroup,
AbstractSemigroup, Operator, Inverse, AbstractGroupAbelian, SubsetOf, Additive,
Multiplicative, MeetSemilattice, JoinSemilattice, Lattice};
/// A type that is equipped with identity.
pub trait Identity<O: Operator> {
/// The identity element.
fn identity() -> Self;
/// Specific identity.
#[inline]
fn id(_: O) -> Self
where Self: Sized
{
Self::identity()
}
}
impl_ident!(Additive; 0; u8, u16, u32, u64, usize, i8, i16, i32, i64, isize);
impl_ident!(Additive; 0.; f32, f64);
#[cfg(decimal)]
impl_ident!(Additive; 0.; decimal::d128);
impl_ident!(Multiplicative; 1; u8, u16, u32, u64, usize, i8, i16, i32, i64, isize);
impl_ident!(Multiplicative; 1.; f32, f64);
#[cfg(decimal)]
impl_ident!(Multiplicative; 1.; decimal::d128);
impl<N: Identity<Additive>> Identity<Additive> for Complex<N> {
#[inline]
fn identity() -> Self {
Complex {
re: N::identity(),
im: N::identity()
}
}
}
impl<N: Num + Clone> Identity<Multiplicative> for Complex<N> {
#[inline]
fn identity() -> Self {
Complex::new(N::one(), N::zero())
}
}
/// The universal identity element wrt. a given operator, usually noted `Id` with a
/// context-dependent subscript.
///
/// By default, it is the multiplicative identity element. It represents the degenerate set
/// containing only the identity element of any group-like structure. It has no dimension known at
/// compile-time. All its operations are no-ops.
#[repr(C)]
#[derive(Debug)]
pub struct Id<O: Operator = Multiplicative> {
_op: PhantomData<O>
}
impl<O: Operator> Id<O> {
/// Creates a new identity element.
#[inline]
pub fn new() -> Id<O> {
Id {
_op: PhantomData
}
}
}
impl<O: Operator> Copy for Id<O> { }
impl<O: Operator> Clone for Id<O> {
#[inline]
fn clone(&self) -> Id<O> {
Id::new()
}
}
impl<O: Operator> fmt::Display for Id<O> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Identity element")
}
}
impl<O: Operator> PartialEq for Id<O> {
#[inline]
fn eq(&self, _: &Id<O>) -> bool {
true
}
}
impl<O: Operator> Eq for Id<O> { }
impl<O: Operator> PartialOrd for Id<O> {
#[inline]
fn partial_cmp(&self, _: &Id<O>) -> Option<Ordering> {
Some(Ordering::Equal)
}
}
impl<O: Operator> Identity<O> for Id<O> {
#[inline]
fn identity() -> Id<O> |
}
impl<O: Operator> ApproxEq for Id<O> {
type Epsilon = Id<O>;
#[inline]
fn default_epsilon() -> Self::Epsilon {
Id::new()
}
#[inline]
fn default_max_relative() -> Self::Epsilon {
Id::new()
}
#[inline]
fn default_max_ulps() -> u32 {
0
}
#[inline]
fn relative_eq(&self, _: &Self, _: Self::Epsilon, _: Self::Epsilon) -> bool {
true
}
#[inline]
fn ulps_eq(&self, _: &Self, _: Self::Epsilon, _: u32) -> bool {
true
}
}
/*
*
* Algebraic structures.
*
*/
impl Mul<Id> for Id {
type Output = Id;
fn mul(self, _: Id) -> Id {
self
}
}
impl MulAssign<Id> for Id {
fn mul_assign(&mut self, _: Id) {
// no-op
}
}
impl Div<Id> for Id {
type Output = Id;
fn div(self, _: Id) -> Id {
self
}
}
impl DivAssign<Id> for Id {
fn div_assign(&mut self, _: Id) {
// no-op
}
}
impl Add<Id> for Id {
type Output = Id;
fn add(self, _: Id) -> Id {
self
}
}
impl AddAssign<Id> for Id {
fn add_assign(&mut self, _: Id) {
// no-op
}
}
impl<O: Operator> AbstractMagma<O> for Id<O> {
#[inline]
fn operate(&self, _: &Self) -> Id<O> {
Id::new()
}
}
impl<O: Operator> Inverse<O> for Id<O> {
#[inline]
fn inverse(&self) -> Self {
Id::new()
}
#[inline]
fn inverse_mut(&mut self) {
// no-op
}
}
impl<O: Operator> AbstractSemigroup<O> for Id<O> { }
impl<O: Operator> AbstractQuasigroup<O> for Id<O> { }
impl<O: Operator> AbstractMonoid<O> for Id<O> { }
impl<O: Operator> AbstractLoop<O> for Id<O> { }
impl<O: Operator> AbstractGroup<O> for Id<O> { }
impl<O: Operator> AbstractGroupAbelian<O> for Id<O> { }
impl One for Id {
#[inline]
fn one() -> Id {
Id::new()
}
}
impl Zero for Id {
#[inline]
fn zero() -> Id {
Id::new()
}
#[inline]
fn is_zero(&self) -> bool {
true
}
}
/*
*
* Conversions.
*
*/
impl<O: Operator, T: PartialEq + Identity<O>> SubsetOf<T> for Id<O> {
#[inline]
fn to_superset(&self) -> T {
T::identity()
}
#[inline]
fn is_in_subset(t: &T) -> bool {
*t == T::identity()
}
#[inline]
unsafe fn from_superset_unchecked(_: &T) -> Self {
Id::new()
}
}
impl<O: Operator> MeetSemilattice for Id<O> {
#[inline]
fn meet(&self, _: &Self) -> Self {
Id::new()
}
}
impl<O: Operator> JoinSemilattice for Id<O> {
#[inline]
fn join(&self, _: &Self) -> Self {
Id::new()
}
}
impl<O: Operator> Lattice for Id<O> {
}
| {
Id::new()
} | identifier_body |
identity.rs | use std::ops::{Mul, MulAssign, Add, AddAssign, Div, DivAssign};
use std::marker::PhantomData;
use std::cmp::{PartialOrd, Ordering};
use std::fmt;
use num::{Num, Zero, One};
use num_complex::Complex;
use approx::ApproxEq;
use general::{AbstractMagma, AbstractGroup, AbstractLoop, AbstractMonoid, AbstractQuasigroup,
AbstractSemigroup, Operator, Inverse, AbstractGroupAbelian, SubsetOf, Additive,
Multiplicative, MeetSemilattice, JoinSemilattice, Lattice};
/// A type that is equipped with identity.
pub trait Identity<O: Operator> {
/// The identity element.
fn identity() -> Self;
/// Specific identity.
#[inline]
fn id(_: O) -> Self
where Self: Sized
{
Self::identity()
}
}
impl_ident!(Additive; 0; u8, u16, u32, u64, usize, i8, i16, i32, i64, isize);
impl_ident!(Additive; 0.; f32, f64);
#[cfg(decimal)]
impl_ident!(Additive; 0.; decimal::d128);
impl_ident!(Multiplicative; 1; u8, u16, u32, u64, usize, i8, i16, i32, i64, isize);
impl_ident!(Multiplicative; 1.; f32, f64);
#[cfg(decimal)]
impl_ident!(Multiplicative; 1.; decimal::d128);
impl<N: Identity<Additive>> Identity<Additive> for Complex<N> {
#[inline]
fn identity() -> Self {
Complex {
re: N::identity(),
im: N::identity()
}
}
}
impl<N: Num + Clone> Identity<Multiplicative> for Complex<N> {
#[inline]
fn identity() -> Self {
Complex::new(N::one(), N::zero())
}
}
/// The universal identity element wrt. a given operator, usually noted `Id` with a
/// context-dependent subscript.
///
/// By default, it is the multiplicative identity element. It represents the degenerate set
/// containing only the identity element of any group-like structure. It has no dimension known at
/// compile-time. All its operations are no-ops.
#[repr(C)]
#[derive(Debug)]
pub struct Id<O: Operator = Multiplicative> {
_op: PhantomData<O>
}
impl<O: Operator> Id<O> {
/// Creates a new identity element.
#[inline]
pub fn new() -> Id<O> {
Id {
_op: PhantomData
}
}
}
impl<O: Operator> Copy for Id<O> { }
impl<O: Operator> Clone for Id<O> {
#[inline]
fn clone(&self) -> Id<O> {
Id::new()
}
}
impl<O: Operator> fmt::Display for Id<O> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Identity element")
}
}
impl<O: Operator> PartialEq for Id<O> {
#[inline]
fn eq(&self, _: &Id<O>) -> bool {
true
}
}
impl<O: Operator> Eq for Id<O> { }
impl<O: Operator> PartialOrd for Id<O> {
#[inline]
fn partial_cmp(&self, _: &Id<O>) -> Option<Ordering> {
Some(Ordering::Equal)
}
}
impl<O: Operator> Identity<O> for Id<O> {
#[inline]
fn identity() -> Id<O> {
Id::new()
}
}
impl<O: Operator> ApproxEq for Id<O> {
type Epsilon = Id<O>;
#[inline]
fn default_epsilon() -> Self::Epsilon {
Id::new()
}
#[inline]
fn default_max_relative() -> Self::Epsilon {
Id::new()
}
#[inline]
fn default_max_ulps() -> u32 {
0
}
#[inline]
fn | (&self, _: &Self, _: Self::Epsilon, _: Self::Epsilon) -> bool {
true
}
#[inline]
fn ulps_eq(&self, _: &Self, _: Self::Epsilon, _: u32) -> bool {
true
}
}
/*
*
* Algebraic structures.
*
*/
impl Mul<Id> for Id {
type Output = Id;
fn mul(self, _: Id) -> Id {
self
}
}
impl MulAssign<Id> for Id {
fn mul_assign(&mut self, _: Id) {
// no-op
}
}
impl Div<Id> for Id {
type Output = Id;
fn div(self, _: Id) -> Id {
self
}
}
impl DivAssign<Id> for Id {
fn div_assign(&mut self, _: Id) {
// no-op
}
}
impl Add<Id> for Id {
type Output = Id;
fn add(self, _: Id) -> Id {
self
}
}
impl AddAssign<Id> for Id {
fn add_assign(&mut self, _: Id) {
// no-op
}
}
impl<O: Operator> AbstractMagma<O> for Id<O> {
#[inline]
fn operate(&self, _: &Self) -> Id<O> {
Id::new()
}
}
impl<O: Operator> Inverse<O> for Id<O> {
#[inline]
fn inverse(&self) -> Self {
Id::new()
}
#[inline]
fn inverse_mut(&mut self) {
// no-op
}
}
impl<O: Operator> AbstractSemigroup<O> for Id<O> { }
impl<O: Operator> AbstractQuasigroup<O> for Id<O> { }
impl<O: Operator> AbstractMonoid<O> for Id<O> { }
impl<O: Operator> AbstractLoop<O> for Id<O> { }
impl<O: Operator> AbstractGroup<O> for Id<O> { }
impl<O: Operator> AbstractGroupAbelian<O> for Id<O> { }
impl One for Id {
#[inline]
fn one() -> Id {
Id::new()
}
}
impl Zero for Id {
#[inline]
fn zero() -> Id {
Id::new()
}
#[inline]
fn is_zero(&self) -> bool {
true
}
}
/*
*
* Conversions.
*
*/
impl<O: Operator, T: PartialEq + Identity<O>> SubsetOf<T> for Id<O> {
#[inline]
fn to_superset(&self) -> T {
T::identity()
}
#[inline]
fn is_in_subset(t: &T) -> bool {
*t == T::identity()
}
#[inline]
unsafe fn from_superset_unchecked(_: &T) -> Self {
Id::new()
}
}
impl<O: Operator> MeetSemilattice for Id<O> {
#[inline]
fn meet(&self, _: &Self) -> Self {
Id::new()
}
}
impl<O: Operator> JoinSemilattice for Id<O> {
#[inline]
fn join(&self, _: &Self) -> Self {
Id::new()
}
}
impl<O: Operator> Lattice for Id<O> {
}
| relative_eq | identifier_name |
identity.rs | use std::ops::{Mul, MulAssign, Add, AddAssign, Div, DivAssign};
use std::marker::PhantomData;
use std::cmp::{PartialOrd, Ordering};
use std::fmt;
use num::{Num, Zero, One};
use num_complex::Complex;
use approx::ApproxEq;
use general::{AbstractMagma, AbstractGroup, AbstractLoop, AbstractMonoid, AbstractQuasigroup,
AbstractSemigroup, Operator, Inverse, AbstractGroupAbelian, SubsetOf, Additive,
Multiplicative, MeetSemilattice, JoinSemilattice, Lattice};
/// A type that is equipped with identity.
pub trait Identity<O: Operator> {
/// The identity element.
fn identity() -> Self;
/// Specific identity.
#[inline]
fn id(_: O) -> Self
where Self: Sized
{
Self::identity()
}
}
impl_ident!(Additive; 0; u8, u16, u32, u64, usize, i8, i16, i32, i64, isize);
impl_ident!(Additive; 0.; f32, f64);
#[cfg(decimal)]
impl_ident!(Additive; 0.; decimal::d128);
impl_ident!(Multiplicative; 1; u8, u16, u32, u64, usize, i8, i16, i32, i64, isize);
impl_ident!(Multiplicative; 1.; f32, f64);
#[cfg(decimal)]
impl_ident!(Multiplicative; 1.; decimal::d128);
impl<N: Identity<Additive>> Identity<Additive> for Complex<N> {
#[inline]
fn identity() -> Self {
Complex {
re: N::identity(),
im: N::identity()
}
}
}
impl<N: Num + Clone> Identity<Multiplicative> for Complex<N> {
#[inline]
fn identity() -> Self {
Complex::new(N::one(), N::zero())
}
}
/// The universal identity element wrt. a given operator, usually noted `Id` with a
/// context-dependent subscript.
///
/// By default, it is the multiplicative identity element. It represents the degenerate set
/// containing only the identity element of any group-like structure. It has no dimension known at
/// compile-time. All its operations are no-ops.
#[repr(C)]
#[derive(Debug)]
pub struct Id<O: Operator = Multiplicative> {
_op: PhantomData<O>
}
impl<O: Operator> Id<O> {
/// Creates a new identity element.
#[inline]
pub fn new() -> Id<O> {
Id {
_op: PhantomData
}
}
}
impl<O: Operator> Copy for Id<O> { }
impl<O: Operator> Clone for Id<O> {
#[inline]
fn clone(&self) -> Id<O> {
Id::new()
}
}
impl<O: Operator> fmt::Display for Id<O> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Identity element")
}
}
impl<O: Operator> PartialEq for Id<O> {
#[inline]
fn eq(&self, _: &Id<O>) -> bool {
true
}
}
impl<O: Operator> Eq for Id<O> { }
impl<O: Operator> PartialOrd for Id<O> {
#[inline]
fn partial_cmp(&self, _: &Id<O>) -> Option<Ordering> {
Some(Ordering::Equal)
}
}
impl<O: Operator> Identity<O> for Id<O> {
#[inline]
fn identity() -> Id<O> {
Id::new()
}
}
impl<O: Operator> ApproxEq for Id<O> {
type Epsilon = Id<O>;
#[inline]
fn default_epsilon() -> Self::Epsilon {
Id::new()
}
#[inline]
fn default_max_relative() -> Self::Epsilon {
Id::new()
}
#[inline]
fn default_max_ulps() -> u32 {
0
}
#[inline]
fn relative_eq(&self, _: &Self, _: Self::Epsilon, _: Self::Epsilon) -> bool {
true
}
#[inline]
fn ulps_eq(&self, _: &Self, _: Self::Epsilon, _: u32) -> bool {
true
}
}
/*
*
* Algebraic structures.
*
*/
impl Mul<Id> for Id {
type Output = Id;
fn mul(self, _: Id) -> Id {
self
}
}
impl MulAssign<Id> for Id {
fn mul_assign(&mut self, _: Id) {
// no-op
}
}
impl Div<Id> for Id {
type Output = Id;
fn div(self, _: Id) -> Id {
self
}
}
impl DivAssign<Id> for Id {
fn div_assign(&mut self, _: Id) {
// no-op
}
}
impl Add<Id> for Id {
type Output = Id;
fn add(self, _: Id) -> Id {
self
}
} | // no-op
}
}
impl<O: Operator> AbstractMagma<O> for Id<O> {
#[inline]
fn operate(&self, _: &Self) -> Id<O> {
Id::new()
}
}
impl<O: Operator> Inverse<O> for Id<O> {
#[inline]
fn inverse(&self) -> Self {
Id::new()
}
#[inline]
fn inverse_mut(&mut self) {
// no-op
}
}
impl<O: Operator> AbstractSemigroup<O> for Id<O> { }
impl<O: Operator> AbstractQuasigroup<O> for Id<O> { }
impl<O: Operator> AbstractMonoid<O> for Id<O> { }
impl<O: Operator> AbstractLoop<O> for Id<O> { }
impl<O: Operator> AbstractGroup<O> for Id<O> { }
impl<O: Operator> AbstractGroupAbelian<O> for Id<O> { }
impl One for Id {
#[inline]
fn one() -> Id {
Id::new()
}
}
impl Zero for Id {
#[inline]
fn zero() -> Id {
Id::new()
}
#[inline]
fn is_zero(&self) -> bool {
true
}
}
/*
*
* Conversions.
*
*/
impl<O: Operator, T: PartialEq + Identity<O>> SubsetOf<T> for Id<O> {
#[inline]
fn to_superset(&self) -> T {
T::identity()
}
#[inline]
fn is_in_subset(t: &T) -> bool {
*t == T::identity()
}
#[inline]
unsafe fn from_superset_unchecked(_: &T) -> Self {
Id::new()
}
}
impl<O: Operator> MeetSemilattice for Id<O> {
#[inline]
fn meet(&self, _: &Self) -> Self {
Id::new()
}
}
impl<O: Operator> JoinSemilattice for Id<O> {
#[inline]
fn join(&self, _: &Self) -> Self {
Id::new()
}
}
impl<O: Operator> Lattice for Id<O> {
} |
impl AddAssign<Id> for Id {
fn add_assign(&mut self, _: Id) { | random_line_split |
index.js | // external imports
import axios from 'axios'
import { Base64 } from 'js-base64'
import path from 'path'
import fm from 'front-matter'
// local imports
import zipObject from '../zipObject'
import decrypt from '../decrypt'
/** 从github调用并在本地缓存期刊内容,返回Promise。主要函数:
* getContent: 调取特定期刊特定内容。如获取第1期“心智”子栏目中第2篇文章正文:getContent(['issues', '1', '心智', '2', 'article.md'])。
* getCurrentIssue: 获取最新期刊号,无需参数。
* getAbstracts: 获取所有文章简介,需要提供期刊号。
*/
class magazineStorage {
/**
* 建立新期刊instance.
* @param {string} owner - github项目有所有者
* @param {string} repo - github项目名称
* @param {array} columns - 各子栏目列表
*/
constructor(owner = 'Perspicere', repo = 'PerspicereContent', columns = ['心智', '此岸', '梦境']) {
// github account settings
this.owner = owner
this.repo = repo
// array of submodules
this.columns = columns
// grap window storage
this.storage = window.sessionStorage
// keys to be replaced with content
this.urlReplace = ['img', 'image']
// github api
this.baseURL = 'https://api.github.com/'
// github api
// github 禁止使用明文储存access token,此处使用加密token
// 新生成token之后可以通过encrypt函数加密
// TODO: use OAuth?
this.github = axios.create({
baseURL: this.baseURL,
auth: {
username: 'guoliu',
password: decrypt(
'6a1975233d2505057cfced9c0c847f9c99f97f8f54df8f4cd90d4d3949d8dff02afdac79c3dec4a9135fad4a474f8288'
)
},
timeout: 10000
})
}
// get content from local storage
// 总数据大小如果超过5mb限制,需要优化存储
get content() {
let content = this.storage.getItem('content')
if (!content) {
return {}
}
return JSON.parse(content)
}
// cache content to local storage
set content(tree) {
this.storage.setItem('content', JSON.stringify(tree))
}
// get current issue number from local storage
get currentIssue() {
return this.storage.getItem('currentIssue')
}
// cache current issue number
set currentIssue(issue) {
this.storage.setItem('currentIssue', issue)
}
// locate leaf note in a tree
static locateLeaf(tree, location) {
if (location.length === 0) {
return tree
}
try {
return magazineStorage.locateLeaf(tree[location[0]], location.slice(1))
} catch (err) {
return null
}
}
| af node
static appendLeaf(tree, location, leaf) {
if (location.length === 0) {
return leaf
} else if (!tree) {
tree = {}
}
return {
...tree,
[location[0]]: magazineStorage.appendLeaf(tree[location[0]], location.slice(1), leaf)
}
}
// build url for image
imageURL = location => `https://github.com/${this.owner}/${this.repo}/raw/master/${path.join(...location)}`
// pull content from github with given path
pullContent = async location => {
try {
const res = await this.github.get(`/repos/${this.owner}/${this.repo}/contents/${path.join(...location)}`)
return res.data
} catch (err) {
console.warn(`Error pulling data from [${location.join(', ')}], null value will be returned instead`, err)
return null
}
}
// parse responce, returns an object
parseData = data => {
if (!data) {
return null
}
// if we get an array, parse every element
if (data.constructor === Array) {
return data.reduce(
(accumulated, current) => ({
...accumulated,
[current.name]: this.parseData(current)
}),
{}
)
}
if (data.content) {
const ext = path.extname(data.path)
const content = Base64.decode(data.content)
// if it's a markdown file, parse it and get meta info
if (ext === '.md') {
const { attributes, body } = fm(content)
// replace image paths
const bodyWithUrl = body.replace(
/(!\[.*?\]\()(.*)(\)\s)/,
(_, prev, url, post) => `${prev}${this.imageURL([...data.path.split('/').slice(0, -1), url])}${post}`
)
return {
...attributes,
body: bodyWithUrl
}
}
if (ext === '.json') {
// if it's a json, parse it
return JSON.parse(content)
}
return content
}
if (data.type === 'dir') {
// if we get a directory
return {}
}
return null
}
/**
* 调用期刊内容.
* @param {string} location - 内容位置,描述目标文档位置。例如第1期“心智”子栏目中第2篇文章正文:['issues', '1', '心智', '2', 'article.md']。
* @return {object} 目标内容
*/
getContent = async location => {
// 尝试从本地获取
let contentNode = magazineStorage.locateLeaf(this.content, location) || {}
// 本地无值,从远程调用
if (contentNode.constructor === Object && Object.keys(contentNode).length === 0) {
const data = await this.pullContent(location)
contentNode = this.parseData(data)
// 将json中路径替换为url,例如图片
if (contentNode && contentNode.constructor === Object && Object.keys(contentNode).length > 0) {
const URLkeys = Object.keys(contentNode).filter(field => this.urlReplace.includes(field))
const URLs = URLkeys.map(key => this.imageURL([...location.slice(0, -1), contentNode[key]]))
contentNode = { ...contentNode, ...zipObject(URLkeys, URLs) }
}
this.content = magazineStorage.appendLeaf(this.content, location, contentNode)
}
return contentNode
}
/**
* 获取最新期刊号。
* @return {int} 最新期刊号。
*/
getCurrentIssue = async () => {
if (!this.currentIssue) {
const data = await this.getContent(['issues'])
this.currentIssue = Object.keys(data)
.filter(
entry => data[entry] && data[entry].constructor === Object // is a directory
)
.reduce((a, b) => Math.max(parseInt(a), parseInt(b)))
}
return this.currentIssue
}
/**
* 获取期刊所有文章简介。
* @param {int} issue - 期刊号。
* @return {object} 该期所有文章简介。
*/
getIssueAbstract = async issue => {
// 默认获取最新一期
let issueNumber = issue
if (!issue) {
issueNumber = await this.getCurrentIssue()
}
const issueContent = await Promise.all(
this.columns.map(async column => {
// 栏目文章列表
const articleList = Object.keys(await this.getContent(['issues', issueNumber, column]))
// 各文章元信息
const columnContent = await Promise.all(
articleList.map(article => this.getContent(['issues', issueNumber, column, article, 'article.md']))
)
return zipObject(articleList, columnContent)
})
)
// 本期信息
const meta = await this.getContent(['issues', issueNumber, 'meta.json'])
return {
...meta,
content: zipObject(this.columns, issueContent)
}
}
/**
* 获取期刊所有单篇文章。
* @return {object} 所有单篇文章。
* TODO: 仅返回一定数量各子栏目最新文章
*/
getArticleAbstract = async () => {
// 各栏目
const articlesContent = await Promise.all(
this.columns.map(async column => {
// 栏目文章列表
const articleList = Object.keys((await this.getContent(['articles', column])) || {})
// 各文章元信息
const columnContent = await Promise.all(
articleList.map(article => this.getContent(['articles', column, article, 'article.md']))
)
return zipObject(articleList, columnContent)
})
)
return zipObject(this.columns, articlesContent)
}
}
export default new magazineStorage()
|
// helper function to return tree with an extra le | identifier_body |
index.js | // external imports
import axios from 'axios'
import { Base64 } from 'js-base64'
import path from 'path'
import fm from 'front-matter'
// local imports
import zipObject from '../zipObject'
import decrypt from '../decrypt'
/** 从github调用并在本地缓存期刊内容,返回Promise。主要函数:
* getContent: 调取特定期刊特定内容。如获取第1期“心智”子栏目中第2篇文章正文:getContent(['issues', '1', '心智', '2', 'article.md'])。
* getCurrentIssue: 获取最新期刊号,无需参数。
* getAbstracts: 获取所有文章简介,需要提供期刊号。
*/
class magazineStorage {
/**
* 建立新期刊instance.
* @param {string} owner - github项目有所有者
* @param {string} repo - github项目名称
* @param {array} columns - 各子栏目列表
*/
constructor(owner = 'Perspicere', repo = 'PerspicereContent', columns = ['心智', '此岸', '梦境']) {
// github account settings
this.owner = owner
this.repo = repo
// array of submodules
this.columns = columns
// grap window storage
this.storage = window.sessionStorage
// keys to be replaced with content
this.urlReplace = ['img', 'image']
// github api
this.baseURL = 'https://api.github.com/'
// github api
// github 禁止使用明文储存access token,此处使用加密token
// 新生成token之后可以通过encrypt函数加密
// TODO: use OAuth?
this.github = axios.create({
baseURL: this.baseURL,
auth: {
username: 'guoliu',
password: decrypt(
'6a1975233d2505057cfced9c0c847f9c99f97f8f54df8f4cd90d4d3949d8dff02afdac79c3dec4a9135fad4a474f8288'
)
},
timeout: 10000
})
}
// get content from local storage
// 总数据大小如果超过5mb限制,需要优化存储
get content() {
let content = this.storage.getItem('content')
if (!content) {
return {}
}
return JSON.parse(content)
}
// cache content to local storage
set content(tree) {
this.storage.setItem('content', JSON.stringify(tree))
}
// get current issue number from local storage
get currentIssue() {
return this.storage.getItem('currentIssue')
}
// cache current issue number
set currentIssue(issue) {
this.storage.setItem('currentIssue', issue)
}
// locate leaf note in a tree
static locateLeaf(tree, location) {
if (location.length === 0) {
return tree
}
try {
return magazineStorage.locateLeaf(tree[location[0]], location.slice(1))
} catch (err) {
return null
}
}
// helper function to return tree with an extra leaf node
static appendLeaf(tree, location, leaf) {
if (location.length === 0) {
return leaf
} else if (!tree) {
tree = {}
}
return {
...tree,
[location[0]]: magazineStorage.appendLeaf(tree[location[0]], location.slice(1), leaf)
}
}
// build url for image
imageURL = location => `https://github.com/${this.owner}/${this.repo}/raw/master/${path.join(...location)}`
// pull content from github with given path
pullContent = async location => {
try {
const res = await this.github.get(`/repos/${this.owner}/${this.repo}/contents/${path.join(...location)}`)
return res.data
} catch (err) {
console.warn(`Error pulling data from [${location.join(', ')}], null value will be returned instead`, err)
return null
}
}
// parse responce, returns an object
parseData = data => {
if (!data) {
return null
}
// if we get an array, parse every element
if (data.constructor === Array) {
return data.reduce(
(accumulated, current) => ({
...accumulated,
[current.name]: this.parseData(current)
}),
{}
)
}
if (data.content) {
const ext = path.extname(data.path)
const content = Base64.decode(data.content)
// if it's a markdown file, parse it and get meta info
if (ext === '.md') {
const { attributes, body } = fm(content)
// replace image paths
const bodyWithUrl = body.replace(
/(!\[.*?\]\()(.*)(\)\s)/,
(_, prev, url, post) => `${prev}${this.imageURL([...data.path.split('/').slice(0, -1), url])}${post}`
)
return {
...attributes,
body: bodyWithUrl
}
}
if (ext === '.json') {
// if it's a json, parse it
return JSON.parse(content)
}
return content
}
if (data.type === 'dir') {
// if we get a directory
return {}
}
return null
}
/**
* 调用期刊内容.
* @param {string} location - 内容位置,描述目标文档位置。例如第1期“心智”子栏目中第2篇文章正文:['issues', '1', '心智', '2', 'article.md']。
* @return {object} 目标内容
*/
getContent = async location => {
// 尝试从本地获取
let contentNode = magazineStorage.locateLeaf(this.content, location) || {}
// 本地无值,从远程调用
if (contentNode.constructor === Object && Object.keys(contentNode).length === 0) {
const data = await this.pullContent(location)
contentNode = this.parseData(data)
// 将json中路径替换为url,例如图片
if (contentNode && contentNode.constructor === Object && Object.keys(contentNode).length > 0) {
const URLkeys = Object.keys(contentNode).filter(field => this.urlReplace.includes(field))
const URLs = URLkeys.map(key => this.imageURL([...location.slice(0, -1), contentNode[key]]))
contentNode = { ...contentNode, ...zipObject(URLkeys, URLs) }
}
this.content = magazineStorage.appendLeaf(this.content, location, contentNode)
}
return contentNode
}
/**
* 获取最新期刊号。
* @return {int} 最新期刊号。
*/
getCurrentIssue = async () => {
if (!this.currentIssue) {
const data = await this.getContent(['issues'])
this.currentIssue = Object.keys(data)
.filter(
entry => data[entry] && data[entry].constructor === Object // is a directory
)
.reduce((a, b) => Math.max(parseInt(a), parseInt(b)))
}
return this.currentIssue
}
/**
* 获取期刊所有文章简介。
* @param {int} issue - 期刊号。
* @return {object} 该期所有文章简介。
*/
getIssueAbstract = async issue => {
// 默认获取最新一期
let issueNumber = issue
if (!issue) {
issueNumber = await this.getCurrentIssue()
}
const issueContent = await Promise.all(
this.columns.map(async column => {
// 栏目文章列表
const articleList = Object.keys(await this.getContent(['issues', issueNumber, column]))
// 各文章元信息
const columnContent = await Promise.all(
articleList.map(article => this.getContent(['issues', issueNumber, column, article, 'article.md']))
)
return zipObject(articleList, columnContent)
})
)
// 本期信息
const meta = await this.getContent(['issues', issueNumber, 'meta.json'])
return {
...meta,
content: zipObject(this.columns, issueContent)
}
}
/**
* 获取期刊所有单篇文章。 | * @return {object} 所有单篇文章。
* TODO: 仅返回一定数量各子栏目最新文章
*/
getArticleAbstract = async () => {
// 各栏目
const articlesContent = await Promise.all(
this.columns.map(async column => {
// 栏目文章列表
const articleList = Object.keys((await this.getContent(['articles', column])) || {})
// 各文章元信息
const columnContent = await Promise.all(
articleList.map(article => this.getContent(['articles', column, article, 'article.md']))
)
return zipObject(articleList, columnContent)
})
)
return zipObject(this.columns, articlesContent)
}
}
export default new magazineStorage() | random_line_split |
|
index.js | // external imports
import axios from 'axios'
import { Base64 } from 'js-base64'
import path from 'path'
import fm from 'front-matter'
// local imports
import zipObject from '../zipObject'
import decrypt from '../decrypt'
/** 从github调用并在本地缓存期刊内容,返回Promise。主要函数:
* getContent: 调取特定期刊特定内容。如获取第1期“心智”子栏目中第2篇文章正文:getContent(['issues', '1', '心智', '2', 'article.md'])。
* getCurrentIssue: 获取最新期刊号,无需参数。
* getAbstracts: 获取所有文章简介,需要提供期刊号。
*/
class magazineStorage {
/**
* 建立新期刊instance.
* @param {string} owner - github项目有所有者
* @param {string} repo - github项目名称
* @param {array} columns - 各子栏目列表
*/
constructor(owner = 'Perspicere', repo = 'PerspicereContent', columns = ['心智', '此岸', '梦境']) {
// github account settings
this.owner = owner
this.repo = repo
// array of submodules
this.columns = columns
// grap window storage
this.storage = window.sessionStorage
// keys to be replaced with content
this.urlReplace = ['img', 'image']
// github api
this.baseURL = 'https://api.github.com/'
// github api
// github 禁止使用明文储存access token,此处使用加密token
// 新生成token之后可以通过encrypt函数加密
// TODO: use OAuth?
this.github = axios.create({
baseURL: this.baseURL,
auth: {
username: 'guoliu',
password: decrypt(
'6a1975233d2505057cfced9c0c847f9c99f97f8f54df8f4cd90d4d3949d8dff02afdac79c3dec4a9135fad4a474f8288'
)
},
timeout: 10000
})
}
// get content from local storage
// 总数据大小如果超过5mb限制,需要优化存储
get content() {
let content = this.storage.getItem('content')
if (!content) {
return {}
}
return JSON.parse(content)
}
// cache content to local storage
set content(tree) {
this.storage.setItem('content', JSON.stringify(tree))
}
// get current issue number from local storage
get currentIssue() {
return this.storage.getItem('currentIssue')
}
// cache current issue number
set currentIssue(issue) {
this.storage.setItem('currentIssue', issue)
}
// locate leaf note in a tree
static locateLeaf(tree, location) {
if (location.length === 0) {
return tree
}
try {
return magazineStorage.locateLeaf(tree[location[0]], location.slice(1))
} catch (err) {
return null
}
}
// helper function to return tree with an extra leaf node
static appendLeaf(tree, location, leaf) {
if (location.length === 0) {
return leaf
} else if (!tree) {
tree = {}
}
return {
...tree,
[location[0]]: magazineStorage.appendLeaf(tree[location[0]], location.slice(1), leaf)
}
}
// build url for image
imageURL = location => `https://github.com/${this.owner}/${this.repo}/raw/master/${path.join( | content from github with given path
pullContent = async location => {
try {
const res = await this.github.get(`/repos/${this.owner}/${this.repo}/contents/${path.join(...location)}`)
return res.data
} catch (err) {
console.warn(`Error pulling data from [${location.join(', ')}], null value will be returned instead`, err)
return null
}
}
// parse responce, returns an object
parseData = data => {
if (!data) {
return null
}
// if we get an array, parse every element
if (data.constructor === Array) {
return data.reduce(
(accumulated, current) => ({
...accumulated,
[current.name]: this.parseData(current)
}),
{}
)
}
if (data.content) {
const ext = path.extname(data.path)
const content = Base64.decode(data.content)
// if it's a markdown file, parse it and get meta info
if (ext === '.md') {
const { attributes, body } = fm(content)
// replace image paths
const bodyWithUrl = body.replace(
/(!\[.*?\]\()(.*)(\)\s)/,
(_, prev, url, post) => `${prev}${this.imageURL([...data.path.split('/').slice(0, -1), url])}${post}`
)
return {
...attributes,
body: bodyWithUrl
}
}
if (ext === '.json') {
// if it's a json, parse it
return JSON.parse(content)
}
return content
}
if (data.type === 'dir') {
// if we get a directory
return {}
}
return null
}
/**
* 调用期刊内容.
* @param {string} location - 内容位置,描述目标文档位置。例如第1期“心智”子栏目中第2篇文章正文:['issues', '1', '心智', '2', 'article.md']。
* @return {object} 目标内容
*/
getContent = async location => {
// 尝试从本地获取
let contentNode = magazineStorage.locateLeaf(this.content, location) || {}
// 本地无值,从远程调用
if (contentNode.constructor === Object && Object.keys(contentNode).length === 0) {
const data = await this.pullContent(location)
contentNode = this.parseData(data)
// 将json中路径替换为url,例如图片
if (contentNode && contentNode.constructor === Object && Object.keys(contentNode).length > 0) {
const URLkeys = Object.keys(contentNode).filter(field => this.urlReplace.includes(field))
const URLs = URLkeys.map(key => this.imageURL([...location.slice(0, -1), contentNode[key]]))
contentNode = { ...contentNode, ...zipObject(URLkeys, URLs) }
}
this.content = magazineStorage.appendLeaf(this.content, location, contentNode)
}
return contentNode
}
/**
* 获取最新期刊号。
* @return {int} 最新期刊号。
*/
getCurrentIssue = async () => {
if (!this.currentIssue) {
const data = await this.getContent(['issues'])
this.currentIssue = Object.keys(data)
.filter(
entry => data[entry] && data[entry].constructor === Object // is a directory
)
.reduce((a, b) => Math.max(parseInt(a), parseInt(b)))
}
return this.currentIssue
}
/**
* 获取期刊所有文章简介。
* @param {int} issue - 期刊号。
* @return {object} 该期所有文章简介。
*/
getIssueAbstract = async issue => {
// 默认获取最新一期
let issueNumber = issue
if (!issue) {
issueNumber = await this.getCurrentIssue()
}
const issueContent = await Promise.all(
this.columns.map(async column => {
// 栏目文章列表
const articleList = Object.keys(await this.getContent(['issues', issueNumber, column]))
// 各文章元信息
const columnContent = await Promise.all(
articleList.map(article => this.getContent(['issues', issueNumber, column, article, 'article.md']))
)
return zipObject(articleList, columnContent)
})
)
// 本期信息
const meta = await this.getContent(['issues', issueNumber, 'meta.json'])
return {
...meta,
content: zipObject(this.columns, issueContent)
}
}
/**
* 获取期刊所有单篇文章。
* @return {object} 所有单篇文章。
* TODO: 仅返回一定数量各子栏目最新文章
*/
getArticleAbstract = async () => {
// 各栏目
const articlesContent = await Promise.all(
this.columns.map(async column => {
// 栏目文章列表
const articleList = Object.keys((await this.getContent(['articles', column])) || {})
// 各文章元信息
const columnContent = await Promise.all(
articleList.map(article => this.getContent(['articles', column, article, 'article.md']))
)
return zipObject(articleList, columnContent)
})
)
return zipObject(this.columns, articlesContent)
}
}
export default new magazineStorage()
| ...location)}`
// pull | conditional_block |
index.js | // external imports
import axios from 'axios'
import { Base64 } from 'js-base64'
import path from 'path'
import fm from 'front-matter'
// local imports
import zipObject from '../zipObject'
import decrypt from '../decrypt'
/** 从github调用并在本地缓存期刊内容,返回Promise。主要函数:
* getContent: 调取特定期刊特定内容。如获取第1期“心智”子栏目中第2篇文章正文:getContent(['issues', '1', '心智', '2', 'article.md'])。
* getCurrentIssue: 获取最新期刊号,无需参数。
* getAbstracts: 获取所有文章简介,需要提供期刊号。
*/
class magazineStorage {
/**
* 建立新期刊instance.
* @param {string} owner - github项目有所有者
* @param {string} repo - github项目名称
* @param {array} columns - 各子栏目列表
*/
constructor(owner = 'Perspicere', repo = 'PerspicereContent', columns = ['心智', '此岸', '梦境']) {
// github account settings
this.owner = owner
this.repo = repo
// array of submodules
this.columns = columns
// grap window storage
this.storage = window.sessionStorage
// keys to be replaced with content
this.urlReplace = ['img', 'image']
// github api
this.baseURL = 'https://api.github.com/'
// github api
// github 禁止使用明文储存access token,此处使用加密token
// 新生成token之后可以通过encrypt函数加密
// TODO: use OAuth?
this.github = axios.create({
baseURL: this.baseURL,
auth: {
username: 'guoliu',
password: decrypt(
'6a1975233d2505057cfced9c0c847f9c99f97f8f54df8f4cd90d4d3949d8dff02afdac79c3dec4a9135fad4a474f8288'
)
},
timeout: 10000
})
}
// get content from local storage
// 总数据大小如果超过5mb限制,需要优化存储
get content() {
let content = this.storage.getItem('content')
if (!content) {
return {}
}
return JSON.parse(content)
}
// cache content to local storage
set content(tree) {
this.storage.setItem('content', JSON.stringify(tree))
}
// get current issue number from local storage
get currentIssue() {
return this.storage.getItem('currentIssue')
}
// cache current issue number
set currentIssue(issue) {
this.storage.setItem('currentIssue', issue)
}
| ate leaf note in a tree
static locateLeaf(tree, location) {
if (location.length === 0) {
return tree
}
try {
return magazineStorage.locateLeaf(tree[location[0]], location.slice(1))
} catch (err) {
return null
}
}
// helper function to return tree with an extra leaf node
static appendLeaf(tree, location, leaf) {
if (location.length === 0) {
return leaf
} else if (!tree) {
tree = {}
}
return {
...tree,
[location[0]]: magazineStorage.appendLeaf(tree[location[0]], location.slice(1), leaf)
}
}
// build url for image
imageURL = location => `https://github.com/${this.owner}/${this.repo}/raw/master/${path.join(...location)}`
// pull content from github with given path
pullContent = async location => {
try {
const res = await this.github.get(`/repos/${this.owner}/${this.repo}/contents/${path.join(...location)}`)
return res.data
} catch (err) {
console.warn(`Error pulling data from [${location.join(', ')}], null value will be returned instead`, err)
return null
}
}
// parse responce, returns an object
parseData = data => {
if (!data) {
return null
}
// if we get an array, parse every element
if (data.constructor === Array) {
return data.reduce(
(accumulated, current) => ({
...accumulated,
[current.name]: this.parseData(current)
}),
{}
)
}
if (data.content) {
const ext = path.extname(data.path)
const content = Base64.decode(data.content)
// if it's a markdown file, parse it and get meta info
if (ext === '.md') {
const { attributes, body } = fm(content)
// replace image paths
const bodyWithUrl = body.replace(
/(!\[.*?\]\()(.*)(\)\s)/,
(_, prev, url, post) => `${prev}${this.imageURL([...data.path.split('/').slice(0, -1), url])}${post}`
)
return {
...attributes,
body: bodyWithUrl
}
}
if (ext === '.json') {
// if it's a json, parse it
return JSON.parse(content)
}
return content
}
if (data.type === 'dir') {
// if we get a directory
return {}
}
return null
}
/**
* 调用期刊内容.
* @param {string} location - 内容位置,描述目标文档位置。例如第1期“心智”子栏目中第2篇文章正文:['issues', '1', '心智', '2', 'article.md']。
* @return {object} 目标内容
*/
getContent = async location => {
// 尝试从本地获取
let contentNode = magazineStorage.locateLeaf(this.content, location) || {}
// 本地无值,从远程调用
if (contentNode.constructor === Object && Object.keys(contentNode).length === 0) {
const data = await this.pullContent(location)
contentNode = this.parseData(data)
// 将json中路径替换为url,例如图片
if (contentNode && contentNode.constructor === Object && Object.keys(contentNode).length > 0) {
const URLkeys = Object.keys(contentNode).filter(field => this.urlReplace.includes(field))
const URLs = URLkeys.map(key => this.imageURL([...location.slice(0, -1), contentNode[key]]))
contentNode = { ...contentNode, ...zipObject(URLkeys, URLs) }
}
this.content = magazineStorage.appendLeaf(this.content, location, contentNode)
}
return contentNode
}
/**
* 获取最新期刊号。
* @return {int} 最新期刊号。
*/
getCurrentIssue = async () => {
if (!this.currentIssue) {
const data = await this.getContent(['issues'])
this.currentIssue = Object.keys(data)
.filter(
entry => data[entry] && data[entry].constructor === Object // is a directory
)
.reduce((a, b) => Math.max(parseInt(a), parseInt(b)))
}
return this.currentIssue
}
/**
* 获取期刊所有文章简介。
* @param {int} issue - 期刊号。
* @return {object} 该期所有文章简介。
*/
getIssueAbstract = async issue => {
// 默认获取最新一期
let issueNumber = issue
if (!issue) {
issueNumber = await this.getCurrentIssue()
}
const issueContent = await Promise.all(
this.columns.map(async column => {
// 栏目文章列表
const articleList = Object.keys(await this.getContent(['issues', issueNumber, column]))
// 各文章元信息
const columnContent = await Promise.all(
articleList.map(article => this.getContent(['issues', issueNumber, column, article, 'article.md']))
)
return zipObject(articleList, columnContent)
})
)
// 本期信息
const meta = await this.getContent(['issues', issueNumber, 'meta.json'])
return {
...meta,
content: zipObject(this.columns, issueContent)
}
}
/**
* 获取期刊所有单篇文章。
* @return {object} 所有单篇文章。
* TODO: 仅返回一定数量各子栏目最新文章
*/
getArticleAbstract = async () => {
// 各栏目
const articlesContent = await Promise.all(
this.columns.map(async column => {
// 栏目文章列表
const articleList = Object.keys((await this.getContent(['articles', column])) || {})
// 各文章元信息
const columnContent = await Promise.all(
articleList.map(article => this.getContent(['articles', column, article, 'article.md']))
)
return zipObject(articleList, columnContent)
})
)
return zipObject(this.columns, articlesContent)
}
}
export default new magazineStorage()
| // loc | identifier_name |
125.rs | /// Quickest to just calculate sequences of squares less than the specified
/// limit and determine if they are palindromic.
///
/// The highest value needed to be calculated is 10^4, since 10^4*10^2 = 10^8.
use std::collections::HashSet;
fn is_palindrome(n: u64) -> bool {
let mut nn = n;
if n % 10 == 0 {
false
} else {
let mut r = 0;
while r < nn {
r = 10 * r + nn % 10;
nn /= 10;
}
nn == r || nn == r / 10
}
}
// Minor problem with something here
fn | () {
let mut seen = HashSet::new();
let mut total_sum = 0_u64;
// Compute sequences from the specified start point
'outer: for n in 1..100_000 {
let mut sum = n * n;
for i in (n+1).. {
// sequence must be at least length two
sum += i * i;
if sum >= 100_000_000 {
continue 'outer;
}
if is_palindrome(sum) && !seen.contains(&sum) {
total_sum += sum;
seen.insert(sum);
}
}
}
println!("{}", total_sum);
}
| main | identifier_name |
125.rs | /// Quickest to just calculate sequences of squares less than the specified
/// limit and determine if they are palindromic.
///
/// The highest value needed to be calculated is 10^4, since 10^4*10^2 = 10^8.
use std::collections::HashSet;
fn is_palindrome(n: u64) -> bool {
let mut nn = n;
if n % 10 == 0 {
false
} else {
let mut r = 0;
while r < nn {
r = 10 * r + nn % 10;
nn /= 10;
}
nn == r || nn == r / 10
}
}
// Minor problem with something here
fn main() {
let mut seen = HashSet::new();
let mut total_sum = 0_u64;
// Compute sequences from the specified start point
'outer: for n in 1..100_000 {
let mut sum = n * n;
for i in (n+1).. {
// sequence must be at least length two
sum += i * i;
if sum >= 100_000_000 {
continue 'outer;
}
if is_palindrome(sum) && !seen.contains(&sum) |
}
}
println!("{}", total_sum);
}
| {
total_sum += sum;
seen.insert(sum);
} | conditional_block |
125.rs | /// Quickest to just calculate sequences of squares less than the specified
/// limit and determine if they are palindromic.
///
/// The highest value needed to be calculated is 10^4, since 10^4*10^2 = 10^8.
use std::collections::HashSet;
fn is_palindrome(n: u64) -> bool {
let mut nn = n;
if n % 10 == 0 {
false
} else {
let mut r = 0;
while r < nn {
r = 10 * r + nn % 10;
nn /= 10;
}
nn == r || nn == r / 10
}
}
| let mut seen = HashSet::new();
let mut total_sum = 0_u64;
// Compute sequences from the specified start point
'outer: for n in 1..100_000 {
let mut sum = n * n;
for i in (n+1).. {
// sequence must be at least length two
sum += i * i;
if sum >= 100_000_000 {
continue 'outer;
}
if is_palindrome(sum) && !seen.contains(&sum) {
total_sum += sum;
seen.insert(sum);
}
}
}
println!("{}", total_sum);
} | // Minor problem with something here
fn main() { | random_line_split |
125.rs | /// Quickest to just calculate sequences of squares less than the specified
/// limit and determine if they are palindromic.
///
/// The highest value needed to be calculated is 10^4, since 10^4*10^2 = 10^8.
use std::collections::HashSet;
fn is_palindrome(n: u64) -> bool |
// Minor problem with something here
fn main() {
let mut seen = HashSet::new();
let mut total_sum = 0_u64;
// Compute sequences from the specified start point
'outer: for n in 1..100_000 {
let mut sum = n * n;
for i in (n+1).. {
// sequence must be at least length two
sum += i * i;
if sum >= 100_000_000 {
continue 'outer;
}
if is_palindrome(sum) && !seen.contains(&sum) {
total_sum += sum;
seen.insert(sum);
}
}
}
println!("{}", total_sum);
}
| {
let mut nn = n;
if n % 10 == 0 {
false
} else {
let mut r = 0;
while r < nn {
r = 10 * r + nn % 10;
nn /= 10;
}
nn == r || nn == r / 10
}
} | identifier_body |
GifPresenter.ts | /*!
gifken
Copyright (c) 2013 aaharu
This software is released under the MIT License.
https://raw.github.com/aaharu/gifken/master/LICENSE
*/
// @ts-ignore
import { isBrowser, isWebWorker } from "browser-or-node";
export class GifPresenter {
/**
* Convert Gif to Blob.
*
* @return {Blob} BLOB
*/
public static writeToBlob(bytes: Uint8Array[]): Blob {
if (isBrowser || isWebWorker || typeof Blob === "function") {
return new Blob(bytes, { type: "image/gif" });
}
throw new Error("writeToBlob is browser-only function");
}
/**
* Convert Gif to Data-URL string.
*
* @return {string} Data-URL string
*/
public static writeToDataUrl(bytes: Uint8Array[]): string |
}
| {
if (isBrowser || isWebWorker) {
let str = "";
bytes.forEach((buffer): void => {
const codes: number[] = [];
for (let i = 0, l = buffer.byteLength; i < l; ++i) {
codes.push(buffer[i]);
}
str += String.fromCharCode.apply(null, codes);
});
return "data:image/gif;base64," + btoa(str);
}
return "data:image/gif;base64," + Buffer.from(bytes).toString("base64");
} | identifier_body |
GifPresenter.ts | /*!
gifken
Copyright (c) 2013 aaharu
This software is released under the MIT License.
https://raw.github.com/aaharu/gifken/master/LICENSE
*/
// @ts-ignore
import { isBrowser, isWebWorker } from "browser-or-node";
export class | {
/**
* Convert Gif to Blob.
*
* @return {Blob} BLOB
*/
public static writeToBlob(bytes: Uint8Array[]): Blob {
if (isBrowser || isWebWorker || typeof Blob === "function") {
return new Blob(bytes, { type: "image/gif" });
}
throw new Error("writeToBlob is browser-only function");
}
/**
* Convert Gif to Data-URL string.
*
* @return {string} Data-URL string
*/
public static writeToDataUrl(bytes: Uint8Array[]): string {
if (isBrowser || isWebWorker) {
let str = "";
bytes.forEach((buffer): void => {
const codes: number[] = [];
for (let i = 0, l = buffer.byteLength; i < l; ++i) {
codes.push(buffer[i]);
}
str += String.fromCharCode.apply(null, codes);
});
return "data:image/gif;base64," + btoa(str);
}
return "data:image/gif;base64," + Buffer.from(bytes).toString("base64");
}
}
| GifPresenter | identifier_name |
GifPresenter.ts | /*!
gifken
Copyright (c) 2013 aaharu
This software is released under the MIT License.
https://raw.github.com/aaharu/gifken/master/LICENSE
*/
// @ts-ignore
import { isBrowser, isWebWorker } from "browser-or-node";
export class GifPresenter {
/**
* Convert Gif to Blob.
*
* @return {Blob} BLOB
*/
public static writeToBlob(bytes: Uint8Array[]): Blob {
if (isBrowser || isWebWorker || typeof Blob === "function") {
return new Blob(bytes, { type: "image/gif" });
}
throw new Error("writeToBlob is browser-only function");
}
/**
* Convert Gif to Data-URL string.
*
* @return {string} Data-URL string
*/
public static writeToDataUrl(bytes: Uint8Array[]): string {
if (isBrowser || isWebWorker) {
let str = "";
bytes.forEach((buffer): void => {
const codes: number[] = [];
for (let i = 0, l = buffer.byteLength; i < l; ++i) {
codes.push(buffer[i]);
} | });
return "data:image/gif;base64," + btoa(str);
}
return "data:image/gif;base64," + Buffer.from(bytes).toString("base64");
}
} | str += String.fromCharCode.apply(null, codes); | random_line_split |
GifPresenter.ts | /*!
gifken
Copyright (c) 2013 aaharu
This software is released under the MIT License.
https://raw.github.com/aaharu/gifken/master/LICENSE
*/
// @ts-ignore
import { isBrowser, isWebWorker } from "browser-or-node";
export class GifPresenter {
/**
* Convert Gif to Blob.
*
* @return {Blob} BLOB
*/
public static writeToBlob(bytes: Uint8Array[]): Blob {
if (isBrowser || isWebWorker || typeof Blob === "function") |
throw new Error("writeToBlob is browser-only function");
}
/**
* Convert Gif to Data-URL string.
*
* @return {string} Data-URL string
*/
public static writeToDataUrl(bytes: Uint8Array[]): string {
if (isBrowser || isWebWorker) {
let str = "";
bytes.forEach((buffer): void => {
const codes: number[] = [];
for (let i = 0, l = buffer.byteLength; i < l; ++i) {
codes.push(buffer[i]);
}
str += String.fromCharCode.apply(null, codes);
});
return "data:image/gif;base64," + btoa(str);
}
return "data:image/gif;base64," + Buffer.from(bytes).toString("base64");
}
}
| {
return new Blob(bytes, { type: "image/gif" });
} | conditional_block |
decoding.rs | use serde::de::DeserializeOwned;
use crate::algorithms::AlgorithmFamily;
use crate::crypto::verify;
use crate::errors::{new_error, ErrorKind, Result};
use crate::header::Header;
#[cfg(feature = "use_pem")]
use crate::pem::decoder::PemEncodedKey;
use crate::serialization::{b64_decode, DecodedJwtPartClaims};
use crate::validation::{validate, Validation};
/// The return type of a successful call to [decode](fn.decode.html).
#[derive(Debug)]
pub struct | <T> {
/// The decoded JWT header
pub header: Header,
/// The decoded JWT claims
pub claims: T,
}
/// Takes the result of a rsplit and ensure we only get 2 parts
/// Errors if we don't
macro_rules! expect_two {
($iter:expr) => {{
let mut i = $iter;
match (i.next(), i.next(), i.next()) {
(Some(first), Some(second), None) => (first, second),
_ => return Err(new_error(ErrorKind::InvalidToken)),
}
}};
}
#[derive(Clone)]
pub(crate) enum DecodingKeyKind {
SecretOrDer(Vec<u8>),
RsaModulusExponent { n: Vec<u8>, e: Vec<u8> },
}
/// All the different kind of keys we can use to decode a JWT
/// This key can be re-used so make sure you only initialize it once if you can for better performance
#[derive(Clone)]
pub struct DecodingKey {
pub(crate) family: AlgorithmFamily,
pub(crate) kind: DecodingKeyKind,
}
impl DecodingKey {
/// If you're using HMAC, use this.
pub fn from_secret(secret: &[u8]) -> Self {
DecodingKey {
family: AlgorithmFamily::Hmac,
kind: DecodingKeyKind::SecretOrDer(secret.to_vec()),
}
}
/// If you're using HMAC with a base64 encoded secret, use this.
pub fn from_base64_secret(secret: &str) -> Result<Self> {
let out = base64::decode(&secret)?;
Ok(DecodingKey { family: AlgorithmFamily::Hmac, kind: DecodingKeyKind::SecretOrDer(out) })
}
/// If you are loading a public RSA key in a PEM format, use this.
/// Only exists if the feature `use_pem` is enabled.
#[cfg(feature = "use_pem")]
pub fn from_rsa_pem(key: &[u8]) -> Result<Self> {
let pem_key = PemEncodedKey::new(key)?;
let content = pem_key.as_rsa_key()?;
Ok(DecodingKey {
family: AlgorithmFamily::Rsa,
kind: DecodingKeyKind::SecretOrDer(content.to_vec()),
})
}
/// If you have (n, e) RSA public key components as strings, use this.
pub fn from_rsa_components(modulus: &str, exponent: &str) -> Result<Self> {
let n = b64_decode(modulus)?;
let e = b64_decode(exponent)?;
Ok(DecodingKey {
family: AlgorithmFamily::Rsa,
kind: DecodingKeyKind::RsaModulusExponent { n, e },
})
}
/// If you have (n, e) RSA public key components already decoded, use this.
pub fn from_rsa_raw_components(modulus: &[u8], exponent: &[u8]) -> Self {
DecodingKey {
family: AlgorithmFamily::Rsa,
kind: DecodingKeyKind::RsaModulusExponent { n: modulus.to_vec(), e: exponent.to_vec() },
}
}
/// If you have a ECDSA public key in PEM format, use this.
/// Only exists if the feature `use_pem` is enabled.
#[cfg(feature = "use_pem")]
pub fn from_ec_pem(key: &[u8]) -> Result<Self> {
let pem_key = PemEncodedKey::new(key)?;
let content = pem_key.as_ec_public_key()?;
Ok(DecodingKey {
family: AlgorithmFamily::Ec,
kind: DecodingKeyKind::SecretOrDer(content.to_vec()),
})
}
/// If you have a EdDSA public key in PEM format, use this.
/// Only exists if the feature `use_pem` is enabled.
#[cfg(feature = "use_pem")]
pub fn from_ed_pem(key: &[u8]) -> Result<Self> {
let pem_key = PemEncodedKey::new(key)?;
let content = pem_key.as_ed_public_key()?;
Ok(DecodingKey {
family: AlgorithmFamily::Ed,
kind: DecodingKeyKind::SecretOrDer(content.to_vec()),
})
}
/// If you know what you're doing and have a RSA DER encoded public key, use this.
pub fn from_rsa_der(der: &[u8]) -> Self {
DecodingKey {
family: AlgorithmFamily::Rsa,
kind: DecodingKeyKind::SecretOrDer(der.to_vec()),
}
}
/// If you know what you're doing and have a RSA EC encoded public key, use this.
pub fn from_ec_der(der: &[u8]) -> Self {
DecodingKey {
family: AlgorithmFamily::Ec,
kind: DecodingKeyKind::SecretOrDer(der.to_vec()),
}
}
/// If you know what you're doing and have a Ed DER encoded public key, use this.
pub fn from_ed_der(der: &[u8]) -> Self {
DecodingKey {
family: AlgorithmFamily::Ed,
kind: DecodingKeyKind::SecretOrDer(der.to_vec()),
}
}
pub(crate) fn as_bytes(&self) -> &[u8] {
match &self.kind {
DecodingKeyKind::SecretOrDer(b) => b,
DecodingKeyKind::RsaModulusExponent { .. } => unreachable!(),
}
}
}
/// Verify signature of a JWT, and return header object and raw payload
///
/// If the token or its signature is invalid, it will return an error.
fn verify_signature<'a>(
token: &'a str,
key: &DecodingKey,
validation: &Validation,
) -> Result<(Header, &'a str)> {
if validation.validate_signature && validation.algorithms.is_empty() {
return Err(new_error(ErrorKind::MissingAlgorithm));
}
if validation.validate_signature {
for alg in &validation.algorithms {
if key.family != alg.family() {
return Err(new_error(ErrorKind::InvalidAlgorithm));
}
}
}
let (signature, message) = expect_two!(token.rsplitn(2, '.'));
let (payload, header) = expect_two!(message.rsplitn(2, '.'));
let header = Header::from_encoded(header)?;
if validation.validate_signature && !validation.algorithms.contains(&header.alg) {
return Err(new_error(ErrorKind::InvalidAlgorithm));
}
if validation.validate_signature && !verify(signature, message.as_bytes(), key, header.alg)? {
return Err(new_error(ErrorKind::InvalidSignature));
}
Ok((header, payload))
}
/// Decode and validate a JWT
///
/// If the token or its signature is invalid or the claims fail validation, it will return an error.
///
/// ```rust
/// use serde::{Deserialize, Serialize};
/// use jsonwebtoken::{decode, DecodingKey, Validation, Algorithm};
///
/// #[derive(Debug, Serialize, Deserialize)]
/// struct Claims {
/// sub: String,
/// company: String
/// }
///
/// let token = "a.jwt.token".to_string();
/// // Claims is a struct that implements Deserialize
/// let token_message = decode::<Claims>(&token, &DecodingKey::from_secret("secret".as_ref()), &Validation::new(Algorithm::HS256));
/// ```
pub fn decode<T: DeserializeOwned>(
token: &str,
key: &DecodingKey,
validation: &Validation,
) -> Result<TokenData<T>> {
match verify_signature(token, key, validation) {
Err(e) => Err(e),
Ok((header, claims)) => {
let decoded_claims = DecodedJwtPartClaims::from_jwt_part_claims(claims)?;
let claims = decoded_claims.deserialize()?;
validate(decoded_claims.deserialize()?, validation)?;
Ok(TokenData { header, claims })
}
}
}
/// Decode a JWT without any signature verification/validations and return its [Header](struct.Header.html).
///
/// If the token has an invalid format (ie 3 parts separated by a `.`), it will return an error.
///
/// ```rust
/// use jsonwebtoken::decode_header;
///
/// let token = "a.jwt.token".to_string();
/// let header = decode_header(&token);
/// ```
pub fn decode_header(token: &str) -> Result<Header> {
let (_, message) = expect_two!(token.rsplitn(2, '.'));
let (_, header) = expect_two!(message.rsplitn(2, '.'));
Header::from_encoded(header)
}
| TokenData | identifier_name |
decoding.rs | use serde::de::DeserializeOwned;
use crate::algorithms::AlgorithmFamily;
use crate::crypto::verify;
use crate::errors::{new_error, ErrorKind, Result};
use crate::header::Header;
#[cfg(feature = "use_pem")]
use crate::pem::decoder::PemEncodedKey;
use crate::serialization::{b64_decode, DecodedJwtPartClaims};
use crate::validation::{validate, Validation};
/// The return type of a successful call to [decode](fn.decode.html).
#[derive(Debug)]
pub struct TokenData<T> {
/// The decoded JWT header
pub header: Header,
/// The decoded JWT claims
pub claims: T,
}
/// Takes the result of a rsplit and ensure we only get 2 parts
/// Errors if we don't
macro_rules! expect_two {
($iter:expr) => {{
let mut i = $iter;
match (i.next(), i.next(), i.next()) {
(Some(first), Some(second), None) => (first, second),
_ => return Err(new_error(ErrorKind::InvalidToken)),
}
}};
}
#[derive(Clone)]
pub(crate) enum DecodingKeyKind {
SecretOrDer(Vec<u8>),
RsaModulusExponent { n: Vec<u8>, e: Vec<u8> },
}
/// All the different kind of keys we can use to decode a JWT
/// This key can be re-used so make sure you only initialize it once if you can for better performance
#[derive(Clone)]
pub struct DecodingKey {
pub(crate) family: AlgorithmFamily,
pub(crate) kind: DecodingKeyKind,
}
impl DecodingKey {
/// If you're using HMAC, use this.
pub fn from_secret(secret: &[u8]) -> Self {
DecodingKey {
family: AlgorithmFamily::Hmac,
kind: DecodingKeyKind::SecretOrDer(secret.to_vec()),
}
}
/// If you're using HMAC with a base64 encoded secret, use this.
pub fn from_base64_secret(secret: &str) -> Result<Self> {
let out = base64::decode(&secret)?;
Ok(DecodingKey { family: AlgorithmFamily::Hmac, kind: DecodingKeyKind::SecretOrDer(out) })
}
/// If you are loading a public RSA key in a PEM format, use this.
/// Only exists if the feature `use_pem` is enabled.
#[cfg(feature = "use_pem")]
pub fn from_rsa_pem(key: &[u8]) -> Result<Self> {
let pem_key = PemEncodedKey::new(key)?;
let content = pem_key.as_rsa_key()?;
Ok(DecodingKey {
family: AlgorithmFamily::Rsa,
kind: DecodingKeyKind::SecretOrDer(content.to_vec()),
})
}
/// If you have (n, e) RSA public key components as strings, use this.
pub fn from_rsa_components(modulus: &str, exponent: &str) -> Result<Self> {
let n = b64_decode(modulus)?;
let e = b64_decode(exponent)?;
Ok(DecodingKey {
family: AlgorithmFamily::Rsa,
kind: DecodingKeyKind::RsaModulusExponent { n, e },
})
}
/// If you have (n, e) RSA public key components already decoded, use this.
pub fn from_rsa_raw_components(modulus: &[u8], exponent: &[u8]) -> Self {
DecodingKey {
family: AlgorithmFamily::Rsa,
kind: DecodingKeyKind::RsaModulusExponent { n: modulus.to_vec(), e: exponent.to_vec() },
}
}
/// If you have a ECDSA public key in PEM format, use this.
/// Only exists if the feature `use_pem` is enabled.
#[cfg(feature = "use_pem")]
pub fn from_ec_pem(key: &[u8]) -> Result<Self> {
let pem_key = PemEncodedKey::new(key)?;
let content = pem_key.as_ec_public_key()?;
Ok(DecodingKey {
family: AlgorithmFamily::Ec,
kind: DecodingKeyKind::SecretOrDer(content.to_vec()),
})
}
/// If you have a EdDSA public key in PEM format, use this.
/// Only exists if the feature `use_pem` is enabled.
#[cfg(feature = "use_pem")]
pub fn from_ed_pem(key: &[u8]) -> Result<Self> {
let pem_key = PemEncodedKey::new(key)?;
let content = pem_key.as_ed_public_key()?;
Ok(DecodingKey {
family: AlgorithmFamily::Ed,
kind: DecodingKeyKind::SecretOrDer(content.to_vec()),
})
}
/// If you know what you're doing and have a RSA DER encoded public key, use this.
pub fn from_rsa_der(der: &[u8]) -> Self {
DecodingKey {
family: AlgorithmFamily::Rsa,
kind: DecodingKeyKind::SecretOrDer(der.to_vec()),
}
}
/// If you know what you're doing and have a RSA EC encoded public key, use this.
pub fn from_ec_der(der: &[u8]) -> Self {
DecodingKey {
family: AlgorithmFamily::Ec,
kind: DecodingKeyKind::SecretOrDer(der.to_vec()),
}
}
/// If you know what you're doing and have a Ed DER encoded public key, use this.
pub fn from_ed_der(der: &[u8]) -> Self {
DecodingKey {
family: AlgorithmFamily::Ed,
kind: DecodingKeyKind::SecretOrDer(der.to_vec()),
}
}
pub(crate) fn as_bytes(&self) -> &[u8] {
match &self.kind {
DecodingKeyKind::SecretOrDer(b) => b,
DecodingKeyKind::RsaModulusExponent { .. } => unreachable!(),
}
}
}
/// Verify signature of a JWT, and return header object and raw payload
///
/// If the token or its signature is invalid, it will return an error.
fn verify_signature<'a>(
token: &'a str,
key: &DecodingKey,
validation: &Validation,
) -> Result<(Header, &'a str)> {
if validation.validate_signature && validation.algorithms.is_empty() {
return Err(new_error(ErrorKind::MissingAlgorithm));
}
if validation.validate_signature {
for alg in &validation.algorithms {
if key.family != alg.family() {
return Err(new_error(ErrorKind::InvalidAlgorithm));
}
}
}
let (signature, message) = expect_two!(token.rsplitn(2, '.'));
let (payload, header) = expect_two!(message.rsplitn(2, '.'));
let header = Header::from_encoded(header)?;
if validation.validate_signature && !validation.algorithms.contains(&header.alg) {
return Err(new_error(ErrorKind::InvalidAlgorithm));
}
if validation.validate_signature && !verify(signature, message.as_bytes(), key, header.alg)? {
return Err(new_error(ErrorKind::InvalidSignature));
}
Ok((header, payload))
}
/// Decode and validate a JWT
///
/// If the token or its signature is invalid or the claims fail validation, it will return an error.
///
/// ```rust
/// use serde::{Deserialize, Serialize};
/// use jsonwebtoken::{decode, DecodingKey, Validation, Algorithm};
///
/// #[derive(Debug, Serialize, Deserialize)]
/// struct Claims {
/// sub: String,
/// company: String
/// }
/// | /// let token_message = decode::<Claims>(&token, &DecodingKey::from_secret("secret".as_ref()), &Validation::new(Algorithm::HS256));
/// ```
pub fn decode<T: DeserializeOwned>(
token: &str,
key: &DecodingKey,
validation: &Validation,
) -> Result<TokenData<T>> {
match verify_signature(token, key, validation) {
Err(e) => Err(e),
Ok((header, claims)) => {
let decoded_claims = DecodedJwtPartClaims::from_jwt_part_claims(claims)?;
let claims = decoded_claims.deserialize()?;
validate(decoded_claims.deserialize()?, validation)?;
Ok(TokenData { header, claims })
}
}
}
/// Decode a JWT without any signature verification/validations and return its [Header](struct.Header.html).
///
/// If the token has an invalid format (ie 3 parts separated by a `.`), it will return an error.
///
/// ```rust
/// use jsonwebtoken::decode_header;
///
/// let token = "a.jwt.token".to_string();
/// let header = decode_header(&token);
/// ```
pub fn decode_header(token: &str) -> Result<Header> {
let (_, message) = expect_two!(token.rsplitn(2, '.'));
let (_, header) = expect_two!(message.rsplitn(2, '.'));
Header::from_encoded(header)
} | /// let token = "a.jwt.token".to_string();
/// // Claims is a struct that implements Deserialize | random_line_split |
decoding.rs | use serde::de::DeserializeOwned;
use crate::algorithms::AlgorithmFamily;
use crate::crypto::verify;
use crate::errors::{new_error, ErrorKind, Result};
use crate::header::Header;
#[cfg(feature = "use_pem")]
use crate::pem::decoder::PemEncodedKey;
use crate::serialization::{b64_decode, DecodedJwtPartClaims};
use crate::validation::{validate, Validation};
/// The return type of a successful call to [decode](fn.decode.html).
#[derive(Debug)]
pub struct TokenData<T> {
/// The decoded JWT header
pub header: Header,
/// The decoded JWT claims
pub claims: T,
}
/// Takes the result of a rsplit and ensure we only get 2 parts
/// Errors if we don't
macro_rules! expect_two {
($iter:expr) => {{
let mut i = $iter;
match (i.next(), i.next(), i.next()) {
(Some(first), Some(second), None) => (first, second),
_ => return Err(new_error(ErrorKind::InvalidToken)),
}
}};
}
#[derive(Clone)]
pub(crate) enum DecodingKeyKind {
SecretOrDer(Vec<u8>),
RsaModulusExponent { n: Vec<u8>, e: Vec<u8> },
}
/// All the different kind of keys we can use to decode a JWT
/// This key can be re-used so make sure you only initialize it once if you can for better performance
#[derive(Clone)]
pub struct DecodingKey {
pub(crate) family: AlgorithmFamily,
pub(crate) kind: DecodingKeyKind,
}
impl DecodingKey {
/// If you're using HMAC, use this.
pub fn from_secret(secret: &[u8]) -> Self {
DecodingKey {
family: AlgorithmFamily::Hmac,
kind: DecodingKeyKind::SecretOrDer(secret.to_vec()),
}
}
/// If you're using HMAC with a base64 encoded secret, use this.
pub fn from_base64_secret(secret: &str) -> Result<Self> {
let out = base64::decode(&secret)?;
Ok(DecodingKey { family: AlgorithmFamily::Hmac, kind: DecodingKeyKind::SecretOrDer(out) })
}
/// If you are loading a public RSA key in a PEM format, use this.
/// Only exists if the feature `use_pem` is enabled.
#[cfg(feature = "use_pem")]
pub fn from_rsa_pem(key: &[u8]) -> Result<Self> {
let pem_key = PemEncodedKey::new(key)?;
let content = pem_key.as_rsa_key()?;
Ok(DecodingKey {
family: AlgorithmFamily::Rsa,
kind: DecodingKeyKind::SecretOrDer(content.to_vec()),
})
}
/// If you have (n, e) RSA public key components as strings, use this.
pub fn from_rsa_components(modulus: &str, exponent: &str) -> Result<Self> {
let n = b64_decode(modulus)?;
let e = b64_decode(exponent)?;
Ok(DecodingKey {
family: AlgorithmFamily::Rsa,
kind: DecodingKeyKind::RsaModulusExponent { n, e },
})
}
/// If you have (n, e) RSA public key components already decoded, use this.
pub fn from_rsa_raw_components(modulus: &[u8], exponent: &[u8]) -> Self {
DecodingKey {
family: AlgorithmFamily::Rsa,
kind: DecodingKeyKind::RsaModulusExponent { n: modulus.to_vec(), e: exponent.to_vec() },
}
}
/// If you have a ECDSA public key in PEM format, use this.
/// Only exists if the feature `use_pem` is enabled.
#[cfg(feature = "use_pem")]
pub fn from_ec_pem(key: &[u8]) -> Result<Self> {
let pem_key = PemEncodedKey::new(key)?;
let content = pem_key.as_ec_public_key()?;
Ok(DecodingKey {
family: AlgorithmFamily::Ec,
kind: DecodingKeyKind::SecretOrDer(content.to_vec()),
})
}
/// If you have a EdDSA public key in PEM format, use this.
/// Only exists if the feature `use_pem` is enabled.
#[cfg(feature = "use_pem")]
pub fn from_ed_pem(key: &[u8]) -> Result<Self> {
let pem_key = PemEncodedKey::new(key)?;
let content = pem_key.as_ed_public_key()?;
Ok(DecodingKey {
family: AlgorithmFamily::Ed,
kind: DecodingKeyKind::SecretOrDer(content.to_vec()),
})
}
/// If you know what you're doing and have a RSA DER encoded public key, use this.
pub fn from_rsa_der(der: &[u8]) -> Self {
DecodingKey {
family: AlgorithmFamily::Rsa,
kind: DecodingKeyKind::SecretOrDer(der.to_vec()),
}
}
/// If you know what you're doing and have a RSA EC encoded public key, use this.
pub fn from_ec_der(der: &[u8]) -> Self {
DecodingKey {
family: AlgorithmFamily::Ec,
kind: DecodingKeyKind::SecretOrDer(der.to_vec()),
}
}
/// If you know what you're doing and have a Ed DER encoded public key, use this.
pub fn from_ed_der(der: &[u8]) -> Self {
DecodingKey {
family: AlgorithmFamily::Ed,
kind: DecodingKeyKind::SecretOrDer(der.to_vec()),
}
}
pub(crate) fn as_bytes(&self) -> &[u8] |
}
/// Verify signature of a JWT, and return header object and raw payload
///
/// If the token or its signature is invalid, it will return an error.
fn verify_signature<'a>(
token: &'a str,
key: &DecodingKey,
validation: &Validation,
) -> Result<(Header, &'a str)> {
if validation.validate_signature && validation.algorithms.is_empty() {
return Err(new_error(ErrorKind::MissingAlgorithm));
}
if validation.validate_signature {
for alg in &validation.algorithms {
if key.family != alg.family() {
return Err(new_error(ErrorKind::InvalidAlgorithm));
}
}
}
let (signature, message) = expect_two!(token.rsplitn(2, '.'));
let (payload, header) = expect_two!(message.rsplitn(2, '.'));
let header = Header::from_encoded(header)?;
if validation.validate_signature && !validation.algorithms.contains(&header.alg) {
return Err(new_error(ErrorKind::InvalidAlgorithm));
}
if validation.validate_signature && !verify(signature, message.as_bytes(), key, header.alg)? {
return Err(new_error(ErrorKind::InvalidSignature));
}
Ok((header, payload))
}
/// Decode and validate a JWT
///
/// If the token or its signature is invalid or the claims fail validation, it will return an error.
///
/// ```rust
/// use serde::{Deserialize, Serialize};
/// use jsonwebtoken::{decode, DecodingKey, Validation, Algorithm};
///
/// #[derive(Debug, Serialize, Deserialize)]
/// struct Claims {
/// sub: String,
/// company: String
/// }
///
/// let token = "a.jwt.token".to_string();
/// // Claims is a struct that implements Deserialize
/// let token_message = decode::<Claims>(&token, &DecodingKey::from_secret("secret".as_ref()), &Validation::new(Algorithm::HS256));
/// ```
pub fn decode<T: DeserializeOwned>(
token: &str,
key: &DecodingKey,
validation: &Validation,
) -> Result<TokenData<T>> {
match verify_signature(token, key, validation) {
Err(e) => Err(e),
Ok((header, claims)) => {
let decoded_claims = DecodedJwtPartClaims::from_jwt_part_claims(claims)?;
let claims = decoded_claims.deserialize()?;
validate(decoded_claims.deserialize()?, validation)?;
Ok(TokenData { header, claims })
}
}
}
/// Decode a JWT without any signature verification/validations and return its [Header](struct.Header.html).
///
/// If the token has an invalid format (ie 3 parts separated by a `.`), it will return an error.
///
/// ```rust
/// use jsonwebtoken::decode_header;
///
/// let token = "a.jwt.token".to_string();
/// let header = decode_header(&token);
/// ```
pub fn decode_header(token: &str) -> Result<Header> {
let (_, message) = expect_two!(token.rsplitn(2, '.'));
let (_, header) = expect_two!(message.rsplitn(2, '.'));
Header::from_encoded(header)
}
| {
match &self.kind {
DecodingKeyKind::SecretOrDer(b) => b,
DecodingKeyKind::RsaModulusExponent { .. } => unreachable!(),
}
} | identifier_body |
trait-with-bounds-default.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub trait Clone2 {
/// Returns a copy of the value. The contents of owned pointers
/// are copied to maintain uniqueness, while the contents of
/// managed pointers are not copied.
fn clone(&self) -> Self;
}
trait Getter<T: Clone> {
fn do_get(&self) -> T;
fn do_get2(&self) -> (T, T) {
let x = self.do_get();
(x.clone(), x.clone())
}
}
impl Getter<int> for int {
fn do_get(&self) -> int { *self }
}
impl<T: Clone> Getter<T> for Option<T> {
fn | (&self) -> T { self.get_ref().clone() }
}
pub fn main() {
assert_eq!(3.do_get2(), (3, 3));
assert_eq!(Some(~"hi").do_get2(), (~"hi", ~"hi"));
}
| do_get | identifier_name |
trait-with-bounds-default.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub trait Clone2 {
/// Returns a copy of the value. The contents of owned pointers
/// are copied to maintain uniqueness, while the contents of
/// managed pointers are not copied.
fn clone(&self) -> Self;
}
trait Getter<T: Clone> {
fn do_get(&self) -> T;
fn do_get2(&self) -> (T, T) {
let x = self.do_get();
(x.clone(), x.clone())
}
}
impl Getter<int> for int {
fn do_get(&self) -> int |
}
impl<T: Clone> Getter<T> for Option<T> {
fn do_get(&self) -> T { self.get_ref().clone() }
}
pub fn main() {
assert_eq!(3.do_get2(), (3, 3));
assert_eq!(Some(~"hi").do_get2(), (~"hi", ~"hi"));
}
| { *self } | identifier_body |
trait-with-bounds-default.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub trait Clone2 {
/// Returns a copy of the value. The contents of owned pointers
/// are copied to maintain uniqueness, while the contents of
/// managed pointers are not copied.
fn clone(&self) -> Self;
}
trait Getter<T: Clone> {
fn do_get(&self) -> T;
fn do_get2(&self) -> (T, T) {
let x = self.do_get();
(x.clone(), x.clone())
}
}
impl Getter<int> for int {
fn do_get(&self) -> int { *self } |
pub fn main() {
assert_eq!(3.do_get2(), (3, 3));
assert_eq!(Some(~"hi").do_get2(), (~"hi", ~"hi"));
} | }
impl<T: Clone> Getter<T> for Option<T> {
fn do_get(&self) -> T { self.get_ref().clone() }
} | random_line_split |
ajax.py | from django.utils import simplejson
from dajaxice.decorators import dajaxice_register
from django.utils.translation import ugettext as _
from django.template.loader import render_to_string
from dajax.core import Dajax
from django.db import transaction
from darkoob.book.models import Book, Review
@dajaxice_register(method='POST')
@transaction.commit_manually
def rate(request, rate, book_id):
done = False
book = ''
try:
book = Book.objects.get(id = book_id)
book.rating.add(score=rate, user=request.user, ip_address=request.META['REMOTE_ADDR'])
except:
errors.append('An error occoured in record in database')
transaction.rollback()
else:
done = True
transaction.commit()
return simplejson.dumps({'done':done})
@dajaxice_register(method='POST')
@transaction.commit_manually
def | (request, rate, review_id):
print "review id",review_id
done = False
try:
review = Review.objects.get(id=review_id)
review.rating.add(score=rate, user=request.user, ip_address=request.META['REMOTE_ADDR'])
except:
errors.append('An error occoured in record in database')
transaction.rollback()
else:
done = True
transaction.commit()
return simplejson.dumps({'done': done})
@dajaxice_register(method='POST')
def submit_review(request, book_id, title, text):
dajax = Dajax()
#TODO: checks if you have permission for posting review
try:
book = Book.objects.get(id=book_id)
except Book.DoesNotExist:
dajax.script('''
$.pnotify({
title: 'Review',
type:'error',
text: 'This Book doesn\'t exsist.',
opacity: .8
});
$('#id_text').val('');
$('#id_title').val('');
''')
else:
if len(text) < 200:
transaction.rollback()
dajax.script('''
$.pnotify({
title: 'Review',
type:'error',
text: 'Complete your review. We need some checks',
opacity: .8
});
$('#id_text').val('');
$('#id_title').val('');
''')
else:
review = Review.objects.create(book=book, user=request.user, title=title, text=text)
t_rendered = render_to_string('book/review.html', {'review': review})
dajax.prepend('#id_new_post_position', 'innerHTML', t_rendered)
dajax.script('''
$.pnotify({
title: 'Review',
type:'success',
text: 'Your review record',
opacity: .8
});
$('#id_text').val('');
$('#id_title').val('');
''')
return dajax.json()
@dajaxice_register(method='POST')
def ha(request, book_name):
print "book_name", book_name
return simplejson.dumps({'done': True})
| review_rate | identifier_name |
ajax.py | from django.utils import simplejson
from dajaxice.decorators import dajaxice_register
from django.utils.translation import ugettext as _
from django.template.loader import render_to_string
from dajax.core import Dajax
from django.db import transaction
from darkoob.book.models import Book, Review
@dajaxice_register(method='POST')
@transaction.commit_manually
def rate(request, rate, book_id):
done = False
book = ''
try:
book = Book.objects.get(id = book_id)
book.rating.add(score=rate, user=request.user, ip_address=request.META['REMOTE_ADDR'])
except:
errors.append('An error occoured in record in database')
transaction.rollback()
else:
done = True
transaction.commit()
return simplejson.dumps({'done':done})
|
@dajaxice_register(method='POST')
@transaction.commit_manually
def review_rate(request, rate, review_id):
print "review id",review_id
done = False
try:
review = Review.objects.get(id=review_id)
review.rating.add(score=rate, user=request.user, ip_address=request.META['REMOTE_ADDR'])
except:
errors.append('An error occoured in record in database')
transaction.rollback()
else:
done = True
transaction.commit()
return simplejson.dumps({'done': done})
@dajaxice_register(method='POST')
def submit_review(request, book_id, title, text):
dajax = Dajax()
#TODO: checks if you have permission for posting review
try:
book = Book.objects.get(id=book_id)
except Book.DoesNotExist:
dajax.script('''
$.pnotify({
title: 'Review',
type:'error',
text: 'This Book doesn\'t exsist.',
opacity: .8
});
$('#id_text').val('');
$('#id_title').val('');
''')
else:
if len(text) < 200:
transaction.rollback()
dajax.script('''
$.pnotify({
title: 'Review',
type:'error',
text: 'Complete your review. We need some checks',
opacity: .8
});
$('#id_text').val('');
$('#id_title').val('');
''')
else:
review = Review.objects.create(book=book, user=request.user, title=title, text=text)
t_rendered = render_to_string('book/review.html', {'review': review})
dajax.prepend('#id_new_post_position', 'innerHTML', t_rendered)
dajax.script('''
$.pnotify({
title: 'Review',
type:'success',
text: 'Your review record',
opacity: .8
});
$('#id_text').val('');
$('#id_title').val('');
''')
return dajax.json()
@dajaxice_register(method='POST')
def ha(request, book_name):
print "book_name", book_name
return simplejson.dumps({'done': True}) | random_line_split |
|
ajax.py | from django.utils import simplejson
from dajaxice.decorators import dajaxice_register
from django.utils.translation import ugettext as _
from django.template.loader import render_to_string
from dajax.core import Dajax
from django.db import transaction
from darkoob.book.models import Book, Review
@dajaxice_register(method='POST')
@transaction.commit_manually
def rate(request, rate, book_id):
done = False
book = ''
try:
book = Book.objects.get(id = book_id)
book.rating.add(score=rate, user=request.user, ip_address=request.META['REMOTE_ADDR'])
except:
errors.append('An error occoured in record in database')
transaction.rollback()
else:
done = True
transaction.commit()
return simplejson.dumps({'done':done})
@dajaxice_register(method='POST')
@transaction.commit_manually
def review_rate(request, rate, review_id):
print "review id",review_id
done = False
try:
review = Review.objects.get(id=review_id)
review.rating.add(score=rate, user=request.user, ip_address=request.META['REMOTE_ADDR'])
except:
errors.append('An error occoured in record in database')
transaction.rollback()
else:
done = True
transaction.commit()
return simplejson.dumps({'done': done})
@dajaxice_register(method='POST')
def submit_review(request, book_id, title, text):
dajax = Dajax()
#TODO: checks if you have permission for posting review
try:
book = Book.objects.get(id=book_id)
except Book.DoesNotExist:
dajax.script('''
$.pnotify({
title: 'Review',
type:'error',
text: 'This Book doesn\'t exsist.',
opacity: .8
});
$('#id_text').val('');
$('#id_title').val('');
''')
else:
if len(text) < 200:
transaction.rollback()
dajax.script('''
$.pnotify({
title: 'Review',
type:'error',
text: 'Complete your review. We need some checks',
opacity: .8
});
$('#id_text').val('');
$('#id_title').val('');
''')
else:
review = Review.objects.create(book=book, user=request.user, title=title, text=text)
t_rendered = render_to_string('book/review.html', {'review': review})
dajax.prepend('#id_new_post_position', 'innerHTML', t_rendered)
dajax.script('''
$.pnotify({
title: 'Review',
type:'success',
text: 'Your review record',
opacity: .8
});
$('#id_text').val('');
$('#id_title').val('');
''')
return dajax.json()
@dajaxice_register(method='POST')
def ha(request, book_name):
| print "book_name", book_name
return simplejson.dumps({'done': True}) | identifier_body |
|
ajax.py | from django.utils import simplejson
from dajaxice.decorators import dajaxice_register
from django.utils.translation import ugettext as _
from django.template.loader import render_to_string
from dajax.core import Dajax
from django.db import transaction
from darkoob.book.models import Book, Review
@dajaxice_register(method='POST')
@transaction.commit_manually
def rate(request, rate, book_id):
done = False
book = ''
try:
book = Book.objects.get(id = book_id)
book.rating.add(score=rate, user=request.user, ip_address=request.META['REMOTE_ADDR'])
except:
errors.append('An error occoured in record in database')
transaction.rollback()
else:
done = True
transaction.commit()
return simplejson.dumps({'done':done})
@dajaxice_register(method='POST')
@transaction.commit_manually
def review_rate(request, rate, review_id):
print "review id",review_id
done = False
try:
review = Review.objects.get(id=review_id)
review.rating.add(score=rate, user=request.user, ip_address=request.META['REMOTE_ADDR'])
except:
errors.append('An error occoured in record in database')
transaction.rollback()
else:
done = True
transaction.commit()
return simplejson.dumps({'done': done})
@dajaxice_register(method='POST')
def submit_review(request, book_id, title, text):
dajax = Dajax()
#TODO: checks if you have permission for posting review
try:
book = Book.objects.get(id=book_id)
except Book.DoesNotExist:
dajax.script('''
$.pnotify({
title: 'Review',
type:'error',
text: 'This Book doesn\'t exsist.',
opacity: .8
});
$('#id_text').val('');
$('#id_title').val('');
''')
else:
if len(text) < 200:
|
else:
review = Review.objects.create(book=book, user=request.user, title=title, text=text)
t_rendered = render_to_string('book/review.html', {'review': review})
dajax.prepend('#id_new_post_position', 'innerHTML', t_rendered)
dajax.script('''
$.pnotify({
title: 'Review',
type:'success',
text: 'Your review record',
opacity: .8
});
$('#id_text').val('');
$('#id_title').val('');
''')
return dajax.json()
@dajaxice_register(method='POST')
def ha(request, book_name):
print "book_name", book_name
return simplejson.dumps({'done': True})
| transaction.rollback()
dajax.script('''
$.pnotify({
title: 'Review',
type:'error',
text: 'Complete your review. We need some checks',
opacity: .8
});
$('#id_text').val('');
$('#id_title').val('');
''') | conditional_block |
mconf.py | . import coredata, environment, mesonlib, build, mintro, mlog
from .ast import AstIDGenerator
def add_arguments(parser):
coredata.register_builtin_arguments(parser)
parser.add_argument('builddir', nargs='?', default='.')
parser.add_argument('--clearcache', action='store_true', default=False,
help='Clear cached state (e.g. found dependencies)')
def make_lower_case(val):
if isinstance(val, bool):
return str(val).lower()
elif isinstance(val, list):
return [make_lower_case(i) for i in val]
else:
return str(val)
class ConfException(mesonlib.MesonException):
pass
class Conf:
def __init__(self, build_dir):
self.build_dir = os.path.abspath(os.path.realpath(build_dir))
if 'meson.build' in [os.path.basename(self.build_dir), self.build_dir]:
self.build_dir = os.path.dirname(self.build_dir)
self.build = None
self.max_choices_line_length = 60
self.name_col = []
self.value_col = []
self.choices_col = []
self.descr_col = []
self.has_choices = False
self.all_subprojects = set()
self.yielding_options = set()
if os.path.isdir(os.path.join(self.build_dir, 'meson-private')):
self.build = build.load(self.build_dir)
self.source_dir = self.build.environment.get_source_dir()
self.coredata = coredata.load(self.build_dir)
self.default_values_only = False
elif os.path.isfile(os.path.join(self.build_dir, environment.build_filename)):
# Make sure that log entries in other parts of meson don't interfere with the JSON output
mlog.disable()
self.source_dir = os.path.abspath(os.path.realpath(self.build_dir))
intr = mintro.IntrospectionInterpreter(self.source_dir, '', 'ninja', visitors = [AstIDGenerator()])
intr.analyze()
# Re-enable logging just in case
mlog.enable()
self.coredata = intr.coredata
self.default_values_only = True
else:
raise ConfException('Directory {} is neither a Meson build directory nor a project source directory.'.format(build_dir))
def clear_cache(self):
self.coredata.deps.host.clear()
self.coredata.deps.build.clear()
def set_options(self, options):
self.coredata.set_options(options)
def save(self):
# Do nothing when using introspection
if self.default_values_only:
return
# Only called if something has changed so overwrite unconditionally.
coredata.save(self.coredata, self.build_dir)
# We don't write the build file because any changes to it
# are erased when Meson is executed the next time, i.e. when
# Ninja is run.
def print_aligned(self):
col_widths = (max([len(i) for i in self.name_col], default=0),
max([len(i) for i in self.value_col], default=0),
max([len(i) for i in self.choices_col], default=0))
for line in zip(self.name_col, self.value_col, self.choices_col, self.descr_col):
if self.has_choices:
print('{0:{width[0]}} {1:{width[1]}} {2:{width[2]}} {3}'.format(*line, width=col_widths))
else:
print('{0:{width[0]}} {1:{width[1]}} {3}'.format(*line, width=col_widths))
def | (self, options):
result = {}
for k, o in options.items():
subproject = ''
if ':' in k:
subproject, optname = k.split(':')
if o.yielding and optname in options:
self.yielding_options.add(k)
self.all_subprojects.add(subproject)
result.setdefault(subproject, {})[k] = o
return result
def _add_line(self, name, value, choices, descr):
self.name_col.append(' ' * self.print_margin + name)
self.value_col.append(value)
self.choices_col.append(choices)
self.descr_col.append(descr)
def add_option(self, name, descr, value, choices):
if isinstance(value, list):
value = '[{0}]'.format(', '.join(make_lower_case(value)))
else:
value = make_lower_case(value)
if choices:
self.has_choices = True
if isinstance(choices, list):
choices_list = make_lower_case(choices)
current = '['
while choices_list:
i = choices_list.pop(0)
if len(current) + len(i) >= self.max_choices_line_length:
self._add_line(name, value, current + ',', descr)
name = ''
value = ''
descr = ''
current = ' '
if len(current) > 1:
current += ', '
current += i
choices = current + ']'
else:
choices = make_lower_case(choices)
else:
choices = ''
self._add_line(name, value, choices, descr)
def add_title(self, title):
titles = {'descr': 'Description', 'value': 'Current Value', 'choices': 'Possible Values'}
if self.default_values_only:
titles['value'] = 'Default Value'
self._add_line('', '', '', '')
self._add_line(title, titles['value'], titles['choices'], titles['descr'])
self._add_line('-' * len(title), '-' * len(titles['value']), '-' * len(titles['choices']), '-' * len(titles['descr']))
def add_section(self, section):
self.print_margin = 0
self._add_line('', '', '', '')
self._add_line(section + ':', '', '', '')
self.print_margin = 2
def print_options(self, title, options):
if not options:
return
if title:
self.add_title(title)
for k, o in sorted(options.items()):
printable_value = o.printable_value()
if k in self.yielding_options:
printable_value = '<inherited from main project>'
self.add_option(k, o.description, printable_value, o.choices)
def print_conf(self):
def print_default_values_warning():
mlog.warning('The source directory instead of the build directory was specified.')
mlog.warning('Only the default values for the project are printed, and all command line parameters are ignored.')
if self.default_values_only:
print_default_values_warning()
print('')
print('Core properties:')
print(' Source dir', self.source_dir)
if not self.default_values_only:
print(' Build dir ', self.build_dir)
dir_option_names = ['bindir',
'datadir',
'includedir',
'infodir',
'libdir',
'libexecdir',
'localedir',
'localstatedir',
'mandir',
'prefix',
'sbindir',
'sharedstatedir',
'sysconfdir']
test_option_names = ['errorlogs',
'stdsplit']
core_option_names = [k for k in self.coredata.builtins if k not in dir_option_names + test_option_names]
dir_options = {k: o for k, o in self.coredata.builtins.items() if k in dir_option_names}
test_options = {k: o for k, o in self.coredata.builtins.items() if k in test_option_names}
core_options = {k: o for k, o in self.coredata.builtins.items() if k in core_option_names}
def insert_build_prefix(k):
idx = k.find(':')
if idx < 0:
return 'build.' + k
return k[:idx + 1] + 'build.' + k[idx + 1:]
core_options = self.split_options_per_subproject(core_options)
host_compiler_options = self.split_options_per_subproject(
dict(self.coredata.flatten_lang_iterator(
self.coredata.compiler_options.host.items())))
build_compiler_options = self.split_options_per_subproject(
dict(self.coredata.flatten_lang_iterator(
(insert_build_prefix(k), o)
for k, o in self.coredata.compiler_options.build.items())))
project_options = self.split_options_per_subproject(self.coredata.user_options)
show_build_options = self.default_values_only or self.build.environment.is_cross_build()
self.add_section('Main project options')
self.print_options('Core options', core_options[''])
self.print_options('', self.coredata.builtins_per_machine.host)
if show_build_options:
self.print_options('', {insert_build_prefix(k): o for k, o in self.coredata.builtins_per_machine.build.items()})
self.print_options('Backend options', self.coredata.backend_options)
self.print_options('Base options', self.coredata.base_options)
self.print_options('Compiler options', host_compiler_options.get('', {}))
if show_build_options:
self.print_options('', build_compiler_options.get('', {}))
self.print_options('Directories', dir_options)
self.print_options('Testing options', test_options)
self.print_options('Project options', project_options.get('', {}))
for subproject in sorted(self.all_subprojects):
if subproject == '':
continue
self.add_section('Subproject ' + subproject)
if subproject in core_options:
self.print_options('Core options', core_options[subproject])
if subproject in | split_options_per_subproject | identifier_name |
mconf.py | . import coredata, environment, mesonlib, build, mintro, mlog
from .ast import AstIDGenerator
def add_arguments(parser):
coredata.register_builtin_arguments(parser)
parser.add_argument('builddir', nargs='?', default='.')
parser.add_argument('--clearcache', action='store_true', default=False,
help='Clear cached state (e.g. found dependencies)')
def make_lower_case(val):
if isinstance(val, bool):
return str(val).lower()
elif isinstance(val, list):
return [make_lower_case(i) for i in val]
else:
return str(val)
class ConfException(mesonlib.MesonException):
pass
class Conf:
def __init__(self, build_dir):
self.build_dir = os.path.abspath(os.path.realpath(build_dir))
if 'meson.build' in [os.path.basename(self.build_dir), self.build_dir]:
self.build_dir = os.path.dirname(self.build_dir)
self.build = None
self.max_choices_line_length = 60
self.name_col = []
self.value_col = []
self.choices_col = []
self.descr_col = []
self.has_choices = False
self.all_subprojects = set()
self.yielding_options = set()
if os.path.isdir(os.path.join(self.build_dir, 'meson-private')):
self.build = build.load(self.build_dir)
self.source_dir = self.build.environment.get_source_dir()
self.coredata = coredata.load(self.build_dir)
self.default_values_only = False
elif os.path.isfile(os.path.join(self.build_dir, environment.build_filename)):
# Make sure that log entries in other parts of meson don't interfere with the JSON output
mlog.disable()
self.source_dir = os.path.abspath(os.path.realpath(self.build_dir))
intr = mintro.IntrospectionInterpreter(self.source_dir, '', 'ninja', visitors = [AstIDGenerator()])
intr.analyze()
# Re-enable logging just in case
mlog.enable()
self.coredata = intr.coredata
self.default_values_only = True
else:
raise ConfException('Directory {} is neither a Meson build directory nor a project source directory.'.format(build_dir))
def clear_cache(self):
self.coredata.deps.host.clear()
self.coredata.deps.build.clear()
def set_options(self, options):
self.coredata.set_options(options)
def save(self):
# Do nothing when using introspection
if self.default_values_only:
return
# Only called if something has changed so overwrite unconditionally.
coredata.save(self.coredata, self.build_dir)
# We don't write the build file because any changes to it
# are erased when Meson is executed the next time, i.e. when
# Ninja is run.
def print_aligned(self):
col_widths = (max([len(i) for i in self.name_col], default=0),
max([len(i) for i in self.value_col], default=0),
max([len(i) for i in self.choices_col], default=0))
for line in zip(self.name_col, self.value_col, self.choices_col, self.descr_col):
if self.has_choices:
print('{0:{width[0]}} {1:{width[1]}} {2:{width[2]}} {3}'.format(*line, width=col_widths))
else:
print('{0:{width[0]}} {1:{width[1]}} {3}'.format(*line, width=col_widths))
def split_options_per_subproject(self, options):
result = {}
for k, o in options.items():
subproject = ''
if ':' in k:
subproject, optname = k.split(':')
if o.yielding and optname in options:
self.yielding_options.add(k)
self.all_subprojects.add(subproject)
result.setdefault(subproject, {})[k] = o
return result
def _add_line(self, name, value, choices, descr):
self.name_col.append(' ' * self.print_margin + name)
self.value_col.append(value)
self.choices_col.append(choices)
self.descr_col.append(descr)
def add_option(self, name, descr, value, choices):
if isinstance(value, list):
value = '[{0}]'.format(', '.join(make_lower_case(value)))
else:
value = make_lower_case(value)
if choices:
self.has_choices = True
if isinstance(choices, list):
choices_list = make_lower_case(choices)
current = '['
while choices_list:
i = choices_list.pop(0)
if len(current) + len(i) >= self.max_choices_line_length:
self._add_line(name, value, current + ',', descr)
name = ''
value = ''
descr = ''
current = ' '
if len(current) > 1:
current += ', '
current += i
choices = current + ']'
else:
choices = make_lower_case(choices)
else:
choices = ''
self._add_line(name, value, choices, descr)
def add_title(self, title):
titles = {'descr': 'Description', 'value': 'Current Value', 'choices': 'Possible Values'}
if self.default_values_only:
titles['value'] = 'Default Value'
self._add_line('', '', '', '')
self._add_line(title, titles['value'], titles['choices'], titles['descr'])
self._add_line('-' * len(title), '-' * len(titles['value']), '-' * len(titles['choices']), '-' * len(titles['descr']))
def add_section(self, section):
self.print_margin = 0
self._add_line('', '', '', '')
self._add_line(section + ':', '', '', '')
self.print_margin = 2
def print_options(self, title, options):
if not options:
return
if title:
self.add_title(title)
for k, o in sorted(options.items()):
printable_value = o.printable_value()
if k in self.yielding_options:
printable_value = '<inherited from main project>'
self.add_option(k, o.description, printable_value, o.choices)
def print_conf(self):
def print_default_values_warning():
mlog.warning('The source directory instead of the build directory was specified.')
mlog.warning('Only the default values for the project are printed, and all command line parameters are ignored.')
if self.default_values_only:
print_default_values_warning()
print('')
print('Core properties:')
print(' Source dir', self.source_dir)
if not self.default_values_only:
print(' Build dir ', self.build_dir)
dir_option_names = ['bindir',
'datadir',
'includedir',
'infodir',
'libdir',
'libexecdir',
'localedir',
'localstatedir',
'mandir',
'prefix',
'sbindir',
'sharedstatedir',
'sysconfdir']
test_option_names = ['errorlogs',
'stdsplit']
core_option_names = [k for k in self.coredata.builtins if k not in dir_option_names + test_option_names]
dir_options = {k: o for k, o in self.coredata.builtins.items() if k in dir_option_names}
test_options = {k: o for k, o in self.coredata.builtins.items() if k in test_option_names}
core_options = {k: o for k, o in self.coredata.builtins.items() if k in core_option_names}
def insert_build_prefix(k):
idx = k.find(':')
if idx < 0:
return 'build.' + k
return k[:idx + 1] + 'build.' + k[idx + 1:]
core_options = self.split_options_per_subproject(core_options)
host_compiler_options = self.split_options_per_subproject(
dict(self.coredata.flatten_lang_iterator(
self.coredata.compiler_options.host.items())))
build_compiler_options = self.split_options_per_subproject(
dict(self.coredata.flatten_lang_iterator(
(insert_build_prefix(k), o)
for k, o in self.coredata.compiler_options.build.items())))
project_options = self.split_options_per_subproject(self.coredata.user_options)
show_build_options = self.default_values_only or self.build.environment.is_cross_build()
self.add_section('Main project options')
self.print_options('Core options', core_options[''])
self.print_options('', self.coredata.builtins_per_machine.host)
if show_build_options:
self.print_options('', {insert_build_prefix(k): o for k, o in self.coredata.builtins_per_machine.build.items()})
self.print_options('Backend options', self.coredata.backend_options)
self.print_options('Base options', self.coredata.base_options)
self.print_options('Compiler options', host_compiler_options.get('', {}))
if show_build_options:
|
self.print_options('Directories', dir_options)
self.print_options('Testing options', test_options)
self.print_options('Project options', project_options.get('', {}))
for subproject in sorted(self.all_subprojects):
if subproject == '':
continue
self.add_section('Subproject ' + subproject)
if subproject in core_options:
self.print_options('Core options', core_options[subproject])
if subproject in | self.print_options('', build_compiler_options.get('', {})) | conditional_block |
mconf.py | . import coredata, environment, mesonlib, build, mintro, mlog
from .ast import AstIDGenerator
def add_arguments(parser):
coredata.register_builtin_arguments(parser)
parser.add_argument('builddir', nargs='?', default='.')
parser.add_argument('--clearcache', action='store_true', default=False,
help='Clear cached state (e.g. found dependencies)')
def make_lower_case(val):
if isinstance(val, bool):
return str(val).lower()
elif isinstance(val, list):
return [make_lower_case(i) for i in val]
else:
return str(val)
class ConfException(mesonlib.MesonException):
pass
class Conf:
def __init__(self, build_dir):
self.build_dir = os.path.abspath(os.path.realpath(build_dir))
if 'meson.build' in [os.path.basename(self.build_dir), self.build_dir]:
self.build_dir = os.path.dirname(self.build_dir)
self.build = None
self.max_choices_line_length = 60
self.name_col = []
self.value_col = []
self.choices_col = []
self.descr_col = []
self.has_choices = False
self.all_subprojects = set()
self.yielding_options = set()
if os.path.isdir(os.path.join(self.build_dir, 'meson-private')):
self.build = build.load(self.build_dir)
self.source_dir = self.build.environment.get_source_dir()
self.coredata = coredata.load(self.build_dir)
self.default_values_only = False
elif os.path.isfile(os.path.join(self.build_dir, environment.build_filename)):
# Make sure that log entries in other parts of meson don't interfere with the JSON output
mlog.disable()
self.source_dir = os.path.abspath(os.path.realpath(self.build_dir))
intr = mintro.IntrospectionInterpreter(self.source_dir, '', 'ninja', visitors = [AstIDGenerator()])
intr.analyze()
# Re-enable logging just in case
mlog.enable()
self.coredata = intr.coredata
self.default_values_only = True
else:
raise ConfException('Directory {} is neither a Meson build directory nor a project source directory.'.format(build_dir))
def clear_cache(self):
self.coredata.deps.host.clear()
self.coredata.deps.build.clear()
def set_options(self, options):
self.coredata.set_options(options)
def save(self):
# Do nothing when using introspection
if self.default_values_only:
return
# Only called if something has changed so overwrite unconditionally.
coredata.save(self.coredata, self.build_dir)
# We don't write the build file because any changes to it
# are erased when Meson is executed the next time, i.e. when
# Ninja is run.
def print_aligned(self):
col_widths = (max([len(i) for i in self.name_col], default=0),
max([len(i) for i in self.value_col], default=0),
max([len(i) for i in self.choices_col], default=0))
for line in zip(self.name_col, self.value_col, self.choices_col, self.descr_col):
if self.has_choices:
print('{0:{width[0]}} {1:{width[1]}} {2:{width[2]}} {3}'.format(*line, width=col_widths))
else:
print('{0:{width[0]}} {1:{width[1]}} {3}'.format(*line, width=col_widths))
def split_options_per_subproject(self, options):
result = {}
for k, o in options.items():
subproject = ''
if ':' in k:
subproject, optname = k.split(':')
if o.yielding and optname in options:
self.yielding_options.add(k)
self.all_subprojects.add(subproject)
result.setdefault(subproject, {})[k] = o
return result
def _add_line(self, name, value, choices, descr):
self.name_col.append(' ' * self.print_margin + name)
self.value_col.append(value)
self.choices_col.append(choices)
self.descr_col.append(descr)
def add_option(self, name, descr, value, choices):
| current += i
choices = current + ']'
else:
choices = make_lower_case(choices)
else:
choices = ''
self._add_line(name, value, choices, descr)
def add_title(self, title):
titles = {'descr': 'Description', 'value': 'Current Value', 'choices': 'Possible Values'}
if self.default_values_only:
titles['value'] = 'Default Value'
self._add_line('', '', '', '')
self._add_line(title, titles['value'], titles['choices'], titles['descr'])
self._add_line('-' * len(title), '-' * len(titles['value']), '-' * len(titles['choices']), '-' * len(titles['descr']))
def add_section(self, section):
self.print_margin = 0
self._add_line('', '', '', '')
self._add_line(section + ':', '', '', '')
self.print_margin = 2
def print_options(self, title, options):
if not options:
return
if title:
self.add_title(title)
for k, o in sorted(options.items()):
printable_value = o.printable_value()
if k in self.yielding_options:
printable_value = '<inherited from main project>'
self.add_option(k, o.description, printable_value, o.choices)
def print_conf(self):
def print_default_values_warning():
mlog.warning('The source directory instead of the build directory was specified.')
mlog.warning('Only the default values for the project are printed, and all command line parameters are ignored.')
if self.default_values_only:
print_default_values_warning()
print('')
print('Core properties:')
print(' Source dir', self.source_dir)
if not self.default_values_only:
print(' Build dir ', self.build_dir)
dir_option_names = ['bindir',
'datadir',
'includedir',
'infodir',
'libdir',
'libexecdir',
'localedir',
'localstatedir',
'mandir',
'prefix',
'sbindir',
'sharedstatedir',
'sysconfdir']
test_option_names = ['errorlogs',
'stdsplit']
core_option_names = [k for k in self.coredata.builtins if k not in dir_option_names + test_option_names]
dir_options = {k: o for k, o in self.coredata.builtins.items() if k in dir_option_names}
test_options = {k: o for k, o in self.coredata.builtins.items() if k in test_option_names}
core_options = {k: o for k, o in self.coredata.builtins.items() if k in core_option_names}
def insert_build_prefix(k):
idx = k.find(':')
if idx < 0:
return 'build.' + k
return k[:idx + 1] + 'build.' + k[idx + 1:]
core_options = self.split_options_per_subproject(core_options)
host_compiler_options = self.split_options_per_subproject(
dict(self.coredata.flatten_lang_iterator(
self.coredata.compiler_options.host.items())))
build_compiler_options = self.split_options_per_subproject(
dict(self.coredata.flatten_lang_iterator(
(insert_build_prefix(k), o)
for k, o in self.coredata.compiler_options.build.items())))
project_options = self.split_options_per_subproject(self.coredata.user_options)
show_build_options = self.default_values_only or self.build.environment.is_cross_build()
self.add_section('Main project options')
self.print_options('Core options', core_options[''])
self.print_options('', self.coredata.builtins_per_machine.host)
if show_build_options:
self.print_options('', {insert_build_prefix(k): o for k, o in self.coredata.builtins_per_machine.build.items()})
self.print_options('Backend options', self.coredata.backend_options)
self.print_options('Base options', self.coredata.base_options)
self.print_options('Compiler options', host_compiler_options.get('', {}))
if show_build_options:
self.print_options('', build_compiler_options.get('', {}))
self.print_options('Directories', dir_options)
self.print_options('Testing options', test_options)
self.print_options('Project options', project_options.get('', {}))
for subproject in sorted(self.all_subprojects):
if subproject == '':
continue
self.add_section('Subproject ' + subproject)
if subproject in core_options:
self.print_options('Core options', core_options[subproject])
if subproject in host | if isinstance(value, list):
value = '[{0}]'.format(', '.join(make_lower_case(value)))
else:
value = make_lower_case(value)
if choices:
self.has_choices = True
if isinstance(choices, list):
choices_list = make_lower_case(choices)
current = '['
while choices_list:
i = choices_list.pop(0)
if len(current) + len(i) >= self.max_choices_line_length:
self._add_line(name, value, current + ',', descr)
name = ''
value = ''
descr = ''
current = ' '
if len(current) > 1:
current += ', ' | identifier_body |
mconf.py | from . import coredata, environment, mesonlib, build, mintro, mlog
from .ast import AstIDGenerator
def add_arguments(parser):
coredata.register_builtin_arguments(parser)
parser.add_argument('builddir', nargs='?', default='.')
parser.add_argument('--clearcache', action='store_true', default=False,
help='Clear cached state (e.g. found dependencies)')
def make_lower_case(val):
if isinstance(val, bool):
return str(val).lower()
elif isinstance(val, list):
return [make_lower_case(i) for i in val]
else:
return str(val)
class ConfException(mesonlib.MesonException):
pass
class Conf:
def __init__(self, build_dir):
self.build_dir = os.path.abspath(os.path.realpath(build_dir))
if 'meson.build' in [os.path.basename(self.build_dir), self.build_dir]:
self.build_dir = os.path.dirname(self.build_dir)
self.build = None
self.max_choices_line_length = 60
self.name_col = []
self.value_col = []
self.choices_col = []
self.descr_col = []
self.has_choices = False
self.all_subprojects = set()
self.yielding_options = set()
if os.path.isdir(os.path.join(self.build_dir, 'meson-private')):
self.build = build.load(self.build_dir)
self.source_dir = self.build.environment.get_source_dir()
self.coredata = coredata.load(self.build_dir)
self.default_values_only = False
elif os.path.isfile(os.path.join(self.build_dir, environment.build_filename)):
# Make sure that log entries in other parts of meson don't interfere with the JSON output
mlog.disable()
self.source_dir = os.path.abspath(os.path.realpath(self.build_dir))
intr = mintro.IntrospectionInterpreter(self.source_dir, '', 'ninja', visitors = [AstIDGenerator()])
intr.analyze()
# Re-enable logging just in case
mlog.enable()
self.coredata = intr.coredata
self.default_values_only = True
else:
raise ConfException('Directory {} is neither a Meson build directory nor a project source directory.'.format(build_dir))
def clear_cache(self):
self.coredata.deps.host.clear()
self.coredata.deps.build.clear()
def set_options(self, options):
self.coredata.set_options(options)
def save(self):
# Do nothing when using introspection
if self.default_values_only:
return
# Only called if something has changed so overwrite unconditionally.
coredata.save(self.coredata, self.build_dir)
# We don't write the build file because any changes to it
# are erased when Meson is executed the next time, i.e. when
# Ninja is run.
def print_aligned(self):
col_widths = (max([len(i) for i in self.name_col], default=0),
max([len(i) for i in self.value_col], default=0),
max([len(i) for i in self.choices_col], default=0))
for line in zip(self.name_col, self.value_col, self.choices_col, self.descr_col):
if self.has_choices:
print('{0:{width[0]}} {1:{width[1]}} {2:{width[2]}} {3}'.format(*line, width=col_widths))
else:
print('{0:{width[0]}} {1:{width[1]}} {3}'.format(*line, width=col_widths))
def split_options_per_subproject(self, options):
result = {}
for k, o in options.items():
subproject = ''
if ':' in k:
subproject, optname = k.split(':')
if o.yielding and optname in options:
self.yielding_options.add(k)
self.all_subprojects.add(subproject)
result.setdefault(subproject, {})[k] = o
return result
def _add_line(self, name, value, choices, descr):
self.name_col.append(' ' * self.print_margin + name)
self.value_col.append(value)
self.choices_col.append(choices)
self.descr_col.append(descr)
def add_option(self, name, descr, value, choices):
if isinstance(value, list):
value = '[{0}]'.format(', '.join(make_lower_case(value)))
else:
value = make_lower_case(value)
if choices:
self.has_choices = True
if isinstance(choices, list):
choices_list = make_lower_case(choices)
current = '['
while choices_list:
i = choices_list.pop(0)
if len(current) + len(i) >= self.max_choices_line_length:
self._add_line(name, value, current + ',', descr)
name = ''
value = ''
descr = ''
current = ' '
if len(current) > 1:
current += ', '
current += i
choices = current + ']'
else:
choices = make_lower_case(choices)
else:
choices = ''
self._add_line(name, value, choices, descr)
def add_title(self, title):
titles = {'descr': 'Description', 'value': 'Current Value', 'choices': 'Possible Values'}
if self.default_values_only:
titles['value'] = 'Default Value'
self._add_line('', '', '', '')
self._add_line(title, titles['value'], titles['choices'], titles['descr'])
self._add_line('-' * len(title), '-' * len(titles['value']), '-' * len(titles['choices']), '-' * len(titles['descr']))
def add_section(self, section):
self.print_margin = 0
self._add_line('', '', '', '')
self._add_line(section + ':', '', '', '')
self.print_margin = 2
def print_options(self, title, options):
if not options:
return
if title:
self.add_title(title)
for k, o in sorted(options.items()):
printable_value = o.printable_value()
if k in self.yielding_options:
printable_value = '<inherited from main project>'
self.add_option(k, o.description, printable_value, o.choices)
def print_conf(self):
def print_default_values_warning():
mlog.warning('The source directory instead of the build directory was specified.')
mlog.warning('Only the default values for the project are printed, and all command line parameters are ignored.')
if self.default_values_only:
print_default_values_warning()
print('')
print('Core properties:')
print(' Source dir', self.source_dir)
if not self.default_values_only:
print(' Build dir ', self.build_dir)
dir_option_names = ['bindir',
'datadir',
'includedir',
'infodir',
'libdir',
'libexecdir',
'localedir',
'localstatedir',
'mandir',
'prefix',
'sbindir',
'sharedstatedir',
'sysconfdir']
test_option_names = ['errorlogs',
'stdsplit']
core_option_names = [k for k in self.coredata.builtins if k not in dir_option_names + test_option_names]
dir_options = {k: o for k, o in self.coredata.builtins.items() if k in dir_option_names}
test_options = {k: o for k, o in self.coredata.builtins.items() if k in test_option_names}
core_options = {k: o for k, o in self.coredata.builtins.items() if k in core_option_names}
def insert_build_prefix(k):
idx = k.find(':')
if idx < 0:
return 'build.' + k
return k[:idx + 1] + 'build.' + k[idx + 1:]
core_options = self.split_options_per_subproject(core_options)
host_compiler_options = self.split_options_per_subproject(
dict(self.coredata.flatten_lang_iterator(
self.coredata.compiler_options.host.items())))
build_compiler_options = self.split_options_per_subproject(
dict(self.coredata.flatten_lang_iterator(
(insert_build_prefix(k), o)
for k, o in self.coredata.compiler_options.build.items())))
project_options = self.split_options_per_subproject(self.coredata.user_options)
show_build_options = self.default_values_only or self.build.environment.is_cross_build()
self.add_section('Main project options')
self.print_options('Core options', core_options[''])
self.print_options('', self.coredata.builtins_per_machine.host)
if show_build_options:
self.print_options('', {insert_build_prefix(k): o for k, o in self.coredata.builtins_per_machine.build.items()})
self.print_options('Backend options', self.coredata.backend_options)
self.print_options('Base options', self.coredata.base_options)
self.print_options('Compiler options', host_compiler_options.get('', {}))
if show_build_options:
self.print_options('', build_compiler_options.get('', {})) | for subproject in sorted(self.all_subprojects):
if subproject == '':
continue
self.add_section('Subproject ' + subproject)
if subproject in core_options:
self.print_options('Core options', core_options[subproject])
if subproject in | self.print_options('Directories', dir_options)
self.print_options('Testing options', test_options)
self.print_options('Project options', project_options.get('', {})) | random_line_split |
vm-installation-image-post-update-version.py | #!/usr/bin/python3
import os
import sys
INSTALLER_VERSION = '"latest"'
def create_installer_config(path):
"""Create a basicl installation configuration file"""
config = u"template=file:///etc/ister.json\n"
jconfig = u'{"DestinationType" : "physical", "PartitionLayout" : \
[{"disk" : "vda", "partition" : 1, "size" : "512M", "type" : "EFI"}, \
{"disk" : "vda", "partition" : 2, \
"size" : "512M", "type" : "swap"}, {"disk" : "vda", "partition" : 3, \
"size" : "rest", "type" : "linux"}], \
"FilesystemTypes" : \
[{"disk" : "vda", "partition" : 1, "type" : "vfat"}, \
{"disk" : "vda", "partition" : 2, "type" : "swap"}, \
{"disk" : "vda", "partition" : 3, "type" : "ext4"}], \
"PartitionMountPoints" : \
[{"disk" : "vda", "partition" : 1, "mount" : "/boot"}, \
{"disk" : "vda", "partition" : 3, "mount" : "/"}], \
"Version" : 0, "Bundles" : ["kernel-native", "telemetrics", "os-core", "os-core-update"]}\n'
if not os.path.isdir("{}/etc".format(path)):
os.mkdir("{}/etc".format(path))
with open("{}/etc/ister.conf".format(path), "w") as cfile:
cfile.write(config)
with open("{}/etc/ister.json".format(path), "w") as jfile:
jfile.write(jconfig.replace('"Version" : 0',
'"Version" : ' + INSTALLER_VERSION))
def | (path):
"""Add a delay to the installer kernel commandline"""
entry_path = path + "/boot/loader/entries/"
entry_file = os.listdir(entry_path)
if len(entry_file) != 1:
raise Exception("Unable to find specific entry file in {0}, "
"found {1} instead".format(entry_path, entry_file))
file_full_path = entry_path + entry_file[0]
with open(file_full_path, "r") as entry:
entry_content = entry.readlines()
options_line = entry_content[-1]
if not options_line.startswith("options "):
raise Exception("Last line of entry file is not the kernel "
"commandline options")
# Account for newline at the end of the line
options_line = options_line[:-1] + " rootwait\n"
entry_content[-1] = options_line
os.unlink(file_full_path)
with open(file_full_path, "w") as entry:
entry.writelines(entry_content)
def disable_tty1_getty(path):
"""Add a symlink masking the systemd tty1 generator"""
os.makedirs(path + "/etc/systemd/system/getty.target.wants")
os.symlink("/dev/null", path + "/etc/systemd/system/getty.target.wants/[email protected]")
def add_installer_service(path):
os.symlink("{}/usr/lib/systemd/system/ister.service"
.format(path),
"{}/usr/lib/systemd/system/multi-user.target.wants/ister.service"
.format(path))
if __name__ == '__main__':
if len(sys.argv) != 2:
sys.exit(-1)
try:
create_installer_config(sys.argv[1])
append_installer_rootwait(sys.argv[1])
disable_tty1_getty(sys.argv[1])
add_installer_service(sys.argv[1])
except Exception as exep:
print(exep)
sys.exit(-1)
sys.exit(0)
| append_installer_rootwait | identifier_name |
vm-installation-image-post-update-version.py | #!/usr/bin/python3
import os
import sys
INSTALLER_VERSION = '"latest"'
def create_installer_config(path):
"""Create a basicl installation configuration file"""
config = u"template=file:///etc/ister.json\n"
jconfig = u'{"DestinationType" : "physical", "PartitionLayout" : \
[{"disk" : "vda", "partition" : 1, "size" : "512M", "type" : "EFI"}, \
{"disk" : "vda", "partition" : 2, \
"size" : "512M", "type" : "swap"}, {"disk" : "vda", "partition" : 3, \
"size" : "rest", "type" : "linux"}], \
"FilesystemTypes" : \
[{"disk" : "vda", "partition" : 1, "type" : "vfat"}, \
{"disk" : "vda", "partition" : 2, "type" : "swap"}, \
{"disk" : "vda", "partition" : 3, "type" : "ext4"}], \
"PartitionMountPoints" : \
[{"disk" : "vda", "partition" : 1, "mount" : "/boot"}, \
{"disk" : "vda", "partition" : 3, "mount" : "/"}], \
"Version" : 0, "Bundles" : ["kernel-native", "telemetrics", "os-core", "os-core-update"]}\n'
if not os.path.isdir("{}/etc".format(path)):
os.mkdir("{}/etc".format(path))
with open("{}/etc/ister.conf".format(path), "w") as cfile:
cfile.write(config)
with open("{}/etc/ister.json".format(path), "w") as jfile:
jfile.write(jconfig.replace('"Version" : 0',
'"Version" : ' + INSTALLER_VERSION))
def append_installer_rootwait(path):
"""Add a delay to the installer kernel commandline"""
entry_path = path + "/boot/loader/entries/"
entry_file = os.listdir(entry_path)
if len(entry_file) != 1:
|
file_full_path = entry_path + entry_file[0]
with open(file_full_path, "r") as entry:
entry_content = entry.readlines()
options_line = entry_content[-1]
if not options_line.startswith("options "):
raise Exception("Last line of entry file is not the kernel "
"commandline options")
# Account for newline at the end of the line
options_line = options_line[:-1] + " rootwait\n"
entry_content[-1] = options_line
os.unlink(file_full_path)
with open(file_full_path, "w") as entry:
entry.writelines(entry_content)
def disable_tty1_getty(path):
"""Add a symlink masking the systemd tty1 generator"""
os.makedirs(path + "/etc/systemd/system/getty.target.wants")
os.symlink("/dev/null", path + "/etc/systemd/system/getty.target.wants/[email protected]")
def add_installer_service(path):
os.symlink("{}/usr/lib/systemd/system/ister.service"
.format(path),
"{}/usr/lib/systemd/system/multi-user.target.wants/ister.service"
.format(path))
if __name__ == '__main__':
if len(sys.argv) != 2:
sys.exit(-1)
try:
create_installer_config(sys.argv[1])
append_installer_rootwait(sys.argv[1])
disable_tty1_getty(sys.argv[1])
add_installer_service(sys.argv[1])
except Exception as exep:
print(exep)
sys.exit(-1)
sys.exit(0)
| raise Exception("Unable to find specific entry file in {0}, "
"found {1} instead".format(entry_path, entry_file)) | conditional_block |
vm-installation-image-post-update-version.py | #!/usr/bin/python3
import os
import sys
INSTALLER_VERSION = '"latest"'
def create_installer_config(path):
"""Create a basicl installation configuration file"""
config = u"template=file:///etc/ister.json\n"
jconfig = u'{"DestinationType" : "physical", "PartitionLayout" : \
[{"disk" : "vda", "partition" : 1, "size" : "512M", "type" : "EFI"}, \
{"disk" : "vda", "partition" : 2, \
"size" : "512M", "type" : "swap"}, {"disk" : "vda", "partition" : 3, \
"size" : "rest", "type" : "linux"}], \
"FilesystemTypes" : \
[{"disk" : "vda", "partition" : 1, "type" : "vfat"}, \
{"disk" : "vda", "partition" : 2, "type" : "swap"}, \
{"disk" : "vda", "partition" : 3, "type" : "ext4"}], \
"PartitionMountPoints" : \
[{"disk" : "vda", "partition" : 1, "mount" : "/boot"}, \
{"disk" : "vda", "partition" : 3, "mount" : "/"}], \
"Version" : 0, "Bundles" : ["kernel-native", "telemetrics", "os-core", "os-core-update"]}\n'
if not os.path.isdir("{}/etc".format(path)):
os.mkdir("{}/etc".format(path))
with open("{}/etc/ister.conf".format(path), "w") as cfile:
cfile.write(config)
with open("{}/etc/ister.json".format(path), "w") as jfile: | jfile.write(jconfig.replace('"Version" : 0',
'"Version" : ' + INSTALLER_VERSION))
def append_installer_rootwait(path):
"""Add a delay to the installer kernel commandline"""
entry_path = path + "/boot/loader/entries/"
entry_file = os.listdir(entry_path)
if len(entry_file) != 1:
raise Exception("Unable to find specific entry file in {0}, "
"found {1} instead".format(entry_path, entry_file))
file_full_path = entry_path + entry_file[0]
with open(file_full_path, "r") as entry:
entry_content = entry.readlines()
options_line = entry_content[-1]
if not options_line.startswith("options "):
raise Exception("Last line of entry file is not the kernel "
"commandline options")
# Account for newline at the end of the line
options_line = options_line[:-1] + " rootwait\n"
entry_content[-1] = options_line
os.unlink(file_full_path)
with open(file_full_path, "w") as entry:
entry.writelines(entry_content)
def disable_tty1_getty(path):
"""Add a symlink masking the systemd tty1 generator"""
os.makedirs(path + "/etc/systemd/system/getty.target.wants")
os.symlink("/dev/null", path + "/etc/systemd/system/getty.target.wants/[email protected]")
def add_installer_service(path):
os.symlink("{}/usr/lib/systemd/system/ister.service"
.format(path),
"{}/usr/lib/systemd/system/multi-user.target.wants/ister.service"
.format(path))
if __name__ == '__main__':
if len(sys.argv) != 2:
sys.exit(-1)
try:
create_installer_config(sys.argv[1])
append_installer_rootwait(sys.argv[1])
disable_tty1_getty(sys.argv[1])
add_installer_service(sys.argv[1])
except Exception as exep:
print(exep)
sys.exit(-1)
sys.exit(0) | random_line_split |
|
vm-installation-image-post-update-version.py | #!/usr/bin/python3
import os
import sys
INSTALLER_VERSION = '"latest"'
def create_installer_config(path):
| jfile.write(jconfig.replace('"Version" : 0',
'"Version" : ' + INSTALLER_VERSION))
def append_installer_rootwait(path):
"""Add a delay to the installer kernel commandline"""
entry_path = path + "/boot/loader/entries/"
entry_file = os.listdir(entry_path)
if len(entry_file) != 1:
raise Exception("Unable to find specific entry file in {0}, "
"found {1} instead".format(entry_path, entry_file))
file_full_path = entry_path + entry_file[0]
with open(file_full_path, "r") as entry:
entry_content = entry.readlines()
options_line = entry_content[-1]
if not options_line.startswith("options "):
raise Exception("Last line of entry file is not the kernel "
"commandline options")
# Account for newline at the end of the line
options_line = options_line[:-1] + " rootwait\n"
entry_content[-1] = options_line
os.unlink(file_full_path)
with open(file_full_path, "w") as entry:
entry.writelines(entry_content)
def disable_tty1_getty(path):
"""Add a symlink masking the systemd tty1 generator"""
os.makedirs(path + "/etc/systemd/system/getty.target.wants")
os.symlink("/dev/null", path + "/etc/systemd/system/getty.target.wants/[email protected]")
def add_installer_service(path):
os.symlink("{}/usr/lib/systemd/system/ister.service"
.format(path),
"{}/usr/lib/systemd/system/multi-user.target.wants/ister.service"
.format(path))
if __name__ == '__main__':
if len(sys.argv) != 2:
sys.exit(-1)
try:
create_installer_config(sys.argv[1])
append_installer_rootwait(sys.argv[1])
disable_tty1_getty(sys.argv[1])
add_installer_service(sys.argv[1])
except Exception as exep:
print(exep)
sys.exit(-1)
sys.exit(0)
| """Create a basicl installation configuration file"""
config = u"template=file:///etc/ister.json\n"
jconfig = u'{"DestinationType" : "physical", "PartitionLayout" : \
[{"disk" : "vda", "partition" : 1, "size" : "512M", "type" : "EFI"}, \
{"disk" : "vda", "partition" : 2, \
"size" : "512M", "type" : "swap"}, {"disk" : "vda", "partition" : 3, \
"size" : "rest", "type" : "linux"}], \
"FilesystemTypes" : \
[{"disk" : "vda", "partition" : 1, "type" : "vfat"}, \
{"disk" : "vda", "partition" : 2, "type" : "swap"}, \
{"disk" : "vda", "partition" : 3, "type" : "ext4"}], \
"PartitionMountPoints" : \
[{"disk" : "vda", "partition" : 1, "mount" : "/boot"}, \
{"disk" : "vda", "partition" : 3, "mount" : "/"}], \
"Version" : 0, "Bundles" : ["kernel-native", "telemetrics", "os-core", "os-core-update"]}\n'
if not os.path.isdir("{}/etc".format(path)):
os.mkdir("{}/etc".format(path))
with open("{}/etc/ister.conf".format(path), "w") as cfile:
cfile.write(config)
with open("{}/etc/ister.json".format(path), "w") as jfile: | identifier_body |
pass-by-copy.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate debug;
| use std::gc::{GC, Gc};
fn magic(x: A) { println!("{:?}", x); }
fn magic2(x: Gc<int>) { println!("{:?}", x); }
struct A { a: Gc<int> }
pub fn main() {
let a = A {a: box(GC) 10};
let b = box(GC) 10;
magic(a); magic(A {a: box(GC) 20});
magic2(b); magic2(box(GC) 20);
} | random_line_split |
|
pass-by-copy.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate debug;
use std::gc::{GC, Gc};
fn magic(x: A) { println!("{:?}", x); }
fn magic2(x: Gc<int>) { println!("{:?}", x); }
struct A { a: Gc<int> }
pub fn main() | {
let a = A {a: box(GC) 10};
let b = box(GC) 10;
magic(a); magic(A {a: box(GC) 20});
magic2(b); magic2(box(GC) 20);
} | identifier_body |
|
pass-by-copy.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate debug;
use std::gc::{GC, Gc};
fn | (x: A) { println!("{:?}", x); }
fn magic2(x: Gc<int>) { println!("{:?}", x); }
struct A { a: Gc<int> }
pub fn main() {
let a = A {a: box(GC) 10};
let b = box(GC) 10;
magic(a); magic(A {a: box(GC) 20});
magic2(b); magic2(box(GC) 20);
}
| magic | identifier_name |
step1.component.spec.ts | /*
* Copyright (C) 2015 The Gravitee team (http://gravitee.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { ComponentFixture, TestBed, waitForAsync } from '@angular/core/testing';
import {DomainAlertNotifierCreationStep1Component} from './step1.component';
describe('Step1Component', () => {
let component: DomainAlertNotifierCreationStep1Component;
let fixture: ComponentFixture<DomainAlertNotifierCreationStep1Component>;
beforeEach(waitForAsync(() => {
TestBed.configureTestingModule({
declarations: [DomainAlertNotifierCreationStep1Component]
})
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(DomainAlertNotifierCreationStep1Component);
component = fixture.componentInstance;
fixture.detectChanges(); |
it('should create', () => {
expect(component).toBeTruthy();
});
}); | }); | random_line_split |
entity.js | "use strict";
var fs = require('fs');
var mkdirSync = function (path) {
try {
fs.mkdirSync(path);
} catch (e) {
if (e.code != 'EEXIST') throw e;
}
};
mkdirSync(iris.configPath + "/" + "entity");
/**
* @file Includes for the entity module
*/
/**
* @namespace entity
*/
iris.registerModule("entity",__dirname);
require('./entity_create');
require('./entity_delete');
require('./entity_edit');
require('./entity_fetch');
// API hooks for getting schema information
// Get list of entity types
iris.route.get("/api/entitySchema", function (req, res) {
// If not admin, present 403 page
if (req.authPass.roles.indexOf('admin') === -1) {
res.status(403);
res.send("Access denied");
return false;
}
var output = {};
Object.keys(iris.entityTypes).forEach(function (entityType) {
output[entityType] = iris.entityTypes[entityType];
});
res.send(output);
});
iris.route.get("/api/entitySchema/:type", function (req, res) {
// If not admin, present 403 page
if (req.authPass.roles.indexOf('admin') === -1) |
res.send(iris.entityTypes[req.params.type]);
});
| {
res.status(403);
res.send("Access denied");
return false;
} | conditional_block |
entity.js | "use strict";
var fs = require('fs');
var mkdirSync = function (path) {
try {
fs.mkdirSync(path);
} catch (e) {
if (e.code != 'EEXIST') throw e;
}
};
mkdirSync(iris.configPath + "/" + "entity");
/**
* @file Includes for the entity module
*/
/**
* @namespace entity
*/
iris.registerModule("entity",__dirname);
require('./entity_create');
require('./entity_delete');
require('./entity_edit');
require('./entity_fetch');
// API hooks for getting schema information
// Get list of entity types
iris.route.get("/api/entitySchema", function (req, res) {
// If not admin, present 403 page
if (req.authPass.roles.indexOf('admin') === -1) {
res.status(403);
res.send("Access denied");
| return false;
}
var output = {};
Object.keys(iris.entityTypes).forEach(function (entityType) {
output[entityType] = iris.entityTypes[entityType];
});
res.send(output);
});
iris.route.get("/api/entitySchema/:type", function (req, res) {
// If not admin, present 403 page
if (req.authPass.roles.indexOf('admin') === -1) {
res.status(403);
res.send("Access denied");
return false;
}
res.send(iris.entityTypes[req.params.type]);
}); | random_line_split |
|
csvimporter.ts | // importcsv.ts
import {InfoElement} from './infoelement';
import { ITransformArray, IBaseItem, IItemFactory} from 'infodata';
//
//declare var Papa:any;
import Papa = require('papaparse');
//
export class CSVImporter extends InfoElement implements ITransformArray {
//
private _factory: IItemFactory; | //
constructor(fact:IItemFactory) {
super();
this._factory = fact;
}
protected get factory():IItemFactory {
return (this._factory !== undefined) ? this._factory : null;
}
public transform_map(oMap: any): IBaseItem {
if ((oMap === undefined) || (oMap === null)) {
return null;
}
return this.factory.create(oMap);
}// transform_map
public transform_file(file: File,stype:string): Promise<any> {
let oRet:any = [];
if ((file === undefined) || (file === null)) {
return Promise.resolve(oRet);
}
let self = this;
return new Promise((resolve, reject) => {
Papa.parse(file, {
header: true, // default: false
dynamicTyping: true, // default: false
skipEmptyLines: true, // default: false
chunk: (results: ParseResult, parser) => {
if ((results !== undefined) && (results !== null)){
if ((results.data !== undefined) && (results.data !== null)){
let maps = results.data;
if (maps.length > 0){
for (let x of maps){
x.type = stype;
let y = self.transform_map(x);
if ((y !== undefined) && (y !== null)){
oRet.push(y);
}
}// x
}
}
}
},
complete: (results: ParseResult, file?: File) => {
resolve(oRet);
}, // default: undefined
error: (error: ParseError, file?: File) => {
reject(new Error(error.message));
},
beforeFirstChunk :(s: string) => {
oRet = [];
} // default: undefined
});
});
}// transform_file
}// class CSVImporter | random_line_split |
|
csvimporter.ts | // importcsv.ts
import {InfoElement} from './infoelement';
import { ITransformArray, IBaseItem, IItemFactory} from 'infodata';
//
//declare var Papa:any;
import Papa = require('papaparse');
//
export class CSVImporter extends InfoElement implements ITransformArray {
//
private _factory: IItemFactory;
//
constructor(fact:IItemFactory) {
super();
this._factory = fact;
}
protected get factory():IItemFactory {
return (this._factory !== undefined) ? this._factory : null;
}
public transform_map(oMap: any): IBaseItem {
if ((oMap === undefined) || (oMap === null)) {
return null;
}
return this.factory.create(oMap);
}// transform_map
public transform_file(file: File,stype:string): Promise<any> {
let oRet:any = [];
if ((file === undefined) || (file === null)) |
let self = this;
return new Promise((resolve, reject) => {
Papa.parse(file, {
header: true, // default: false
dynamicTyping: true, // default: false
skipEmptyLines: true, // default: false
chunk: (results: ParseResult, parser) => {
if ((results !== undefined) && (results !== null)){
if ((results.data !== undefined) && (results.data !== null)){
let maps = results.data;
if (maps.length > 0){
for (let x of maps){
x.type = stype;
let y = self.transform_map(x);
if ((y !== undefined) && (y !== null)){
oRet.push(y);
}
}// x
}
}
}
},
complete: (results: ParseResult, file?: File) => {
resolve(oRet);
}, // default: undefined
error: (error: ParseError, file?: File) => {
reject(new Error(error.message));
},
beforeFirstChunk :(s: string) => {
oRet = [];
} // default: undefined
});
});
}// transform_file
}// class CSVImporter
| {
return Promise.resolve(oRet);
} | conditional_block |
csvimporter.ts | // importcsv.ts
import {InfoElement} from './infoelement';
import { ITransformArray, IBaseItem, IItemFactory} from 'infodata';
//
//declare var Papa:any;
import Papa = require('papaparse');
//
export class | extends InfoElement implements ITransformArray {
//
private _factory: IItemFactory;
//
constructor(fact:IItemFactory) {
super();
this._factory = fact;
}
protected get factory():IItemFactory {
return (this._factory !== undefined) ? this._factory : null;
}
public transform_map(oMap: any): IBaseItem {
if ((oMap === undefined) || (oMap === null)) {
return null;
}
return this.factory.create(oMap);
}// transform_map
public transform_file(file: File,stype:string): Promise<any> {
let oRet:any = [];
if ((file === undefined) || (file === null)) {
return Promise.resolve(oRet);
}
let self = this;
return new Promise((resolve, reject) => {
Papa.parse(file, {
header: true, // default: false
dynamicTyping: true, // default: false
skipEmptyLines: true, // default: false
chunk: (results: ParseResult, parser) => {
if ((results !== undefined) && (results !== null)){
if ((results.data !== undefined) && (results.data !== null)){
let maps = results.data;
if (maps.length > 0){
for (let x of maps){
x.type = stype;
let y = self.transform_map(x);
if ((y !== undefined) && (y !== null)){
oRet.push(y);
}
}// x
}
}
}
},
complete: (results: ParseResult, file?: File) => {
resolve(oRet);
}, // default: undefined
error: (error: ParseError, file?: File) => {
reject(new Error(error.message));
},
beforeFirstChunk :(s: string) => {
oRet = [];
} // default: undefined
});
});
}// transform_file
}// class CSVImporter
| CSVImporter | identifier_name |
lib.rs | extern crate bazel_protos;
extern crate bytes;
extern crate digest;
extern crate hashing;
extern crate protobuf;
extern crate sha2;
use bytes::Bytes;
use std::io::Write;
use std::os::unix::fs::PermissionsExt;
use std::path::Path;
pub mod data;
pub mod file;
pub fn owned_string_vec(args: &[&str]) -> Vec<String> {
args.into_iter().map(|s| s.to_string()).collect()
}
pub fn as_byte_owned_vec(str: &str) -> Vec<u8> |
pub fn as_bytes(str: &str) -> Bytes {
Bytes::from(str.as_bytes())
}
pub fn make_file(path: &Path, contents: &[u8], mode: u32) {
let mut file = std::fs::File::create(&path).unwrap();
file.write(contents).unwrap();
let mut permissions = std::fs::metadata(path).unwrap().permissions();
permissions.set_mode(mode);
file.set_permissions(permissions).unwrap();
}
| {
Vec::from(str.as_bytes())
} | identifier_body |
lib.rs | extern crate bazel_protos;
extern crate bytes;
extern crate digest;
extern crate hashing;
extern crate protobuf;
extern crate sha2;
use bytes::Bytes;
use std::io::Write;
use std::os::unix::fs::PermissionsExt;
use std::path::Path;
pub mod data; | pub fn owned_string_vec(args: &[&str]) -> Vec<String> {
args.into_iter().map(|s| s.to_string()).collect()
}
pub fn as_byte_owned_vec(str: &str) -> Vec<u8> {
Vec::from(str.as_bytes())
}
pub fn as_bytes(str: &str) -> Bytes {
Bytes::from(str.as_bytes())
}
pub fn make_file(path: &Path, contents: &[u8], mode: u32) {
let mut file = std::fs::File::create(&path).unwrap();
file.write(contents).unwrap();
let mut permissions = std::fs::metadata(path).unwrap().permissions();
permissions.set_mode(mode);
file.set_permissions(permissions).unwrap();
} | pub mod file;
| random_line_split |
lib.rs | extern crate bazel_protos;
extern crate bytes;
extern crate digest;
extern crate hashing;
extern crate protobuf;
extern crate sha2;
use bytes::Bytes;
use std::io::Write;
use std::os::unix::fs::PermissionsExt;
use std::path::Path;
pub mod data;
pub mod file;
pub fn owned_string_vec(args: &[&str]) -> Vec<String> {
args.into_iter().map(|s| s.to_string()).collect()
}
pub fn as_byte_owned_vec(str: &str) -> Vec<u8> {
Vec::from(str.as_bytes())
}
pub fn | (str: &str) -> Bytes {
Bytes::from(str.as_bytes())
}
pub fn make_file(path: &Path, contents: &[u8], mode: u32) {
let mut file = std::fs::File::create(&path).unwrap();
file.write(contents).unwrap();
let mut permissions = std::fs::metadata(path).unwrap().permissions();
permissions.set_mode(mode);
file.set_permissions(permissions).unwrap();
}
| as_bytes | identifier_name |
pastetext.js | /*
Copyright (c) 2003-2011, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.html or http://ckeditor.com/license
*/
(function()
{
CKEDITOR.dialog.add( 'pastetext', function( editor )
{
return {
title : editor.lang.pasteText.title,
minWidth : CKEDITOR.env.ie && CKEDITOR.env.quirks ? 368 : 350,
minHeight : 240,
| {
label : editor.lang.common.generalTab,
id : 'general',
elements :
[
{
type : 'html',
id : 'pasteMsg',
html : '<div style="white-space:normal;width:340px;">' + editor.lang.clipboard.pasteMsg + '</div>'
},
{
type : 'textarea',
id : 'content',
className : 'cke_pastetext',
onLoad : function()
{
var label = this.getDialog().getContentElement( 'general', 'pasteMsg' ).getElement(),
input = this.getElement().getElementsByTag( 'textarea' ).getItem( 0 );
input.setAttribute( 'aria-labelledby', label.$.id );
input.setStyle( 'direction', editor.config.contentsLangDirection );
},
focus : function()
{
this.getElement().focus();
},
setup : function()
{
this.setValue( '' );
},
commit : function()
{
var value = this.getValue();
setTimeout( function()
{
editor.fire( 'paste', { 'text' : value } );
}, 0 );
}
}
]
}
]
};
});
})(); | onShow : function(){ this.setupContent(); },
onOk : function(){ this.commitContent(); },
contents :
[ | random_line_split |
home-test.js | (function () {
'use strict';
/**
* @ngdoc function
* @name app.test:homeTest
* @description
* # homeTest |
describe('homeCtrl', function () {
var controller = null, $scope = null, $location;
beforeEach(function () {
module('g4mify-client-app');
});
beforeEach(inject(function ($controller, $rootScope, _$location_) {
$scope = $rootScope.$new();
$location = _$location_;
controller = $controller('HomeCtrl', {
$scope: $scope
});
}));
it('Should HomeCtrl must be defined', function () {
expect(controller).toBeDefined();
});
it('Should match the path Module name', function () {
$location.path('/home');
expect($location.path()).toBe('/home');
});
});
})(); | * Test of the app
*/ | random_line_split |
offer_011_xuan_zhuan_shu_zu_de_zui_xiao_shu_zi_lcof.rs | struct Solution;
impl Solution {
pub fn min_array(numbers: Vec<i32>) -> i32 {
// 使用二分法
let (mut left, mut right) = (0, numbers.len() - 1);
while left < right {
let mid = (left + right) / 2;
// 如果 mid 比 left 大,说明最小值在 [mid+1, right] 之间,也可能就是 left。
// 如果 mid 比 left 小,说明最小值在 [left+1, mid] 之间。
// 如果 mid 比 right 小,说明最小值在 [left, mid] 之间。
// 如果 mid 比 right 大,说明最小值在 [left, mid] 之间。
// 上面这个判断不出应该怎么缩小范围。
// 可以判断 mid 是在较小的那块上还是在较大的那块上吗?
// 如果 mid > left && mid > right, 说明 mid 是在较大的那块上,否则就是在较小的那块上。
// 如果 mid 就是最小值呢?那么有:mid < left && mid < right. 但反过来不一定。
// 还是有点繁琐。
// 如果 mid 比 right 大,是不是就说明 mid 在较大的那块上了。如果 mid <= right,那说明 mid 在小块上。
// 如果 mid == right,那么 mid 可能在大块上,也可能在小块上。参考单测4.
// 看了官方题解,当相等时直接把 right 减一 😳,这样就没有二分了呀。。。
if numbers[mid] > numbers[right] {
left = mid + 1;
} else if numbers[mid] < numbers[right] {
right = mid;
} else {
right -= 1;
}
}
numbers[left]
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_min_array1() {
assert_eq!(Solution::min_array(vec![3, 4, 5, 1, 2]), 1);
}
#[test]
fn test_min_array2() {
assert_eq!(Solution::min_array(vec![2, 2, 2, 0, 1]), 0);
}
#[test]
fn test_min_array3() {
assert_eq!(Solution::min_array(vec![1, 1]), 1); | fn test_min_array4() {
assert_eq!(Solution::min_array(vec![3, 3, 1, 3]), 1);
}
} | }
#[test] | random_line_split |
offer_011_xuan_zhuan_shu_zu_de_zui_xiao_shu_zi_lcof.rs | struct Solution;
impl Solution {
pub fn min_array(numbers: Vec<i32>) -> i32 {
// 使用二分法
let (mut left, mut right) = (0, numbers.len() - 1);
while left < right {
let mid = (left + right) / 2;
// 如果 mid 比 left 大,说明最小值在 [mid+1, right] 之间,也可能就是 left。
// 如果 mid 比 left 小,说明最小值在 [left+1, mid] 之间。
// 如果 mid 比 right 小,说明最小值在 [left, mid] 之间。
// 如果 mid 比 right 大,说明最小值在 [left, mid] 之间。
// 上面这个判断不出应该怎么缩小范围。
// 可以判断 mid 是在较小的那块上还是在较大的那块上吗?
// 如果 mid > left && mid > right, 说明 mid 是在较大的那块上,否则就是在较小的那块上。
// 如果 mid 就是最小值呢?那么有:mid < left && mid < right. 但反过来不一定。
// 还是有点繁琐。
// 如果 mid 比 right 大,是不是就说明 mid 在较大的那块上了。如果 mid <= right,那说明 mid 在小块上。
// 如果 mid == right,那么 mid 可能在大块上,也可能在小块上。参考单测4.
// 看了官方题解,当相等时直接把 right 减一 😳,这样就没有二分了呀。。。
if numbers[mid] > numbers[right] {
left = mid + 1;
} else if numbers[mid] < numbers[right] {
right = mid;
} else {
right -= 1;
}
}
numbers[left]
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_min_array1() {
assert_eq!(Solution::min_array(vec![3, 4, 5, 1, 2]), 1);
}
#[test]
fn test_min_array2() {
assert_eq!(Solution::min_array(vec![2, 2, 2, 0, 1]), 0);
}
#[test]
fn test_min_array3() {
assert_eq!(Solution::min_array(vec![1, 1]), | y4() {
assert_eq!(Solution::min_array(vec![3, 3, 1, 3]), 1);
}
}
| 1);
}
#[test]
fn test_min_arra | conditional_block |
offer_011_xuan_zhuan_shu_zu_de_zui_xiao_shu_zi_lcof.rs | struct | ;
impl Solution {
pub fn min_array(numbers: Vec<i32>) -> i32 {
// 使用二分法
let (mut left, mut right) = (0, numbers.len() - 1);
while left < right {
let mid = (left + right) / 2;
// 如果 mid 比 left 大,说明最小值在 [mid+1, right] 之间,也可能就是 left。
// 如果 mid 比 left 小,说明最小值在 [left+1, mid] 之间。
// 如果 mid 比 right 小,说明最小值在 [left, mid] 之间。
// 如果 mid 比 right 大,说明最小值在 [left, mid] 之间。
// 上面这个判断不出应该怎么缩小范围。
// 可以判断 mid 是在较小的那块上还是在较大的那块上吗?
// 如果 mid > left && mid > right, 说明 mid 是在较大的那块上,否则就是在较小的那块上。
// 如果 mid 就是最小值呢?那么有:mid < left && mid < right. 但反过来不一定。
// 还是有点繁琐。
// 如果 mid 比 right 大,是不是就说明 mid 在较大的那块上了。如果 mid <= right,那说明 mid 在小块上。
// 如果 mid == right,那么 mid 可能在大块上,也可能在小块上。参考单测4.
// 看了官方题解,当相等时直接把 right 减一 😳,这样就没有二分了呀。。。
if numbers[mid] > numbers[right] {
left = mid + 1;
} else if numbers[mid] < numbers[right] {
right = mid;
} else {
right -= 1;
}
}
numbers[left]
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_min_array1() {
assert_eq!(Solution::min_array(vec![3, 4, 5, 1, 2]), 1);
}
#[test]
fn test_min_array2() {
assert_eq!(Solution::min_array(vec![2, 2, 2, 0, 1]), 0);
}
#[test]
fn test_min_array3() {
assert_eq!(Solution::min_array(vec![1, 1]), 1);
}
#[test]
fn test_min_array4() {
assert_eq!(Solution::min_array(vec![3, 3, 1, 3]), 1);
}
}
| Solution | identifier_name |
offer_011_xuan_zhuan_shu_zu_de_zui_xiao_shu_zi_lcof.rs | struct Solution;
impl Solution {
pub fn min_array(numbers: Vec<i32>) -> i32 {
// 使用二分法
let (mut left, mut right) = (0, numbers.len() - 1);
while left < right {
let mid = (left + right) / 2;
// 如果 mid 比 left 大,说明最小值在 [mid+1, right] 之间,也可能就是 left。
// 如果 mid 比 left 小,说明最小值在 [left+1, mid] 之间。
// 如果 mid 比 right 小,说明最小值在 [left, mid] 之间。
// 如果 mid 比 right 大,说明最小值在 [left, mid] 之间。
// 上面这个判断不出应该怎么缩小范围。
// 可以判断 mid 是在较小的那块上还是在较大的那块上吗?
// 如果 mid > left && mid > right, 说明 mid 是在较大的那块上,否则就是在较小的那块上。
// 如果 mid 就是最小值呢?那么有:mid < left && mid < right. 但反过来不一定。
// 还是有点繁琐。
// 如果 mid 比 right 大,是不是就说明 mid 在较大的那块上了。如果 mid <= right,那说明 mid 在小块上。
// 如果 mid == right,那么 mid 可能在大块上,也可能在小块上。参考单测4.
// 看了官方题解,当相等时直接把 right 减一 😳,这样就没有二分了呀。。。
if numbers[mid] > numbers[right] {
left = mid + 1;
} else if numbers[mid] < numbers[right] {
right = mid;
} else {
right -= 1;
}
}
numbers[left]
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_min_array1() {
assert_eq!(Solution::min_array(vec![3, 4, 5, 1, 2]), 1);
}
#[test]
fn test_min_array2() {
assert_eq!(Solution::min_array(vec![2, 2, 2, 0, 1]), 0);
}
#[test]
fn test_min_array3() {
assert_eq!(Solution::min_array(vec![1, 1]), 1);
}
#[test]
fn test_min_array4() {
assert_eq!(Solution::min_array(vec![3, 3, 1, 3]), 1);
}
}
| identifier_body |
||
vortex_sensor_t.py | """LCM type definitions
This file automatically generated by lcm.
DO NOT MODIFY BY HAND!!!!
"""
try:
import cStringIO.StringIO as BytesIO
except ImportError:
from io import BytesIO
import struct
class vortex_sensor_t(object):
__slots__ = ["sensor1", "sensor2", "velocity"]
def __init__(self):
self.sensor1 = 0.0
self.sensor2 = 0.0
self.velocity = 0.0
def encode(self):
buf = BytesIO()
buf.write(vortex_sensor_t._get_packed_fingerprint())
self._encode_one(buf)
return buf.getvalue()
def _encode_one(self, buf):
buf.write(struct.pack(">ddd", self.sensor1, self.sensor2, self.velocity))
def decode(data):
if hasattr(data, 'read'):
buf = data
else:
buf = BytesIO(data)
if buf.read(8) != vortex_sensor_t._get_packed_fingerprint():
raise ValueError("Decode error")
return vortex_sensor_t._decode_one(buf)
decode = staticmethod(decode)
def _decode_one(buf):
self = vortex_sensor_t()
self.sensor1, self.sensor2, self.velocity = struct.unpack(">ddd", buf.read(24))
return self
_decode_one = staticmethod(_decode_one)
_hash = None
def _get_hash_recursive(parents):
if vortex_sensor_t in parents: return 0
tmphash = (0x3525d46ae32101c3) & 0xffffffffffffffff
tmphash = (((tmphash<<1)&0xffffffffffffffff) + (tmphash>>63)) & 0xffffffffffffffff
return tmphash
_get_hash_recursive = staticmethod(_get_hash_recursive)
_packed_fingerprint = None
def _get_packed_fingerprint():
if vortex_sensor_t._packed_fingerprint is None: | vortex_sensor_t._packed_fingerprint = struct.pack(">Q", vortex_sensor_t._get_hash_recursive([]))
return vortex_sensor_t._packed_fingerprint
_get_packed_fingerprint = staticmethod(_get_packed_fingerprint) | random_line_split |
|
vortex_sensor_t.py | """LCM type definitions
This file automatically generated by lcm.
DO NOT MODIFY BY HAND!!!!
"""
try:
import cStringIO.StringIO as BytesIO
except ImportError:
from io import BytesIO
import struct
class vortex_sensor_t(object):
| buf = BytesIO(data)
if buf.read(8) != vortex_sensor_t._get_packed_fingerprint():
raise ValueError("Decode error")
return vortex_sensor_t._decode_one(buf)
decode = staticmethod(decode)
def _decode_one(buf):
self = vortex_sensor_t()
self.sensor1, self.sensor2, self.velocity = struct.unpack(">ddd", buf.read(24))
return self
_decode_one = staticmethod(_decode_one)
_hash = None
def _get_hash_recursive(parents):
if vortex_sensor_t in parents: return 0
tmphash = (0x3525d46ae32101c3) & 0xffffffffffffffff
tmphash = (((tmphash<<1)&0xffffffffffffffff) + (tmphash>>63)) & 0xffffffffffffffff
return tmphash
_get_hash_recursive = staticmethod(_get_hash_recursive)
_packed_fingerprint = None
def _get_packed_fingerprint():
if vortex_sensor_t._packed_fingerprint is None:
vortex_sensor_t._packed_fingerprint = struct.pack(">Q", vortex_sensor_t._get_hash_recursive([]))
return vortex_sensor_t._packed_fingerprint
_get_packed_fingerprint = staticmethod(_get_packed_fingerprint)
| __slots__ = ["sensor1", "sensor2", "velocity"]
def __init__(self):
self.sensor1 = 0.0
self.sensor2 = 0.0
self.velocity = 0.0
def encode(self):
buf = BytesIO()
buf.write(vortex_sensor_t._get_packed_fingerprint())
self._encode_one(buf)
return buf.getvalue()
def _encode_one(self, buf):
buf.write(struct.pack(">ddd", self.sensor1, self.sensor2, self.velocity))
def decode(data):
if hasattr(data, 'read'):
buf = data
else: | identifier_body |
vortex_sensor_t.py | """LCM type definitions
This file automatically generated by lcm.
DO NOT MODIFY BY HAND!!!!
"""
try:
import cStringIO.StringIO as BytesIO
except ImportError:
from io import BytesIO
import struct
class vortex_sensor_t(object):
__slots__ = ["sensor1", "sensor2", "velocity"]
def __init__(self):
self.sensor1 = 0.0
self.sensor2 = 0.0
self.velocity = 0.0
def encode(self):
buf = BytesIO()
buf.write(vortex_sensor_t._get_packed_fingerprint())
self._encode_one(buf)
return buf.getvalue()
def _encode_one(self, buf):
buf.write(struct.pack(">ddd", self.sensor1, self.sensor2, self.velocity))
def decode(data):
if hasattr(data, 'read'):
buf = data
else:
buf = BytesIO(data)
if buf.read(8) != vortex_sensor_t._get_packed_fingerprint():
|
return vortex_sensor_t._decode_one(buf)
decode = staticmethod(decode)
def _decode_one(buf):
self = vortex_sensor_t()
self.sensor1, self.sensor2, self.velocity = struct.unpack(">ddd", buf.read(24))
return self
_decode_one = staticmethod(_decode_one)
_hash = None
def _get_hash_recursive(parents):
if vortex_sensor_t in parents: return 0
tmphash = (0x3525d46ae32101c3) & 0xffffffffffffffff
tmphash = (((tmphash<<1)&0xffffffffffffffff) + (tmphash>>63)) & 0xffffffffffffffff
return tmphash
_get_hash_recursive = staticmethod(_get_hash_recursive)
_packed_fingerprint = None
def _get_packed_fingerprint():
if vortex_sensor_t._packed_fingerprint is None:
vortex_sensor_t._packed_fingerprint = struct.pack(">Q", vortex_sensor_t._get_hash_recursive([]))
return vortex_sensor_t._packed_fingerprint
_get_packed_fingerprint = staticmethod(_get_packed_fingerprint)
| raise ValueError("Decode error") | conditional_block |
vortex_sensor_t.py | """LCM type definitions
This file automatically generated by lcm.
DO NOT MODIFY BY HAND!!!!
"""
try:
import cStringIO.StringIO as BytesIO
except ImportError:
from io import BytesIO
import struct
class vortex_sensor_t(object):
__slots__ = ["sensor1", "sensor2", "velocity"]
def __init__(self):
self.sensor1 = 0.0
self.sensor2 = 0.0
self.velocity = 0.0
def encode(self):
buf = BytesIO()
buf.write(vortex_sensor_t._get_packed_fingerprint())
self._encode_one(buf)
return buf.getvalue()
def _encode_one(self, buf):
buf.write(struct.pack(">ddd", self.sensor1, self.sensor2, self.velocity))
def decode(data):
if hasattr(data, 'read'):
buf = data
else:
buf = BytesIO(data)
if buf.read(8) != vortex_sensor_t._get_packed_fingerprint():
raise ValueError("Decode error")
return vortex_sensor_t._decode_one(buf)
decode = staticmethod(decode)
def _decode_one(buf):
self = vortex_sensor_t()
self.sensor1, self.sensor2, self.velocity = struct.unpack(">ddd", buf.read(24))
return self
_decode_one = staticmethod(_decode_one)
_hash = None
def | (parents):
if vortex_sensor_t in parents: return 0
tmphash = (0x3525d46ae32101c3) & 0xffffffffffffffff
tmphash = (((tmphash<<1)&0xffffffffffffffff) + (tmphash>>63)) & 0xffffffffffffffff
return tmphash
_get_hash_recursive = staticmethod(_get_hash_recursive)
_packed_fingerprint = None
def _get_packed_fingerprint():
if vortex_sensor_t._packed_fingerprint is None:
vortex_sensor_t._packed_fingerprint = struct.pack(">Q", vortex_sensor_t._get_hash_recursive([]))
return vortex_sensor_t._packed_fingerprint
_get_packed_fingerprint = staticmethod(_get_packed_fingerprint)
| _get_hash_recursive | identifier_name |
LineClipping.ts | /**
* Uses the Cohen Sutherland algorithm for clipping lines with the sceen bounds.
*
* @author Guido Krömer <mail 64 cacodaemon 46 de>
*/
class LineClipping {
private static INSIDE: number = 0;
private static LEFT: number = 1;
private static RIGHT: number = 2;
private static BOTTOM: number = 4;
private static TOP: number = 8;
constructor(public screen: GameScreen) {
}
/**
* Clips the line.
*
* @param line The line to clip.
* @return {boolean} True if the give line was clipped.
*/
public clipLine(line: Line): boolean {
return this.clip(line.a, line.b)
}
/**
* Clips the line.
*
* @param a The line's start point.
* @param b The line's end point.
* @return {boolean} True if the give line was clipped.
*/
public c | a: Vector3, b: Vector3): boolean { //http://en.wikipedia.org/wiki/Cohen%E2%80%93Sutherland_algorithm
var areaCodeA: number = this.areaCode(a);
var areaCodeB: number = this.areaCode(b);
while (true) {
if (!(areaCodeA | areaCodeB)) {
return true;
}
if (areaCodeA & areaCodeB) {
return false;
}
var tempPoint: Vector3 = new Vector3();
var tempAreaCode: number = areaCodeA ? areaCodeA : areaCodeB;
if (tempAreaCode & LineClipping.TOP) {
tempPoint.x = a.x + (b.x - a.x) * (this.screen.height - a.y) / (b.y - a.y);
tempPoint.y = this.screen.height;
} else if (tempAreaCode & LineClipping.BOTTOM) {
tempPoint.x = a.x + (b.x - a.x) * (0.0 - a.y) / (b.y - a.y);
tempPoint.y = 0.0;
} else if (tempAreaCode & LineClipping.RIGHT) {
tempPoint.y = a.y + (b.y - a.y) * (this.screen.width - a.x) / (b.x - a.x);
tempPoint.x = this.screen.width;
} else if (tempAreaCode & LineClipping.LEFT) {
tempPoint.y = a.y + (b.y - a.y) * (0.0 - a.x) / (b.x - a.x);
tempPoint.x = 0.0;
}
if (tempAreaCode == areaCodeA) {
a.x = tempPoint.x;
a.y = tempPoint.y;
areaCodeA = this.areaCode(a);
} else {
b.x = tempPoint.x;
b.y = tempPoint.y;
areaCodeB = this.areaCode(b);
}
}
}
/**
* Returns the area code determining which part of the screen has to be clipped.
*
* @param p The point to check.
* @return {number} The area code.
*/
private areaCode(p: Vector3): number {
var code = LineClipping.INSIDE;
if (p.x < 0) {
code |= LineClipping.LEFT;
} else if (p.x > this.screen.width) {
code |= LineClipping.RIGHT;
}
if (p.y < 0) {
code |= LineClipping.BOTTOM;
} else if (p.y > this.screen.height) {
code |= LineClipping.TOP;
}
return code;
}
} | lip( | identifier_name |
LineClipping.ts | /**
* Uses the Cohen Sutherland algorithm for clipping lines with the sceen bounds.
*
* @author Guido Krömer <mail 64 cacodaemon 46 de>
*/
class LineClipping {
private static INSIDE: number = 0;
private static LEFT: number = 1;
private static RIGHT: number = 2;
private static BOTTOM: number = 4;
private static TOP: number = 8;
constructor(public screen: GameScreen) {
}
/**
* Clips the line.
*
* @param line The line to clip.
* @return {boolean} True if the give line was clipped.
*/
public clipLine(line: Line): boolean {
return this.clip(line.a, line.b)
}
/**
* Clips the line.
*
* @param a The line's start point.
* @param b The line's end point.
* @return {boolean} True if the give line was clipped.
*/
public clip(a: Vector3, b: Vector3): boolean { //http://en.wikipedia.org/wiki/Cohen%E2%80%93Sutherland_algorithm
var areaCodeA: number = this.areaCode(a);
var areaCodeB: number = this.areaCode(b);
while (true) {
if (!(areaCodeA | areaCodeB)) {
return true;
}
if (areaCodeA & areaCodeB) {
return false;
}
var tempPoint: Vector3 = new Vector3();
var tempAreaCode: number = areaCodeA ? areaCodeA : areaCodeB;
if (tempAreaCode & LineClipping.TOP) { | else if (tempAreaCode & LineClipping.BOTTOM) {
tempPoint.x = a.x + (b.x - a.x) * (0.0 - a.y) / (b.y - a.y);
tempPoint.y = 0.0;
} else if (tempAreaCode & LineClipping.RIGHT) {
tempPoint.y = a.y + (b.y - a.y) * (this.screen.width - a.x) / (b.x - a.x);
tempPoint.x = this.screen.width;
} else if (tempAreaCode & LineClipping.LEFT) {
tempPoint.y = a.y + (b.y - a.y) * (0.0 - a.x) / (b.x - a.x);
tempPoint.x = 0.0;
}
if (tempAreaCode == areaCodeA) {
a.x = tempPoint.x;
a.y = tempPoint.y;
areaCodeA = this.areaCode(a);
} else {
b.x = tempPoint.x;
b.y = tempPoint.y;
areaCodeB = this.areaCode(b);
}
}
}
/**
* Returns the area code determining which part of the screen has to be clipped.
*
* @param p The point to check.
* @return {number} The area code.
*/
private areaCode(p: Vector3): number {
var code = LineClipping.INSIDE;
if (p.x < 0) {
code |= LineClipping.LEFT;
} else if (p.x > this.screen.width) {
code |= LineClipping.RIGHT;
}
if (p.y < 0) {
code |= LineClipping.BOTTOM;
} else if (p.y > this.screen.height) {
code |= LineClipping.TOP;
}
return code;
}
} |
tempPoint.x = a.x + (b.x - a.x) * (this.screen.height - a.y) / (b.y - a.y);
tempPoint.y = this.screen.height;
} | conditional_block |
LineClipping.ts | /**
* Uses the Cohen Sutherland algorithm for clipping lines with the sceen bounds.
*
* @author Guido Krömer <mail 64 cacodaemon 46 de>
*/
class LineClipping {
private static INSIDE: number = 0;
private static LEFT: number = 1;
private static RIGHT: number = 2;
private static BOTTOM: number = 4;
private static TOP: number = 8;
constructor(public screen: GameScreen) {
}
/**
* Clips the line.
*
* @param line The line to clip.
* @return {boolean} True if the give line was clipped.
*/
public clipLine(line: Line): boolean {
return this.clip(line.a, line.b)
}
/**
* Clips the line.
*
* @param a The line's start point.
* @param b The line's end point.
* @return {boolean} True if the give line was clipped.
*/
public clip(a: Vector3, b: Vector3): boolean { //http://en.wikipedia.org/wiki/Cohen%E2%80%93Sutherland_algorithm
var areaCodeA: number = this.areaCode(a);
var areaCodeB: number = this.areaCode(b); | if (!(areaCodeA | areaCodeB)) {
return true;
}
if (areaCodeA & areaCodeB) {
return false;
}
var tempPoint: Vector3 = new Vector3();
var tempAreaCode: number = areaCodeA ? areaCodeA : areaCodeB;
if (tempAreaCode & LineClipping.TOP) {
tempPoint.x = a.x + (b.x - a.x) * (this.screen.height - a.y) / (b.y - a.y);
tempPoint.y = this.screen.height;
} else if (tempAreaCode & LineClipping.BOTTOM) {
tempPoint.x = a.x + (b.x - a.x) * (0.0 - a.y) / (b.y - a.y);
tempPoint.y = 0.0;
} else if (tempAreaCode & LineClipping.RIGHT) {
tempPoint.y = a.y + (b.y - a.y) * (this.screen.width - a.x) / (b.x - a.x);
tempPoint.x = this.screen.width;
} else if (tempAreaCode & LineClipping.LEFT) {
tempPoint.y = a.y + (b.y - a.y) * (0.0 - a.x) / (b.x - a.x);
tempPoint.x = 0.0;
}
if (tempAreaCode == areaCodeA) {
a.x = tempPoint.x;
a.y = tempPoint.y;
areaCodeA = this.areaCode(a);
} else {
b.x = tempPoint.x;
b.y = tempPoint.y;
areaCodeB = this.areaCode(b);
}
}
}
/**
* Returns the area code determining which part of the screen has to be clipped.
*
* @param p The point to check.
* @return {number} The area code.
*/
private areaCode(p: Vector3): number {
var code = LineClipping.INSIDE;
if (p.x < 0) {
code |= LineClipping.LEFT;
} else if (p.x > this.screen.width) {
code |= LineClipping.RIGHT;
}
if (p.y < 0) {
code |= LineClipping.BOTTOM;
} else if (p.y > this.screen.height) {
code |= LineClipping.TOP;
}
return code;
}
} |
while (true) { | random_line_split |
LineClipping.ts | /**
* Uses the Cohen Sutherland algorithm for clipping lines with the sceen bounds.
*
* @author Guido Krömer <mail 64 cacodaemon 46 de>
*/
class LineClipping {
private static INSIDE: number = 0;
private static LEFT: number = 1;
private static RIGHT: number = 2;
private static BOTTOM: number = 4;
private static TOP: number = 8;
constructor(public screen: GameScreen) {
}
/**
* Clips the line.
*
* @param line The line to clip.
* @return {boolean} True if the give line was clipped.
*/
public clipLine(line: Line): boolean {
return this.clip(line.a, line.b)
}
/**
* Clips the line.
*
* @param a The line's start point.
* @param b The line's end point.
* @return {boolean} True if the give line was clipped.
*/
public clip(a: Vector3, b: Vector3): boolean { //http://en.wikipedia.org/wiki/Cohen%E2%80%93Sutherland_algorithm
var areaCodeA: number = this.areaCode(a);
var areaCodeB: number = this.areaCode(b);
while (true) {
if (!(areaCodeA | areaCodeB)) {
return true;
}
if (areaCodeA & areaCodeB) {
return false;
}
var tempPoint: Vector3 = new Vector3();
var tempAreaCode: number = areaCodeA ? areaCodeA : areaCodeB;
if (tempAreaCode & LineClipping.TOP) {
tempPoint.x = a.x + (b.x - a.x) * (this.screen.height - a.y) / (b.y - a.y);
tempPoint.y = this.screen.height;
} else if (tempAreaCode & LineClipping.BOTTOM) {
tempPoint.x = a.x + (b.x - a.x) * (0.0 - a.y) / (b.y - a.y);
tempPoint.y = 0.0;
} else if (tempAreaCode & LineClipping.RIGHT) {
tempPoint.y = a.y + (b.y - a.y) * (this.screen.width - a.x) / (b.x - a.x);
tempPoint.x = this.screen.width;
} else if (tempAreaCode & LineClipping.LEFT) {
tempPoint.y = a.y + (b.y - a.y) * (0.0 - a.x) / (b.x - a.x);
tempPoint.x = 0.0;
}
if (tempAreaCode == areaCodeA) {
a.x = tempPoint.x;
a.y = tempPoint.y;
areaCodeA = this.areaCode(a);
} else {
b.x = tempPoint.x;
b.y = tempPoint.y;
areaCodeB = this.areaCode(b);
}
}
}
/**
* Returns the area code determining which part of the screen has to be clipped.
*
* @param p The point to check.
* @return {number} The area code.
*/
private areaCode(p: Vector3): number { | } |
var code = LineClipping.INSIDE;
if (p.x < 0) {
code |= LineClipping.LEFT;
} else if (p.x > this.screen.width) {
code |= LineClipping.RIGHT;
}
if (p.y < 0) {
code |= LineClipping.BOTTOM;
} else if (p.y > this.screen.height) {
code |= LineClipping.TOP;
}
return code;
}
| identifier_body |
font.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use geom::{Point2D, Rect, Size2D};
use std::mem;
use std::string;
use std::rc::Rc;
use std::cell::RefCell;
use servo_util::cache::{Cache, HashCache};
use style::computed_values::{font_weight, font_style}; | use platform::font_context::FontContextHandle;
use platform::font::{FontHandle, FontTable};
use text::glyph::{GlyphStore, GlyphId};
use text::shaping::ShaperMethods;
use text::{Shaper, TextRun};
use font_template::FontTemplateDescriptor;
use platform::font_template::FontTemplateData;
// FontHandle encapsulates access to the platform's font API,
// e.g. quartz, FreeType. It provides access to metrics and tables
// needed by the text shaper as well as access to the underlying font
// resources needed by the graphics layer to draw glyphs.
pub trait FontHandleMethods {
fn new_from_template(fctx: &FontContextHandle, template: Arc<FontTemplateData>, pt_size: Option<f64>)
-> Result<Self,()>;
fn get_template(&self) -> Arc<FontTemplateData>;
fn family_name(&self) -> String;
fn face_name(&self) -> String;
fn is_italic(&self) -> bool;
fn boldness(&self) -> font_weight::T;
fn glyph_index(&self, codepoint: char) -> Option<GlyphId>;
fn glyph_h_advance(&self, GlyphId) -> Option<FractionalPixel>;
fn glyph_h_kerning(&self, GlyphId, GlyphId) -> FractionalPixel;
fn get_metrics(&self) -> FontMetrics;
fn get_table_for_tag(&self, FontTableTag) -> Option<FontTable>;
}
// Used to abstract over the shaper's choice of fixed int representation.
pub type FractionalPixel = f64;
pub type FontTableTag = u32;
pub trait FontTableTagConversions {
fn tag_to_str(&self) -> String;
}
impl FontTableTagConversions for FontTableTag {
fn tag_to_str(&self) -> String {
unsafe {
let reversed = string::raw::from_buf_len(mem::transmute(self), 4);
return String::from_chars([reversed.as_slice().char_at(3),
reversed.as_slice().char_at(2),
reversed.as_slice().char_at(1),
reversed.as_slice().char_at(0)]);
}
}
}
pub trait FontTableMethods {
fn with_buffer(&self, |*const u8, uint|);
}
#[deriving(Clone)]
pub struct FontMetrics {
pub underline_size: Au,
pub underline_offset: Au,
pub strikeout_size: Au,
pub strikeout_offset: Au,
pub leading: Au,
pub x_height: Au,
pub em_size: Au,
pub ascent: Au,
pub descent: Au,
pub max_advance: Au,
pub line_gap: Au,
}
// TODO(Issue #179): eventually this will be split into the specified
// and used font styles. specified contains uninterpreted CSS font
// property values, while 'used' is attached to gfx::Font to descript
// the instance's properties.
//
// For now, the cases are differentiated with a typedef
#[deriving(Clone, PartialEq)]
pub struct FontStyle {
pub pt_size: f64,
pub weight: font_weight::T,
pub style: font_style::T,
pub families: Vec<String>,
// TODO(Issue #198): font-stretch, text-decoration, font-variant, size-adjust
}
pub type SpecifiedFontStyle = FontStyle;
pub type UsedFontStyle = FontStyle;
pub struct Font {
pub handle: FontHandle,
pub metrics: FontMetrics,
pub descriptor: FontTemplateDescriptor,
pub pt_size: f64,
pub shaper: Option<Shaper>,
pub shape_cache: HashCache<String, Arc<GlyphStore>>,
pub glyph_advance_cache: HashCache<u32, FractionalPixel>,
}
impl Font {
pub fn shape_text(&mut self, text: String, is_whitespace: bool) -> Arc<GlyphStore> {
self.make_shaper();
let shaper = &self.shaper;
self.shape_cache.find_or_create(&text, |txt| {
let mut glyphs = GlyphStore::new(text.as_slice().char_len() as int, is_whitespace);
shaper.as_ref().unwrap().shape_text(txt.as_slice(), &mut glyphs);
Arc::new(glyphs)
})
}
fn make_shaper<'a>(&'a mut self) -> &'a Shaper {
// fast path: already created a shaper
match self.shaper {
Some(ref shaper) => {
let s: &'a Shaper = shaper;
return s;
},
None => {}
}
let shaper = Shaper::new(self);
self.shaper = Some(shaper);
self.shaper.as_ref().unwrap()
}
pub fn get_table_for_tag(&self, tag: FontTableTag) -> Option<FontTable> {
let result = self.handle.get_table_for_tag(tag);
let status = if result.is_some() { "Found" } else { "Didn't find" };
debug!("{:s} font table[{:s}] with family={}, face={}",
status, tag.tag_to_str(),
self.handle.family_name(), self.handle.face_name());
return result;
}
pub fn glyph_index(&self, codepoint: char) -> Option<GlyphId> {
self.handle.glyph_index(codepoint)
}
pub fn glyph_h_kerning(&mut self, first_glyph: GlyphId, second_glyph: GlyphId) -> FractionalPixel {
self.handle.glyph_h_kerning(first_glyph, second_glyph)
}
pub fn glyph_h_advance(&mut self, glyph: GlyphId) -> FractionalPixel {
let handle = &self.handle;
self.glyph_advance_cache.find_or_create(&glyph, |glyph| {
match handle.glyph_h_advance(*glyph) {
Some(adv) => adv,
None => 10f64 as FractionalPixel // FIXME: Need fallback strategy
}
})
}
}
pub struct FontGroup {
pub fonts: Vec<Rc<RefCell<Font>>>,
}
impl FontGroup {
pub fn new(fonts: Vec<Rc<RefCell<Font>>>) -> FontGroup {
FontGroup {
fonts: fonts
}
}
pub fn create_textrun(&self, text: String) -> TextRun {
assert!(self.fonts.len() > 0);
// TODO(Issue #177): Actually fall back through the FontGroup when a font is unsuitable.
TextRun::new(&mut *self.fonts[0].borrow_mut(), text.clone())
}
}
pub struct RunMetrics {
// may be negative due to negative width (i.e., kerning of '.' in 'P.T.')
pub advance_width: Au,
pub ascent: Au, // nonzero
pub descent: Au, // nonzero
// this bounding box is relative to the left origin baseline.
// so, bounding_box.position.y = -ascent
pub bounding_box: Rect<Au>
}
impl RunMetrics {
pub fn new(advance: Au, ascent: Au, descent: Au) -> RunMetrics {
let bounds = Rect(Point2D(Au(0), -ascent),
Size2D(advance, ascent + descent));
// TODO(Issue #125): support loose and tight bounding boxes; using the
// ascent+descent and advance is sometimes too generous and
// looking at actual glyph extents can yield a tighter box.
RunMetrics {
advance_width: advance,
bounding_box: bounds,
ascent: ascent,
descent: descent,
}
}
} | use sync::Arc;
use servo_util::geometry::Au; | random_line_split |
font.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use geom::{Point2D, Rect, Size2D};
use std::mem;
use std::string;
use std::rc::Rc;
use std::cell::RefCell;
use servo_util::cache::{Cache, HashCache};
use style::computed_values::{font_weight, font_style};
use sync::Arc;
use servo_util::geometry::Au;
use platform::font_context::FontContextHandle;
use platform::font::{FontHandle, FontTable};
use text::glyph::{GlyphStore, GlyphId};
use text::shaping::ShaperMethods;
use text::{Shaper, TextRun};
use font_template::FontTemplateDescriptor;
use platform::font_template::FontTemplateData;
// FontHandle encapsulates access to the platform's font API,
// e.g. quartz, FreeType. It provides access to metrics and tables
// needed by the text shaper as well as access to the underlying font
// resources needed by the graphics layer to draw glyphs.
pub trait FontHandleMethods {
fn new_from_template(fctx: &FontContextHandle, template: Arc<FontTemplateData>, pt_size: Option<f64>)
-> Result<Self,()>;
fn get_template(&self) -> Arc<FontTemplateData>;
fn family_name(&self) -> String;
fn face_name(&self) -> String;
fn is_italic(&self) -> bool;
fn boldness(&self) -> font_weight::T;
fn glyph_index(&self, codepoint: char) -> Option<GlyphId>;
fn glyph_h_advance(&self, GlyphId) -> Option<FractionalPixel>;
fn glyph_h_kerning(&self, GlyphId, GlyphId) -> FractionalPixel;
fn get_metrics(&self) -> FontMetrics;
fn get_table_for_tag(&self, FontTableTag) -> Option<FontTable>;
}
// Used to abstract over the shaper's choice of fixed int representation.
pub type FractionalPixel = f64;
pub type FontTableTag = u32;
pub trait FontTableTagConversions {
fn tag_to_str(&self) -> String;
}
impl FontTableTagConversions for FontTableTag {
fn tag_to_str(&self) -> String {
unsafe {
let reversed = string::raw::from_buf_len(mem::transmute(self), 4);
return String::from_chars([reversed.as_slice().char_at(3),
reversed.as_slice().char_at(2),
reversed.as_slice().char_at(1),
reversed.as_slice().char_at(0)]);
}
}
}
pub trait FontTableMethods {
fn with_buffer(&self, |*const u8, uint|);
}
#[deriving(Clone)]
pub struct FontMetrics {
pub underline_size: Au,
pub underline_offset: Au,
pub strikeout_size: Au,
pub strikeout_offset: Au,
pub leading: Au,
pub x_height: Au,
pub em_size: Au,
pub ascent: Au,
pub descent: Au,
pub max_advance: Au,
pub line_gap: Au,
}
// TODO(Issue #179): eventually this will be split into the specified
// and used font styles. specified contains uninterpreted CSS font
// property values, while 'used' is attached to gfx::Font to descript
// the instance's properties.
//
// For now, the cases are differentiated with a typedef
#[deriving(Clone, PartialEq)]
pub struct FontStyle {
pub pt_size: f64,
pub weight: font_weight::T,
pub style: font_style::T,
pub families: Vec<String>,
// TODO(Issue #198): font-stretch, text-decoration, font-variant, size-adjust
}
pub type SpecifiedFontStyle = FontStyle;
pub type UsedFontStyle = FontStyle;
pub struct Font {
pub handle: FontHandle,
pub metrics: FontMetrics,
pub descriptor: FontTemplateDescriptor,
pub pt_size: f64,
pub shaper: Option<Shaper>,
pub shape_cache: HashCache<String, Arc<GlyphStore>>,
pub glyph_advance_cache: HashCache<u32, FractionalPixel>,
}
impl Font {
pub fn shape_text(&mut self, text: String, is_whitespace: bool) -> Arc<GlyphStore> {
self.make_shaper();
let shaper = &self.shaper;
self.shape_cache.find_or_create(&text, |txt| {
let mut glyphs = GlyphStore::new(text.as_slice().char_len() as int, is_whitespace);
shaper.as_ref().unwrap().shape_text(txt.as_slice(), &mut glyphs);
Arc::new(glyphs)
})
}
fn make_shaper<'a>(&'a mut self) -> &'a Shaper {
// fast path: already created a shaper
match self.shaper {
Some(ref shaper) => {
let s: &'a Shaper = shaper;
return s;
},
None => {}
}
let shaper = Shaper::new(self);
self.shaper = Some(shaper);
self.shaper.as_ref().unwrap()
}
pub fn get_table_for_tag(&self, tag: FontTableTag) -> Option<FontTable> {
let result = self.handle.get_table_for_tag(tag);
let status = if result.is_some() { "Found" } else { "Didn't find" };
debug!("{:s} font table[{:s}] with family={}, face={}",
status, tag.tag_to_str(),
self.handle.family_name(), self.handle.face_name());
return result;
}
pub fn glyph_index(&self, codepoint: char) -> Option<GlyphId> {
self.handle.glyph_index(codepoint)
}
pub fn glyph_h_kerning(&mut self, first_glyph: GlyphId, second_glyph: GlyphId) -> FractionalPixel {
self.handle.glyph_h_kerning(first_glyph, second_glyph)
}
pub fn glyph_h_advance(&mut self, glyph: GlyphId) -> FractionalPixel {
let handle = &self.handle;
self.glyph_advance_cache.find_or_create(&glyph, |glyph| {
match handle.glyph_h_advance(*glyph) {
Some(adv) => adv,
None => 10f64 as FractionalPixel // FIXME: Need fallback strategy
}
})
}
}
pub struct FontGroup {
pub fonts: Vec<Rc<RefCell<Font>>>,
}
impl FontGroup {
pub fn new(fonts: Vec<Rc<RefCell<Font>>>) -> FontGroup {
FontGroup {
fonts: fonts
}
}
pub fn create_textrun(&self, text: String) -> TextRun {
assert!(self.fonts.len() > 0);
// TODO(Issue #177): Actually fall back through the FontGroup when a font is unsuitable.
TextRun::new(&mut *self.fonts[0].borrow_mut(), text.clone())
}
}
pub struct RunMetrics {
// may be negative due to negative width (i.e., kerning of '.' in 'P.T.')
pub advance_width: Au,
pub ascent: Au, // nonzero
pub descent: Au, // nonzero
// this bounding box is relative to the left origin baseline.
// so, bounding_box.position.y = -ascent
pub bounding_box: Rect<Au>
}
impl RunMetrics {
pub fn | (advance: Au, ascent: Au, descent: Au) -> RunMetrics {
let bounds = Rect(Point2D(Au(0), -ascent),
Size2D(advance, ascent + descent));
// TODO(Issue #125): support loose and tight bounding boxes; using the
// ascent+descent and advance is sometimes too generous and
// looking at actual glyph extents can yield a tighter box.
RunMetrics {
advance_width: advance,
bounding_box: bounds,
ascent: ascent,
descent: descent,
}
}
}
| new | identifier_name |
cart.service.ts | import { Injectable } from '@angular/core';
import { Movie } from './../moviesList/movie.model';
@Injectable()
export class CartService {
public moviesCart: Array<Movie> = [];
constructor() {
let storedCart = JSON.parse(localStorage.getItem('currentCart'));
this.moviesCart = storedCart || [];
}
updateLocalstorage() {
localStorage.setItem('currentCart', JSON.stringify(this.moviesCart));
}
addItem(movie: Movie): boolean {
this.moviesCart.push(movie);
this.updateLocalstorage();
return true;
}
removeOneItem(id: number): boolean {
if(id >= 0 && id < this.moviesCart.length) {
this.moviesCart.splice(id, 1);
this.updateLocalstorage();
return true;
} else {
return false;
}
}
removeAll(): boolean {
this.moviesCart = [];
this.updateLocalstorage();
return true;
}
countAll(): number |
getAll(): Array<Movie> {
return this.moviesCart;
}
} | {
return this.moviesCart.length;
} | identifier_body |
cart.service.ts | import { Injectable } from '@angular/core';
import { Movie } from './../moviesList/movie.model';
@Injectable() | public moviesCart: Array<Movie> = [];
constructor() {
let storedCart = JSON.parse(localStorage.getItem('currentCart'));
this.moviesCart = storedCart || [];
}
updateLocalstorage() {
localStorage.setItem('currentCart', JSON.stringify(this.moviesCart));
}
addItem(movie: Movie): boolean {
this.moviesCart.push(movie);
this.updateLocalstorage();
return true;
}
removeOneItem(id: number): boolean {
if(id >= 0 && id < this.moviesCart.length) {
this.moviesCart.splice(id, 1);
this.updateLocalstorage();
return true;
} else {
return false;
}
}
removeAll(): boolean {
this.moviesCart = [];
this.updateLocalstorage();
return true;
}
countAll(): number {
return this.moviesCart.length;
}
getAll(): Array<Movie> {
return this.moviesCart;
}
} | export class CartService {
| random_line_split |
cart.service.ts | import { Injectable } from '@angular/core';
import { Movie } from './../moviesList/movie.model';
@Injectable()
export class CartService {
public moviesCart: Array<Movie> = [];
constructor() {
let storedCart = JSON.parse(localStorage.getItem('currentCart'));
this.moviesCart = storedCart || [];
}
updateLocalstorage() {
localStorage.setItem('currentCart', JSON.stringify(this.moviesCart));
}
addItem(movie: Movie): boolean {
this.moviesCart.push(movie);
this.updateLocalstorage();
return true;
}
removeOneItem(id: number): boolean {
if(id >= 0 && id < this.moviesCart.length) {
this.moviesCart.splice(id, 1);
this.updateLocalstorage();
return true;
} else |
}
removeAll(): boolean {
this.moviesCart = [];
this.updateLocalstorage();
return true;
}
countAll(): number {
return this.moviesCart.length;
}
getAll(): Array<Movie> {
return this.moviesCart;
}
} | {
return false;
} | conditional_block |
cart.service.ts | import { Injectable } from '@angular/core';
import { Movie } from './../moviesList/movie.model';
@Injectable()
export class CartService {
public moviesCart: Array<Movie> = [];
| () {
let storedCart = JSON.parse(localStorage.getItem('currentCart'));
this.moviesCart = storedCart || [];
}
updateLocalstorage() {
localStorage.setItem('currentCart', JSON.stringify(this.moviesCart));
}
addItem(movie: Movie): boolean {
this.moviesCart.push(movie);
this.updateLocalstorage();
return true;
}
removeOneItem(id: number): boolean {
if(id >= 0 && id < this.moviesCart.length) {
this.moviesCart.splice(id, 1);
this.updateLocalstorage();
return true;
} else {
return false;
}
}
removeAll(): boolean {
this.moviesCart = [];
this.updateLocalstorage();
return true;
}
countAll(): number {
return this.moviesCart.length;
}
getAll(): Array<Movie> {
return this.moviesCart;
}
} | constructor | identifier_name |
description-formatter.d.ts | /**
* @license
* Copyright (c) 2017 Google Inc. All rights reserved.
* This code may only be used under the BSD style license found at
* http://polymer.github.io/LICENSE.txt
* Code distributed by Google as part of this project is also
* subject to an additional IP rights grant found at
* http://polymer.github.io/PATENTS.txt
*/
import { Particle, Handle, HandleConnection } from './recipe/lib-recipe.js';
import { Dictionary } from '../utils/lib-utils.js';
import { Entity } from './entity.js';
export declare type ParticleDescription = {
_particle: Particle;
pattern?: string;
_connections: Dictionary<HandleDescription>;
_rank?: number;
};
export declare type HandleDescription = {
pattern: string;
_handleConn: HandleConnection;
value: DescriptionValue;
};
export declare type DescriptionValue = {
entityValue?: string | {};
valueDescription?: string;
collectionValues?: Entity[];
bigCollectionValues?: string[];
interfaceValue?: string | {};
};
export declare type CombinedDescriptionsOptions = {
skipFormatting?: boolean;
};
export declare class DescriptionFormatter { | seenParticles: Set<Particle>;
excludeValues: boolean;
constructor(particleDescriptions?: ParticleDescription[], storeDescById?: Dictionary<string>);
getDescription(recipe: {
patterns: string[];
particles: Particle[];
}): any;
_isSelectedDescription(desc: ParticleDescription): boolean;
getHandleDescription(recipeHandle: Handle): string;
_combineSelectedDescriptions(selectedDescriptions: ParticleDescription[], options?: CombinedDescriptionsOptions): any;
_joinDescriptions(strings: any): any;
_joinTokens(tokens: any): any;
_capitalizeAndPunctuate(sentence: any): string;
patternToSuggestion(pattern: string, particleDescription: any): any;
static readonly tokensRegex: RegExp;
static readonly tokensInnerRegex: RegExp;
_initTokens(pattern: string, particleDescription: any): any[];
_initSubTokens(pattern: any, particleDescription: any): {}[];
tokenToString(token: any): any;
_particleTokenToString(token: any): any;
_handleTokenToString(token: any): any;
_combineDescriptionAndValue(token: any, description: any, storeValue: any): any;
_slotTokenToString(token: any): any;
_propertyTokenToString(handleName: string, value: DescriptionValue, properties: string[]): any;
_formatEntityProperty(handleName: any, properties: any, value: any): any;
_formatStoreValue(handleName: string, value: DescriptionValue): any;
_formatCollection(handleName: any, values: any): any;
_formatBigCollection(handleName: any, firstValue: any): any;
_formatSingleton(handleName: string, value: DescriptionValue): any;
_formatDescription(handleConnection: any): string;
_formatDescriptionPattern(handleConnection: any): string | undefined;
_formatStoreDescription(handleConn: any): string | undefined;
_formatHandleType(handleConnection: any): string;
_selectHandleConnection(recipeHandle: any): any;
static sort(p1: ParticleDescription, p2: ParticleDescription): number;
} | private readonly particleDescriptions;
private readonly storeDescById;
private seenHandles; | random_line_split |
description-formatter.d.ts | /**
* @license
* Copyright (c) 2017 Google Inc. All rights reserved.
* This code may only be used under the BSD style license found at
* http://polymer.github.io/LICENSE.txt
* Code distributed by Google as part of this project is also
* subject to an additional IP rights grant found at
* http://polymer.github.io/PATENTS.txt
*/
import { Particle, Handle, HandleConnection } from './recipe/lib-recipe.js';
import { Dictionary } from '../utils/lib-utils.js';
import { Entity } from './entity.js';
export declare type ParticleDescription = {
_particle: Particle;
pattern?: string;
_connections: Dictionary<HandleDescription>;
_rank?: number;
};
export declare type HandleDescription = {
pattern: string;
_handleConn: HandleConnection;
value: DescriptionValue;
};
export declare type DescriptionValue = {
entityValue?: string | {};
valueDescription?: string;
collectionValues?: Entity[];
bigCollectionValues?: string[];
interfaceValue?: string | {};
};
export declare type CombinedDescriptionsOptions = {
skipFormatting?: boolean;
};
export declare class | {
private readonly particleDescriptions;
private readonly storeDescById;
private seenHandles;
seenParticles: Set<Particle>;
excludeValues: boolean;
constructor(particleDescriptions?: ParticleDescription[], storeDescById?: Dictionary<string>);
getDescription(recipe: {
patterns: string[];
particles: Particle[];
}): any;
_isSelectedDescription(desc: ParticleDescription): boolean;
getHandleDescription(recipeHandle: Handle): string;
_combineSelectedDescriptions(selectedDescriptions: ParticleDescription[], options?: CombinedDescriptionsOptions): any;
_joinDescriptions(strings: any): any;
_joinTokens(tokens: any): any;
_capitalizeAndPunctuate(sentence: any): string;
patternToSuggestion(pattern: string, particleDescription: any): any;
static readonly tokensRegex: RegExp;
static readonly tokensInnerRegex: RegExp;
_initTokens(pattern: string, particleDescription: any): any[];
_initSubTokens(pattern: any, particleDescription: any): {}[];
tokenToString(token: any): any;
_particleTokenToString(token: any): any;
_handleTokenToString(token: any): any;
_combineDescriptionAndValue(token: any, description: any, storeValue: any): any;
_slotTokenToString(token: any): any;
_propertyTokenToString(handleName: string, value: DescriptionValue, properties: string[]): any;
_formatEntityProperty(handleName: any, properties: any, value: any): any;
_formatStoreValue(handleName: string, value: DescriptionValue): any;
_formatCollection(handleName: any, values: any): any;
_formatBigCollection(handleName: any, firstValue: any): any;
_formatSingleton(handleName: string, value: DescriptionValue): any;
_formatDescription(handleConnection: any): string;
_formatDescriptionPattern(handleConnection: any): string | undefined;
_formatStoreDescription(handleConn: any): string | undefined;
_formatHandleType(handleConnection: any): string;
_selectHandleConnection(recipeHandle: any): any;
static sort(p1: ParticleDescription, p2: ParticleDescription): number;
}
| DescriptionFormatter | identifier_name |
order.py | # -*- encoding: utf-8 -*-
from openerp.osv import fields, osv
from openerp.tools.translate import _
class sale_order_line(osv.Model):
"""
OpenERP Model : sale_order_line
"""
_inherit = 'sale.order.line'
_columns = {
'att_bro': fields.boolean('Attach Brochure', required=False, help="""If you check this
option, the first attachment related to the product_id marked as brochure will be printed
as extra info with sale order"""),
}
class | (osv.Model):
"""
OpenERP Model : sale_order_line
"""
_inherit = 'sale.order'
def print_with_attachment(self, cr, user, ids, context={}):
for o in self.browse(cr, user, ids, context):
for ol in o.order_line:
if ol.att_bro:
print "Im Here i will go to print %s " % ol.name
return True
def __get_company_object(self, cr, uid):
user = self.pool.get('res.users').browse(cr, uid, uid)
print user
if not user.company_id:
raise except_osv(_('ERROR !'), _(
'There is no company configured for this user'))
return user.company_id
def _get_report_name(self, cr, uid, context):
report = self.__get_company_object(cr, uid).sale_report_id
if not report:
rep_id = self.pool.get("ir.actions.report.xml").search(
cr, uid, [('model', '=', 'sale.order'), ], order="id")[0]
report = self.pool.get(
"ir.actions.report.xml").browse(cr, uid, rep_id)
return report.report_name
def print_quotation(self, cr, uid, ids, context=None):
pq = super(sale_order, self).print_quotation(cr,uid,ids, context)
return {'type': 'ir.actions.report.xml', 'report_name': self._get_report_name(cr, uid,
context), 'datas': pq['datas'], 'nodestroy': True}
| sale_order | identifier_name |
order.py | # -*- encoding: utf-8 -*-
from openerp.osv import fields, osv
from openerp.tools.translate import _
class sale_order_line(osv.Model):
"""
OpenERP Model : sale_order_line
"""
_inherit = 'sale.order.line'
_columns = {
'att_bro': fields.boolean('Attach Brochure', required=False, help="""If you check this
option, the first attachment related to the product_id marked as brochure will be printed
as extra info with sale order"""),
}
class sale_order(osv.Model):
"""
OpenERP Model : sale_order_line
"""
_inherit = 'sale.order'
def print_with_attachment(self, cr, user, ids, context={}):
for o in self.browse(cr, user, ids, context):
for ol in o.order_line:
if ol.att_bro:
print "Im Here i will go to print %s " % ol.name
return True
def __get_company_object(self, cr, uid):
user = self.pool.get('res.users').browse(cr, uid, uid)
print user
if not user.company_id:
raise except_osv(_('ERROR !'), _(
'There is no company configured for this user'))
return user.company_id
def _get_report_name(self, cr, uid, context):
report = self.__get_company_object(cr, uid).sale_report_id
if not report:
|
return report.report_name
def print_quotation(self, cr, uid, ids, context=None):
pq = super(sale_order, self).print_quotation(cr,uid,ids, context)
return {'type': 'ir.actions.report.xml', 'report_name': self._get_report_name(cr, uid,
context), 'datas': pq['datas'], 'nodestroy': True}
| rep_id = self.pool.get("ir.actions.report.xml").search(
cr, uid, [('model', '=', 'sale.order'), ], order="id")[0]
report = self.pool.get(
"ir.actions.report.xml").browse(cr, uid, rep_id) | conditional_block |
order.py | # -*- encoding: utf-8 -*-
from openerp.osv import fields, osv
from openerp.tools.translate import _
class sale_order_line(osv.Model):
"""
OpenERP Model : sale_order_line
"""
_inherit = 'sale.order.line'
_columns = {
'att_bro': fields.boolean('Attach Brochure', required=False, help="""If you check this
option, the first attachment related to the product_id marked as brochure will be printed
as extra info with sale order"""),
}
class sale_order(osv.Model):
"""
OpenERP Model : sale_order_line | """
_inherit = 'sale.order'
def print_with_attachment(self, cr, user, ids, context={}):
for o in self.browse(cr, user, ids, context):
for ol in o.order_line:
if ol.att_bro:
print "Im Here i will go to print %s " % ol.name
return True
def __get_company_object(self, cr, uid):
user = self.pool.get('res.users').browse(cr, uid, uid)
print user
if not user.company_id:
raise except_osv(_('ERROR !'), _(
'There is no company configured for this user'))
return user.company_id
def _get_report_name(self, cr, uid, context):
report = self.__get_company_object(cr, uid).sale_report_id
if not report:
rep_id = self.pool.get("ir.actions.report.xml").search(
cr, uid, [('model', '=', 'sale.order'), ], order="id")[0]
report = self.pool.get(
"ir.actions.report.xml").browse(cr, uid, rep_id)
return report.report_name
def print_quotation(self, cr, uid, ids, context=None):
pq = super(sale_order, self).print_quotation(cr,uid,ids, context)
return {'type': 'ir.actions.report.xml', 'report_name': self._get_report_name(cr, uid,
context), 'datas': pq['datas'], 'nodestroy': True} | random_line_split |
|
order.py | # -*- encoding: utf-8 -*-
from openerp.osv import fields, osv
from openerp.tools.translate import _
class sale_order_line(osv.Model):
"""
OpenERP Model : sale_order_line
"""
_inherit = 'sale.order.line'
_columns = {
'att_bro': fields.boolean('Attach Brochure', required=False, help="""If you check this
option, the first attachment related to the product_id marked as brochure will be printed
as extra info with sale order"""),
}
class sale_order(osv.Model):
"""
OpenERP Model : sale_order_line
"""
_inherit = 'sale.order'
def print_with_attachment(self, cr, user, ids, context={}):
|
def __get_company_object(self, cr, uid):
user = self.pool.get('res.users').browse(cr, uid, uid)
print user
if not user.company_id:
raise except_osv(_('ERROR !'), _(
'There is no company configured for this user'))
return user.company_id
def _get_report_name(self, cr, uid, context):
report = self.__get_company_object(cr, uid).sale_report_id
if not report:
rep_id = self.pool.get("ir.actions.report.xml").search(
cr, uid, [('model', '=', 'sale.order'), ], order="id")[0]
report = self.pool.get(
"ir.actions.report.xml").browse(cr, uid, rep_id)
return report.report_name
def print_quotation(self, cr, uid, ids, context=None):
pq = super(sale_order, self).print_quotation(cr,uid,ids, context)
return {'type': 'ir.actions.report.xml', 'report_name': self._get_report_name(cr, uid,
context), 'datas': pq['datas'], 'nodestroy': True}
| for o in self.browse(cr, user, ids, context):
for ol in o.order_line:
if ol.att_bro:
print "Im Here i will go to print %s " % ol.name
return True | identifier_body |
SignIn.component.ts | import {Component} from 'angular2/core';
import {UserService} from '../services/user.services';
@Component({
selector: 'Sign-in',
template: `
<div id="Signinmodal" class="modal fade" role="dialog" style="margin-left:4cm;">
<div class="modal-dialog">
<!-- Modal content-->
<div class="modal-content">
<div class="modal-header">
<button type="button" class="close" data-dismiss="modal">×</button>
<h4 class="modal-title">Sign In to out web site </h4>
</div>
<div class="modal-body">
<img src="../../assets/svgimages/ripple.svg"><br>
<i class="mdi mdi-account">Login</i>
<input type="text" class="form-control" [(ngModel)]="email" /><br><br>
<i class="mdi mdi-account md-70">Password</i>
<input type="password" class="form-control" [(ngModel)]="Password" /><br><br>
<button type="submit"
class="mdc-button
mdc-button--raised
mdc-button--primary
mdc-ripple-surface"
(click)="Connect()"
data-mdc-auto-init="MDCRipple" id="but">
<div [innerHTML]="spinonbutt"></div>
| </button>
</div>
<div [innerHTML]="htmlStr" style="margin-left: 5cm;"></div>
<div class="modal-footer">
<button type="button" class="btn btn-default" data-dismiss="modal">Close</button>
</div>
</div>
</div>
`,
styleUrls: ["../../assets/css/signinstyle.css"],
providers: [UserService]
})
export class SignInComponent {
private spinonbutt='Sign In' :string;
private email:string;
private Password:string;
private htmlStr = ''
:
string;
constructor(private _userservice:UserService) {
}
Connect() {
this.spinonbutt = '<img src="../../assets/svgimages/ripple.svg" width="25px" height="25px">';
this._userservice.Authentification(this.email, this.Password).subscribe(
(data)=>
if (data == 0) {
this.spinonbutt = 'Sign In';
this.htmlStr = '<p style="color:red;">Error !! Please check your details</p>';
} else if (data == 2) {
this.spinonbutt = 'Sign In';
this.htmlStr = '<p style="color:red;">Error !! Your account is not activated yet !!' +
'please check your email and click on the link below to activate ' +
'your account </p>';
} else {
this.htmlStr = 'ahla wasahla';
}
)
;
}
} | random_line_split |
|
SignIn.component.ts | import {Component} from 'angular2/core';
import {UserService} from '../services/user.services';
@Component({
selector: 'Sign-in',
template: `
<div id="Signinmodal" class="modal fade" role="dialog" style="margin-left:4cm;">
<div class="modal-dialog">
<!-- Modal content-->
<div class="modal-content">
<div class="modal-header">
<button type="button" class="close" data-dismiss="modal">×</button>
<h4 class="modal-title">Sign In to out web site </h4>
</div>
<div class="modal-body">
<img src="../../assets/svgimages/ripple.svg"><br>
<i class="mdi mdi-account">Login</i>
<input type="text" class="form-control" [(ngModel)]="email" /><br><br>
<i class="mdi mdi-account md-70">Password</i>
<input type="password" class="form-control" [(ngModel)]="Password" /><br><br>
<button type="submit"
class="mdc-button
mdc-button--raised
mdc-button--primary
mdc-ripple-surface"
(click)="Connect()"
data-mdc-auto-init="MDCRipple" id="but">
<div [innerHTML]="spinonbutt"></div>
</button>
</div>
<div [innerHTML]="htmlStr" style="margin-left: 5cm;"></div>
<div class="modal-footer">
<button type="button" class="btn btn-default" data-dismiss="modal">Close</button>
</div>
</div>
</div>
`,
styleUrls: ["../../assets/css/signinstyle.css"],
providers: [UserService]
})
export class SignInComponent {
private spinonbutt='Sign In' :string;
private email:string;
private Password:string;
private htmlStr = ''
:
string;
constructor(private _userservice:UserService) {
}
Connect() {
this.spinonbutt = '<img src="../../assets/svgimages/ripple.svg" width="25px" height="25px">';
this._userservice.Authentification(this.email, this.Password).subscribe(
(data)=>
if (data == 0) {
this.spinonbutt = 'Sign In';
this.htmlStr = '<p style="color:red;">Error !! Please check your details</p>';
} else if (data == 2) {
this.spinonbutt = 'Sign In';
this.htmlStr = '<p style="color:red;">Error !! Your account is not activated yet !!' +
'please check your email and click on the link below to activate ' +
'your account </p>';
} else |
)
;
}
}
| {
this.htmlStr = 'ahla wasahla';
} | conditional_block |
SignIn.component.ts | import {Component} from 'angular2/core';
import {UserService} from '../services/user.services';
@Component({
selector: 'Sign-in',
template: `
<div id="Signinmodal" class="modal fade" role="dialog" style="margin-left:4cm;">
<div class="modal-dialog">
<!-- Modal content-->
<div class="modal-content">
<div class="modal-header">
<button type="button" class="close" data-dismiss="modal">×</button>
<h4 class="modal-title">Sign In to out web site </h4>
</div>
<div class="modal-body">
<img src="../../assets/svgimages/ripple.svg"><br>
<i class="mdi mdi-account">Login</i>
<input type="text" class="form-control" [(ngModel)]="email" /><br><br>
<i class="mdi mdi-account md-70">Password</i>
<input type="password" class="form-control" [(ngModel)]="Password" /><br><br>
<button type="submit"
class="mdc-button
mdc-button--raised
mdc-button--primary
mdc-ripple-surface"
(click)="Connect()"
data-mdc-auto-init="MDCRipple" id="but">
<div [innerHTML]="spinonbutt"></div>
</button>
</div>
<div [innerHTML]="htmlStr" style="margin-left: 5cm;"></div>
<div class="modal-footer">
<button type="button" class="btn btn-default" data-dismiss="modal">Close</button>
</div>
</div>
</div>
`,
styleUrls: ["../../assets/css/signinstyle.css"],
providers: [UserService]
})
export class | {
private spinonbutt='Sign In' :string;
private email:string;
private Password:string;
private htmlStr = ''
:
string;
constructor(private _userservice:UserService) {
}
Connect() {
this.spinonbutt = '<img src="../../assets/svgimages/ripple.svg" width="25px" height="25px">';
this._userservice.Authentification(this.email, this.Password).subscribe(
(data)=>
if (data == 0) {
this.spinonbutt = 'Sign In';
this.htmlStr = '<p style="color:red;">Error !! Please check your details</p>';
} else if (data == 2) {
this.spinonbutt = 'Sign In';
this.htmlStr = '<p style="color:red;">Error !! Your account is not activated yet !!' +
'please check your email and click on the link below to activate ' +
'your account </p>';
} else {
this.htmlStr = 'ahla wasahla';
}
)
;
}
}
| SignInComponent | identifier_name |
tests.rs | use std::cmp::Ordering;
use super::print_item::compare_names;
use super::{AllTypes, Buffer};
#[test]
fn test_compare_names() | assert_eq!(compare_names("u32", "u16"), Ordering::Greater);
assert_eq!(compare_names("u8_to_f64", "u16_to_f64"), Ordering::Less);
assert_eq!(compare_names("u32_to_f64", "u16_to_f64"), Ordering::Greater);
assert_eq!(compare_names("u16_to_f64", "u16_to_f64"), Ordering::Equal);
assert_eq!(compare_names("u16_to_f32", "u16_to_f64"), Ordering::Less);
}
#[test]
fn test_name_sorting() {
let names = [
"Apple", "Banana", "Fruit", "Fruit0", "Fruit00", "Fruit01", "Fruit1", "Fruit02", "Fruit2",
"Fruit20", "Fruit30x", "Fruit100", "Pear",
];
let mut sorted = names.to_owned();
sorted.sort_by(|&l, r| compare_names(l, r));
assert_eq!(names, sorted);
}
#[test]
fn test_all_types_prints_header_once() {
// Regression test for #82477
let all_types = AllTypes::new();
let mut buffer = Buffer::new();
all_types.print(&mut buffer);
assert_eq!(1, buffer.into_inner().matches("List of all items").count());
}
| {
for &(a, b) in &[
("hello", "world"),
("", "world"),
("123", "hello"),
("123", ""),
("123test", "123"),
("hello", ""),
("hello", "hello"),
("hello123", "hello123"),
("hello123", "hello12"),
("hello12", "hello123"),
("hello01abc", "hello01xyz"),
("hello0abc", "hello0"),
("hello0", "hello0abc"),
("01", "1"),
] {
assert_eq!(compare_names(a, b), a.cmp(b), "{:?} - {:?}", a, b);
}
assert_eq!(compare_names("u8", "u16"), Ordering::Less); | identifier_body |
tests.rs | use std::cmp::Ordering;
use super::print_item::compare_names;
use super::{AllTypes, Buffer};
#[test]
fn test_compare_names() {
for &(a, b) in &[
("hello", "world"),
("", "world"),
("123", "hello"),
("123", ""),
("123test", "123"),
("hello", ""),
("hello", "hello"),
("hello123", "hello123"),
("hello123", "hello12"),
("hello12", "hello123"),
("hello01abc", "hello01xyz"),
("hello0abc", "hello0"),
("hello0", "hello0abc"),
("01", "1"),
] {
assert_eq!(compare_names(a, b), a.cmp(b), "{:?} - {:?}", a, b);
}
assert_eq!(compare_names("u8", "u16"), Ordering::Less);
assert_eq!(compare_names("u32", "u16"), Ordering::Greater);
assert_eq!(compare_names("u8_to_f64", "u16_to_f64"), Ordering::Less);
assert_eq!(compare_names("u32_to_f64", "u16_to_f64"), Ordering::Greater);
assert_eq!(compare_names("u16_to_f64", "u16_to_f64"), Ordering::Equal);
assert_eq!(compare_names("u16_to_f32", "u16_to_f64"), Ordering::Less);
}
#[test]
fn test_name_sorting() {
let names = [
"Apple", "Banana", "Fruit", "Fruit0", "Fruit00", "Fruit01", "Fruit1", "Fruit02", "Fruit2",
"Fruit20", "Fruit30x", "Fruit100", "Pear",
];
let mut sorted = names.to_owned();
sorted.sort_by(|&l, r| compare_names(l, r));
assert_eq!(names, sorted);
}
#[test]
fn | () {
// Regression test for #82477
let all_types = AllTypes::new();
let mut buffer = Buffer::new();
all_types.print(&mut buffer);
assert_eq!(1, buffer.into_inner().matches("List of all items").count());
}
| test_all_types_prints_header_once | identifier_name |
tests.rs | use std::cmp::Ordering;
use super::print_item::compare_names;
use super::{AllTypes, Buffer};
#[test]
fn test_compare_names() {
for &(a, b) in &[
("hello", "world"),
("", "world"),
("123", "hello"),
("123", ""),
("123test", "123"),
("hello", ""),
("hello", "hello"),
("hello123", "hello123"), | ("hello01abc", "hello01xyz"),
("hello0abc", "hello0"),
("hello0", "hello0abc"),
("01", "1"),
] {
assert_eq!(compare_names(a, b), a.cmp(b), "{:?} - {:?}", a, b);
}
assert_eq!(compare_names("u8", "u16"), Ordering::Less);
assert_eq!(compare_names("u32", "u16"), Ordering::Greater);
assert_eq!(compare_names("u8_to_f64", "u16_to_f64"), Ordering::Less);
assert_eq!(compare_names("u32_to_f64", "u16_to_f64"), Ordering::Greater);
assert_eq!(compare_names("u16_to_f64", "u16_to_f64"), Ordering::Equal);
assert_eq!(compare_names("u16_to_f32", "u16_to_f64"), Ordering::Less);
}
#[test]
fn test_name_sorting() {
let names = [
"Apple", "Banana", "Fruit", "Fruit0", "Fruit00", "Fruit01", "Fruit1", "Fruit02", "Fruit2",
"Fruit20", "Fruit30x", "Fruit100", "Pear",
];
let mut sorted = names.to_owned();
sorted.sort_by(|&l, r| compare_names(l, r));
assert_eq!(names, sorted);
}
#[test]
fn test_all_types_prints_header_once() {
// Regression test for #82477
let all_types = AllTypes::new();
let mut buffer = Buffer::new();
all_types.print(&mut buffer);
assert_eq!(1, buffer.into_inner().matches("List of all items").count());
} | ("hello123", "hello12"),
("hello12", "hello123"), | random_line_split |
rt-set-exit-status.rs | //
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// error-pattern:whatever
#![feature(rustc_private, exit_status)]
#[macro_use] extern crate log;
use std::env;
fn main() {
error!("whatever");
// 101 is the code the runtime uses on thread panic and the value
// compiletest expects run-fail tests to return.
env::set_exit_status(101);
} | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT. | random_line_split |
|
rt-set-exit-status.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// error-pattern:whatever
#![feature(rustc_private, exit_status)]
#[macro_use] extern crate log;
use std::env;
fn main() | {
error!("whatever");
// 101 is the code the runtime uses on thread panic and the value
// compiletest expects run-fail tests to return.
env::set_exit_status(101);
} | identifier_body |
|
rt-set-exit-status.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// error-pattern:whatever
#![feature(rustc_private, exit_status)]
#[macro_use] extern crate log;
use std::env;
fn | () {
error!("whatever");
// 101 is the code the runtime uses on thread panic and the value
// compiletest expects run-fail tests to return.
env::set_exit_status(101);
}
| main | identifier_name |
delivery-page.component.ts | import { Component, OnInit, Input, Inject, forwardRef } from '@angular/core';
import { Delivery, RoundService } from '../round.service'
import { DeliveryProductListPageComponent } from './delivery-product-list-page.component'
import { DeliveryOrderListPageComponent } from './delivery-order-list-page.component'
import { RouteParams } from '@angular/router-deprecated';
import { RouteConfig, ROUTER_DIRECTIVES, Router } from '@angular/router-deprecated';
import { SectionHeaderComponent } from '../../structure/section-header.component'
import { Observable } from 'rxjs/Observable'
import 'rxjs/add/observable/combineLatest'
import { ButtonComponent } from '../../shared/button.component'
import { RoundPageService } from './round-page.component'
import { DateStringPipe } from '../../shared/pipes'
export class DeliveryPageService {
delivery: Delivery;
}
@Component({
selector: 'cc-delivery-page',
templateUrl: 'app/rounds/round-page/delivery-page.component.html',
directives: [ROUTER_DIRECTIVES, SectionHeaderComponent, ButtonComponent],
pipes: [DateStringPipe],
providers: [DeliveryPageService]
})
@RouteConfig([
{
path: 'order-list',
name: 'OrderList',
component: DeliveryOrderListPageComponent
},
{
path: 'product-list',
name: 'ProductList',
component: DeliveryProductListPageComponent
}
])
export class DeliveryPageComponent implements OnInit {
delivery: Delivery;
loading = true;
constructor(
private roundService: RoundService,
@Inject(forwardRef(() => RoundPageService))
private roundPage: RoundPageService,
private page: DeliveryPageService,
private routeParams: RouteParams,
private router: Router) |
ngOnInit() {
let deliveryId = +this.routeParams.params['deliveryId'];
this.roundService.getDelivery(this.roundPage.round.id, deliveryId)
.subscribe(delivery => {
this.loading = false;
this.page.delivery = delivery;
this.delivery = delivery;
});
}
isCurrent(linkParams: any[]): boolean {
//router.isRouteActive() isn't working here for some reason :(
//need to switch to the new router anyway.
let cleanUp = (s:string) => s.replace(/;[^\/]+\/?$/, '');
let pathname = cleanUp(this.router.generate(linkParams).toLinkUrl());
let currentPathname = cleanUp(window.location.pathname);
return currentPathname == pathname;
}
} | {
} | identifier_body |
delivery-page.component.ts | import { Component, OnInit, Input, Inject, forwardRef } from '@angular/core';
import { Delivery, RoundService } from '../round.service'
import { DeliveryProductListPageComponent } from './delivery-product-list-page.component'
import { DeliveryOrderListPageComponent } from './delivery-order-list-page.component'
import { RouteParams } from '@angular/router-deprecated';
import { RouteConfig, ROUTER_DIRECTIVES, Router } from '@angular/router-deprecated';
import { SectionHeaderComponent } from '../../structure/section-header.component'
import { Observable } from 'rxjs/Observable'
import 'rxjs/add/observable/combineLatest'
import { ButtonComponent } from '../../shared/button.component'
import { RoundPageService } from './round-page.component'
import { DateStringPipe } from '../../shared/pipes'
export class DeliveryPageService {
delivery: Delivery;
}
@Component({
selector: 'cc-delivery-page',
templateUrl: 'app/rounds/round-page/delivery-page.component.html',
directives: [ROUTER_DIRECTIVES, SectionHeaderComponent, ButtonComponent],
pipes: [DateStringPipe],
providers: [DeliveryPageService]
})
@RouteConfig([
{
path: 'order-list',
name: 'OrderList',
component: DeliveryOrderListPageComponent
},
{
path: 'product-list',
name: 'ProductList',
component: DeliveryProductListPageComponent
}
])
export class DeliveryPageComponent implements OnInit {
delivery: Delivery;
loading = true;
constructor(
private roundService: RoundService,
@Inject(forwardRef(() => RoundPageService))
private roundPage: RoundPageService,
private page: DeliveryPageService,
private routeParams: RouteParams,
private router: Router) {
}
| () {
let deliveryId = +this.routeParams.params['deliveryId'];
this.roundService.getDelivery(this.roundPage.round.id, deliveryId)
.subscribe(delivery => {
this.loading = false;
this.page.delivery = delivery;
this.delivery = delivery;
});
}
isCurrent(linkParams: any[]): boolean {
//router.isRouteActive() isn't working here for some reason :(
//need to switch to the new router anyway.
let cleanUp = (s:string) => s.replace(/;[^\/]+\/?$/, '');
let pathname = cleanUp(this.router.generate(linkParams).toLinkUrl());
let currentPathname = cleanUp(window.location.pathname);
return currentPathname == pathname;
}
} | ngOnInit | identifier_name |
delivery-page.component.ts | import { Component, OnInit, Input, Inject, forwardRef } from '@angular/core';
import { Delivery, RoundService } from '../round.service'
import { DeliveryProductListPageComponent } from './delivery-product-list-page.component'
import { DeliveryOrderListPageComponent } from './delivery-order-list-page.component'
import { RouteParams } from '@angular/router-deprecated';
import { RouteConfig, ROUTER_DIRECTIVES, Router } from '@angular/router-deprecated';
import { SectionHeaderComponent } from '../../structure/section-header.component'
import { Observable } from 'rxjs/Observable'
import 'rxjs/add/observable/combineLatest'
import { ButtonComponent } from '../../shared/button.component'
import { RoundPageService } from './round-page.component'
import { DateStringPipe } from '../../shared/pipes'
export class DeliveryPageService {
delivery: Delivery;
}
@Component({
selector: 'cc-delivery-page',
templateUrl: 'app/rounds/round-page/delivery-page.component.html',
directives: [ROUTER_DIRECTIVES, SectionHeaderComponent, ButtonComponent],
pipes: [DateStringPipe],
providers: [DeliveryPageService]
})
@RouteConfig([
{
path: 'order-list',
name: 'OrderList',
component: DeliveryOrderListPageComponent
},
{
path: 'product-list',
name: 'ProductList',
component: DeliveryProductListPageComponent
}
])
export class DeliveryPageComponent implements OnInit {
delivery: Delivery;
loading = true;
constructor(
private roundService: RoundService,
@Inject(forwardRef(() => RoundPageService))
private roundPage: RoundPageService,
private page: DeliveryPageService,
private routeParams: RouteParams,
private router: Router) {
}
ngOnInit() {
let deliveryId = +this.routeParams.params['deliveryId'];
this.roundService.getDelivery(this.roundPage.round.id, deliveryId) | });
}
isCurrent(linkParams: any[]): boolean {
//router.isRouteActive() isn't working here for some reason :(
//need to switch to the new router anyway.
let cleanUp = (s:string) => s.replace(/;[^\/]+\/?$/, '');
let pathname = cleanUp(this.router.generate(linkParams).toLinkUrl());
let currentPathname = cleanUp(window.location.pathname);
return currentPathname == pathname;
}
} | .subscribe(delivery => {
this.loading = false;
this.page.delivery = delivery;
this.delivery = delivery; | random_line_split |
base.js | 'use strict';
let path = require('path');
let defaultSettings = require('./defaults');
// Additional npm or bower modules to include in builds
// Add all foreign plugins you may need into this array
// @example:
// let npmBase = path.join(__dirname, '../node_modules');
// let additionalPaths = [ path.join(npmBase, 'react-bootstrap') ];
let additionalPaths = [];
module.exports = {
additionalPaths: additionalPaths,
port: defaultSettings.port,
debug: true,
devtool: 'eval',
output: {
path: path.join(__dirname, '/../dist/assets'),
filename: 'app.js',
publicPath: defaultSettings.publicPath
},
devServer: {
contentBase: './src/',
| noInfo: false
},
resolve: {
extensions: ['', '.js', '.jsx', '.json'],
alias: {
api: `${defaultSettings.srcPath}/api/`,
components: `${defaultSettings.srcPath}/components/`,
containers: `${defaultSettings.srcPath}/containers/`,
constants: `${defaultSettings.srcPath}/constants/`,
stores: `${defaultSettings.srcPath}/stores/`,
styles: `${defaultSettings.srcPath}/styles/`,
config: `${defaultSettings.srcPath}/config/` + process.env.REACT_WEBPACK_ENV
}
},
module: {}
}; | historyApiFallback: true,
hot: true,
port: defaultSettings.port,
publicPath: defaultSettings.publicPath,
| random_line_split |
main.rs | use std::thread;
fn test1() {
let v = vec![1, 2, 3];
let handle = thread::spawn(move || {
println!("Here's a vector: {:?}", v);
});
handle.join().unwrap();
}
use std::sync::mpsc;
fn test2() {
let (tx, rx) = mpsc::channel();
thread::spawn(move || {
let val = String::from("[sender say]> hi");
println!("[sender] before send: {}", val);
tx.send(val).unwrap();
// println!("[sender] before send: {}", val); //value borrowed here after move
});
let received = rx.recv().unwrap();
println!("[receiver] Got: {}", received);
}
// use std::sync::mpsc;
// use std::thread;
use std::time::Duration;
fn test3() {
let (tx, rx) = mpsc::channel();
thread::spawn(move || {
let vals = vec![
String::from("hi"),
String::from("from"),
String::from("the"),
String::from("sub-thread"),
];
for val in vals {
tx.send(val).unwrap();
thread::sleep(Duration::from_secs(1));
}
});
for received in rx {
println!("Got: {}", received);
}
}
fn test4() {
let (tx, rx) = mpsc::channel();
let tx1 = mpsc::Sender::clone(&tx);
thread::spawn(move || {
let vals = vec![
String::from("hi"),
String::from("from"),
String::from("the"),
String::from("sub-thread"),
];
for val in vals {
tx1.send(val).unwrap();
thread::sleep(Duration::from_secs(1));
}
});
thread::spawn(move || {
let vals = vec![
String::from("more"),
String::from("messages"),
String::from("for"),
String::from("you"),
];
for val in vals {
tx.send(val).unwrap();
thread::sleep(Duration::from_secs(1));
}
});
for received in rx {
println!("Got: {}", received);
}
}
use std::sync::{Arc, Mutex};
fn test5() {
let counter = Arc::new(Mutex::new(0));
let mut handles = vec![];
for _ in 0..10 {
let counter = Arc::clone(&counter);
let handle = thread::spawn(move || {
let mut num = counter.lock().unwrap();
*num += 2;
});
handles.push(handle);
}
for handle in handles {
handle.join().unwrap();
}
println!("Result: {}", *counter.lock().unwrap());
}
//test entry
fn | () {
println!("main; -enter");
test5();
println!("main; -exit");
}
| main | identifier_name |
main.rs | use std::thread;
fn test1() {
let v = vec![1, 2, 3];
let handle = thread::spawn(move || {
println!("Here's a vector: {:?}", v);
});
handle.join().unwrap();
}
use std::sync::mpsc;
fn test2() |
// use std::sync::mpsc;
// use std::thread;
use std::time::Duration;
fn test3() {
let (tx, rx) = mpsc::channel();
thread::spawn(move || {
let vals = vec![
String::from("hi"),
String::from("from"),
String::from("the"),
String::from("sub-thread"),
];
for val in vals {
tx.send(val).unwrap();
thread::sleep(Duration::from_secs(1));
}
});
for received in rx {
println!("Got: {}", received);
}
}
fn test4() {
let (tx, rx) = mpsc::channel();
let tx1 = mpsc::Sender::clone(&tx);
thread::spawn(move || {
let vals = vec![
String::from("hi"),
String::from("from"),
String::from("the"),
String::from("sub-thread"),
];
for val in vals {
tx1.send(val).unwrap();
thread::sleep(Duration::from_secs(1));
}
});
thread::spawn(move || {
let vals = vec![
String::from("more"),
String::from("messages"),
String::from("for"),
String::from("you"),
];
for val in vals {
tx.send(val).unwrap();
thread::sleep(Duration::from_secs(1));
}
});
for received in rx {
println!("Got: {}", received);
}
}
use std::sync::{Arc, Mutex};
fn test5() {
let counter = Arc::new(Mutex::new(0));
let mut handles = vec![];
for _ in 0..10 {
let counter = Arc::clone(&counter);
let handle = thread::spawn(move || {
let mut num = counter.lock().unwrap();
*num += 2;
});
handles.push(handle);
}
for handle in handles {
handle.join().unwrap();
}
println!("Result: {}", *counter.lock().unwrap());
}
//test entry
fn main() {
println!("main; -enter");
test5();
println!("main; -exit");
}
| {
let (tx, rx) = mpsc::channel();
thread::spawn(move || {
let val = String::from("[sender say]> hi");
println!("[sender] before send: {}", val);
tx.send(val).unwrap();
// println!("[sender] before send: {}", val); //value borrowed here after move
});
let received = rx.recv().unwrap();
println!("[receiver] Got: {}", received);
} | identifier_body |
main.rs | use std::thread;
fn test1() {
let v = vec![1, 2, 3];
let handle = thread::spawn(move || {
println!("Here's a vector: {:?}", v);
});
handle.join().unwrap();
}
use std::sync::mpsc;
fn test2() {
let (tx, rx) = mpsc::channel();
thread::spawn(move || {
let val = String::from("[sender say]> hi");
println!("[sender] before send: {}", val);
tx.send(val).unwrap();
// println!("[sender] before send: {}", val); //value borrowed here after move
});
let received = rx.recv().unwrap();
println!("[receiver] Got: {}", received);
}
// use std::sync::mpsc;
// use std::thread;
use std::time::Duration;
fn test3() {
let (tx, rx) = mpsc::channel();
thread::spawn(move || {
let vals = vec![
String::from("hi"),
String::from("from"),
String::from("the"),
String::from("sub-thread"),
];
for val in vals {
tx.send(val).unwrap();
thread::sleep(Duration::from_secs(1));
}
});
for received in rx {
println!("Got: {}", received);
}
}
fn test4() {
let (tx, rx) = mpsc::channel();
let tx1 = mpsc::Sender::clone(&tx);
thread::spawn(move || {
let vals = vec![
String::from("hi"),
String::from("from"),
String::from("the"),
String::from("sub-thread"),
];
for val in vals {
tx1.send(val).unwrap();
thread::sleep(Duration::from_secs(1));
}
});
thread::spawn(move || {
let vals = vec![
String::from("more"),
String::from("messages"),
String::from("for"),
String::from("you"),
];
| thread::sleep(Duration::from_secs(1));
}
});
for received in rx {
println!("Got: {}", received);
}
}
use std::sync::{Arc, Mutex};
fn test5() {
let counter = Arc::new(Mutex::new(0));
let mut handles = vec![];
for _ in 0..10 {
let counter = Arc::clone(&counter);
let handle = thread::spawn(move || {
let mut num = counter.lock().unwrap();
*num += 2;
});
handles.push(handle);
}
for handle in handles {
handle.join().unwrap();
}
println!("Result: {}", *counter.lock().unwrap());
}
//test entry
fn main() {
println!("main; -enter");
test5();
println!("main; -exit");
} | for val in vals {
tx.send(val).unwrap(); | random_line_split |
day_6.rs | use std::borrow::Cow;
use std::iter::Peekable;
use std::num::ParseFloatError;
use std::str::Chars;
pub fn calculate<'s>(src: Cow<'s, str>) -> Result<f64, ParseFloatError> {
let mut iter = src.chars().peekable();
parse_expression(&mut iter)
}
fn parse_expression(iter: &mut Peekable<Chars>) -> Result<f64, ParseFloatError> {
let mut ret = parse_term(iter.by_ref());
loop {
match iter.peek().cloned() {
Some('+') => {
iter.next();
ret = ret.and_then(|ret| parse_term(iter.by_ref()).map(|num| ret + num))
},
Some('-') => {
iter.next();
ret = ret.and_then(|ret| parse_term(iter.by_ref()).map(|num| ret - num))
}
_ => break
}
}
ret
}
fn parse_term(iter: &mut Peekable<Chars>) -> Result<f64, ParseFloatError> {
let mut ret = parse_num(iter.by_ref());
loop {
match iter.peek().cloned() {
Some('×') => {
iter.next();
ret = ret.and_then(|ret| parse_num(iter.by_ref()).map(|num| ret * num))
},
Some('÷') => {
iter.next(); | }
ret
}
fn parse_num(iter: &mut Peekable<Chars>) -> Result<f64, ParseFloatError> {
let mut num = String::new();
loop {
match iter.peek().cloned() {
Some('+') | Some('×') | Some('÷') | Some(')') | None => break,
Some('-') if !num.is_empty() => break,
Some('(') => {
iter.next();
let ret = parse_expression(iter.by_ref());
iter.next();
return ret;
}
Some(d) => num.push(d)
}
iter.next();
}
num.parse()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn evaluate_negative_number() {
assert_eq!(calculate(Cow::Borrowed("-54")), Ok(-54.0));
}
#[test]
fn evaluate_addition() {
assert_eq!(calculate(Cow::Borrowed("14+23")), Ok(37.0));
}
#[test]
fn evaluate_subtraction() {
assert_eq!(calculate(Cow::Borrowed("3-45")), Ok(-42.0));
}
#[test]
fn evaluate_multiplication() {
assert_eq!(calculate(Cow::Borrowed("4×9")), Ok(36.0));
}
#[test]
fn evaluate_division() {
assert_eq!(calculate(Cow::Borrowed("21÷3")), Ok(7.0));
}
#[test]
fn evaluate_many_operations() {
assert_eq!(calculate(Cow::Borrowed("3+12÷2-3×7+2")), Ok(-10.0));
}
#[test]
fn evaluate_operation_with_parenthesis() {
assert_eq!(calculate(Cow::Borrowed("3+18÷(2-(3+7)×2)")), Ok(2.0))
}
} | ret = ret.and_then(|ret| parse_num(iter.by_ref()).map(|num| ret / num))
}
_ => break
} | random_line_split |
day_6.rs | use std::borrow::Cow;
use std::iter::Peekable;
use std::num::ParseFloatError;
use std::str::Chars;
pub fn calculate<'s>(src: Cow<'s, str>) -> Result<f64, ParseFloatError> {
let mut iter = src.chars().peekable();
parse_expression(&mut iter)
}
fn parse_expression(iter: &mut Peekable<Chars>) -> Result<f64, ParseFloatError> {
let mut ret = parse_term(iter.by_ref());
loop {
match iter.peek().cloned() {
Some('+') => {
iter.next();
ret = ret.and_then(|ret| parse_term(iter.by_ref()).map(|num| ret + num))
},
Some('-') => {
iter.next();
ret = ret.and_then(|ret| parse_term(iter.by_ref()).map(|num| ret - num))
}
_ => break
}
}
ret
}
fn parse_term(iter: &mut Peekable<Chars>) -> Result<f64, ParseFloatError> {
let mut ret = parse_num(iter.by_ref());
loop {
match iter.peek().cloned() {
Some('×') => {
iter.next();
ret = ret.and_then(|ret| parse_num(iter.by_ref()).map(|num| ret * num))
},
Some('÷') => {
iter.next();
ret = ret.and_then(|ret| parse_num(iter.by_ref()).map(|num| ret / num))
}
_ => break
}
}
ret
}
fn pa | ter: &mut Peekable<Chars>) -> Result<f64, ParseFloatError> {
let mut num = String::new();
loop {
match iter.peek().cloned() {
Some('+') | Some('×') | Some('÷') | Some(')') | None => break,
Some('-') if !num.is_empty() => break,
Some('(') => {
iter.next();
let ret = parse_expression(iter.by_ref());
iter.next();
return ret;
}
Some(d) => num.push(d)
}
iter.next();
}
num.parse()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn evaluate_negative_number() {
assert_eq!(calculate(Cow::Borrowed("-54")), Ok(-54.0));
}
#[test]
fn evaluate_addition() {
assert_eq!(calculate(Cow::Borrowed("14+23")), Ok(37.0));
}
#[test]
fn evaluate_subtraction() {
assert_eq!(calculate(Cow::Borrowed("3-45")), Ok(-42.0));
}
#[test]
fn evaluate_multiplication() {
assert_eq!(calculate(Cow::Borrowed("4×9")), Ok(36.0));
}
#[test]
fn evaluate_division() {
assert_eq!(calculate(Cow::Borrowed("21÷3")), Ok(7.0));
}
#[test]
fn evaluate_many_operations() {
assert_eq!(calculate(Cow::Borrowed("3+12÷2-3×7+2")), Ok(-10.0));
}
#[test]
fn evaluate_operation_with_parenthesis() {
assert_eq!(calculate(Cow::Borrowed("3+18÷(2-(3+7)×2)")), Ok(2.0))
}
}
| rse_num(i | identifier_name |
day_6.rs | use std::borrow::Cow;
use std::iter::Peekable;
use std::num::ParseFloatError;
use std::str::Chars;
pub fn calculate<'s>(src: Cow<'s, str>) -> Result<f64, ParseFloatError> {
let mut iter = src.chars().peekable();
parse_expression(&mut iter)
}
fn parse_expression(iter: &mut Peekable<Chars>) -> Result<f64, ParseFloatError> {
let mut ret = parse_term(iter.by_ref());
loop {
match iter.peek().cloned() {
Some('+') => {
iter.next();
ret = ret.and_then(|ret| parse_term(iter.by_ref()).map(|num| ret + num))
},
Some('-') => {
iter.next();
ret = ret.and_then(|ret| parse_term(iter.by_ref()).map(|num| ret - num))
}
_ => break
}
}
ret
}
fn parse_term(iter: &mut Peekable<Chars>) -> Result<f64, ParseFloatError> {
let mut ret = parse_num(iter.by_ref());
loop {
match iter.peek().cloned() {
Some('×') => { |
Some('÷') => {
iter.next();
ret = ret.and_then(|ret| parse_num(iter.by_ref()).map(|num| ret / num))
}
_ => break
}
}
ret
}
fn parse_num(iter: &mut Peekable<Chars>) -> Result<f64, ParseFloatError> {
let mut num = String::new();
loop {
match iter.peek().cloned() {
Some('+') | Some('×') | Some('÷') | Some(')') | None => break,
Some('-') if !num.is_empty() => break,
Some('(') => {
iter.next();
let ret = parse_expression(iter.by_ref());
iter.next();
return ret;
}
Some(d) => num.push(d)
}
iter.next();
}
num.parse()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn evaluate_negative_number() {
assert_eq!(calculate(Cow::Borrowed("-54")), Ok(-54.0));
}
#[test]
fn evaluate_addition() {
assert_eq!(calculate(Cow::Borrowed("14+23")), Ok(37.0));
}
#[test]
fn evaluate_subtraction() {
assert_eq!(calculate(Cow::Borrowed("3-45")), Ok(-42.0));
}
#[test]
fn evaluate_multiplication() {
assert_eq!(calculate(Cow::Borrowed("4×9")), Ok(36.0));
}
#[test]
fn evaluate_division() {
assert_eq!(calculate(Cow::Borrowed("21÷3")), Ok(7.0));
}
#[test]
fn evaluate_many_operations() {
assert_eq!(calculate(Cow::Borrowed("3+12÷2-3×7+2")), Ok(-10.0));
}
#[test]
fn evaluate_operation_with_parenthesis() {
assert_eq!(calculate(Cow::Borrowed("3+18÷(2-(3+7)×2)")), Ok(2.0))
}
}
|
iter.next();
ret = ret.and_then(|ret| parse_num(iter.by_ref()).map(|num| ret * num))
}, | conditional_block |
day_6.rs | use std::borrow::Cow;
use std::iter::Peekable;
use std::num::ParseFloatError;
use std::str::Chars;
pub fn calculate<'s>(src: Cow<'s, str>) -> Result<f64, ParseFloatError> |
fn parse_expression(iter: &mut Peekable<Chars>) -> Result<f64, ParseFloatError> {
let mut ret = parse_term(iter.by_ref());
loop {
match iter.peek().cloned() {
Some('+') => {
iter.next();
ret = ret.and_then(|ret| parse_term(iter.by_ref()).map(|num| ret + num))
},
Some('-') => {
iter.next();
ret = ret.and_then(|ret| parse_term(iter.by_ref()).map(|num| ret - num))
}
_ => break
}
}
ret
}
fn parse_term(iter: &mut Peekable<Chars>) -> Result<f64, ParseFloatError> {
let mut ret = parse_num(iter.by_ref());
loop {
match iter.peek().cloned() {
Some('×') => {
iter.next();
ret = ret.and_then(|ret| parse_num(iter.by_ref()).map(|num| ret * num))
},
Some('÷') => {
iter.next();
ret = ret.and_then(|ret| parse_num(iter.by_ref()).map(|num| ret / num))
}
_ => break
}
}
ret
}
fn parse_num(iter: &mut Peekable<Chars>) -> Result<f64, ParseFloatError> {
let mut num = String::new();
loop {
match iter.peek().cloned() {
Some('+') | Some('×') | Some('÷') | Some(')') | None => break,
Some('-') if !num.is_empty() => break,
Some('(') => {
iter.next();
let ret = parse_expression(iter.by_ref());
iter.next();
return ret;
}
Some(d) => num.push(d)
}
iter.next();
}
num.parse()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn evaluate_negative_number() {
assert_eq!(calculate(Cow::Borrowed("-54")), Ok(-54.0));
}
#[test]
fn evaluate_addition() {
assert_eq!(calculate(Cow::Borrowed("14+23")), Ok(37.0));
}
#[test]
fn evaluate_subtraction() {
assert_eq!(calculate(Cow::Borrowed("3-45")), Ok(-42.0));
}
#[test]
fn evaluate_multiplication() {
assert_eq!(calculate(Cow::Borrowed("4×9")), Ok(36.0));
}
#[test]
fn evaluate_division() {
assert_eq!(calculate(Cow::Borrowed("21÷3")), Ok(7.0));
}
#[test]
fn evaluate_many_operations() {
assert_eq!(calculate(Cow::Borrowed("3+12÷2-3×7+2")), Ok(-10.0));
}
#[test]
fn evaluate_operation_with_parenthesis() {
assert_eq!(calculate(Cow::Borrowed("3+18÷(2-(3+7)×2)")), Ok(2.0))
}
}
| {
let mut iter = src.chars().peekable();
parse_expression(&mut iter)
} | identifier_body |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.