file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
12.1k
| suffix
large_stringlengths 0
12k
| middle
large_stringlengths 0
7.51k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
term_gui.rs | rustbox.print_char(col, row, style, Color::Default, Color::Default, c);
}
row += 1;
if row > rustbox.height() {
break;
}
}
}
}
struct BufferViewWidget {
view_id: String,
client: client::ThinClient,
cursor_id: String,
}
impl BufferViewWidget {
pub fn new(view_id: String, client: client::ThinClient) -> Self {
BufferViewWidget {
view_id: view_id,
client: client,
cursor_id: String::new(),
}
}
fn draw(&mut self, buffer_view: &buffer_views::BufferView, rustbox: &rustbox::RustBox) {
let mut row = 0;
let top_line_index = buffer_view.top_line_index as usize;
self.cursor_id = buffer_view.cursor.id().to_string();
let mut cursor_drawn = false;
while row < rustbox.height() {
let line_index = top_line_index + row;
if let Some(line) = buffer_view.lines.get(line_index) {
for (col, c) in line.chars().enumerate() {
if col >= rustbox.width() {
break;
}
let bg = if buffer_view.cursor.position.line_index == line_index as isize &&
buffer_view.cursor.position.column_index as usize == col {
cursor_drawn = true;
Color::Red
} else {
Color::Default
};
rustbox.print_char(col, row, rustbox::RB_NORMAL, Color::Default, bg, c);
}
}
row += 1;
}
if !cursor_drawn {
let row = buffer_view.cursor.position.line_index - top_line_index as isize;
rustbox.print_char(buffer_view.cursor.position.column_index as usize,
row as usize, rustbox::RB_NORMAL,
Color::Default, Color::Red, ' ');
}
}
}
#[derive(Debug)]
struct Options {
socket: String,
config_file: path::PathBuf,
}
struct TerminalGui {
config_file_runner: Box<gui::config_file::ConfigFileRunner>,
client: client::Client,
rustbox: rustbox::RustBox,
buffer_views: Arc<RwLock<gui::buffer_views::BufferViews>>,
last_key_down_event: time::PreciseTime,
completer: Option<CompleterWidget>,
buffer_view_widget: Option<BufferViewWidget>,
// NOCOM(#sirver): GuiCommand in namespace gui is very duplicated
gui_commands: mpsc::Receiver<gui::command::GuiCommand>,
}
impl TerminalGui {
fn new(options: &Options) -> swiboe::Result<Self> {
let mut client = match net::SocketAddr::from_str(&options.socket) {
Ok(value) => {
client::Client::connect_tcp(&value).unwrap()
}
Err(_) => {
let socket_path = path::PathBuf::from(&options.socket);
client::Client::connect_unix(&socket_path).unwrap()
}
};
let mut config_file_runner = gui::config_file::ConfigFileRunner::new(
try!(client.clone()));
config_file_runner.run(&options.config_file);
let rustbox = match RustBox::init(rustbox::InitOptions {
input_mode: rustbox::InputMode::Current,
buffer_stderr: true,
}) {
Result::Ok(v) => v,
Result::Err(e) => panic!("{}", e),
};
let gui_id: String = Uuid::new_v4().to_hyphenated_string();
let (gui_commands_tx, gui_commands_rx) = mpsc::channel();
let buffer_views = try!(gui::buffer_views::BufferViews::new(&gui_id, gui_commands_tx, &mut client));
Ok(TerminalGui {
config_file_runner: config_file_runner,
client: client,
rustbox: rustbox,
buffer_views: buffer_views,
last_key_down_event: time::PreciseTime::now(),
completer: None,
buffer_view_widget: None,
gui_commands: gui_commands_rx,
})
}
fn handle_events(&mut self) -> swiboe::Result<bool> {
match self.rustbox.peek_event(std::time::Duration::from_millis(5), false) {
Ok(rustbox::Event::KeyEvent(key)) => {
if self.completer.is_some() {
let rv = self.completer.as_mut().unwrap().on_key(key);
match rv {
CompleterState::Running => (),
CompleterState::Canceled => {
self.completer = None;
},
CompleterState::Selected(result) => {
self.completer = None;
let mut rpc = try!(self.client.call("buffer.open", &swiboe::plugin::buffer::open::Request {
uri: format!("file://{}", result),
}));
let response: swiboe::plugin::buffer::open::Response = rpc.wait_for().unwrap();
let mut buffer_views = self.buffer_views.write().unwrap();
let view_id = buffer_views.new_view(response.buffer_index, self.rustbox.width(), self.rustbox.height());
self.buffer_view_widget = Some(BufferViewWidget::new(view_id, try!(self.client.clone())));
},
}
} else {
if !try!(self.handle_key(key)) {
return Ok(false);
}
}
},
Err(e) => panic!("{}", e),
_ => { }
}
while let Ok(command) = self.gui_commands.try_recv() {
match command {
gui::command::GuiCommand::Quit => return Ok(false),
gui::command::GuiCommand::Redraw => (),
}
}
return Ok(true);
}
fn handle_key(&mut self, key: rustbox::Key) -> swiboe::Result<bool> {
let delta_t = {
let now = time::PreciseTime::now();
let delta_t = self.last_key_down_event.to(now);
self.last_key_down_event = now;
delta_t
};
let delta_t_in_seconds = delta_t.num_nanoseconds().unwrap() as f64 / 1e9;
match key {
// NOCOM(#sirver): should be handled through plugins.
rustbox::Key::Char('q') => return Ok(false),
rustbox::Key::Ctrl('t') => {
self.completer = Some(try!(CompleterWidget::new(&mut self.client)))
},
rustbox::Key::Esc => {
self.config_file_runner.keymap_handler.timeout();
},
rustbox::Key::Char(a) => {
self.config_file_runner.keymap_handler.key_down(
delta_t_in_seconds, keymap_handler::Key::Char(a));
},
rustbox::Key::Up => {
self.config_file_runner.keymap_handler.key_down(
delta_t_in_seconds, keymap_handler::Key::Up);
},
rustbox::Key::Down => {
self.config_file_runner.keymap_handler.key_down(
delta_t_in_seconds, keymap_handler::Key::Down);
},
rustbox::Key::Left => {
self.config_file_runner.keymap_handler.key_down(
delta_t_in_seconds, keymap_handler::Key::Left);
},
rustbox::Key::Right => {
self.config_file_runner.keymap_handler.key_down(
delta_t_in_seconds, keymap_handler::Key::Right);
},
rustbox::Key::Tab => {
self.config_file_runner.keymap_handler.key_down(
delta_t_in_seconds, keymap_handler::Key::Tab);
},
rustbox::Key::Ctrl(some_other_key) => {
self.config_file_runner.keymap_handler.key_down(
delta_t_in_seconds, keymap_handler::Key::Ctrl);
try!(self.handle_key(rustbox::Key::Char(some_other_key)));
}
_ => (),
}
Ok(true)
}
fn draw(&mut self) {
self.rustbox.clear();
if let Some(ref mut widget) = self.buffer_view_widget {
let buffer_views = self.buffer_views.read().unwrap();
let buffer_view = buffer_views.get(&widget.view_id).unwrap();
widget.draw(&buffer_view, &self.rustbox);
}
if let Some(ref mut completer) = self.completer {
completer.draw(&self.rustbox);
}
self.rustbox.present();
}
}
fn parse_options() -> Options {
let matches = clap::App::new("term_gui")
.about("Terminal client for Swiboe")
.version(&crate_version!()[..])
.arg(clap::Arg::with_name("SOCKET")
.short("s")
.long("socket")
.help("Socket at which the master listens.")
.required(true)
.takes_value(true))
.arg(clap::Arg::with_name("CONFIG_FILE")
.short("c")
.long("config_file")
.help("The config file to run when the GUI starts up.")
.takes_value(true))
.get_matches();
Options {
config_file: path::PathBuf::from(matches.value_of("CONFIG_FILE").unwrap_or("config.lua")),
socket: matches.value_of("SOCKET").unwrap().into(),
}
}
fn main() | {
let options = parse_options();
let mut gui = TerminalGui::new(&options).unwrap();
while gui.handle_events().unwrap() {
gui.draw();
}
} | identifier_body |
|
term_gui.rs | of the server, since the server
// might run on a different machine than the client - and certainly in a different
// directory.
let current_dir = env::current_dir().unwrap();
let rpc = try!(client.call("list_files", &swiboe::plugin::list_files::ListFilesRequest {
directory: current_dir.to_string_lossy().into_owned(),
}));
Ok(CompleterWidget {
candidates: subsequence_match::CandidateSet::new(),
rpc: Some(rpc),
query: "".into(),
results: Vec::new(),
selection_index: 0,
})
}
fn | (&mut self, key: rustbox::Key) -> CompleterState {
match key {
rustbox::Key::Char(c) => {
self.query.push(c);
self.results.clear();
CompleterState::Running
},
rustbox::Key::Backspace => {
self.query.pop();
self.results.clear();
CompleterState::Running
},
rustbox::Key::Down => {
self.selection_index += 1;
CompleterState::Running
},
rustbox::Key::Up => {
self.selection_index -= 1;
CompleterState::Running
},
rustbox::Key::Esc => {
self.rpc.take().unwrap().cancel().unwrap();
CompleterState::Canceled
},
rustbox::Key::Enter => {
self.rpc.take().unwrap().cancel().unwrap();
if self.results.is_empty() {
CompleterState::Canceled
} else {
clamp(0, self.results.len() as isize - 1, &mut self.selection_index);
CompleterState::Selected(self.results[self.selection_index as usize].text.clone())
}
}
_ => CompleterState::Running,
}
}
fn draw(&mut self, rustbox: &rustbox::RustBox) {
while let Some(b) = self.rpc.as_mut().unwrap().try_recv().unwrap() {
self.results.clear();
let b: swiboe::plugin::list_files::ListFilesUpdate = serde_json::from_value(b).unwrap();
for file in &b.files {
self.candidates.insert(file);
}
}
if self.results.is_empty() {
let query_to_use: String = self.query.chars().filter(|c| !c.is_whitespace()).collect();
self.candidates.query(&query_to_use, subsequence_match::MatchCase::No, &mut self.results);
}
if !self.results.is_empty() {
clamp(0, self.results.len() as isize - 1, &mut self.selection_index);
}
rustbox.print(0, 0, rustbox::RB_BOLD, Color::Yellow, Color::Default, &self.query);
let len_string = format!("{}/{} matching ({})", self.results.len(), self.candidates.len(),
if self.rpc.as_ref().unwrap().done() { "done" } else { "scanning" } );
rustbox.print(rustbox.width() - len_string.len() - 1, 0, rustbox::RB_BOLD, Color::Blue, Color::Default, &len_string);
let mut row = 1usize;
for result in &self.results {
let mut matching_indices = result.matching_indices.iter().peekable();
for (col, c) in result.text.chars().enumerate() {
let matches = match matching_indices.peek() {
Some(val) if **val == col => true,
_ => false,
};
let mut style = if matches {
matching_indices.next();
rustbox::RB_BOLD
} else {
rustbox::RB_NORMAL
};
if row as isize == self.selection_index + 1 {
style = style | rustbox::RB_REVERSE;
}
rustbox.print_char(col, row, style, Color::Default, Color::Default, c);
}
row += 1;
if row > rustbox.height() {
break;
}
}
}
}
struct BufferViewWidget {
view_id: String,
client: client::ThinClient,
cursor_id: String,
}
impl BufferViewWidget {
pub fn new(view_id: String, client: client::ThinClient) -> Self {
BufferViewWidget {
view_id: view_id,
client: client,
cursor_id: String::new(),
}
}
fn draw(&mut self, buffer_view: &buffer_views::BufferView, rustbox: &rustbox::RustBox) {
let mut row = 0;
let top_line_index = buffer_view.top_line_index as usize;
self.cursor_id = buffer_view.cursor.id().to_string();
let mut cursor_drawn = false;
while row < rustbox.height() {
let line_index = top_line_index + row;
if let Some(line) = buffer_view.lines.get(line_index) {
for (col, c) in line.chars().enumerate() {
if col >= rustbox.width() {
break;
}
let bg = if buffer_view.cursor.position.line_index == line_index as isize &&
buffer_view.cursor.position.column_index as usize == col {
cursor_drawn = true;
Color::Red
} else {
Color::Default
};
rustbox.print_char(col, row, rustbox::RB_NORMAL, Color::Default, bg, c);
}
}
row += 1;
}
if !cursor_drawn {
let row = buffer_view.cursor.position.line_index - top_line_index as isize;
rustbox.print_char(buffer_view.cursor.position.column_index as usize,
row as usize, rustbox::RB_NORMAL,
Color::Default, Color::Red, ' ');
}
}
}
#[derive(Debug)]
struct Options {
socket: String,
config_file: path::PathBuf,
}
struct TerminalGui {
config_file_runner: Box<gui::config_file::ConfigFileRunner>,
client: client::Client,
rustbox: rustbox::RustBox,
buffer_views: Arc<RwLock<gui::buffer_views::BufferViews>>,
last_key_down_event: time::PreciseTime,
completer: Option<CompleterWidget>,
buffer_view_widget: Option<BufferViewWidget>,
// NOCOM(#sirver): GuiCommand in namespace gui is very duplicated
gui_commands: mpsc::Receiver<gui::command::GuiCommand>,
}
impl TerminalGui {
fn new(options: &Options) -> swiboe::Result<Self> {
let mut client = match net::SocketAddr::from_str(&options.socket) {
Ok(value) => {
client::Client::connect_tcp(&value).unwrap()
}
Err(_) => {
let socket_path = path::PathBuf::from(&options.socket);
client::Client::connect_unix(&socket_path).unwrap()
}
};
let mut config_file_runner = gui::config_file::ConfigFileRunner::new(
try!(client.clone()));
config_file_runner.run(&options.config_file);
let rustbox = match RustBox::init(rustbox::InitOptions {
input_mode: rustbox::InputMode::Current,
buffer_stderr: true,
}) {
Result::Ok(v) => v,
Result::Err(e) => panic!("{}", e),
};
let gui_id: String = Uuid::new_v4().to_hyphenated_string();
let (gui_commands_tx, gui_commands_rx) = mpsc::channel();
let buffer_views = try!(gui::buffer_views::BufferViews::new(&gui_id, gui_commands_tx, &mut client));
Ok(TerminalGui {
config_file_runner: config_file_runner,
client: client,
rustbox: rustbox,
buffer_views: buffer_views,
last_key_down_event: time::PreciseTime::now(),
completer: None,
buffer_view_widget: None,
gui_commands: gui_commands_rx,
})
}
fn handle_events(&mut self) -> swiboe::Result<bool> {
match self.rustbox.peek_event(std::time::Duration::from_millis(5), false) {
Ok(rustbox::Event::KeyEvent(key)) => {
if self.completer.is_some() {
let rv = self.completer.as_mut().unwrap().on_key(key);
match rv {
CompleterState::Running => (),
CompleterState::Canceled => {
self.completer = None;
},
CompleterState::Selected(result) => {
self.completer = None;
let mut rpc = try!(self.client.call("buffer.open", &swiboe::plugin::buffer::open::Request {
uri: format!("file://{}", result),
}));
let response: swiboe::plugin::buffer::open::Response = rpc.wait_for().unwrap();
let mut buffer_views = self.buffer_views.write().unwrap();
let view_id = buffer_views.new_view(response.buffer_index, self.rustbox.width(), self.rustbox.height());
self.buffer_view_widget = Some(BufferViewWidget::new(view_id, try!(self.client.clone())));
},
}
} else {
if !try!(self.handle_key(key)) {
return Ok(false);
}
}
},
Err(e) => panic!("{}", e),
_ => { }
}
while let Ok(command) = self.gui_commands.try_recv() {
match command {
gui::command::GuiCommand::Quit => return Ok(false),
gui::command::GuiCommand::Redraw => (),
| on_key | identifier_name |
wechat_task.py | .join(self.out_path,"qrcode")
if not os.path.exists(self.out_qrcode_path):
os.makedirs(self.out_qrcode_path)
self.wx_cookie_path = os.path.join(self.out_path,"wx.info")
def __start_threads__(self):
for thread_id in range(1,self.threads):
thread_name = "Thread - " + str(thread_id)
thread = HtmlToPdfThreads(self.task_queue,thread_id,thread_name)
thread.start()
self.thread_list.append(thread)
def __data__(self,map=None):
data = {"userlang":"zh_CN","redirect_url":"","login_type":"3","token":"","lang":"","f":"json","ajax":"1"}
if map:
for key,value in map.items():
data[key] = value
return data
def __head__(self,heads=None):
head ={
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:78.0) Gecko/20100101 Firefox/78.0",
"Referer": self.referer
}
if self.cookie:
head["Cookie"] = self.cookie
if heads:
for key,value in heads.items():
head[key] = value
return head
def __start_login__(self):
data = {"sessionid":str(time.time()).replace(".","")}
session,result = self.__http_request__(url=self.start_login_url,data=self.__data__(data),wait=1)
if result:
self.getqrcode(session)
def getqrcode(self,session):
time_str = str(time.time()).replace(".","")
new_getqrcode_url = self.getqrcode_url.replace("%s",time_str)
qrcode_path = os.path.join(self.out_qrcode_path,time_str + ".png")
self.__http_io_request__(url=new_getqrcode_url,session=session,path=qrcode_path)
log.warn("请使用微信扫描弹出的二维码图片用于登录微信公众号!")
try:
image = Image.open(qrcode_path)
image.show()
except Exception as e:
log.error(e)
raise Exception("获取二维码失败,请重试!")
self.getqrcodeStatus(session)
def getqrcodeStatus(self,session,t=6):
while True:
session,result = self.__http_request__(method='get',url=self.ask_url,wait=t)
if not result:
return
if result.get("status") == "3":
log.warn("二维码已失效,请重新使用微信进行扫码!")
self.getqrcode(session)
return
if str(result.get("status")) == "1":
self.login(session)
return
if t == 6:
t = 7
else:
t = 6
def login(self,session):
data = {"lang":"zh_CN"}
session,result = self.__http_request__(url=self.login_url,data=self.__data__(data))
if not result:
return
redirect_url = result.get("redirect_url")
if not redirect_url:
return
token_compile = re.compile(r'.*token=(.*).*')
token = token_compile.findall(redirect_url)
if len(token) < 0:
return
token = token[0]
names = self.name.split(",")
self.__save_cookie__(session,token)
for name in names:
self.search_biz(session,token,name)
# 搜索公众号
def search_biz(self,session,token,name,no=1,begin=0,count=5,total=0):
data = {
"action":"search_biz",
"begin":begin,
"count":count,
"query":name,
"token":token,
"lang":"zh_CN",
"f":"json",
"ajax":1
}
self.referer = ("https://mp.weixin.qq.com/cgi-bin/appmsg?t=media/appmsg_edit_v2&action=edit&isNew=1&type=10&createType=0&token=%s&lang=zh_CN") % (token)
session,result = self.__http_request__(method='get',url=self.search_biz_url,data=data)
if not result:
return
biz_list = result.get("list") # 公众号列表
biz_total = result.get("total") # 公众号总数量
if len(biz_list) == 0:
return
for biz in biz_list:
fakeid = biz.get("fakeid")
nickname = biz.get("nickname")
alias = biz.get("alias")
if nickname != name:
continue
wi_id = WechatSql.insert_info(fakeid,alias,nickname)
out_dir = os.path.join(self.out_path , nickname)
if not os.path.exists(out_dir):
os.makedirs(out_dir)
begin = WechatSql.select_list_num(wi_id)
app_msg_cnt = self.list_ex(session,fakeid,token,out_dir,wi_id)
diz_dict ={}
if app_msg_cnt != 0:
diz_dict["wi_id"] = wi_id
diz_dict["name"] = name
diz_dict["total"] = app_msg_cnt
diz_dict["current"] = str(app_msg_cnt - begin)
diz_dict["html"] = os.path.join(out_dir,"html")
diz_dict["pdf"] = os.path.join(out_dir,"pdf")
self.diz_list.append(diz_dict)
return
begin = count + begin
if no <= biz_total:
self.search_biz(session,token,name,no,begin,count,biz_total)
def list_ex(self,session,fakeid,token,out_dir,wi_id,no=0,begin=0,count=5,app_msg_cnt=0):
data ={
"action":"list_ex",
"begin":str(begin),
"count":str(count),
"fakeid":str(fakeid),
"type":"9",
"query":"",
"token":str(token),
"lang":"zh_CN",
"f":"json",
"ajax":"1"
}
if begin < 0: # 防止出现负数的情况
return app_msg_cnt
if app_msg_cnt == 0: # 获取文章总数量
session,result = self.__http_request__(method='get',url=self.appmsg_url,data=data,session=session)
if not result:
return app_msg_cnt
app_msg_cnt = result.get("app_msg_cnt")
nums = str(app_msg_cnt/10).split(".")
if int(nums[1]) >= 5:
start = app_msg_cnt - int(nums[1]) + 5
else:
start = app_msg_cnt - int(nums[1])
self.list_ex(session,fakeid,token,out_dir,wi_id,begin=start, app_msg_cnt = app_msg_cnt) # 设置文章起始编号和文章总数量
return app_msg_cnt
session,result = self.__http_request__(method='get',url=self.appmsg_url,data=data,session=session)
if not result:
return app_msg_cnt
app_msg_cnt = result.get("app_msg_cnt")
app_msg_list = result.get("app_msg_list")
if len(app_msg_list) == 0:
return app_msg_cnt
for app in list(reversed(app_msg_list)):
link = app.get("link")
title = app.get("title")
digest = app.get("digest")
title_list = WechatSql.select_list_title(wi_id,begin)
if title in title_list:
continue
i_date = str(time.time).replace(".","")
WechatSql.insert_list(wi_id,no,title,link,digest,i_date)
self.__get_article_details__(no,title,link,out_dir)
no = no + 1
begin = begin - count
self.list_ex(session,fakeid,token,out_dir,wi_id,no,begin,count,app_msg_cnt)
def __get_article_details__(self,no,title,link,out_dir):
filters = {'/','\\','?','*',':','"','<','>','|',' ','?','(',')','!',',','“',"”"}
for filter in filters:
title = title.replace(filter,"")
html_path = os.path.join(out_dir,"html")
pdf_path = os.path.join(out_dir,"pdf")
image_path = os.path.join(html_path,"image")
if not os.path.exist | os.makedirs(image_path)
if not os.path.exists(pdf_path):
os.makedirs(pdf_path)
html_file = os.path.join(html_path,str(no)+ "-" +title+".html")
pdf_file = os.path.join(pdf_path,str(no)+ "-" +title+".pdf")
if os.path.exists(pdf_file): # PDF文件存在则不生成对应的PDF文件,否则继续
return
if not os.path.exists(html_file):
content = self.__get_content__(link,image_path)
with open(html_file,"w") as f:
f.write(content)
f.flush()
f.close()
task_info = {"html":html_file,"pdf":pdf_file}
self.task_queue.put(task_info)
def __get_content__(self,link,image_path):
self.referer = link
session,content = self.__http_request__(method="get",url=link,flag=True)
if not content | s(image_path):
| conditional_block |
wechat_task.py | 请使用微信扫描弹出的二维码图片用于登录微信公众号!")
try:
image = Image.open(qrcode_path)
image.show()
except Exception as e:
log.error(e)
raise Exception("获取二维码失败,请重试!")
self.getqrcodeStatus(session)
def getqrcodeStatus(self,session,t=6):
while True:
session,result = self.__http_request__(method='get',url=self.ask_url,wait=t)
if not result:
return
if result.get("status") == "3":
log.warn("二维码已失效,请重新使用微信进行扫码!")
self.getqrcode(session)
return
if str(result.get("status")) == "1":
self.login(session)
return
if t == 6:
t = 7
else:
t = 6
def login(self,session):
data = {"lang":"zh_CN"}
session,result = self.__http_request__(url=self.login_url,data=self.__data__(data))
if not result:
return
redirect_url = result.get("redirect_url")
if not redirect_url:
return
token_compile = re.compile(r'.*token=(.*).*')
token = token_compile.findall(redirect_url)
if len(token) < 0:
return
token = token[0]
names = self.name.split(",")
self.__save_cookie__(session,token)
for name in names:
self.search_biz(session,token,name)
# 搜索公众号
def search_biz(self,session,token,name,no=1,begin=0,count=5,total=0):
data = {
"action":"search_biz",
"begin":begin,
"count":count,
"query":name,
"token":token,
"lang":"zh_CN",
"f":"json",
"ajax":1
}
self.referer = ("https://mp.weixin.qq.com/cgi-bin/appmsg?t=media/appmsg_edit_v2&action=edit&isNew=1&type=10&createType=0&token=%s&lang=zh_CN") % (token)
session,result = self.__http_request__(method='get',url=self.search_biz_url,data=data)
if not result:
return
biz_list = result.get("list") # 公众号列表
biz_total = result.get("total") # 公众号总数量
if len(biz_list) == 0:
return
for biz in biz_list:
fakeid = biz.get("fakeid")
nickname = biz.get("nickname")
alias = biz.get("alias")
if nickname != name:
continue
wi_id = WechatSql.insert_info(fakeid,alias,nickname)
out_dir = os.path.join(self.out_path , nickname)
if not os.path.exists(out_dir):
os.makedirs(out_dir)
begin = WechatSql.select_list_num(wi_id)
app_msg_cnt = self.list_ex(session,fakeid,token,out_dir,wi_id)
diz_dict ={}
if app_msg_cnt != 0:
diz_dict["wi_id"] = wi_id
diz_dict["name"] = name
diz_dict["total"] = app_msg_cnt
diz_dict["current"] = str(app_msg_cnt - begin)
diz_dict["html"] = os.path.join(out_dir,"html")
diz_dict["pdf"] = os.path.join(out_dir,"pdf")
self.diz_list.append(diz_dict)
return
begin = count + begin
if no <= biz_total:
self.search_biz(session,token,name,no,begin,count,biz_total)
def list_ex(self,session,fakeid,token,out_dir,wi_id,no=0,begin=0,count=5,app_msg_cnt=0):
data ={
"action":"list_ex",
"begin":str(begin),
"count":str(count),
"fakeid":str(fakeid),
"type":"9",
"query":"",
"token":str(token),
"lang":"zh_CN",
"f":"json",
"ajax":"1"
}
if begin < 0: # 防止出现负数的情况
return app_msg_cnt
if app_msg_cnt == 0: # 获取文章总数量
session,result = self.__http_request__(method='get',url=self.appmsg_url,data=data,session=session)
if not result:
return app_msg_cnt
app_msg_cnt = result.get("app_msg_cnt")
nums = str(app_msg_cnt/10).split(".")
if int(nums[1]) >= 5:
start = app_msg_cnt - int(nums[1]) + 5
else:
start = app_msg_cnt - int(nums[1])
self.list_ex(session,fakeid,token,out_dir,wi_id,begin=start, app_msg_cnt = app_msg_cnt) # 设置文章起始编号和文章总数量
return app_msg_cnt
session,result = self.__http_request__(method='get',url=self.appmsg_url,data=data,session=session)
if not result:
return app_msg_cnt
app_msg_cnt = result.get("app_msg_cnt")
app_msg_list = result.get("app_msg_list")
if len(app_msg_list) == 0:
return app_msg_cnt
for app in list(reversed(app_msg_list)):
link = app.get("link")
title = app.get("title")
digest = app.get("digest")
title_list = WechatSql.select_list_title(wi_id,begin)
if title in title_list:
continue
i_date = str(time.time).replace(".","")
WechatSql.insert_list(wi_id,no,title,link,digest,i_date)
self.__get_article_details__(no,title,link,out_dir)
no = no + 1
begin = begin - count
self.list_ex(session,fakeid,token,out_dir,wi_id,no,begin,count,app_msg_cnt)
def __get_article_details__(self,no,title,link,out_dir):
filters = {'/','\\','?','*',':','"','<','>','|',' ','?','(',')','!',',','“',"”"}
for filter in filters:
title = title.replace(filter,"")
html_path = os.path.join(out_dir,"html")
pdf_path = os.path.join(out_dir,"pdf")
image_path = os.path.join(html_path,"image")
if not os.path.exists(image_path):
os.makedirs(image_path)
if not os.path.exists(pdf_path):
os.makedirs(pdf_path)
html_file = os.path.join(html_path,str(no)+ "-" +title+".html")
pdf_file = os.path.join(pdf_path,str(no)+ "-" +title+".pdf")
if os.path.exists(pdf_file): # PDF文件存在则不生成对应的PDF文件,否则继续
return
if not os.path.exists(html_file):
content = self.__get_content__(link,image_path)
with open(html_file,"w") as f:
f.write(content)
f.flush()
f.close()
task_info = {"html":html_file,"pdf":pdf_file}
self.task_queue.put(task_info)
def __get_content__(self,link,image_path):
self.referer = link
session,content = self.__http_request__(method="get",url=link,flag=True)
if not content:
return
src_compile = re.compile(r'data-src=\"(.*?)\"')
src_urls = src_compile.findall(content)
if len(src_urls) < 0:
return
for img_url in src_urls:
if not (img_url.startswith("http://") or img_url.startswith("https://")):
continue
img_url_compile = re.compile("wx_fmt=(.*)?")
img = img_url_compile.findall(img_url)
suffix = ".png"
if len(img)>0:
suffix = "."+ str(img[0])
img_name = str(time.time()).replace(".","") + suffix
img_file = os.path.join(image_path,img_name)
self.__http_io_request__(url=img_url,path=img_file)
self.img_path_dict[img_url] = "./image/"+img_name
content = content.replace("data-src","src")
for key,value in self.img_path_dict.items():
content = content.replace(key,value)
return content
def __http_io_request__(self,method='get',url=None,data=None,headers=None,session=requests.session(),stream=True,path=None):
if method =='get':
resp = session.get(url=url,params=data,headers=self.__head__(headers),stream=stream)
else:
resp = session.post(url=url,data=data,headers=self.__head__(headers),stream=stream)
if resp.statu | s_code == 200:
with open(path, 'wb+') as f:
for chunk in resp.iter_content(chunk_size=1024):
if chunk:
f.write(chunk)
f.flush()
f.close()
return session,True
time.sleep(1)
return session,False
def __http_request__(self,method='post',url=None,data=None,headers=None,session=requests.session(),wait=5,flag=False):
time.sleep(wait)
if method == "get":
resp = session.get(url = url, params = data, headers = sel | identifier_body |
|
wechat_task.py | .referer
}
if self.cookie:
head["Cookie"] = self.cookie
if heads:
for key,value in heads.items():
head[key] = value
return head
def __start_login__(self):
data = {"sessionid":str(time.time()).replace(".","")}
session,result = self.__http_request__(url=self.start_login_url,data=self.__data__(data),wait=1)
if result:
self.getqrcode(session)
def getqrcode(self,session):
time_str = str(time.time()).replace(".","")
new_getqrcode_url = self.getqrcode_url.replace("%s",time_str)
qrcode_path = os.path.join(self.out_qrcode_path,time_str + ".png")
self.__http_io_request__(url=new_getqrcode_url,session=session,path=qrcode_path)
log.warn("请使用微信扫描弹出的二维码图片用于登录微信公众号!")
try:
image = Image.open(qrcode_path)
image.show()
except Exception as e:
log.error(e)
raise Exception("获取二维码失败,请重试!")
self.getqrcodeStatus(session)
def getqrcodeStatus(self,session,t=6):
while True:
session,result = self.__http_request__(method='get',url=self.ask_url,wait=t)
if not result:
return
if result.get("status") == "3":
log.warn("二维码已失效,请重新使用微信进行扫码!")
self.getqrcode(session)
return
if str(result.get("status")) == "1":
self.login(session)
return
if t == 6:
t = 7
else:
t = 6
def login(self,session):
data = {"lang":"zh_CN"}
session,result = self.__http_request__(url=self.login_url,data=self.__data__(data))
if not result:
return
redirect_url = result.get("redirect_url")
if not redirect_url:
return
token_compile = re.compile(r'.*token=(.*).*')
token = token_compile.findall(redirect_url)
if len(token) < 0:
return
token = token[0]
names = self.name.split(",")
self.__save_cookie__(session,token)
for name in names:
self.search_biz(session,token,name)
# 搜索公众号
def search_biz(self,session,token,name,no=1,begin=0,count=5,total=0):
data = {
"action":"search_biz",
"begin":begin,
"count":count,
"query":name,
"token":token,
"lang":"zh_CN",
"f":"json",
"ajax":1
}
self.referer = ("https://mp.weixin.qq.com/cgi-bin/appmsg?t=media/appmsg_edit_v2&action=edit&isNew=1&type=10&createType=0&token=%s&lang=zh_CN") % (token)
session,result = self.__http_request__(method='get',url=self.search_biz_url,data=data)
if not result:
return
biz_list = result.get("list") # 公众号列表
biz_total = result.get("total") # 公众号总数量
if len(biz_list) == 0:
return
for biz in biz_list:
fakeid = biz.get("fakeid")
nickname = biz.get("nickname")
alias = biz.get("alias")
if nickname != name:
continue
wi_id = WechatSql.insert_info(fakeid,alias,nickname)
out_dir = os.path.join(self.out_path , nickname)
if not os.path.exists(out_dir):
os.makedirs(out_dir)
begin = WechatSql.select_list_num(wi_id)
app_msg_cnt = self.list_ex(session,fakeid,token,out_dir,wi_id)
diz_dict ={}
if app_msg_cnt != 0:
diz_dict["wi_id"] = wi_id
diz_dict["name"] = name
diz_dict["total"] = app_msg_cnt
diz_dict["current"] = str(app_msg_cnt - begin)
diz_dict["html"] = os.path.join(out_dir,"html")
diz_dict["pdf"] = os.path.join(out_dir,"pdf")
self.diz_list.append(diz_dict)
return
begin = count + begin
if no <= biz_total:
self.search_biz(session,token,name,no,begin,count,biz_total)
def list_ex(self,session,fakeid,token,out_dir,wi_id,no=0,begin=0,count=5,app_msg_cnt=0):
data ={
"action":"list_ex",
"begin":str(begin),
"count":str(count),
"fakeid":str(fakeid),
"type":"9",
"query":"",
"token":str(token),
"lang":"zh_CN",
"f":"json",
"ajax":"1"
}
if begin < 0: # 防止出现负数的情况
return app_msg_cnt
if app_msg_cnt == 0: # 获取文章总数量
session,result = self.__http_request__(method='get',url=self.appmsg_url,data=data,session=session)
if not result:
return app_msg_cnt
app_msg_cnt = result.get("app_msg_cnt")
nums = str(app_msg_cnt/10).split(".")
if int(nums[1]) >= 5:
start = app_msg_cnt - int(nums[1]) + 5
else:
start = app_msg_cnt - int(nums[1])
self.list_ex(session,fakeid,token,out_dir,wi_id,begin=start, app_msg_cnt = app_msg_cnt) # 设置文章起始编号和文章总数量
return app_msg_cnt
session,result = self.__http_request__(method='get',url=self.appmsg_url,data=data,session=session)
if not result:
return app_msg_cnt
app_msg_cnt = result.get("app_msg_cnt")
app_msg_list = result.get("app_msg_list")
if len(app_msg_list) == 0:
return app_msg_cnt
for app in list(reversed(app_msg_list)):
link = app.get("link")
title = app.get("title")
digest = app.get("digest")
title_list = WechatSql.select_list_title(wi_id,begin)
if title in title_list:
continue
i_date = str(time.time).replace(".","")
WechatSql.insert_list(wi_id,no,title,link,digest,i_date)
self.__get_article_details__(no,title,link,out_dir)
no = no + 1
begin = begin - count
self.list_ex(session,fakeid,token,out_dir,wi_id,no,begin,count,app_msg_cnt)
def __get_article_details__(self,no,title,link,out_dir):
filters = {'/','\\','?','*',':','"','<','>','|',' ','?','(',')','!',',','“',"”"}
for filter in filters:
title = title.replace(filter,"")
html_path = os.path.join(out_dir,"html")
pdf_path = os.path.join(out_dir,"pdf")
image_path = os.path.join(html_path,"image")
if not os.path.exists(image_path):
os.makedirs(image_path)
if not os.path.exists(pdf_path):
os.makedirs(pdf_path)
html_file = os.path.join(html_path,str(no)+ "-" +title+".html")
pdf_file = os.path.join(pdf_path,str(no)+ "-" +title+".pdf")
if os.path.exists(pdf_file): # PDF文件存在则不生成对应的PDF文件,否则继续
return
if not os.path.exists(html_file):
content = self.__get_content__(link,image_path)
with open(html_file,"w") as f:
f.write(content)
f.flush()
f.close()
task_info = {"html":html_file,"pdf":pdf_file}
self.task_queue.put(task_info)
def __get_content__(self,link,image_path):
self.referer = link
session,content = self.__http_request__(method="get",url=link,flag=True)
if not content:
return
src_compile = re.compile(r'data-src=\"(.*?)\"')
src_urls = src_compile.findall(content)
if len(src_urls) < 0:
return
for img_url in src_urls:
if not (img_url.startswith("http://") or img_url.startswith("https://")):
continue
img_url_compile = re.compile("wx_fmt=(.*)?")
img = img_url_compile.findall(img_url)
suffix = ".png"
if len(img)>0:
suffix = "."+ str(img[0])
img_name = str(time.time()).replace(".","") + suffix
img_file = os.path.join(image_path,img_name)
self.__http_io_request__(url=img_url,path=img_file)
self.img_path_dict[img_url] = "./image/"+img_name
content = content.replace("data-src","src")
for key,value in self.img_path_dict.items():
content = content.replace(key,value)
return content
| def __http_io_request__(self,method='get',url=None,data=None,headers=None,session=requests.session(),stream=True,path=None): | random_line_split |
|
wechat_task.py | .join(self.out_path,"qrcode")
if not os.path.exists(self.out_qrcode_path):
os.makedirs(self.out_qrcode_path)
self.wx_cookie_path = os.path.join(self.out_path,"wx.info")
def __start_threads__(self):
for thread_id in range(1,self.threads):
thread_name = "Thread - " + str(thread_id)
thread = HtmlToPdfThreads(self.task_queue,thread_id,thread_name)
thread.start()
self.thread_list.append(thread)
def __data__(self,map=None):
data = {"userlang":"zh_CN","redirect_url":"","login_type":"3","token":"","lang":"","f":"json","ajax":"1"}
if map:
for key,value in map.items():
data[key] = value
return data
def __head__(self,heads=None):
head ={
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:78.0) Gecko/20100101 Firefox/78.0",
"Referer": self.referer
}
if self.cookie:
head["Cookie"] = self.cookie
if heads:
for key,value in heads.items():
head[key] = value
return head
def __start_login__(self):
data = {"sessionid":str(time.time()).replace(".","")}
session,result = self.__http_request__(url=self.start_login_url,data=self.__data__(data),wait=1)
if result:
self.getqrcode(session)
def getqrcode(self,session):
time_str = str(time.time()).replace(".","")
new_getqrcode_url = self.getqrcode_url.replace("%s",time_str)
qrcode_path = os.path.join(self.out_qrcode_path,time_str + ".png")
self.__http_io_request__(url=new_getqrcode_url,session=session,path=qrcode_path)
log.warn("请使用微信扫描弹出的二维码图片用于登录微信公众号!")
try:
image = Image.open(qrcode_path)
image.show()
except Exception as e:
log.error(e)
raise Exception("获取二维码失败,请重试!")
self.getqrcodeStatus(session)
def getqrcodeStatus(self,session,t=6):
while True:
session,result = self.__http_request__(method='get',url=self.ask_url,wait=t)
if not result:
return
if result.get("status") == "3":
log.warn("二维码已失效,请重新使用微信进行扫码!")
self.getqrcode(session)
return
if str(result.get("status")) == "1":
self.login(session)
return
if t == 6:
t = 7
else:
t = 6
def login(self,session):
data = {"lang":"zh_CN"}
session,result = self.__http_request__(url=self.login_url,data=self.__data__(data))
if not result:
return
redirect_url = result.get("redirect_url")
if not redirect_url:
return
token_compile = re.compile(r'.*token=(.*).*')
token = token_compile.findall(redirect_url)
if len(token) < 0:
return
token = token[0]
names = self.name.split(",")
self.__save_cookie__(session,token)
for name in names:
self.search_biz(session,token,name)
# 搜索公众号
def search_biz(self,session,token,name,no=1,begin=0,count=5,total=0):
data = {
"action":"search_biz",
"begin":begin,
"count":count,
"query":name,
"token":token,
"lang":"zh_CN",
"f":"json",
"ajax":1
}
self.referer = ("https://mp.weixin.qq.com/cgi-bin/appmsg?t=media/appmsg_edit_v2&action=edit&isNew=1&type=10&createType=0&token=%s&lang=zh_CN") % (token)
session,result = self.__http_request__(method='get',url=self.search_biz_url,data=data)
if not result:
return
biz_list = result.get("list") # 公众号列表
biz_total = result.get("total") # 公众号总数量
if len(biz_list) == 0:
return
for biz in biz_list:
fakeid = biz.get("fakeid")
nickname = biz.get("nickname")
alias = biz.get("alias")
if nickname != name:
continue
wi_id = WechatSql.insert_info(fakeid,alias,nickname)
out_dir = os.path.join(self.out_path , nickname)
if not os.path.exists(out_dir):
os.makedirs(out_dir)
begin = WechatSql.select_list_num(wi_id)
app_msg_cnt = self.list_ex(session,fakeid,token,out_dir,wi_id)
diz_dict ={}
if app_msg_cnt != 0:
diz_dict["wi_id"] = wi_id
diz_dict["name"] = name
diz_dict["total"] = app_msg_cnt
diz_dict["current"] = str(app_msg_cnt - begin)
diz_dict["html"] = os.path.join(out_dir,"html")
diz_dict["pdf"] = os.path.join(out_dir,"pdf")
self.diz_list.append(diz_dict)
return
begin = count + begin
if no <= biz_total:
self.search_biz(session,token,name,no,begin,count,biz_total)
def list_ex(self,session,fakeid,token,out_dir,wi_id,no=0,begin=0,count=5,app_msg_cnt=0):
data ={
"action":"list_ex",
"begin":str(begin),
"count":str(count),
"fakeid":str(fakeid),
"type":"9",
"query":"",
"token":str(token),
"lang":"zh_CN",
"f":"json",
"ajax":"1"
}
if begin < 0: # 防止出现负数的情况
return app_msg_cnt
if app_msg_cnt == 0: # 获取文章总数量
session,result = self.__http_request__(method='get',url=self.appmsg_url,data=data,session=session)
if not result:
return app_msg_cnt
app_msg_cnt = result.get("app_msg_cnt")
nums = str(app_msg_cnt/10).split(".")
if int(nums[1]) >= 5:
start = app_msg_cnt - int(nums[1]) + 5
else:
start = app_msg_cnt - int(nums[1])
self.list_ex(session,fakeid,token,out_dir,wi_id,begin=start, app_msg_cnt = app_msg_cnt) # 设置文章起始编号和文章总数量
return app_msg_cnt
session,result = self.__http_request__(method='get',url=self.appmsg_url,data=data,session=session)
if not result:
return app_msg_cnt
app_msg_cnt = result.get("app_msg_cnt")
app_msg_list = result.get("app_msg_list")
if len(app_msg_list) == 0:
return app_msg_cnt
for app in list(reversed(app_msg_list)):
link = app.get("link")
title = app.get("title")
digest = app.get("digest")
title_list = WechatSql.select_list_title(wi_id,begin)
if title in title_list:
continue
i_date = str(time.time).replace(".","")
WechatSql.insert_list(wi_id,no,title,link,digest,i_date)
self.__get_article_details__(no,title,link,out_dir)
no = no + 1
begin = begin - count
self.list_ex(session,fakeid,token,out_dir,wi_id,no,begin,count,app_msg_cnt)
def __get_article_details__(self,no,title,link,out_dir):
filters = {'/','\\','?','*',':','"','<','>','|',' ','?','(',')','!',',','“',"”"}
for filter in filters:
title = title | html_path = os.path.join(out_dir,"html")
pdf_path = os.path.join(out_dir,"pdf")
image_path = os.path.join(html_path,"image")
if not os.path.exists(image_path):
os.makedirs(image_path)
if not os.path.exists(pdf_path):
os.makedirs(pdf_path)
html_file = os.path.join(html_path,str(no)+ "-" +title+".html")
pdf_file = os.path.join(pdf_path,str(no)+ "-" +title+".pdf")
if os.path.exists(pdf_file): # PDF文件存在则不生成对应的PDF文件,否则继续
return
if not os.path.exists(html_file):
content = self.__get_content__(link,image_path)
with open(html_file,"w") as f:
f.write(content)
f.flush()
f.close()
task_info = {"html":html_file,"pdf":pdf_file}
self.task_queue.put(task_info)
def __get_content__(self,link,image_path):
self.referer = link
session,content = self.__http_request__(method="get",url=link,flag=True)
if not content:
| .replace(filter,"")
| identifier_name |
write.rs | }
/// Creates a new builder instance for a binary STL file.
pub fn | () -> Self {
Self::new(Encoding::Binary)
}
/// Creates a new builder instance for an ASCII STL file.
///
/// **Note**: please don't use this. STL ASCII files are even more space
/// inefficient than binary STL files. If you can avoid it, never use ASCII
/// STL. In fact, consider not using STL at all.
pub fn ascii() -> Self {
Self::new(Encoding::Ascii)
}
/// Sets the solid name for this file.
///
/// The given name must be an ASCII string (otherwise the function panics).
/// If a binary file is written, only 76 bytes of the string are written to
/// the file.
pub fn with_solid_name(self, name: impl Into<String>) -> Self {
let solid_name = name.into();
assert!(solid_name.is_ascii());
Self {
solid_name,
.. self
}
}
/// Creates a [`Writer`] with `self` as config.
pub fn into_writer<W: io::Write>(self, writer: W) -> Writer<W> {
Writer::new(self, writer)
}
}
// ===============================================================================================
// ===== STL Writer
// ===============================================================================================
/// A writer able to write binary and ASCII STL files. Implements
/// [`StreamSink`].
#[derive(Debug)]
pub struct Writer<W: io::Write> {
config: Config,
writer: W,
}
impl<W: io::Write> Writer<W> {
/// Creates a new STL writer with the given STL config which will write to
/// the given `io::Write` instance.
pub fn new(config: Config, writer: W) -> Self {
Self { config, writer }
}
/// Low level function to write STL files.
///
/// You usually don't need to use this function directly and instead use a
/// high level interface. This function is still exposed to give you more
/// or less complete control.
pub fn write_raw(
self,
num_triangles: u32,
triangles: impl IntoIterator<Item = Result<RawTriangle, Error>>,
) -> Result<(), Error> {
if self.config.encoding == Encoding::Ascii {
self.write_raw_ascii(triangles)
} else {
self.write_raw_binary(num_triangles, triangles)
}
}
#[inline(never)]
pub fn write_raw_binary(
self,
num_triangles: u32,
triangles: impl IntoIterator<Item = Result<RawTriangle, Error>>,
) -> Result<(), Error> {
let config = self.config;
let mut w = self.writer;
// First, a 80 bytes useless header that must not begin with "solid".
// We try to fit the solid name in it.
let name_len = cmp::min(config.solid_name.len(), 76);
let signature = format!("LOX {}", &config.solid_name[..name_len]);
let padding = vec![b' '; 80 - signature.len()];
w.write_all(signature.as_bytes())?;
w.write_all(&padding)?;
// Next, number of triangles
w.write_u32::<LittleEndian>(num_triangles)?;
const TRI_SIZE: usize = 4 * 3 * 4 + 2;
let mut buf = [0; TRI_SIZE];
for triangle in triangles {
let triangle = triangle?;
// Write face normal
LittleEndian::write_f32(&mut buf[00..04], triangle.normal[0]);
LittleEndian::write_f32(&mut buf[04..08], triangle.normal[1]);
LittleEndian::write_f32(&mut buf[08..12], triangle.normal[2]);
LittleEndian::write_f32(&mut buf[12..16], triangle.vertices[0][0]);
LittleEndian::write_f32(&mut buf[16..20], triangle.vertices[0][1]);
LittleEndian::write_f32(&mut buf[20..24], triangle.vertices[0][2]);
LittleEndian::write_f32(&mut buf[24..28], triangle.vertices[1][0]);
LittleEndian::write_f32(&mut buf[28..32], triangle.vertices[1][1]);
LittleEndian::write_f32(&mut buf[32..36], triangle.vertices[1][2]);
LittleEndian::write_f32(&mut buf[36..40], triangle.vertices[2][0]);
LittleEndian::write_f32(&mut buf[40..44], triangle.vertices[2][1]);
LittleEndian::write_f32(&mut buf[44..48], triangle.vertices[2][2]);
LittleEndian::write_u16(&mut buf[48..50], triangle.attribute_byte_count);
w.write_all(&buf)?;
}
Ok(())
}
#[inline(never)]
pub fn write_raw_ascii(
self,
triangles: impl IntoIterator<Item = Result<RawTriangle, Error>>,
) -> Result<(), Error> {
let config = self.config;
let mut w = self.writer;
writeln!(w, "solid {}", config.solid_name)?;
for triangle in triangles {
let triangle = triangle?;
// Write face normal
write!(w, " facet normal ")?;
write_ascii_vector(&mut w, triangle.normal)?;
writeln!(w, "")?;
// Write all vertex positions
writeln!(w, " outer loop")?;
for &vertex_pos in &triangle.vertices {
write!(w, " vertex ")?;
write_ascii_vector(&mut w, vertex_pos)?;
writeln!(w, "")?;
}
writeln!(w, " endloop")?;
writeln!(w, " endfacet")?;
}
writeln!(w, "endsolid {}", config.solid_name)?;
Ok(())
}
}
impl<W: io::Write> StreamSink for Writer<W> {
#[inline(never)]
fn transfer_from<S: MemSource>(self, src: &S) -> Result<(), Error> {
// Make sure we have positions
if src.vertex_position_type().is_none() {
return Err(Error::new(|| ErrorKind::DataIncomplete {
prop: PropKind::VertexPosition,
msg: "source does not provide vertex positions, but STL requires them".into(),
}));
}
let mesh = src.core_mesh();
let has_normals = src.face_normal_type().is_some();
// The triangle iterator
let triangles = mesh.face_handles().map(|fh| {
let mut it = mesh.vertices_around_face(fh);
let va = it.next().expect("bug: less than 3 vertices around face");
let vb = it.next().expect("bug: less than 3 vertices around face");
let vc = it.next().expect("bug: less than 3 vertices around face");
// Make sure this is a triangle face. Note: we do not check
// `mesh.is_tri_mesh()` in the beginning, as we also want to be
// able to serialize triangle meshes whose type does not implement
// `TriMesh`. We only want to error if there is actually a non-tri
// face.
if it.next().is_some() {
return Err(Error::new(|| ErrorKind::StreamSinkDoesNotSupportPolygonFaces));
}
// Get positions from map and convert them to array
let get_v = |vh| -> Result<[f32; 3], Error> {
src.vertex_position::<f32>(vh)
.and_then(|opt| {
opt.ok_or_else(|| Error::new(|| ErrorKind::DataIncomplete {
prop: PropKind::VertexPosition,
msg: format!("no position for {:?} while writing STL", vh),
}))
})
.map(|p| p.convert()) // to array form
};
let vertices = [get_v(va)?, get_v(vb)?, get_v(vc)?];
let normal = if has_normals {
src.face_normal::<f32>(fh)?
.ok_or_else(|| Error::new(|| ErrorKind::DataIncomplete {
prop: PropKind::FaceNormal,
msg: format!("no normal for {:?} while writing STL", fh),
}))?
.convert() // to array form
} else {
calc_normal(&vertices)
};
Ok(RawTriangle {
vertices,
normal,
// As Wikipedia beautifully put it: "this should be zero
// because most software does not understand anything else."
// Great. Some people abuse this to store color or other
// information. This is terrible, we won't do that.
attribute_byte_count: 0,
})
});
let face_count = mesh.num_faces().try_into().map_err(|_| {
Error::new(|| ErrorKind::SinkIncompatible(
"STL only supports 2^32 triangles, but mesh contains more faces".into()
))
})?;
self.write_raw(face_count, triangles)
}
}
// ===============================================================================================
// ===== Helper functions
// ===============================================================================================
/// Calculates the normal of the face defined be the three vertices in CCW.
fn calc_normal(positions: &[[f32; 3]; 3]) -> [f32; 3] {
let pos_a = positions[0].to_point3();
| binary | identifier_name |
write.rs | /// Creates a new builder instance for a binary STL file.
pub fn binary() -> Self {
Self::new(Encoding::Binary)
}
/// Creates a new builder instance for an ASCII STL file.
///
/// **Note**: please don't use this. STL ASCII files are even more space
/// inefficient than binary STL files. If you can avoid it, never use ASCII
/// STL. In fact, consider not using STL at all.
pub fn ascii() -> Self {
Self::new(Encoding::Ascii)
}
/// Sets the solid name for this file.
///
/// The given name must be an ASCII string (otherwise the function panics).
/// If a binary file is written, only 76 bytes of the string are written to
/// the file.
pub fn with_solid_name(self, name: impl Into<String>) -> Self {
let solid_name = name.into();
assert!(solid_name.is_ascii());
Self {
solid_name,
.. self
}
}
/// Creates a [`Writer`] with `self` as config.
pub fn into_writer<W: io::Write>(self, writer: W) -> Writer<W> {
Writer::new(self, writer)
}
}
// ===============================================================================================
// ===== STL Writer
// ===============================================================================================
/// A writer able to write binary and ASCII STL files. Implements
/// [`StreamSink`].
#[derive(Debug)]
pub struct Writer<W: io::Write> {
config: Config,
writer: W,
}
impl<W: io::Write> Writer<W> {
/// Creates a new STL writer with the given STL config which will write to
/// the given `io::Write` instance.
pub fn new(config: Config, writer: W) -> Self {
Self { config, writer }
}
/// Low level function to write STL files.
///
/// You usually don't need to use this function directly and instead use a
/// high level interface. This function is still exposed to give you more
/// or less complete control.
pub fn write_raw(
self,
num_triangles: u32,
triangles: impl IntoIterator<Item = Result<RawTriangle, Error>>,
) -> Result<(), Error> |
#[inline(never)]
pub fn write_raw_binary(
self,
num_triangles: u32,
triangles: impl IntoIterator<Item = Result<RawTriangle, Error>>,
) -> Result<(), Error> {
let config = self.config;
let mut w = self.writer;
// First, a 80 bytes useless header that must not begin with "solid".
// We try to fit the solid name in it.
let name_len = cmp::min(config.solid_name.len(), 76);
let signature = format!("LOX {}", &config.solid_name[..name_len]);
let padding = vec![b' '; 80 - signature.len()];
w.write_all(signature.as_bytes())?;
w.write_all(&padding)?;
// Next, number of triangles
w.write_u32::<LittleEndian>(num_triangles)?;
const TRI_SIZE: usize = 4 * 3 * 4 + 2;
let mut buf = [0; TRI_SIZE];
for triangle in triangles {
let triangle = triangle?;
// Write face normal
LittleEndian::write_f32(&mut buf[00..04], triangle.normal[0]);
LittleEndian::write_f32(&mut buf[04..08], triangle.normal[1]);
LittleEndian::write_f32(&mut buf[08..12], triangle.normal[2]);
LittleEndian::write_f32(&mut buf[12..16], triangle.vertices[0][0]);
LittleEndian::write_f32(&mut buf[16..20], triangle.vertices[0][1]);
LittleEndian::write_f32(&mut buf[20..24], triangle.vertices[0][2]);
LittleEndian::write_f32(&mut buf[24..28], triangle.vertices[1][0]);
LittleEndian::write_f32(&mut buf[28..32], triangle.vertices[1][1]);
LittleEndian::write_f32(&mut buf[32..36], triangle.vertices[1][2]);
LittleEndian::write_f32(&mut buf[36..40], triangle.vertices[2][0]);
LittleEndian::write_f32(&mut buf[40..44], triangle.vertices[2][1]);
LittleEndian::write_f32(&mut buf[44..48], triangle.vertices[2][2]);
LittleEndian::write_u16(&mut buf[48..50], triangle.attribute_byte_count);
w.write_all(&buf)?;
}
Ok(())
}
#[inline(never)]
pub fn write_raw_ascii(
self,
triangles: impl IntoIterator<Item = Result<RawTriangle, Error>>,
) -> Result<(), Error> {
let config = self.config;
let mut w = self.writer;
writeln!(w, "solid {}", config.solid_name)?;
for triangle in triangles {
let triangle = triangle?;
// Write face normal
write!(w, " facet normal ")?;
write_ascii_vector(&mut w, triangle.normal)?;
writeln!(w, "")?;
// Write all vertex positions
writeln!(w, " outer loop")?;
for &vertex_pos in &triangle.vertices {
write!(w, " vertex ")?;
write_ascii_vector(&mut w, vertex_pos)?;
writeln!(w, "")?;
}
writeln!(w, " endloop")?;
writeln!(w, " endfacet")?;
}
writeln!(w, "endsolid {}", config.solid_name)?;
Ok(())
}
}
impl<W: io::Write> StreamSink for Writer<W> {
#[inline(never)]
fn transfer_from<S: MemSource>(self, src: &S) -> Result<(), Error> {
// Make sure we have positions
if src.vertex_position_type().is_none() {
return Err(Error::new(|| ErrorKind::DataIncomplete {
prop: PropKind::VertexPosition,
msg: "source does not provide vertex positions, but STL requires them".into(),
}));
}
let mesh = src.core_mesh();
let has_normals = src.face_normal_type().is_some();
// The triangle iterator
let triangles = mesh.face_handles().map(|fh| {
let mut it = mesh.vertices_around_face(fh);
let va = it.next().expect("bug: less than 3 vertices around face");
let vb = it.next().expect("bug: less than 3 vertices around face");
let vc = it.next().expect("bug: less than 3 vertices around face");
// Make sure this is a triangle face. Note: we do not check
// `mesh.is_tri_mesh()` in the beginning, as we also want to be
// able to serialize triangle meshes whose type does not implement
// `TriMesh`. We only want to error if there is actually a non-tri
// face.
if it.next().is_some() {
return Err(Error::new(|| ErrorKind::StreamSinkDoesNotSupportPolygonFaces));
}
// Get positions from map and convert them to array
let get_v = |vh| -> Result<[f32; 3], Error> {
src.vertex_position::<f32>(vh)
.and_then(|opt| {
opt.ok_or_else(|| Error::new(|| ErrorKind::DataIncomplete {
prop: PropKind::VertexPosition,
msg: format!("no position for {:?} while writing STL", vh),
}))
})
.map(|p| p.convert()) // to array form
};
let vertices = [get_v(va)?, get_v(vb)?, get_v(vc)?];
let normal = if has_normals {
src.face_normal::<f32>(fh)?
.ok_or_else(|| Error::new(|| ErrorKind::DataIncomplete {
prop: PropKind::FaceNormal,
msg: format!("no normal for {:?} while writing STL", fh),
}))?
.convert() // to array form
} else {
calc_normal(&vertices)
};
Ok(RawTriangle {
vertices,
normal,
// As Wikipedia beautifully put it: "this should be zero
// because most software does not understand anything else."
// Great. Some people abuse this to store color or other
// information. This is terrible, we won't do that.
attribute_byte_count: 0,
})
});
let face_count = mesh.num_faces().try_into().map_err(|_| {
Error::new(|| ErrorKind::SinkIncompatible(
"STL only supports 2^32 triangles, but mesh contains more faces".into()
))
})?;
self.write_raw(face_count, triangles)
}
}
// ===============================================================================================
// ===== Helper functions
// ===============================================================================================
/// Calculates the normal of the face defined be the three vertices in CCW.
fn calc_normal(positions: &[[f32; 3]; 3]) -> [f32; 3] {
let pos_a = positions[0].to_point3();
| {
if self.config.encoding == Encoding::Ascii {
self.write_raw_ascii(triangles)
} else {
self.write_raw_binary(num_triangles, triangles)
}
} | identifier_body |
write.rs | use cgmath::prelude::*;
use crate::{
prelude::*,
io::{PropKind, Error, ErrorKind},
};
use super::{Encoding, RawTriangle};
// ----------------------------------------------------------------------------
/// The solid name used when the user didn't specify one.
const DEFAULT_SOLID_NAME: &str = "mesh";
// ===============================================================================================
// ===== STL Config
// ===============================================================================================
/// Used to configure and create a [`Writer`].
///
/// This is used to configure basic settings for the file to be written. You
/// can use the [`Config::into_writer`] method to create a [`Writer`] that can
/// be used as streaming sink.
#[derive(Clone, Debug)]
pub struct Config {
solid_name: String,
encoding: Encoding,
}
impl Config {
/// Creates a new builder instance from the given encoding. For
/// convenience, you can use [`Config::binary()`] or [`Config::ascii()`]
/// directly.
pub fn new(encoding: Encoding) -> Self {
Self {
solid_name: DEFAULT_SOLID_NAME.into(),
encoding,
}
}
/// Creates a new builder instance for a binary STL file.
pub fn binary() -> Self {
Self::new(Encoding::Binary)
}
/// Creates a new builder instance for an ASCII STL file.
///
/// **Note**: please don't use this. STL ASCII files are even more space
/// inefficient than binary STL files. If you can avoid it, never use ASCII
/// STL. In fact, consider not using STL at all.
pub fn ascii() -> Self {
Self::new(Encoding::Ascii)
}
/// Sets the solid name for this file.
///
/// The given name must be an ASCII string (otherwise the function panics).
/// If a binary file is written, only 76 bytes of the string are written to
/// the file.
pub fn with_solid_name(self, name: impl Into<String>) -> Self {
let solid_name = name.into();
assert!(solid_name.is_ascii());
Self {
solid_name,
.. self
}
}
/// Creates a [`Writer`] with `self` as config.
pub fn into_writer<W: io::Write>(self, writer: W) -> Writer<W> {
Writer::new(self, writer)
}
}
// ===============================================================================================
// ===== STL Writer
// ===============================================================================================
/// A writer able to write binary and ASCII STL files. Implements
/// [`StreamSink`].
#[derive(Debug)]
pub struct Writer<W: io::Write> {
config: Config,
writer: W,
}
impl<W: io::Write> Writer<W> {
/// Creates a new STL writer with the given STL config which will write to
/// the given `io::Write` instance.
pub fn new(config: Config, writer: W) -> Self {
Self { config, writer }
}
/// Low level function to write STL files.
///
/// You usually don't need to use this function directly and instead use a
/// high level interface. This function is still exposed to give you more
/// or less complete control.
pub fn write_raw(
self,
num_triangles: u32,
triangles: impl IntoIterator<Item = Result<RawTriangle, Error>>,
) -> Result<(), Error> {
if self.config.encoding == Encoding::Ascii {
self.write_raw_ascii(triangles)
} else {
self.write_raw_binary(num_triangles, triangles)
}
}
#[inline(never)]
pub fn write_raw_binary(
self,
num_triangles: u32,
triangles: impl IntoIterator<Item = Result<RawTriangle, Error>>,
) -> Result<(), Error> {
let config = self.config;
let mut w = self.writer;
// First, a 80 bytes useless header that must not begin with "solid".
// We try to fit the solid name in it.
let name_len = cmp::min(config.solid_name.len(), 76);
let signature = format!("LOX {}", &config.solid_name[..name_len]);
let padding = vec![b' '; 80 - signature.len()];
w.write_all(signature.as_bytes())?;
w.write_all(&padding)?;
// Next, number of triangles
w.write_u32::<LittleEndian>(num_triangles)?;
const TRI_SIZE: usize = 4 * 3 * 4 + 2;
let mut buf = [0; TRI_SIZE];
for triangle in triangles {
let triangle = triangle?;
// Write face normal
LittleEndian::write_f32(&mut buf[00..04], triangle.normal[0]);
LittleEndian::write_f32(&mut buf[04..08], triangle.normal[1]);
LittleEndian::write_f32(&mut buf[08..12], triangle.normal[2]);
LittleEndian::write_f32(&mut buf[12..16], triangle.vertices[0][0]);
LittleEndian::write_f32(&mut buf[16..20], triangle.vertices[0][1]);
LittleEndian::write_f32(&mut buf[20..24], triangle.vertices[0][2]);
LittleEndian::write_f32(&mut buf[24..28], triangle.vertices[1][0]);
LittleEndian::write_f32(&mut buf[28..32], triangle.vertices[1][1]);
LittleEndian::write_f32(&mut buf[32..36], triangle.vertices[1][2]);
LittleEndian::write_f32(&mut buf[36..40], triangle.vertices[2][0]);
LittleEndian::write_f32(&mut buf[40..44], triangle.vertices[2][1]);
LittleEndian::write_f32(&mut buf[44..48], triangle.vertices[2][2]);
LittleEndian::write_u16(&mut buf[48..50], triangle.attribute_byte_count);
w.write_all(&buf)?;
}
Ok(())
}
#[inline(never)]
pub fn write_raw_ascii(
self,
triangles: impl IntoIterator<Item = Result<RawTriangle, Error>>,
) -> Result<(), Error> {
let config = self.config;
let mut w = self.writer;
writeln!(w, "solid {}", config.solid_name)?;
for triangle in triangles {
let triangle = triangle?;
// Write face normal
write!(w, " facet normal ")?;
write_ascii_vector(&mut w, triangle.normal)?;
writeln!(w, "")?;
// Write all vertex positions
writeln!(w, " outer loop")?;
for &vertex_pos in &triangle.vertices {
write!(w, " vertex ")?;
write_ascii_vector(&mut w, vertex_pos)?;
writeln!(w, "")?;
}
writeln!(w, " endloop")?;
writeln!(w, " endfacet")?;
}
writeln!(w, "endsolid {}", config.solid_name)?;
Ok(())
}
}
impl<W: io::Write> StreamSink for Writer<W> {
#[inline(never)]
fn transfer_from<S: MemSource>(self, src: &S) -> Result<(), Error> {
// Make sure we have positions
if src.vertex_position_type().is_none() {
return Err(Error::new(|| ErrorKind::DataIncomplete {
prop: PropKind::VertexPosition,
msg: "source does not provide vertex positions, but STL requires them".into(),
}));
}
let mesh = src.core_mesh();
let has_normals = src.face_normal_type().is_some();
// The triangle iterator
let triangles = mesh.face_handles().map(|fh| {
let mut it = mesh.vertices_around_face(fh);
let va = it.next().expect("bug: less than 3 vertices around face");
let vb = it.next().expect("bug: less than 3 vertices around face");
let vc = it.next().expect("bug: less than 3 vertices around face");
// Make sure this is a triangle face. Note: we do not check
// `mesh.is_tri_mesh()` in the beginning, as we also want to be
// able to serialize triangle meshes whose type does not implement
// `TriMesh`. We only want to error if there is actually a non-tri
// face.
if it.next().is_some() {
return Err(Error::new(|| ErrorKind::StreamSinkDoesNotSupportPolygonFaces));
}
// Get positions from map and convert them to array
let get_v = |vh| -> Result<[f32; 3], Error> {
src.vertex_position::<f32>(vh)
.and_then(|opt| {
opt.ok_or_else(|| Error::new(|| ErrorKind::DataIncomplete {
prop: PropKind::VertexPosition,
msg: format!("no position for {:?} while writing STL", vh),
}))
})
.map(|p| p.convert()) // to array form
};
let vertices = [get_v(va)?, get_v(vb)?, get_v(vc)?];
let normal = if has_normals {
src.face_normal::<f32>(fh)?
.ok_or_else(|| Error::new(|| ErrorKind::DataIncomplete {
prop: PropKind::FaceNormal,
msg: format!("no normal for {:?} while writing STL", fh |
use byteorder::{ByteOrder, LittleEndian, WriteBytesExt}; | random_line_split |
|
write.rs | /// Creates a new builder instance for a binary STL file.
pub fn binary() -> Self {
Self::new(Encoding::Binary)
}
/// Creates a new builder instance for an ASCII STL file.
///
/// **Note**: please don't use this. STL ASCII files are even more space
/// inefficient than binary STL files. If you can avoid it, never use ASCII
/// STL. In fact, consider not using STL at all.
pub fn ascii() -> Self {
Self::new(Encoding::Ascii)
}
/// Sets the solid name for this file.
///
/// The given name must be an ASCII string (otherwise the function panics).
/// If a binary file is written, only 76 bytes of the string are written to
/// the file.
pub fn with_solid_name(self, name: impl Into<String>) -> Self {
let solid_name = name.into();
assert!(solid_name.is_ascii());
Self {
solid_name,
.. self
}
}
/// Creates a [`Writer`] with `self` as config.
pub fn into_writer<W: io::Write>(self, writer: W) -> Writer<W> {
Writer::new(self, writer)
}
}
// ===============================================================================================
// ===== STL Writer
// ===============================================================================================
/// A writer able to write binary and ASCII STL files. Implements
/// [`StreamSink`].
#[derive(Debug)]
pub struct Writer<W: io::Write> {
config: Config,
writer: W,
}
impl<W: io::Write> Writer<W> {
/// Creates a new STL writer with the given STL config which will write to
/// the given `io::Write` instance.
pub fn new(config: Config, writer: W) -> Self {
Self { config, writer }
}
/// Low level function to write STL files.
///
/// You usually don't need to use this function directly and instead use a
/// high level interface. This function is still exposed to give you more
/// or less complete control.
pub fn write_raw(
self,
num_triangles: u32,
triangles: impl IntoIterator<Item = Result<RawTriangle, Error>>,
) -> Result<(), Error> {
if self.config.encoding == Encoding::Ascii {
self.write_raw_ascii(triangles)
} else {
self.write_raw_binary(num_triangles, triangles)
}
}
#[inline(never)]
pub fn write_raw_binary(
self,
num_triangles: u32,
triangles: impl IntoIterator<Item = Result<RawTriangle, Error>>,
) -> Result<(), Error> {
let config = self.config;
let mut w = self.writer;
// First, a 80 bytes useless header that must not begin with "solid".
// We try to fit the solid name in it.
let name_len = cmp::min(config.solid_name.len(), 76);
let signature = format!("LOX {}", &config.solid_name[..name_len]);
let padding = vec![b' '; 80 - signature.len()];
w.write_all(signature.as_bytes())?;
w.write_all(&padding)?;
// Next, number of triangles
w.write_u32::<LittleEndian>(num_triangles)?;
const TRI_SIZE: usize = 4 * 3 * 4 + 2;
let mut buf = [0; TRI_SIZE];
for triangle in triangles {
let triangle = triangle?;
// Write face normal
LittleEndian::write_f32(&mut buf[00..04], triangle.normal[0]);
LittleEndian::write_f32(&mut buf[04..08], triangle.normal[1]);
LittleEndian::write_f32(&mut buf[08..12], triangle.normal[2]);
LittleEndian::write_f32(&mut buf[12..16], triangle.vertices[0][0]);
LittleEndian::write_f32(&mut buf[16..20], triangle.vertices[0][1]);
LittleEndian::write_f32(&mut buf[20..24], triangle.vertices[0][2]);
LittleEndian::write_f32(&mut buf[24..28], triangle.vertices[1][0]);
LittleEndian::write_f32(&mut buf[28..32], triangle.vertices[1][1]);
LittleEndian::write_f32(&mut buf[32..36], triangle.vertices[1][2]);
LittleEndian::write_f32(&mut buf[36..40], triangle.vertices[2][0]);
LittleEndian::write_f32(&mut buf[40..44], triangle.vertices[2][1]);
LittleEndian::write_f32(&mut buf[44..48], triangle.vertices[2][2]);
LittleEndian::write_u16(&mut buf[48..50], triangle.attribute_byte_count);
w.write_all(&buf)?;
}
Ok(())
}
#[inline(never)]
pub fn write_raw_ascii(
self,
triangles: impl IntoIterator<Item = Result<RawTriangle, Error>>,
) -> Result<(), Error> {
let config = self.config;
let mut w = self.writer;
writeln!(w, "solid {}", config.solid_name)?;
for triangle in triangles {
let triangle = triangle?;
// Write face normal
write!(w, " facet normal ")?;
write_ascii_vector(&mut w, triangle.normal)?;
writeln!(w, "")?;
// Write all vertex positions
writeln!(w, " outer loop")?;
for &vertex_pos in &triangle.vertices {
write!(w, " vertex ")?;
write_ascii_vector(&mut w, vertex_pos)?;
writeln!(w, "")?;
}
writeln!(w, " endloop")?;
writeln!(w, " endfacet")?;
}
writeln!(w, "endsolid {}", config.solid_name)?;
Ok(())
}
}
impl<W: io::Write> StreamSink for Writer<W> {
#[inline(never)]
fn transfer_from<S: MemSource>(self, src: &S) -> Result<(), Error> {
// Make sure we have positions
if src.vertex_position_type().is_none() |
let mesh = src.core_mesh();
let has_normals = src.face_normal_type().is_some();
// The triangle iterator
let triangles = mesh.face_handles().map(|fh| {
let mut it = mesh.vertices_around_face(fh);
let va = it.next().expect("bug: less than 3 vertices around face");
let vb = it.next().expect("bug: less than 3 vertices around face");
let vc = it.next().expect("bug: less than 3 vertices around face");
// Make sure this is a triangle face. Note: we do not check
// `mesh.is_tri_mesh()` in the beginning, as we also want to be
// able to serialize triangle meshes whose type does not implement
// `TriMesh`. We only want to error if there is actually a non-tri
// face.
if it.next().is_some() {
return Err(Error::new(|| ErrorKind::StreamSinkDoesNotSupportPolygonFaces));
}
// Get positions from map and convert them to array
let get_v = |vh| -> Result<[f32; 3], Error> {
src.vertex_position::<f32>(vh)
.and_then(|opt| {
opt.ok_or_else(|| Error::new(|| ErrorKind::DataIncomplete {
prop: PropKind::VertexPosition,
msg: format!("no position for {:?} while writing STL", vh),
}))
})
.map(|p| p.convert()) // to array form
};
let vertices = [get_v(va)?, get_v(vb)?, get_v(vc)?];
let normal = if has_normals {
src.face_normal::<f32>(fh)?
.ok_or_else(|| Error::new(|| ErrorKind::DataIncomplete {
prop: PropKind::FaceNormal,
msg: format!("no normal for {:?} while writing STL", fh),
}))?
.convert() // to array form
} else {
calc_normal(&vertices)
};
Ok(RawTriangle {
vertices,
normal,
// As Wikipedia beautifully put it: "this should be zero
// because most software does not understand anything else."
// Great. Some people abuse this to store color or other
// information. This is terrible, we won't do that.
attribute_byte_count: 0,
})
});
let face_count = mesh.num_faces().try_into().map_err(|_| {
Error::new(|| ErrorKind::SinkIncompatible(
"STL only supports 2^32 triangles, but mesh contains more faces".into()
))
})?;
self.write_raw(face_count, triangles)
}
}
// ===============================================================================================
// ===== Helper functions
// ===============================================================================================
/// Calculates the normal of the face defined be the three vertices in CCW.
fn calc_normal(positions: &[[f32; 3]; 3]) -> [f32; 3] {
let pos_a = positions[0].to_point3();
| {
return Err(Error::new(|| ErrorKind::DataIncomplete {
prop: PropKind::VertexPosition,
msg: "source does not provide vertex positions, but STL requires them".into(),
}));
} | conditional_block |
jh.py | 3, 0x0758df38, 0x65655e4e, 0x897cfcf2, 0x8e5086fc,
0x442e7031, 0x86ca0bd0, 0xa20940f0, 0x4e477830, 0x39eea065, 0x8338f7d1,
0x37e95ef7, 0xbd3a2ce4, 0x26b29721, 0x6ff81301, 0xd1ed44a3, 0xe7de9fef,
0x15dfa08b, 0xd9922576, 0xf6f7853c, 0xbe42dc12, 0x7ceca7d8, 0x7eb027ab,
0xda7d8d53, 0xdea83eaa, 0x93ce25aa, 0xd86902bd, 0xfd43f65a, 0xf908731a,
0xdaef5fc0, 0xa5194a17, 0x33664d97, 0x6a21fd4c, 0x3198b435, 0x701541db,
0xbb0f1eea, 0x9b54cded, 0xa163d09a, 0x72409751, 0xbf9d75f6, 0xe26f4791,
]
def Sb(x, c):
x[3] = ~x[3]
x[0] ^= (c) & ~x[2]
tmp = (c) ^ (x[0] & x[1])
x[0] ^= x[2] & x[3]
x[3] ^= ~x[1] & x[2]
x[1] ^= x[0] & x[2]
x[2] ^= x[0] & ~x[3]
x[0] ^= x[1] | x[3]
x[3] ^= x[1] & x[2]
x[1] ^= tmp & x[0]
x[2] ^= tmp
return x
def Lb(x):
x[4] ^= x[1]
x[5] ^= x[2]
x[6] ^= x[3] ^ x[0]
x[7] ^= x[0]
x[0] ^= x[5]
x[1] ^= x[6]
x[2] ^= x[7] ^ x[4]
x[3] ^= x[4]
return x
def Ceven(n, r):
return C[((r) << 3) + 3 - n]
def Codd(n, r):
return C[((r) << 3) + 7 - n]
def S(x0, x1, x2, x3, cb, r):
x = Sb([x0[3], x1[3], x2[3], x3[3]], cb(3, r))
x0[3] = x[0]
x1[3] = x[1]
x2[3] = x[2]
x3[3] = x[3]
x = Sb([x0[2], x1[2], x2[2], x3[2]], cb(2, r))
x0[2] = x[0]
x1[2] = x[1]
x2[2] = x[2]
x3[2] = x[3]
x = Sb([x0[1], x1[1], x2[1], x3[1]], cb(1, r))
x0[1] = x[0]
x1[1] = x[1]
x2[1] = x[2]
x3[1] = x[3]
x = Sb([x0[0], x1[0], x2[0], x3[0]], cb(0, r))
x0[0] = x[0]
x1[0] = x[1]
x2[0] = x[2]
x3[0] = x[3]
def L(x0, x1, x2, x3, x4, x5, x6, x7):
x = Lb([x0[3], x1[3], x2[3], x3[3], x4[3], x5[3], x6[3], x7[3]])
x0[3] = x[0]
x1[3] = x[1]
x2[3] = x[2]
x3[3] = x[3]
x4[3] = x[4]
x5[3] = x[5]
x6[3] = x[6]
x7[3] = x[7]
x = Lb([x0[2], x1[2], x2[2], x3[2], x4[2], x5[2], x6[2], x7[2]])
x0[2] = x[0]
x1[2] = x[1]
x2[2] = x[2]
x3[2] = x[3]
x4[2] = x[4]
x5[2] = x[5]
x6[2] = x[6]
x7[2] = x[7]
x = Lb([x0[1], x1[1], x2[1], x3[1], x4[1], x5[1], x6[1], x7[1]])
x0[1] = x[0]
x1[1] = x[1]
x2[1] = x[2]
x3[1] = x[3]
x4[1] = x[4]
x5[1] = x[5]
x6[1] = x[6]
x7[1] = x[7]
x = Lb([x0[0], x1[0], x2[0], x3[0], x4[0], x5[0], x6[0], x7[0]])
x0[0] = x[0]
x1[0] = x[1]
x2[0] = x[2]
x3[0] = x[3]
x4[0] = x[4]
x5[0] = x[5]
x6[0] = x[6]
x7[0] = x[7]
def Wz(x, c, n):
t = (x[3] & (c)) << (n)
x[3] = ((x[3] >> (n)) & (c)) | t
t = (x[2] & (c)) << (n)
x[2] = ((x[2] >> (n)) & (c)) | t
t = (x[1] & (c)) << (n)
x[1] = ((x[1] >> (n)) & (c)) | t
t = (x[0] & (c)) << (n)
x[0] = ((x[0] >> (n)) & (c)) | t
def W(ro, x):
if ro == 0:
return Wz(x, (0x55555555), 1)
elif ro == 1:
return Wz(x, (0x33333333), 2)
elif ro == 2:
return Wz(x, (0x0F0F0F0F), 4)
elif ro == 3:
return Wz(x, (0x00FF00FF), 8)
elif ro == 4:
return Wz(x, (0x0000FFFF), 16)
elif ro == 5:
t = x[3]
x[3] = x[2]
x[2] = t
t = x[1]
x[1] = x[0]
x[0] = t
return
elif ro == 6:
t = x[3]
x[3] = x[1]
x[1] = t
t = x[2]
x[2] = x[0]
x[0] = t
def | SL | identifier_name |
|
jh.py | ]
x[0] ^= x[5]
x[1] ^= x[6]
x[2] ^= x[7] ^ x[4]
x[3] ^= x[4]
return x
def Ceven(n, r):
return C[((r) << 3) + 3 - n]
def Codd(n, r):
return C[((r) << 3) + 7 - n]
def S(x0, x1, x2, x3, cb, r):
x = Sb([x0[3], x1[3], x2[3], x3[3]], cb(3, r))
x0[3] = x[0]
x1[3] = x[1]
x2[3] = x[2]
x3[3] = x[3]
x = Sb([x0[2], x1[2], x2[2], x3[2]], cb(2, r))
x0[2] = x[0]
x1[2] = x[1]
x2[2] = x[2]
x3[2] = x[3]
x = Sb([x0[1], x1[1], x2[1], x3[1]], cb(1, r))
x0[1] = x[0]
x1[1] = x[1]
x2[1] = x[2]
x3[1] = x[3]
x = Sb([x0[0], x1[0], x2[0], x3[0]], cb(0, r))
x0[0] = x[0]
x1[0] = x[1]
x2[0] = x[2]
x3[0] = x[3]
def L(x0, x1, x2, x3, x4, x5, x6, x7):
x = Lb([x0[3], x1[3], x2[3], x3[3], x4[3], x5[3], x6[3], x7[3]])
x0[3] = x[0]
x1[3] = x[1]
x2[3] = x[2]
x3[3] = x[3]
x4[3] = x[4]
x5[3] = x[5]
x6[3] = x[6]
x7[3] = x[7]
x = Lb([x0[2], x1[2], x2[2], x3[2], x4[2], x5[2], x6[2], x7[2]])
x0[2] = x[0]
x1[2] = x[1]
x2[2] = x[2]
x3[2] = x[3]
x4[2] = x[4]
x5[2] = x[5]
x6[2] = x[6]
x7[2] = x[7]
x = Lb([x0[1], x1[1], x2[1], x3[1], x4[1], x5[1], x6[1], x7[1]])
x0[1] = x[0]
x1[1] = x[1]
x2[1] = x[2]
x3[1] = x[3]
x4[1] = x[4]
x5[1] = x[5]
x6[1] = x[6]
x7[1] = x[7]
x = Lb([x0[0], x1[0], x2[0], x3[0], x4[0], x5[0], x6[0], x7[0]])
x0[0] = x[0]
x1[0] = x[1]
x2[0] = x[2]
x3[0] = x[3]
x4[0] = x[4]
x5[0] = x[5]
x6[0] = x[6]
x7[0] = x[7]
def Wz(x, c, n):
t = (x[3] & (c)) << (n)
x[3] = ((x[3] >> (n)) & (c)) | t
t = (x[2] & (c)) << (n)
x[2] = ((x[2] >> (n)) & (c)) | t
t = (x[1] & (c)) << (n)
x[1] = ((x[1] >> (n)) & (c)) | t
t = (x[0] & (c)) << (n)
x[0] = ((x[0] >> (n)) & (c)) | t
def W(ro, x):
if ro == 0:
return Wz(x, (0x55555555), 1)
elif ro == 1:
return Wz(x, (0x33333333), 2)
elif ro == 2:
return Wz(x, (0x0F0F0F0F), 4)
elif ro == 3:
return Wz(x, (0x00FF00FF), 8)
elif ro == 4:
return Wz(x, (0x0000FFFF), 16)
elif ro == 5:
t = x[3]
x[3] = x[2]
x[2] = t
t = x[1]
x[1] = x[0]
x[0] = t
return
elif ro == 6:
t = x[3]
x[3] = x[1]
x[1] = t
t = x[2]
x[2] = x[0]
x[0] = t
def SL(h, r, ro):
S(h[0], h[2], h[4], h[6], Ceven, r)
S(h[1], h[3], h[5], h[7], Codd, r)
L(h[0], h[2], h[4], h[6], h[1], h[3], h[5], h[7])
W(ro, h[1])
W(ro, h[3])
W(ro, h[5])
W(ro, h[7])
def READ_STATE(h, state):
h[0][3] = state[0]
h[0][2] = state[1]
h[0][1] = state[2]
h[0][0] = state[3]
h[1][3] = state[4]
h[1][2] = state[5]
h[1][1] = state[6]
h[1][0] = state[7]
h[2][3] = state[8]
h[2][2] = state[9]
h[2][1] = state[10]
h[2][0] = state[11]
h[3][3] = state[12]
h[3][2] = state[13]
h[3][1] = state[14]
h[3][0] = state[15]
h[4][3] = state[16]
h[4][2] = state[17]
h[4][1] = state[18]
h[4][0] = state[19]
h[5][3] = state[20]
h[5][2] = state[21]
h[5][1] = state[22]
h[5][0] = state[23]
h[6][3] = state[24]
h[6][2] = state[25]
h[6][1] = state[26]
h[6][0] = state[27]
h[7][3] = state[28]
h[7][2] = state[29]
h[7][1] = state[30]
h[7][0] = state[31]
def WRITE_STATE(h, state):
state[0] = h[0][3]
state[1] = h[0][2]
state[2] = h[0][1]
state[3] = h[0][0]
state[4] = h[1][3] | state[5] = h[1][2]
state[6] = h[1][1]
state[7] = h[1][0]
state[8] = h[2][3] | random_line_split |
|
jh.py | ] = x[5]
x6[1] = x[6]
x7[1] = x[7]
x = Lb([x0[0], x1[0], x2[0], x3[0], x4[0], x5[0], x6[0], x7[0]])
x0[0] = x[0]
x1[0] = x[1]
x2[0] = x[2]
x3[0] = x[3]
x4[0] = x[4]
x5[0] = x[5]
x6[0] = x[6]
x7[0] = x[7]
def Wz(x, c, n):
t = (x[3] & (c)) << (n)
x[3] = ((x[3] >> (n)) & (c)) | t
t = (x[2] & (c)) << (n)
x[2] = ((x[2] >> (n)) & (c)) | t
t = (x[1] & (c)) << (n)
x[1] = ((x[1] >> (n)) & (c)) | t
t = (x[0] & (c)) << (n)
x[0] = ((x[0] >> (n)) & (c)) | t
def W(ro, x):
if ro == 0:
return Wz(x, (0x55555555), 1)
elif ro == 1:
return Wz(x, (0x33333333), 2)
elif ro == 2:
return Wz(x, (0x0F0F0F0F), 4)
elif ro == 3:
return Wz(x, (0x00FF00FF), 8)
elif ro == 4:
return Wz(x, (0x0000FFFF), 16)
elif ro == 5:
t = x[3]
x[3] = x[2]
x[2] = t
t = x[1]
x[1] = x[0]
x[0] = t
return
elif ro == 6:
t = x[3]
x[3] = x[1]
x[1] = t
t = x[2]
x[2] = x[0]
x[0] = t
def SL(h, r, ro):
S(h[0], h[2], h[4], h[6], Ceven, r)
S(h[1], h[3], h[5], h[7], Codd, r)
L(h[0], h[2], h[4], h[6], h[1], h[3], h[5], h[7])
W(ro, h[1])
W(ro, h[3])
W(ro, h[5])
W(ro, h[7])
def READ_STATE(h, state):
h[0][3] = state[0]
h[0][2] = state[1]
h[0][1] = state[2]
h[0][0] = state[3]
h[1][3] = state[4]
h[1][2] = state[5]
h[1][1] = state[6]
h[1][0] = state[7]
h[2][3] = state[8]
h[2][2] = state[9]
h[2][1] = state[10]
h[2][0] = state[11]
h[3][3] = state[12]
h[3][2] = state[13]
h[3][1] = state[14]
h[3][0] = state[15]
h[4][3] = state[16]
h[4][2] = state[17]
h[4][1] = state[18]
h[4][0] = state[19]
h[5][3] = state[20]
h[5][2] = state[21]
h[5][1] = state[22]
h[5][0] = state[23]
h[6][3] = state[24]
h[6][2] = state[25]
h[6][1] = state[26]
h[6][0] = state[27]
h[7][3] = state[28]
h[7][2] = state[29]
h[7][1] = state[30]
h[7][0] = state[31]
def WRITE_STATE(h, state):
state[0] = h[0][3]
state[1] = h[0][2]
state[2] = h[0][1]
state[3] = h[0][0]
state[4] = h[1][3]
state[5] = h[1][2]
state[6] = h[1][1]
state[7] = h[1][0]
state[8] = h[2][3]
state[9] = h[2][2]
state[10] = h[2][1]
state[11] = h[2][0]
state[12] = h[3][3]
state[13] = h[3][2]
state[14] = h[3][1]
state[15] = h[3][0]
state[16] = h[4][3]
state[17] = h[4][2]
state[18] = h[4][1]
state[19] = h[4][0]
state[20] = h[5][3]
state[21] = h[5][2]
state[22] = h[5][1]
state[23] = h[5][0]
state[24] = h[6][3]
state[25] = h[6][2]
state[26] = h[6][1]
state[27] = h[6][0]
state[28] = h[7][3]
state[29] = h[7][2]
state[30] = h[7][1]
state[31] = h[7][0]
def E8(h):
for r in range(0, 42, 7):
SL(h, r + 0, 0)
SL(h, r + 1, 1)
SL(h, r + 2, 2)
SL(h, r + 3, 3)
SL(h, r + 4, 4)
SL(h, r + 5, 5)
SL(h, r + 6, 6)
def bufferXORInsertBackwards(buf, data, x, y, bufferOffsetX=0, bufferOffsetY=0):
for i in range(x):
for j in range(x):
m = i + bufferOffsetX
n = bufferOffsetY + y - 1 - j
buf[m][n] = buf[m][n] ^ data[i * 4 + j]
def jh_update(ctx, msg, msg_len=None):
buf = ctx['buffer']
buf_len = len(buf)
ptr = ctx['ptr']
if msg_len is None:
msg_len = len(msg)
if msg_len < buf_len - ptr:
op.buffer_insert(buf, ptr, msg, msg_len)
ptr += msg_len
ctx['ptr'] = ptr
return
V = [None] * JH_HX
for i in range(JH_HX):
V[i] = [None] * JH_HY
READ_STATE(V, ctx['state'])
while msg_len > 0:
clen = buf_len - ptr
if clen > msg_len:
clen = msg_len
op.buffer_insert(buf, ptr, msg, clen)
ptr += clen
msg = msg[clen:]
msg_len -= clen
if ptr == buf_len:
| buf32 = op.swap32_list(op.bytes_to_i32_list(buf))
bufferXORInsertBackwards(V, buf32, 4, 4)
E8(V)
bufferXORInsertBackwards(V, buf32, 4, 4, 4, 0)
blockCountLow = ctx['blockCountLow']
blockCountLow = op.t32(blockCountLow + 1)
ctx['blockCountLow'] = blockCountLow
if blockCountLow == 0:
ctx['blockCountHigh'] += 1
ptr = 0 | conditional_block |
|
jh.py | f2c9d2, 0x0cd29b00, 0x30ceaa5f, 0x300cd4b7,
0x16512a74, 0x9832e0f2, 0xd830eb0d, 0x9af8cee3, 0x7b9ec54b, 0x9279f1b5,
0x6ee651ff, 0xd3688604, 0x574d239b, 0x316796e6, 0xf3a6e6cc, 0x05750a17,
0xd98176b1, 0xce6c3213, 0x8452173c, 0x62a205f8, 0xb3cb2bf4, 0x47154778,
0x825446ff, 0x486a9323, 0x0758df38, 0x65655e4e, 0x897cfcf2, 0x8e5086fc,
0x442e7031, 0x86ca0bd0, 0xa20940f0, 0x4e477830, 0x39eea065, 0x8338f7d1,
0x37e95ef7, 0xbd3a2ce4, 0x26b29721, 0x6ff81301, 0xd1ed44a3, 0xe7de9fef,
0x15dfa08b, 0xd9922576, 0xf6f7853c, 0xbe42dc12, 0x7ceca7d8, 0x7eb027ab,
0xda7d8d53, 0xdea83eaa, 0x93ce25aa, 0xd86902bd, 0xfd43f65a, 0xf908731a,
0xdaef5fc0, 0xa5194a17, 0x33664d97, 0x6a21fd4c, 0x3198b435, 0x701541db,
0xbb0f1eea, 0x9b54cded, 0xa163d09a, 0x72409751, 0xbf9d75f6, 0xe26f4791,
]
def Sb(x, c):
x[3] = ~x[3]
x[0] ^= (c) & ~x[2]
tmp = (c) ^ (x[0] & x[1])
x[0] ^= x[2] & x[3]
x[3] ^= ~x[1] & x[2]
x[1] ^= x[0] & x[2]
x[2] ^= x[0] & ~x[3]
x[0] ^= x[1] | x[3]
x[3] ^= x[1] & x[2]
x[1] ^= tmp & x[0]
x[2] ^= tmp
return x
def Lb(x):
x[4] ^= x[1]
x[5] ^= x[2]
x[6] ^= x[3] ^ x[0]
x[7] ^= x[0]
x[0] ^= x[5]
x[1] ^= x[6]
x[2] ^= x[7] ^ x[4]
x[3] ^= x[4]
return x
def Ceven(n, r):
return C[((r) << 3) + 3 - n]
def Codd(n, r):
return C[((r) << 3) + 7 - n]
def S(x0, x1, x2, x3, cb, r):
x = Sb([x0[3], x1[3], x2[3], x3[3]], cb(3, r))
x0[3] = x[0]
x1[3] = x[1]
x2[3] = x[2]
x3[3] = x[3]
x = Sb([x0[2], x1[2], x2[2], x3[2]], cb(2, r))
x0[2] = x[0]
x1[2] = x[1]
x2[2] = x[2]
x3[2] = x[3]
x = Sb([x0[1], x1[1], x2[1], x3[1]], cb(1, r))
x0[1] = x[0]
x1[1] = x[1]
x2[1] = x[2]
x3[1] = x[3]
x = Sb([x0[0], x1[0], x2[0], x3[0]], cb(0, r))
x0[0] = x[0]
x1[0] = x[1]
x2[0] = x[2]
x3[0] = x[3]
def L(x0, x1, x2, x3, x4, x5, x6, x7):
x = Lb([x0[3], x1[3], x2[3], x3[3], x4[3], x5[3], x6[3], x7[3]])
x0[3] = x[0]
x1[3] = x[1]
x2[3] = x[2]
x3[3] = x[3]
x4[3] = x[4]
x5[3] = x[5]
x6[3] = x[6]
x7[3] = x[7]
x = Lb([x0[2], x1[2], x2[2], x3[2], x4[2], x5[2], x6[2], x7[2]])
x0[2] = x[0]
x1[2] = x[1]
x2[2] = x[2]
x3[2] = x[3]
x4[2] = x[4]
x5[2] = x[5]
x6[2] = x[6]
x7[2] = x[7]
x = Lb([x0[1], x1[1], x2[1], x3[1], x4[1], x5[1], x6[1], x7[1]])
x0[1] = x[0]
x1[1] = x[1]
x2[1] = x[2]
x3[1] = x[3]
x4[1] = x[4]
x5[1] = x[5]
x6[1] = x[6]
x7[1] = x[7]
x = Lb([x0[0], x1[0], x2[0], x3[0], x4[0], x5[0], x6[0], x7[0]])
x0[0] = x[0]
x1[0] = x[1]
x2[0] = x[2]
x3[0] = x[3]
x4[0] = x[4]
x5[0] = x[5]
x6[0] = x[6]
x7[0] = x[7]
def Wz(x, c, n):
| t = (x[3] & (c)) << (n)
x[3] = ((x[3] >> (n)) & (c)) | t
t = (x[2] & (c)) << (n)
x[2] = ((x[2] >> (n)) & (c)) | t
t = (x[1] & (c)) << (n)
x[1] = ((x[1] >> (n)) & (c)) | t
t = (x[0] & (c)) << (n)
x[0] = ((x[0] >> (n)) & (c)) | t | identifier_body |
|
parse.rs | ']'", 0),
'{' => {
match split_in_parens(s.clone(), CURLY_BRACKETS) {
Some((rep, newst)) => {
if let Some(p) = stack.pop() {
let rep = parse_specific_repetition(rep, p)?;
stack.push(rep);
s = newst;
} else {
return s.err("repetition {} without pattern to repeat", 0);
}
}
None => return s.err("unmatched {", s.len()),
};
}
c => {
stack.push(Pattern::Char(c));
s = s.from(1);
}
}
}
Ok((stack.to_pattern(), s))
}
/// parse_char_set parses the character set at the start of the input state.
/// Valid states are [a], [ab], [a-z], [-a-z], [a-z-] and [a-fh-kl].
fn parse_char_set<'a>(s: ParseState<'a>) -> Result<(Pattern, ParseState<'a>), String> {
if let Some((cs, rest)) = split_in_parens(s.clone(), SQUARE_BRACKETS) {
let mut chars: Vec<char> = vec![];
let mut ranges: Vec<Pattern> = vec![];
let mut st = cs;
loop {
// Try to match a range "a-z" by looking for the dash; if no dash, add character to set
// and advance.
if st.len() >= 3 && st[1] == '-' {
ranges.push(Pattern::CharRange(st[0], st[2]));
st = st.from(3);
} else if st.len() > 0 {
chars.push(st[0]);
st = st.from(1);
} else {
break;
}
}
assert_eq!(st.len(), 0);
if chars.len() == 1 {
ranges.push(Pattern::Char(chars.pop().unwrap()));
} else if !chars.is_empty() {
ranges.push(Pattern::CharSet(chars));
}
if ranges.len() == 1 {
Ok((ranges.pop().unwrap(), rest))
} else {
let pat = Pattern::Alternate(ranges);
Ok((pat, rest))
}
} else {
s.err("unmatched [", s.len())
}
}
/// Parse a repetition spec inside curly braces: {1} | {1,} | {,1} | {1,2}
fn parse_specific_repetition<'a>(rep: ParseState<'a>, p: Pattern) -> Result<Pattern, String> {
let mut nparts = 0;
let mut parts: [Option<&[char]>; 2] = Default::default();
for p in rep[..].split(|c| *c == ',') {
parts[nparts] = Some(p);
nparts += 1;
if nparts == 2 {
break;
}
}
if nparts == 0 {
// {}
return rep.err("empty {} spec", 0);
} else if nparts == 1 {
// {1}
if let Ok(n) = u32::from_str(&String::from_iter(parts[0].unwrap().iter())) {
return Ok(Pattern::Repeated(Box::new(Repetition::Specific(
p, n, None,
))));
} else {
return Err(format!(
"invalid repetition '{}'",
String::from_iter(rep[..].iter())
));
}
} else if nparts == 2 {
fn errtostr(r: Result<u32, std::num::ParseIntError>) -> Result<u32, String> {
match r {
Ok(u) => Ok(u),
Err(e) => Err(format!("{}", e)),
}
}
let (p0, p1) = (parts[0].unwrap(), parts[1].unwrap());
// {2,3}
if !p0.is_empty() && !p1.is_empty() {
let min = errtostr(u32::from_str(&String::from_iter(p0.iter())))?;
let max = errtostr(u32::from_str(&String::from_iter(p1.iter())))?;
return Ok(Pattern::Repeated(Box::new(Repetition::Specific(
p,
min,
Some(max),
))));
} else if p0.is_empty() && !p1.is_empty() {
// {,3}
let min = 0;
let max = errtostr(u32::from_str(&String::from_iter(p1.iter())))?;
return Ok(Pattern::Repeated(Box::new(Repetition::Specific(
p,
min,
Some(max),
))));
} else if !p0.is_empty() && p1.is_empty() {
// {3,}
let min = errtostr(u32::from_str(&String::from_iter(p0.iter())))?;
let repetition =
Pattern::Repeated(Box::new(Repetition::Specific(p.clone(), min, None)));
return Ok(Pattern::Concat(vec![
repetition,
Pattern::Repeated(Box::new(Repetition::ZeroOrMore(p))),
]));
}
}
Err(format!("invalid repetition pattern {:?}", &rep[..]))
}
/// Constants for generalizing parsing of parentheses.
const ROUND_PARENS: (char, char) = ('(', ')');
/// Constants for generalizing parsing of parentheses.
const SQUARE_BRACKETS: (char, char) = ('[', ']');
/// Constants for generalizing parsing of parentheses.
const CURLY_BRACKETS: (char, char) = ('{', '}');
/// split_in_parens returns two new ParseStates; the first one containing the contents of the
/// parenthesized clause starting at s[0], the second one containing the rest.
fn split_in_parens<'a>(
s: ParseState<'a>,
parens: (char, char),
) -> Option<(ParseState<'a>, ParseState<'a>)> {
if let Some(end) = find_closing_paren(s.clone(), parens) {
Some((s.sub(1, end), s.from(end + 1)))
} else {
None
}
}
/// find_closing_paren returns the index of the parenthesis closing the opening parenthesis at the
/// beginning of the state's string.
fn find_closing_paren<'a>(s: ParseState<'a>, parens: (char, char)) -> Option<usize> {
if s[0] != parens.0 {
return None;
}
let mut count = 0;
for i in 0..s.len() {
if s[i] == parens.0 {
count += 1;
} else if s[i] == parens.1 {
count -= 1;
}
if count == 0 {
return Some(i);
}
}
None
}
#[cfg(test)]
mod tests {
use super::*;
use crate::compile::*;
use crate::repr::*;
use crate::state::dot;
#[test]
fn test_find_closing_paren() {
for case in &[
("(abc)de", Some(4)),
("()a", Some(1)),
("(abcd)", Some(5)),
("(abc", None),
] {
let src: Vec<char> = case.0.chars().collect();
assert_eq!(
find_closing_paren(ParseState::new(src.as_ref()), ROUND_PARENS),
case.1
);
}
}
#[test]
fn test_parse_charset() {
for case in &[
("[a]", Pattern::Char('a')),
("[ab]", Pattern::CharSet(vec!['a', 'b'])),
("[ba-]", Pattern::CharSet(vec!['b', 'a', '-'])),
("[a-z]", Pattern::CharRange('a', 'z')),
(
"[a-z-]",
Pattern::Alternate(vec![Pattern::CharRange('a', 'z'), Pattern::Char('-')]),
),
(
"[-a-z-]",
Pattern::Alternate(vec![
Pattern::CharRange('a', 'z'),
Pattern::CharSet(vec!['-', '-']),
]),
),
(
"[a-zA-Z]",
Pattern::Alternate(vec![
Pattern::CharRange('a', 'z'),
Pattern::CharRange('A', 'Z'),
]),
),
(
"[a-zA-Z-]",
Pattern::Alternate(vec![
Pattern::CharRange('a', 'z'),
Pattern::CharRange('A', 'Z'),
Pattern::Char('-'),
]),
),
] {
let src: Vec<char> = case.0.chars().collect();
let st = ParseState::new(&src);
assert_eq!(parse_char_set(st).unwrap().0, case.1);
}
}
#[test]
fn test_parse_subs() | {
let case1 = (
"a(b)c",
Pattern::Concat(vec![
Pattern::Char('a'),
Pattern::Submatch(Box::new(Pattern::Char('b'))),
Pattern::Char('c'),
]),
);
let case2 = ("(b)", Pattern::Submatch(Box::new(Pattern::Char('b'))));
for c in &[case1, case2] {
assert_eq!(c.1, parse(c.0).unwrap());
}
} | identifier_body |
|
parse.rs | (from, self.len())
}
/// pos returns the overall position within the input regex.
fn pos(&self) -> usize {
self.pos
}
/// sub returns a sub-ParseState containing [from..to] of the current one.
fn sub(&self, from: usize, to: usize) -> ParseState<'a> {
ParseState {
src: &self.src[from..to],
pos: self.pos + from,
}
}
/// len returns how many characters this ParseState contains.
fn len(&self) -> usize {
self.src.len()
}
/// err returns a formatted error string containing the specified message and the overall
/// position within the original input string.
fn err<T>(&self, s: &str, i: usize) -> Result<T, String> {
Err(format!("{} at :{}", s, self.pos + i))
}
}
impl<'a> Index<Range<usize>> for ParseState<'a> {
type Output = [char];
fn index(&self, r: Range<usize>) -> &Self::Output {
&self.src[r]
}
}
impl<'a> Index<RangeFull> for ParseState<'a> {
type Output = [char];
fn index(&self, r: RangeFull) -> &Self::Output {
&self.src[r]
}
}
impl<'a> Index<usize> for ParseState<'a> {
type Output = char;
fn index(&self, i: usize) -> &Self::Output {
&self.src[i]
}
}
impl<'a> Clone for ParseState<'a> {
fn clone(&self) -> ParseState<'a> {
ParseState {
src: self.src,
pos: self.pos,
}
}
}
/// parse_re is the parser entry point; like all parser functions, it returns either a pair of
/// (parsed pattern, new ParseState) or an error string.
fn parse_re<'a>(mut s: ParseState<'a>) -> Result<(Pattern, ParseState<'a>), String> {
// The stack assists us in parsing the linear parts of a regular expression, e.g. non-pattern
// characters, or character sets.
let mut stack = ParseStack::new();
loop {
if s.len() == 0 {
break;
}
match s[0] {
'.' => {
stack.push(Pattern::Any);
s = s.from(1);
}
'$' => {
if s.len() == 1 {
stack.push(Pattern::Anchor(AnchorLocation::End));
} else {
stack.push(Pattern::Char('$'))
}
s = s.from(1);
}
'^' => {
if s.pos() == 0 {
stack.push(Pattern::Anchor(AnchorLocation::Begin));
} else {
stack.push(Pattern::Char('^'));
}
s = s.from(1);
}
r @ '+' | r @ '*' | r @ '?' => {
if let Some(p) = stack.pop() {
let rep = match r {
'+' => Repetition::OnceOrMore(p),
'*' => Repetition::ZeroOrMore(p),
'?' => Repetition::ZeroOrOnce(p),
_ => unimplemented!(),
};
stack.push(Pattern::Repeated(Box::new(rep)));
s = s.from(1);
} else {
return s.err("+ without pattern to repeat", 0);
}
}
// Alternation: Parse the expression on the right of the pipe sign and push an
// alternation between what we've already seen and the stuff on the right.
'|' => {
let (rest, newst) = parse_re(s.from(1))?;
let left = stack.to_pattern();
stack = ParseStack::new();
stack.push(Pattern::Alternate(vec![left, rest]));
s = newst;
}
'(' => {
match split_in_parens(s.clone(), ROUND_PARENS) {
Some((parens, newst)) => {
// Parse the sub-regex within parentheses.
let (pat, rest) = parse_re(parens)?;
assert!(rest.len() == 0);
stack.push(Pattern::Submatch(Box::new(pat)));
// Set the current state to contain the string after the parentheses.
s = newst;
}
None => return s.err("unmatched (", s.len()),
}
}
')' => return s.err("unopened ')'", 0),
'[' => match parse_char_set(s) {
Ok((pat, newst)) => {
stack.push(pat);
s = newst;
}
Err(e) => return Err(e),
},
']' => return s.err("unopened ']'", 0),
'{' => {
match split_in_parens(s.clone(), CURLY_BRACKETS) {
Some((rep, newst)) => { | } else {
return s.err("repetition {} without pattern to repeat", 0);
}
}
None => return s.err("unmatched {", s.len()),
};
}
c => {
stack.push(Pattern::Char(c));
s = s.from(1);
}
}
}
Ok((stack.to_pattern(), s))
}
/// parse_char_set parses the character set at the start of the input state.
/// Valid states are [a], [ab], [a-z], [-a-z], [a-z-] and [a-fh-kl].
fn parse_char_set<'a>(s: ParseState<'a>) -> Result<(Pattern, ParseState<'a>), String> {
if let Some((cs, rest)) = split_in_parens(s.clone(), SQUARE_BRACKETS) {
let mut chars: Vec<char> = vec![];
let mut ranges: Vec<Pattern> = vec![];
let mut st = cs;
loop {
// Try to match a range "a-z" by looking for the dash; if no dash, add character to set
// and advance.
if st.len() >= 3 && st[1] == '-' {
ranges.push(Pattern::CharRange(st[0], st[2]));
st = st.from(3);
} else if st.len() > 0 {
chars.push(st[0]);
st = st.from(1);
} else {
break;
}
}
assert_eq!(st.len(), 0);
if chars.len() == 1 {
ranges.push(Pattern::Char(chars.pop().unwrap()));
} else if !chars.is_empty() {
ranges.push(Pattern::CharSet(chars));
}
if ranges.len() == 1 {
Ok((ranges.pop().unwrap(), rest))
} else {
let pat = Pattern::Alternate(ranges);
Ok((pat, rest))
}
} else {
s.err("unmatched [", s.len())
}
}
/// Parse a repetition spec inside curly braces: {1} | {1,} | {,1} | {1,2}
fn parse_specific_repetition<'a>(rep: ParseState<'a>, p: Pattern) -> Result<Pattern, String> {
let mut nparts = 0;
let mut parts: [Option<&[char]>; 2] = Default::default();
for p in rep[..].split(|c| *c == ',') {
parts[nparts] = Some(p);
nparts += 1;
if nparts == 2 {
break;
}
}
if nparts == 0 {
// {}
return rep.err("empty {} spec", 0);
} else if nparts == 1 {
// {1}
if let Ok(n) = u32::from_str(&String::from_iter(parts[0].unwrap().iter())) {
return Ok(Pattern::Repeated(Box::new(Repetition::Specific(
p, n, None,
))));
} else {
return Err(format!(
"invalid repetition '{}'",
String::from_iter(rep[..].iter())
));
}
} else if nparts == 2 {
fn errtostr(r: Result<u32, std::num::ParseIntError>) -> Result<u32, String> {
match r {
Ok(u) => Ok(u),
Err(e) => Err(format!("{}", e)),
}
}
let (p0, p1) = (parts[0].unwrap(), parts[1].unwrap());
// {2,3}
if !p0.is_empty() && !p1.is_empty() {
let min = errtostr(u32::from_str(&String::from_iter(p0.iter())))?;
let max = errtostr(u32::from_str(&String::from_iter(p1.iter())))?;
return Ok(Pattern::Repeated(Box::new(Repetition::Specific(
p,
min,
Some(max),
))));
} else if p0.is_empty() && !p1.is_empty() {
// {,3}
let min = 0;
let max = errtostr(u32::from_str(&String::from_iter(p1.iter())))?;
return Ok(Pattern::Repeated(Box::new(Repetition::Specific(
| if let Some(p) = stack.pop() {
let rep = parse_specific_repetition(rep, p)?;
stack.push(rep);
s = newst; | random_line_split |
parse.rs | <'a> {
/// The string to parse. This may be a substring of the "overall" matched string.
src: &'a [char],
/// The position within the overall string (for error reporting).
pos: usize,
}
impl<'a> ParseState<'a> {
/// new returns a new ParseState operating on the specified input string.
fn new(s: &'a [char]) -> ParseState<'a> {
ParseState { src: s, pos: 0 }
}
/// from returns a new ParseState operating on the [from..] sub-string of the current
/// ParseState.
fn from(&self, from: usize) -> ParseState<'a> {
self.sub(from, self.len())
}
/// pos returns the overall position within the input regex.
fn pos(&self) -> usize {
self.pos
}
/// sub returns a sub-ParseState containing [from..to] of the current one.
fn sub(&self, from: usize, to: usize) -> ParseState<'a> {
ParseState {
src: &self.src[from..to],
pos: self.pos + from,
}
}
/// len returns how many characters this ParseState contains.
fn len(&self) -> usize {
self.src.len()
}
/// err returns a formatted error string containing the specified message and the overall
/// position within the original input string.
fn err<T>(&self, s: &str, i: usize) -> Result<T, String> {
Err(format!("{} at :{}", s, self.pos + i))
}
}
impl<'a> Index<Range<usize>> for ParseState<'a> {
type Output = [char];
fn index(&self, r: Range<usize>) -> &Self::Output {
&self.src[r]
}
}
impl<'a> Index<RangeFull> for ParseState<'a> {
type Output = [char];
fn index(&self, r: RangeFull) -> &Self::Output {
&self.src[r]
}
}
impl<'a> Index<usize> for ParseState<'a> {
type Output = char;
fn index(&self, i: usize) -> &Self::Output {
&self.src[i]
}
}
impl<'a> Clone for ParseState<'a> {
fn clone(&self) -> ParseState<'a> {
ParseState {
src: self.src,
pos: self.pos,
}
}
}
/// parse_re is the parser entry point; like all parser functions, it returns either a pair of
/// (parsed pattern, new ParseState) or an error string.
fn parse_re<'a>(mut s: ParseState<'a>) -> Result<(Pattern, ParseState<'a>), String> {
// The stack assists us in parsing the linear parts of a regular expression, e.g. non-pattern
// characters, or character sets.
let mut stack = ParseStack::new();
loop {
if s.len() == 0 {
break;
}
match s[0] {
'.' => {
stack.push(Pattern::Any);
s = s.from(1);
}
'$' => {
if s.len() == 1 {
stack.push(Pattern::Anchor(AnchorLocation::End));
} else {
stack.push(Pattern::Char('$'))
}
s = s.from(1);
}
'^' => {
if s.pos() == 0 {
stack.push(Pattern::Anchor(AnchorLocation::Begin));
} else {
stack.push(Pattern::Char('^'));
}
s = s.from(1);
}
r @ '+' | r @ '*' | r @ '?' => {
if let Some(p) = stack.pop() {
let rep = match r {
'+' => Repetition::OnceOrMore(p),
'*' => Repetition::ZeroOrMore(p),
'?' => Repetition::ZeroOrOnce(p),
_ => unimplemented!(),
};
stack.push(Pattern::Repeated(Box::new(rep)));
s = s.from(1);
} else {
return s.err("+ without pattern to repeat", 0);
}
}
// Alternation: Parse the expression on the right of the pipe sign and push an
// alternation between what we've already seen and the stuff on the right.
'|' => {
let (rest, newst) = parse_re(s.from(1))?;
let left = stack.to_pattern();
stack = ParseStack::new();
stack.push(Pattern::Alternate(vec![left, rest]));
s = newst;
}
'(' => {
match split_in_parens(s.clone(), ROUND_PARENS) {
Some((parens, newst)) => {
// Parse the sub-regex within parentheses.
let (pat, rest) = parse_re(parens)?;
assert!(rest.len() == 0);
stack.push(Pattern::Submatch(Box::new(pat)));
// Set the current state to contain the string after the parentheses.
s = newst;
}
None => return s.err("unmatched (", s.len()),
}
}
')' => return s.err("unopened ')'", 0),
'[' => match parse_char_set(s) {
Ok((pat, newst)) => {
stack.push(pat);
s = newst;
}
Err(e) => return Err(e),
},
']' => return s.err("unopened ']'", 0),
'{' => {
match split_in_parens(s.clone(), CURLY_BRACKETS) {
Some((rep, newst)) => {
if let Some(p) = stack.pop() {
let rep = parse_specific_repetition(rep, p)?;
stack.push(rep);
s = newst;
} else {
return s.err("repetition {} without pattern to repeat", 0);
}
}
None => return s.err("unmatched {", s.len()),
};
}
c => {
stack.push(Pattern::Char(c));
s = s.from(1);
}
}
}
Ok((stack.to_pattern(), s))
}
/// parse_char_set parses the character set at the start of the input state.
/// Valid states are [a], [ab], [a-z], [-a-z], [a-z-] and [a-fh-kl].
fn parse_char_set<'a>(s: ParseState<'a>) -> Result<(Pattern, ParseState<'a>), String> {
if let Some((cs, rest)) = split_in_parens(s.clone(), SQUARE_BRACKETS) {
let mut chars: Vec<char> = vec![];
let mut ranges: Vec<Pattern> = vec![];
let mut st = cs;
loop {
// Try to match a range "a-z" by looking for the dash; if no dash, add character to set
// and advance.
if st.len() >= 3 && st[1] == '-' {
ranges.push(Pattern::CharRange(st[0], st[2]));
st = st.from(3);
} else if st.len() > 0 {
chars.push(st[0]);
st = st.from(1);
} else {
break;
}
}
assert_eq!(st.len(), 0);
if chars.len() == 1 {
ranges.push(Pattern::Char(chars.pop().unwrap()));
} else if !chars.is_empty() {
ranges.push(Pattern::CharSet(chars));
}
if ranges.len() == 1 {
Ok((ranges.pop().unwrap(), rest))
} else {
let pat = Pattern::Alternate(ranges);
Ok((pat, rest))
}
} else {
s.err("unmatched [", s.len())
}
}
/// Parse a repetition spec inside curly braces: {1} | {1,} | {,1} | {1,2}
fn parse_specific_repetition<'a>(rep: ParseState<'a>, p: Pattern) -> Result<Pattern, String> {
let mut nparts = 0;
let mut parts: [Option<&[char]>; 2] = Default::default();
for p in rep[..].split(|c| *c == ',') {
parts[nparts] = Some(p);
nparts += 1;
if nparts == 2 {
break;
}
}
if nparts == 0 {
// {}
return rep.err("empty {} spec", 0);
} else if nparts == 1 {
// {1}
if let Ok(n) = u32::from_str(&String::from_iter(parts[0].unwrap().iter())) {
return Ok(Pattern::Repeated(Box::new(Repetition::Specific(
p, n, None,
))));
} else {
return Err(format!(
"invalid repetition '{}'",
String::from_iter(rep[..].iter())
));
}
} else if nparts == 2 {
fn errtostr(r: Result<u32, std::num::ParseIntError>) -> Result<u32, String> {
match r {
Ok(u) => Ok(u),
Err(e) => Err(format!("{}", e)),
}
}
let (p0, p1) = (parts[0].unwrap(), parts[1].unwrap());
// {2,3}
if !p0 | ParseState | identifier_name |
|
regression.py | ____',
'try dropping "type" _____,'
'try adding "type" _____, to improve score by ____'],
[[str(round(pred_val,5)), rank,
top_5_games, str(round(accuracy,2)), decrease_gain_tup,
increase_gain_tup, lang_dep_gain_tup, game_type_tup]])
else:
return (['Your game is likely to be voted for by ____ users on BoardGameGeek',
'placing you at a ____ rank among 4093 games in our dataset',
'with top 5 BGG board games being _____',
'This prediction is only ____ percent accurate.',
'try decreasing ____,'
'to improve score by (for each unit decreased) ____',
'try increasing ____,'
'to improve score by (for each unit increased) ____',
'try changing "Language dependency" to ____,'
'to improve score by ____',
'try dropping "type" _____, try adding "type" _____,'
'to improve score by ____'],
[[str(round(pred_val,0)), rank,
top_5_games,str(round(accuracy,2)), decrease_gain_tup,
increase_gain_tup, lang_dep_gain_tup, game_type_tup]])
def construct_x(input_dict, rating_bool):
'''
Construct x vector using user inputs from Django by matching Django
fields to column names in internal data, using field inputs to create
required columns and finally add a 'ones' column for constant of the
regression equation.
Input: (dict) Dictionary produced by Django UI, containing
required fields for the prediction using regression
Output: (pandas Series) Column vector
'''
x_dict = {}
type_lst =[]
for field in input_dict.keys():
if field == 'Language dependency':
for dummy, complexity in django_to_local_cols[field].items():
x_dict[dummy] = 0
if input_dict[field] == complexity:
x_dict[dummy] = 1
elif field in ['Type 1', 'Type 2', 'Type 3']:
type_lst.append(input_dict[field])
else:
col_name = django_to_local_cols[field]
value = input_dict[field]
x_dict[col_name] = value
for type_dummy in django_to_local_cols['Type']:
x_dict[type_dummy] = 0
if type_dummy in type_lst:
x_dict[type_dummy] = 1
x = pd.DataFrame(x_dict, index = ['obs'])
if rating_bool:
pred_vars = rating_lst
else:
pred_vars = popularity_lst
x = x.loc[:,pred_vars]
prepend_ones_col(x)
return x
def construct_X_y(rating_bool):
'''
Process raw data (data cleaning, data type coercion, creating dummy
variables) pulled from BoardGameGeek API and then use it to construct X
matrix and y vector to be plugged into the regress function.
Input: (bool) Indicates which regression model to run
Outputs:
X: (pandas DataFrame) X matrix containing observations of regressors
y: (pandas Series) column vector containing obsersvations of dependent
variable
raw_df: (pandas DataFrame) processed dataframe
dep_var: (str) name of depedent variable
'''
raw_df = pd.read_csv("all_games.csv")
raw_df = raw_df.loc[:,['bgg_id', 'is_boardgame', 'name', 'name_coerced',
'minplaytime', 'maxplaytime', 'suggested_numplayers',
'suggested_language', 'num_ratings',
'Board Game_avg_rating', 'Strategy Game',
'Family Game', 'Party Game', 'Abstract Game', 'Thematic',
'War Game','Customizable', "Children's Game",
'num_categories', 'num_mechanics','averageweight']]
raw_df = raw_df[raw_df['is_boardgame'] == True]
raw_df = raw_df.dropna(subset=['suggested_language'])
create_lang_dummy(raw_df)
raw_df = raw_df.astype({'Strategy Game':'int64', 'Family Game': 'int64',
'Party Game': 'int64', 'Abstract Game': 'int64',
'Thematic': 'int64', 'War Game': 'int64',
'Customizable': 'int64', "Children's Game": 'int64',
'lang_dep2': 'int64', 'lang_dep3': 'int64',
'lang_dep4': 'int64', 'lang_dep5': 'int64'})
raw_df['suggested_numplayers'] = raw_df['suggested_numplayers']\
.astype('string').str.strip('+').astype('int64')
raw_df['avg_playtime'] = (raw_df['minplaytime'] + raw_df['maxplaytime'])/2
raw_df = raw_df[raw_df['suggested_numplayers'] != 0]
raw_df = raw_df[raw_df['avg_playtime'] != 0]
raw_df = raw_df.dropna()
if rating_bool:
pred_vars, dep_var = rating_lst, 'Board Game_avg_rating'
else:
pred_vars, dep_var = popularity_lst, 'num_ratings'
X = raw_df.loc[:,pred_vars]
prepend_ones_col(X)
y = raw_df[dep_var]
return X, y, raw_df, dep_var
def create_lang_dummy(df):
'''
Create and insert (k-1) dummy variables for k Language dependency categories
in the dataframe.
Input: (pandas DataFrame) BGG data
'''
lang_dep = {'No necessary in-game text':1,
'Some necessary text - easily memorized or small crib sheet':2,
'Moderate in-game text - needs crib sheet or paste ups':3,
'Extensive use of text - massive conversion needed to be playable':4,
'Unplayable in another language':5}
categories = pd.unique(df['suggested_language'])
for category in categories:
if lang_dep[category] != 1:
dummy_name = 'lang_dep' + str(lang_dep[category])
df[dummy_name] = df['suggested_language'] == category
def prepend_ones_col(X):
'''
Add a ones column to the left side of pandas DataFrame.
Input: (pandas DataFrame) X matrix
'''
X.insert(0,'ones', 1)
def regress(X, y):
'''
Regress X matrix on y vector and calculate beta vector.
Inputs:
X (pandas DataFrame): X matrix containing observations of regressors
y (pandas Series): y vector
Ouputs:
coef (pandas DataFrame): beta vector containing coefficient estimates
for the regressors
'''
beta = np.linalg.lstsq(X, y, rcond=None)[0]
#Source: /home/syedajaisha/capp30121-aut-20-syedajaisha/pa5/util.py
col_names = list(X.columns)
col_names[0] = 'intercept'
coef = pd.DataFrame({'beta': beta}, index=col_names)
return coef
def calculate_R2(X, y, beta):
'''
Calculate R_sqauared for a regression model
Inputs:
X (pandas DataFrame): X matrix
y (pandas Series): y vector
beta(pandas DataFrame): beta vector
Output: (float) R_squared
'''
yhat = apply_beta(beta, X)
R2 = 1 - (np.sum((y - yhat)**2) / np.sum((y - np.mean(y))**2))
#Source: /home/syedajaisha/capp30121-aut-20-syedajaisha/pa5/regression.py
return R2*100
def apply_beta(beta, X):
'''
Apply beta, the vector generated by regress, to the
specified values to calculate predicted value
Inputs:
beta (pandas Series): beta vector
X (pandas DataFrame): X matrix
Output:
yhat (numpy array): predicted value
'''
yhat = np.dot(X, beta)
return yhat
def recommend(coef, input_dict, X, rating_bool):
| '''
Make recommendations based on what paramters can the user potentially
increase, decrease, switch categories of to increase their predicted value
of BGG rating and number of ratings and also informs of the corresponding
change in the predicted value
Inputs:
coef (pandas DataFrame): beta vector containing coefficient estimates
input_dict (dict): Dictionary produced by Django UI, containing
required fields for the prediction using regression
X (pandas DataFrame): X matrix
rating_bool (bool): Indicates which regression model to run
Disclaimer: This function doesn't recommend changing everything to arrive at
the optimal result. For example, in case a game already has three types, it
won't suggest the user to replace them all with the ones corresponding to
the largest three coefficents among all games types, it would just ask that
the existing type that adds the least value to the regression be replaced
with the type corresponding to the highest coefficient among remaining game
types | identifier_body |
|
regression.py | for predicted BGG
rating
If False, run the regression for predicted number
of ratings
input_dict (dict): Dictionary produced by Django UI, containing
required fields for the prediction using regression
Output:
(tuple of lists) Contains a list of column names and a list of columns
output for Django UI
Warning: Predicted values may be negative due to low R2 of models
'''
x = construct_x(input_dict, rating_bool)
X, y, raw_df, dep_var = construct_X_y(rating_bool)
coef = regress(X,y)
beta = coef['beta']
pred_val = apply_beta(beta, x)[0]
accuracy = calculate_R2(X, y, beta)
sorted_df_y = raw_df.sort_values(by=dep_var, ascending = False).\
reset_index(drop=True)
rank = sorted_df_y[sorted_df_y[dep_var] >= pred_val].index[-1] + 2
top_5_games = ''
for i, game in enumerate(sorted_df_y['name'][0:5]):
top_5_games += game
if i != 4:
top_5_games += ', '
decrease_gain_tup, increase_gain_tup, lang_dep_gain_tup, game_type_tup = \
recommend(coef, input_dict, X, rating_bool)
if rating_bool:
return (['Your game is likely to get a BGG rating of ____ on BoardGameGeek',
'placing you at a rank of ____ among 4093 games in our dataset',
'with top 5 BGG board games being ____',
'This prediction is only ____ percent accurate.',
'try decreasing ____,'
'to improve score by (for each unit decreased) ____',
'try increasing ____,'
'to improve score by (for each unit increased) ____',
'try changing "Language dependency" to ____,'
'to improve score by ____',
'try dropping "type" _____,'
'try adding "type" _____, to improve score by ____'],
[[str(round(pred_val,5)), rank,
top_5_games, str(round(accuracy,2)), decrease_gain_tup,
increase_gain_tup, lang_dep_gain_tup, game_type_tup]])
else:
return (['Your game is likely to be voted for by ____ users on BoardGameGeek',
'placing you at a ____ rank among 4093 games in our dataset',
'with top 5 BGG board games being _____',
'This prediction is only ____ percent accurate.',
'try decreasing ____,'
'to improve score by (for each unit decreased) ____',
'try increasing ____,'
'to improve score by (for each unit increased) ____',
'try changing "Language dependency" to ____,'
'to improve score by ____',
'try dropping "type" _____, try adding "type" _____,'
'to improve score by ____'],
[[str(round(pred_val,0)), rank,
top_5_games,str(round(accuracy,2)), decrease_gain_tup,
increase_gain_tup, lang_dep_gain_tup, game_type_tup]])
def construct_x(input_dict, rating_bool):
'''
Construct x vector using user inputs from Django by matching Django
fields to column names in internal data, using field inputs to create
required columns and finally add a 'ones' column for constant of the
regression equation.
Input: (dict) Dictionary produced by Django UI, containing
required fields for the prediction using regression
Output: (pandas Series) Column vector
'''
x_dict = {}
type_lst =[]
for field in input_dict.keys():
if field == 'Language dependency':
for dummy, complexity in django_to_local_cols[field].items():
x_dict[dummy] = 0
if input_dict[field] == complexity:
x_dict[dummy] = 1
elif field in ['Type 1', 'Type 2', 'Type 3']:
type_lst.append(input_dict[field])
else:
col_name = django_to_local_cols[field]
value = input_dict[field]
x_dict[col_name] = value
for type_dummy in django_to_local_cols['Type']:
x_dict[type_dummy] = 0
if type_dummy in type_lst:
x_dict[type_dummy] = 1
x = pd.DataFrame(x_dict, index = ['obs'])
if rating_bool:
pred_vars = rating_lst
else:
pred_vars = popularity_lst
x = x.loc[:,pred_vars]
prepend_ones_col(x)
return x
def construct_X_y(rating_bool):
'''
Process raw data (data cleaning, data type coercion, creating dummy
variables) pulled from BoardGameGeek API and then use it to construct X
matrix and y vector to be plugged into the regress function.
Input: (bool) Indicates which regression model to run
Outputs:
X: (pandas DataFrame) X matrix containing observations of regressors
y: (pandas Series) column vector containing obsersvations of dependent
variable
raw_df: (pandas DataFrame) processed dataframe
dep_var: (str) name of depedent variable
'''
raw_df = pd.read_csv("all_games.csv")
raw_df = raw_df.loc[:,['bgg_id', 'is_boardgame', 'name', 'name_coerced',
'minplaytime', 'maxplaytime', 'suggested_numplayers',
'suggested_language', 'num_ratings',
'Board Game_avg_rating', 'Strategy Game',
'Family Game', 'Party Game', 'Abstract Game', 'Thematic',
'War Game','Customizable', "Children's Game",
'num_categories', 'num_mechanics','averageweight']]
raw_df = raw_df[raw_df['is_boardgame'] == True]
raw_df = raw_df.dropna(subset=['suggested_language'])
create_lang_dummy(raw_df)
raw_df = raw_df.astype({'Strategy Game':'int64', 'Family Game': 'int64',
'Party Game': 'int64', 'Abstract Game': 'int64',
'Thematic': 'int64', 'War Game': 'int64',
'Customizable': 'int64', "Children's Game": 'int64',
'lang_dep2': 'int64', 'lang_dep3': 'int64',
'lang_dep4': 'int64', 'lang_dep5': 'int64'})
raw_df['suggested_numplayers'] = raw_df['suggested_numplayers']\
.astype('string').str.strip('+').astype('int64')
raw_df['avg_playtime'] = (raw_df['minplaytime'] + raw_df['maxplaytime'])/2
raw_df = raw_df[raw_df['suggested_numplayers'] != 0]
raw_df = raw_df[raw_df['avg_playtime'] != 0]
raw_df = raw_df.dropna()
if rating_bool:
pred_vars, dep_var = rating_lst, 'Board Game_avg_rating'
else:
pred_vars, dep_var = popularity_lst, 'num_ratings'
X = raw_df.loc[:,pred_vars]
prepend_ones_col(X)
y = raw_df[dep_var]
return X, y, raw_df, dep_var
def create_lang_dummy(df):
'''
Create and insert (k-1) dummy variables for k Language dependency categories
in the dataframe.
Input: (pandas DataFrame) BGG data
'''
lang_dep = {'No necessary in-game text':1,
'Some necessary text - easily memorized or small crib sheet':2,
'Moderate in-game text - needs crib sheet or paste ups':3,
'Extensive use of text - massive conversion needed to be playable':4,
'Unplayable in another language':5}
categories = pd.unique(df['suggested_language'])
for category in categories:
if lang_dep[category] != 1:
dummy_name = 'lang_dep' + str(lang_dep[category])
df[dummy_name] = df['suggested_language'] == category
def prepend_ones_col(X):
'''
Add a ones column to the left side of pandas DataFrame.
Input: (pandas DataFrame) X matrix
'''
X.insert(0,'ones', 1)
def regress(X, y):
'''
Regress X matrix on y vector and calculate beta vector.
Inputs:
X (pandas DataFrame): X matrix containing observations of regressors
y (pandas Series): y vector
Ouputs:
coef (pandas DataFrame): beta vector containing coefficient estimates
for the regressors
| #Source: /home/syedajaisha/capp30121-aut-20-syedajaisha/pa5/util.py
col_names = list(X.columns)
col_names[0] = 'intercept'
coef = pd.DataFrame({'beta': beta}, index=col_names)
return coef
def calculate_R2(X, y, beta):
'''
Calculate R_sqauared for a regression model
Inputs:
X (pandas DataFrame): X | '''
beta = np.linalg.lstsq(X, y, rcond=None)[0] | random_line_split |
regression.py | in input_dict.keys():
if field == 'Language dependency':
for dummy, complexity in django_to_local_cols[field].items():
x_dict[dummy] = 0
if input_dict[field] == complexity:
x_dict[dummy] = 1
elif field in ['Type 1', 'Type 2', 'Type 3']:
type_lst.append(input_dict[field])
else:
col_name = django_to_local_cols[field]
value = input_dict[field]
x_dict[col_name] = value
for type_dummy in django_to_local_cols['Type']:
x_dict[type_dummy] = 0
if type_dummy in type_lst:
x_dict[type_dummy] = 1
x = pd.DataFrame(x_dict, index = ['obs'])
if rating_bool:
pred_vars = rating_lst
else:
pred_vars = popularity_lst
x = x.loc[:,pred_vars]
prepend_ones_col(x)
return x
def construct_X_y(rating_bool):
'''
Process raw data (data cleaning, data type coercion, creating dummy
variables) pulled from BoardGameGeek API and then use it to construct X
matrix and y vector to be plugged into the regress function.
Input: (bool) Indicates which regression model to run
Outputs:
X: (pandas DataFrame) X matrix containing observations of regressors
y: (pandas Series) column vector containing obsersvations of dependent
variable
raw_df: (pandas DataFrame) processed dataframe
dep_var: (str) name of depedent variable
'''
raw_df = pd.read_csv("all_games.csv")
raw_df = raw_df.loc[:,['bgg_id', 'is_boardgame', 'name', 'name_coerced',
'minplaytime', 'maxplaytime', 'suggested_numplayers',
'suggested_language', 'num_ratings',
'Board Game_avg_rating', 'Strategy Game',
'Family Game', 'Party Game', 'Abstract Game', 'Thematic',
'War Game','Customizable', "Children's Game",
'num_categories', 'num_mechanics','averageweight']]
raw_df = raw_df[raw_df['is_boardgame'] == True]
raw_df = raw_df.dropna(subset=['suggested_language'])
create_lang_dummy(raw_df)
raw_df = raw_df.astype({'Strategy Game':'int64', 'Family Game': 'int64',
'Party Game': 'int64', 'Abstract Game': 'int64',
'Thematic': 'int64', 'War Game': 'int64',
'Customizable': 'int64', "Children's Game": 'int64',
'lang_dep2': 'int64', 'lang_dep3': 'int64',
'lang_dep4': 'int64', 'lang_dep5': 'int64'})
raw_df['suggested_numplayers'] = raw_df['suggested_numplayers']\
.astype('string').str.strip('+').astype('int64')
raw_df['avg_playtime'] = (raw_df['minplaytime'] + raw_df['maxplaytime'])/2
raw_df = raw_df[raw_df['suggested_numplayers'] != 0]
raw_df = raw_df[raw_df['avg_playtime'] != 0]
raw_df = raw_df.dropna()
if rating_bool:
pred_vars, dep_var = rating_lst, 'Board Game_avg_rating'
else:
pred_vars, dep_var = popularity_lst, 'num_ratings'
X = raw_df.loc[:,pred_vars]
prepend_ones_col(X)
y = raw_df[dep_var]
return X, y, raw_df, dep_var
def create_lang_dummy(df):
'''
Create and insert (k-1) dummy variables for k Language dependency categories
in the dataframe.
Input: (pandas DataFrame) BGG data
'''
lang_dep = {'No necessary in-game text':1,
'Some necessary text - easily memorized or small crib sheet':2,
'Moderate in-game text - needs crib sheet or paste ups':3,
'Extensive use of text - massive conversion needed to be playable':4,
'Unplayable in another language':5}
categories = pd.unique(df['suggested_language'])
for category in categories:
if lang_dep[category] != 1:
dummy_name = 'lang_dep' + str(lang_dep[category])
df[dummy_name] = df['suggested_language'] == category
def prepend_ones_col(X):
'''
Add a ones column to the left side of pandas DataFrame.
Input: (pandas DataFrame) X matrix
'''
X.insert(0,'ones', 1)
def regress(X, y):
'''
Regress X matrix on y vector and calculate beta vector.
Inputs:
X (pandas DataFrame): X matrix containing observations of regressors
y (pandas Series): y vector
Ouputs:
coef (pandas DataFrame): beta vector containing coefficient estimates
for the regressors
'''
beta = np.linalg.lstsq(X, y, rcond=None)[0]
#Source: /home/syedajaisha/capp30121-aut-20-syedajaisha/pa5/util.py
col_names = list(X.columns)
col_names[0] = 'intercept'
coef = pd.DataFrame({'beta': beta}, index=col_names)
return coef
def calculate_R2(X, y, beta):
'''
Calculate R_sqauared for a regression model
Inputs:
X (pandas DataFrame): X matrix
y (pandas Series): y vector
beta(pandas DataFrame): beta vector
Output: (float) R_squared
'''
yhat = apply_beta(beta, X)
R2 = 1 - (np.sum((y - yhat)**2) / np.sum((y - np.mean(y))**2))
#Source: /home/syedajaisha/capp30121-aut-20-syedajaisha/pa5/regression.py
return R2*100
def apply_beta(beta, X):
'''
Apply beta, the vector generated by regress, to the
specified values to calculate predicted value
Inputs:
beta (pandas Series): beta vector
X (pandas DataFrame): X matrix
Output:
yhat (numpy array): predicted value
'''
yhat = np.dot(X, beta)
return yhat
def recommend(coef, input_dict, X, rating_bool):
'''
Make recommendations based on what paramters can the user potentially
increase, decrease, switch categories of to increase their predicted value
of BGG rating and number of ratings and also informs of the corresponding
change in the predicted value
Inputs:
coef (pandas DataFrame): beta vector containing coefficient estimates
input_dict (dict): Dictionary produced by Django UI, containing
required fields for the prediction using regression
X (pandas DataFrame): X matrix
rating_bool (bool): Indicates which regression model to run
Disclaimer: This function doesn't recommend changing everything to arrive at
the optimal result. For example, in case a game already has three types, it
won't suggest the user to replace them all with the ones corresponding to
the largest three coefficents among all games types, it would just ask that
the existing type that adds the least value to the regression be replaced
with the type corresponding to the highest coefficient among remaining game
types
'''
dummy_var = ['Language dependency', 'Type']
decrease_gain_tup = []
increase_gain_tup =[]
lang_dep_gain_tup = []
game_type_tup= []
if rating_bool:
beta = round(coef['beta'],4)
else:
beta = round(coef['beta'],0).astype('int64')
for field in django_to_local_cols:
if field not in dummy_var:
if field in input_dict:
if beta[django_to_local_cols[field]] < 0:
if input_dict[field] > min(X[django_to_local_cols[field]]):
decrease_gain_tup.append((field, -beta[django_to_local_cols[field]]))
else:
if input_dict[field] < max(X[django_to_local_cols[field]]):
increase_gain_tup.append((field, beta[django_to_local_cols[field]]))
elif field == 'Language dependency':
current_lang_dep = 'lang_dep' + str(input_dict['Language dependency'])
if current_lang_dep == 'lang_dep1':
for lang_dep_dummy in django_to_local_cols['Language dependency'].keys():
if beta[lang_dep_dummy] > 0:
lang_dep_gain_tup.append((django_to_local_cols['Language dependency'][lang_dep_dummy], \
beta[lang_dep_dummy]))
else:
| if beta[current_lang_dep] < 0:
lang_dep_gain_tup.append((1, -beta[current_lang_dep]))
for lang_dep_dummy in django_to_local_cols['Language dependency'].keys():
if beta[lang_dep_dummy] > beta[current_lang_dep]:
gain = -beta[current_lang_dep] + beta[lang_dep_dummy]
lang_dep_gain_tup.append((django_to_local_cols['Language dependency'][lang_dep_dummy], gain)) | conditional_block |
|
regression.py | for predicted BGG
rating
If False, run the regression for predicted number
of ratings
input_dict (dict): Dictionary produced by Django UI, containing
required fields for the prediction using regression
Output:
(tuple of lists) Contains a list of column names and a list of columns
output for Django UI
Warning: Predicted values may be negative due to low R2 of models
'''
x = construct_x(input_dict, rating_bool)
X, y, raw_df, dep_var = construct_X_y(rating_bool)
coef = regress(X,y)
beta = coef['beta']
pred_val = apply_beta(beta, x)[0]
accuracy = calculate_R2(X, y, beta)
sorted_df_y = raw_df.sort_values(by=dep_var, ascending = False).\
reset_index(drop=True)
rank = sorted_df_y[sorted_df_y[dep_var] >= pred_val].index[-1] + 2
top_5_games = ''
for i, game in enumerate(sorted_df_y['name'][0:5]):
top_5_games += game
if i != 4:
top_5_games += ', '
decrease_gain_tup, increase_gain_tup, lang_dep_gain_tup, game_type_tup = \
recommend(coef, input_dict, X, rating_bool)
if rating_bool:
return (['Your game is likely to get a BGG rating of ____ on BoardGameGeek',
'placing you at a rank of ____ among 4093 games in our dataset',
'with top 5 BGG board games being ____',
'This prediction is only ____ percent accurate.',
'try decreasing ____,'
'to improve score by (for each unit decreased) ____',
'try increasing ____,'
'to improve score by (for each unit increased) ____',
'try changing "Language dependency" to ____,'
'to improve score by ____',
'try dropping "type" _____,'
'try adding "type" _____, to improve score by ____'],
[[str(round(pred_val,5)), rank,
top_5_games, str(round(accuracy,2)), decrease_gain_tup,
increase_gain_tup, lang_dep_gain_tup, game_type_tup]])
else:
return (['Your game is likely to be voted for by ____ users on BoardGameGeek',
'placing you at a ____ rank among 4093 games in our dataset',
'with top 5 BGG board games being _____',
'This prediction is only ____ percent accurate.',
'try decreasing ____,'
'to improve score by (for each unit decreased) ____',
'try increasing ____,'
'to improve score by (for each unit increased) ____',
'try changing "Language dependency" to ____,'
'to improve score by ____',
'try dropping "type" _____, try adding "type" _____,'
'to improve score by ____'],
[[str(round(pred_val,0)), rank,
top_5_games,str(round(accuracy,2)), decrease_gain_tup,
increase_gain_tup, lang_dep_gain_tup, game_type_tup]])
def construct_x(input_dict, rating_bool):
'''
Construct x vector using user inputs from Django by matching Django
fields to column names in internal data, using field inputs to create
required columns and finally add a 'ones' column for constant of the
regression equation.
Input: (dict) Dictionary produced by Django UI, containing
required fields for the prediction using regression
Output: (pandas Series) Column vector
'''
x_dict = {}
type_lst =[]
for field in input_dict.keys():
if field == 'Language dependency':
for dummy, complexity in django_to_local_cols[field].items():
x_dict[dummy] = 0
if input_dict[field] == complexity:
x_dict[dummy] = 1
elif field in ['Type 1', 'Type 2', 'Type 3']:
type_lst.append(input_dict[field])
else:
col_name = django_to_local_cols[field]
value = input_dict[field]
x_dict[col_name] = value
for type_dummy in django_to_local_cols['Type']:
x_dict[type_dummy] = 0
if type_dummy in type_lst:
x_dict[type_dummy] = 1
x = pd.DataFrame(x_dict, index = ['obs'])
if rating_bool:
pred_vars = rating_lst
else:
pred_vars = popularity_lst
x = x.loc[:,pred_vars]
prepend_ones_col(x)
return x
def construct_X_y(rating_bool):
'''
Process raw data (data cleaning, data type coercion, creating dummy
variables) pulled from BoardGameGeek API and then use it to construct X
matrix and y vector to be plugged into the regress function.
Input: (bool) Indicates which regression model to run
Outputs:
X: (pandas DataFrame) X matrix containing observations of regressors
y: (pandas Series) column vector containing obsersvations of dependent
variable
raw_df: (pandas DataFrame) processed dataframe
dep_var: (str) name of depedent variable
'''
raw_df = pd.read_csv("all_games.csv")
raw_df = raw_df.loc[:,['bgg_id', 'is_boardgame', 'name', 'name_coerced',
'minplaytime', 'maxplaytime', 'suggested_numplayers',
'suggested_language', 'num_ratings',
'Board Game_avg_rating', 'Strategy Game',
'Family Game', 'Party Game', 'Abstract Game', 'Thematic',
'War Game','Customizable', "Children's Game",
'num_categories', 'num_mechanics','averageweight']]
raw_df = raw_df[raw_df['is_boardgame'] == True]
raw_df = raw_df.dropna(subset=['suggested_language'])
create_lang_dummy(raw_df)
raw_df = raw_df.astype({'Strategy Game':'int64', 'Family Game': 'int64',
'Party Game': 'int64', 'Abstract Game': 'int64',
'Thematic': 'int64', 'War Game': 'int64',
'Customizable': 'int64', "Children's Game": 'int64',
'lang_dep2': 'int64', 'lang_dep3': 'int64',
'lang_dep4': 'int64', 'lang_dep5': 'int64'})
raw_df['suggested_numplayers'] = raw_df['suggested_numplayers']\
.astype('string').str.strip('+').astype('int64')
raw_df['avg_playtime'] = (raw_df['minplaytime'] + raw_df['maxplaytime'])/2
raw_df = raw_df[raw_df['suggested_numplayers'] != 0]
raw_df = raw_df[raw_df['avg_playtime'] != 0]
raw_df = raw_df.dropna()
if rating_bool:
pred_vars, dep_var = rating_lst, 'Board Game_avg_rating'
else:
pred_vars, dep_var = popularity_lst, 'num_ratings'
X = raw_df.loc[:,pred_vars]
prepend_ones_col(X)
y = raw_df[dep_var]
return X, y, raw_df, dep_var
def create_lang_dummy(df):
'''
Create and insert (k-1) dummy variables for k Language dependency categories
in the dataframe.
Input: (pandas DataFrame) BGG data
'''
lang_dep = {'No necessary in-game text':1,
'Some necessary text - easily memorized or small crib sheet':2,
'Moderate in-game text - needs crib sheet or paste ups':3,
'Extensive use of text - massive conversion needed to be playable':4,
'Unplayable in another language':5}
categories = pd.unique(df['suggested_language'])
for category in categories:
if lang_dep[category] != 1:
dummy_name = 'lang_dep' + str(lang_dep[category])
df[dummy_name] = df['suggested_language'] == category
def prepend_ones_col(X):
'''
Add a ones column to the left side of pandas DataFrame.
Input: (pandas DataFrame) X matrix
'''
X.insert(0,'ones', 1)
def regress(X, y):
'''
Regress X matrix on y vector and calculate beta vector.
Inputs:
X (pandas DataFrame): X matrix containing observations of regressors
y (pandas Series): y vector
Ouputs:
coef (pandas DataFrame): beta vector containing coefficient estimates
for the regressors
'''
beta = np.linalg.lstsq(X, y, rcond=None)[0]
#Source: /home/syedajaisha/capp30121-aut-20-syedajaisha/pa5/util.py
col_names = list(X.columns)
col_names[0] = 'intercept'
coef = pd.DataFrame({'beta': beta}, index=col_names)
return coef
def | (X, y, beta):
'''
Calculate R_sqauared for a regression model
Inputs:
X (pandas DataFrame): | calculate_R2 | identifier_name |
iggo.go | int `json:"follower_count"`
Byline string `json:"byline"`
}
}
Tags []struct {
Tag struct {
Name string `json:"name"`
MediaCount int `json:"media_count"`
} `json:"hashtag"`
} `json:"hashtags"`
}
type Tag struct {
Name string
Posts []*Post
}
func GetPost(r *http.Request) (*Post, error) {
shortcode := strings.TrimRight(r.URL.Path[len("/post/"):], "/")
if shortcode == "" {
return nil, nil
}
resp, err := client.Get(fmt.Sprintf("https://www.instagram.com/p/%s/", shortcode))
if err != nil {
return nil, err
}
defer resp.Body.Close()
return GetPostFromMarkup(resp.Body)
}
func GetPostFromMarkup(body io.Reader) (*Post, error) {
sd := sharedData(body)
container, err := simplejson.NewJson(sd)
if err != nil {
return nil, err
}
base := container.GetPath("entry_data", "PostPage").GetIndex(0).GetPath("graphql", "shortcode_media")
timestamp := base.Get("taken_at_timestamp").GetInt64()
likers := []*PostLiker{}
for _, edge := range base.GetPath("edge_media_preview_like", "edges").GetArray() {
n := edge.Get("node")
likers = append(likers, &PostLiker{
ProfilePic: n.Get("profile_pic_url").GetString(),
Username: n.Get("username").GetString(),
})
}
data := &Post{
Shortcode: base.Get("shortcode").GetString(),
ID: base.Get("id").GetString(),
URL: base.Get("display_url").GetString(),
Text: getText(base),
Timestamp: timestamp,
Time: humanize.Time(time.Unix(timestamp, 0)),
Likes: base.Get("edge_media_preview_like").Get("count").GetInt(),
Likers: likers,
Owner: &PostOwner{
ID: base.GetPath("owner", "id").GetString(),
ProfilePic: base.GetPath("owner", "profile_pic_url").GetString(),
Username: base.GetPath("owner", "username").GetString(),
Name: base.GetPath("owner", "full_name").GetString(),
},
}
return data, nil
}
func getText(j *simplejson.Json) string {
return j.GetPath("edge_media_to_caption", "edges").GetIndex(0).GetPath("node", "text").GetString()
}
func getPosts(j *simplejson.Json) []*Post {
var posts []*Post
for _, edge := range j.Get("edges").GetArray() {
n := edge.Get("node")
var sizes []Size
for _, s := range n.Get("thumbnail_resources").GetArray() {
sizes = append(sizes, Size{
URL: s.Get("src").GetString(),
Width: s.Get("config_width").GetInt(),
Height: s.Get("config_width").GetInt(),
})
}
timestamp := n.Get("taken_at_timestamp").GetInt64()
posts = append(posts, &Post{
ID: n.Get("id").GetString(),
Shortcode: n.Get("shortcode").GetString(),
URL: n.Get("display_url").GetString(),
Timestamp: timestamp,
Time: humanize.Time(time.Unix(timestamp, 0)),
Likes: n.GetPath("edge_liked_by", "count").GetInt(),
Sizes: sizes,
Text: getText(n),
Height: n.GetPath("dimensions", "height").GetInt(),
Width: n.GetPath("dimensions", "width").GetInt(),
Thumbnail: n.Get("thumbnail_src").GetString(),
})
}
return posts
}
func GetUserFromMarkup(body io.Reader) (*User, error) {
sd := sharedData(body)
container, err := simplejson.NewJson(sd)
if err != nil {
return nil, err
}
base := container.GetPath("entry_data", "ProfilePage").GetIndex(0).GetPath("graphql", "user")
data := &User{
ID: base.Get("id").GetString(),
Name: base.Get("full_name").GetString(),
Username: base.Get("username").GetString(),
Bio: base.Get("biography").GetString(),
Followers: base.GetPath("edge_followed_by", "count").GetInt(),
Following: base.GetPath("edge_follow", "count").GetInt(),
ProfilePic: base.Get("profile_pic_url_hd").GetString(),
Posts: getPosts(base.Get("edge_owner_to_timeline_media")),
}
return data, nil
}
func GetTagFromMarkup(body io.Reader) (*Tag, error) {
sd := sharedData(body)
container, err := simplejson.NewJson(sd)
if err != nil {
return nil, err
}
base := container.GetPath("entry_data", "TagPage").GetIndex(0).GetPath("graphql", "hashtag")
data := &Tag{
Name: base.Get("name").GetString(),
Posts: getPosts(base.Get("edge_hashtag_to_media")),
}
return data, nil
}
// GetUserFromUsername takes a username, makes a request
// and parses the response into a User struct, returning a pointer
func GetUserFromUsername(username string) (*User, error) {
if username == "" {
return nil, nil
}
resp, err := client.Get(fmt.Sprintf("https://www.instagram.com/%s/", username))
if err != nil {
return nil, err
}
defer resp.Body.Close()
return GetUserFromMarkup(resp.Body)
}
func GetUser(r *http.Request) (*User, error) {
username := strings.TrimRight(r.URL.Path[len("/user/"):], "/")
return GetUserFromUsername(username)
}
func GetTag(r *http.Request) (*Tag, error) {
slug := strings.TrimRight(r.URL.Path[len("/tag/"):], "/")
if slug == "" {
return nil, nil
}
resp, err := client.Get(fmt.Sprintf("https://www.instagram.com/explore/tags/%s/", slug))
if err != nil {
return nil, err
}
defer resp.Body.Close()
if err != nil {
return nil, err
}
return GetTagFromMarkup(resp.Body)
}
func sizemax(p *Post, w int) Size {
ix := 0
for i, s := range p.Sizes {
if s.Width <= w {
ix = i
} else {
break
}
}
return p.Sizes[ix]
}
func linkify(s string) template.HTML {
t := regexp.MustCompile(`(?i)#([\p{L}\w]+)`)
s = t.ReplaceAllString(s, `<a href="/tag/$1">#$1</a>`)
u := regexp.MustCompile(`(?i)@([\p{L}\w.]+)`)
s = u.ReplaceAllString(s, `<a href="/user/$1">@$1</a>`)
return template.HTML(s)
}
func setupTemplates() {
base := template.Must(template.ParseFiles("templates/base.html")).Funcs(templateFuncs)
if _, err := base.ParseFiles("templates/custom.html"); err != nil {
base.New("custom.html").Parse("")
}
keys := []string{"index", "post", "search", "tag", "user"}
for _, key := range keys {
clone := template.Must(base.Clone())
tmpl := template.Must(clone.ParseFiles("templates/" + key + ".html"))
templateMap[key] = tmpl
}
}
func renderTemplate(w http.ResponseWriter, key string, data interface{}) *appError {
tmpl, ok := templateMap[key]
if !ok {
return &appError{"Template error", 500, fmt.Errorf(`template "%s" not found`, key)}
}
err := tmpl.ExecuteTemplate(w, "base.html", data)
if err != nil |
return nil
}
func sharedData(r io.Reader) []byte {
re := regexp.MustCompile(`window._sharedData\s?=\s?(.*);</script>`)
b, err := ioutil.ReadAll(r)
if err != nil {
return nil
}
matches := re.FindSubmatch(b)
if len(matches) < 2 {
return nil
}
return matches[1]
}
func getSearchResult(q string) (*SearchResult, error) {
sr := &SearchResult{}
qs := &url.Values{}
qs.Add("context", "blended")
qs.Add("query", q)
r, err := client.Get("https://www.instagram.com/web/search/topsearch/?" + qs.Encode())
if err != nil {
return sr, err
}
defer r.Body.Close()
err = json.NewDecoder(r.Body).Decode(sr)
return sr, err
}
func renderJSON(w http.ResponseWriter, data interface{}) *appError {
w.Header().Set("Content-Type", "application/json")
err := json.NewEncoder(w).Encode(data)
if err != nil {
return &appError{"Could not write response", 500, err}
}
return nil
}
func makeFeedHandler() http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
username := strings.TrimRight(r.URL.Path[len("/feed/"):], "/")
| {
return &appError{"Template error", 500, err}
} | conditional_block |
iggo.go | : n.Get("shortcode").GetString(),
URL: n.Get("display_url").GetString(),
Timestamp: timestamp,
Time: humanize.Time(time.Unix(timestamp, 0)),
Likes: n.GetPath("edge_liked_by", "count").GetInt(),
Sizes: sizes,
Text: getText(n),
Height: n.GetPath("dimensions", "height").GetInt(),
Width: n.GetPath("dimensions", "width").GetInt(),
Thumbnail: n.Get("thumbnail_src").GetString(),
})
}
return posts
}
func GetUserFromMarkup(body io.Reader) (*User, error) {
sd := sharedData(body)
container, err := simplejson.NewJson(sd)
if err != nil {
return nil, err
}
base := container.GetPath("entry_data", "ProfilePage").GetIndex(0).GetPath("graphql", "user")
data := &User{
ID: base.Get("id").GetString(),
Name: base.Get("full_name").GetString(),
Username: base.Get("username").GetString(),
Bio: base.Get("biography").GetString(),
Followers: base.GetPath("edge_followed_by", "count").GetInt(),
Following: base.GetPath("edge_follow", "count").GetInt(),
ProfilePic: base.Get("profile_pic_url_hd").GetString(),
Posts: getPosts(base.Get("edge_owner_to_timeline_media")),
}
return data, nil
}
func GetTagFromMarkup(body io.Reader) (*Tag, error) {
sd := sharedData(body)
container, err := simplejson.NewJson(sd)
if err != nil {
return nil, err
}
base := container.GetPath("entry_data", "TagPage").GetIndex(0).GetPath("graphql", "hashtag")
data := &Tag{
Name: base.Get("name").GetString(),
Posts: getPosts(base.Get("edge_hashtag_to_media")),
}
return data, nil
}
// GetUserFromUsername takes a username, makes a request
// and parses the response into a User struct, returning a pointer
func GetUserFromUsername(username string) (*User, error) {
if username == "" {
return nil, nil
}
resp, err := client.Get(fmt.Sprintf("https://www.instagram.com/%s/", username))
if err != nil {
return nil, err
}
defer resp.Body.Close()
return GetUserFromMarkup(resp.Body)
}
func GetUser(r *http.Request) (*User, error) {
username := strings.TrimRight(r.URL.Path[len("/user/"):], "/")
return GetUserFromUsername(username)
}
func GetTag(r *http.Request) (*Tag, error) {
slug := strings.TrimRight(r.URL.Path[len("/tag/"):], "/")
if slug == "" {
return nil, nil
}
resp, err := client.Get(fmt.Sprintf("https://www.instagram.com/explore/tags/%s/", slug))
if err != nil {
return nil, err
}
defer resp.Body.Close()
if err != nil {
return nil, err
}
return GetTagFromMarkup(resp.Body)
}
func sizemax(p *Post, w int) Size {
ix := 0
for i, s := range p.Sizes {
if s.Width <= w {
ix = i
} else {
break
}
}
return p.Sizes[ix]
}
func linkify(s string) template.HTML {
t := regexp.MustCompile(`(?i)#([\p{L}\w]+)`)
s = t.ReplaceAllString(s, `<a href="/tag/$1">#$1</a>`)
u := regexp.MustCompile(`(?i)@([\p{L}\w.]+)`)
s = u.ReplaceAllString(s, `<a href="/user/$1">@$1</a>`)
return template.HTML(s)
}
func setupTemplates() {
base := template.Must(template.ParseFiles("templates/base.html")).Funcs(templateFuncs)
if _, err := base.ParseFiles("templates/custom.html"); err != nil {
base.New("custom.html").Parse("")
}
keys := []string{"index", "post", "search", "tag", "user"}
for _, key := range keys {
clone := template.Must(base.Clone())
tmpl := template.Must(clone.ParseFiles("templates/" + key + ".html"))
templateMap[key] = tmpl
}
}
func renderTemplate(w http.ResponseWriter, key string, data interface{}) *appError {
tmpl, ok := templateMap[key]
if !ok {
return &appError{"Template error", 500, fmt.Errorf(`template "%s" not found`, key)}
}
err := tmpl.ExecuteTemplate(w, "base.html", data)
if err != nil {
return &appError{"Template error", 500, err}
}
return nil
}
func sharedData(r io.Reader) []byte {
re := regexp.MustCompile(`window._sharedData\s?=\s?(.*);</script>`)
b, err := ioutil.ReadAll(r)
if err != nil {
return nil
}
matches := re.FindSubmatch(b)
if len(matches) < 2 {
return nil
}
return matches[1]
}
func getSearchResult(q string) (*SearchResult, error) {
sr := &SearchResult{}
qs := &url.Values{}
qs.Add("context", "blended")
qs.Add("query", q)
r, err := client.Get("https://www.instagram.com/web/search/topsearch/?" + qs.Encode())
if err != nil {
return sr, err
}
defer r.Body.Close()
err = json.NewDecoder(r.Body).Decode(sr)
return sr, err
}
func renderJSON(w http.ResponseWriter, data interface{}) *appError {
w.Header().Set("Content-Type", "application/json")
err := json.NewEncoder(w).Encode(data)
if err != nil {
return &appError{"Could not write response", 500, err}
}
return nil
}
func makeFeedHandler() http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
username := strings.TrimRight(r.URL.Path[len("/feed/"):], "/")
user, err := GetUserFromUsername(username)
if err != nil {
log.Printf("Error fetching user (%s) data for feed: %s", username, err)
w.Write([]byte("Error"))
return
}
now := time.Now()
feed := &feeds.Feed{
Title: fmt.Sprintf("Instagram Posts by %s", username),
Link: &feeds.Link{Href: fmt.Sprintf("https://www.instagram.com/%s", username)},
Description: fmt.Sprintf("Recent photos posted by %s on Instagram", username),
Created: now,
}
for _, post := range user.Posts {
item := feeds.Item{
Id: post.Shortcode,
Title: post.Text,
Link: &feeds.Link{Href: fmt.Sprintf("https://www.instagram.com/p/%s", post.Shortcode)},
Author: &feeds.Author{Name: username},
Created: time.Unix(post.Timestamp, 0),
Content: sizemax(post, 800).URL,
}
feed.Add(&item)
}
err = feed.WriteRss(w)
if err != nil {
log.Printf("Error writing feed: %s", err)
}
})
}
func makeIndex() appHandler {
return func(w http.ResponseWriter, r *http.Request) *appError {
q := r.FormValue("q")
if q != "" {
sr, _ := getSearchResult(q)
sr.Query = q
if r.URL.Query().Get("format") == "json" {
return renderJSON(w, &sr)
}
return renderTemplate(w, "search", sr)
}
return renderTemplate(w, "index", nil)
}
}
type appError struct {
Message string
Code int
Error error
}
type appHandler func(w http.ResponseWriter, r *http.Request) *appError
func (fn appHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Access-Control-Allow-Origin", "*")
w.Header().Set("Access-Control-Allow-Headers", "Content-Type")
if apperr := fn(w, r); apperr != nil {
http.Error(w, apperr.Message, apperr.Code)
log.Println(apperr.Error.Error())
}
}
func makeHandler(f func(*http.Request) (interface{}, error), templateKey string) appHandler {
return func(w http.ResponseWriter, r *http.Request) *appError {
data, err := f(r)
if err != nil || data == nil {
return &appError{"Could not load data", 404, err}
}
if r.URL.Query().Get("format") == "json" {
return renderJSON(w, &data)
}
return renderTemplate(w, templateKey, data)
}
}
func getListenAddr() string {
if port := os.Getenv("PORT"); port != "" {
return ":" + port
}
if addr := os.Getenv("LISTEN_ADDR"); addr != "" {
return addr
}
return "127.0.0.1:8000"
}
func userFetcher(r *http.Request) (interface{}, error) {
return GetUser(r)
}
func postFetcher(r *http.Request) (interface{}, error) | {
return GetPost(r)
} | identifier_body |
|
iggo.go | := range base.GetPath("edge_media_preview_like", "edges").GetArray() {
n := edge.Get("node")
likers = append(likers, &PostLiker{
ProfilePic: n.Get("profile_pic_url").GetString(),
Username: n.Get("username").GetString(),
})
}
data := &Post{
Shortcode: base.Get("shortcode").GetString(),
ID: base.Get("id").GetString(),
URL: base.Get("display_url").GetString(),
Text: getText(base),
Timestamp: timestamp,
Time: humanize.Time(time.Unix(timestamp, 0)),
Likes: base.Get("edge_media_preview_like").Get("count").GetInt(),
Likers: likers,
Owner: &PostOwner{
ID: base.GetPath("owner", "id").GetString(),
ProfilePic: base.GetPath("owner", "profile_pic_url").GetString(),
Username: base.GetPath("owner", "username").GetString(),
Name: base.GetPath("owner", "full_name").GetString(),
},
}
return data, nil
}
func getText(j *simplejson.Json) string {
return j.GetPath("edge_media_to_caption", "edges").GetIndex(0).GetPath("node", "text").GetString()
}
func getPosts(j *simplejson.Json) []*Post {
var posts []*Post
for _, edge := range j.Get("edges").GetArray() {
n := edge.Get("node")
var sizes []Size
for _, s := range n.Get("thumbnail_resources").GetArray() {
sizes = append(sizes, Size{
URL: s.Get("src").GetString(),
Width: s.Get("config_width").GetInt(),
Height: s.Get("config_width").GetInt(),
})
}
timestamp := n.Get("taken_at_timestamp").GetInt64()
posts = append(posts, &Post{
ID: n.Get("id").GetString(),
Shortcode: n.Get("shortcode").GetString(),
URL: n.Get("display_url").GetString(),
Timestamp: timestamp,
Time: humanize.Time(time.Unix(timestamp, 0)),
Likes: n.GetPath("edge_liked_by", "count").GetInt(),
Sizes: sizes,
Text: getText(n),
Height: n.GetPath("dimensions", "height").GetInt(),
Width: n.GetPath("dimensions", "width").GetInt(),
Thumbnail: n.Get("thumbnail_src").GetString(),
})
}
return posts
}
func GetUserFromMarkup(body io.Reader) (*User, error) {
sd := sharedData(body)
container, err := simplejson.NewJson(sd)
if err != nil {
return nil, err
}
base := container.GetPath("entry_data", "ProfilePage").GetIndex(0).GetPath("graphql", "user")
data := &User{
ID: base.Get("id").GetString(),
Name: base.Get("full_name").GetString(),
Username: base.Get("username").GetString(),
Bio: base.Get("biography").GetString(),
Followers: base.GetPath("edge_followed_by", "count").GetInt(),
Following: base.GetPath("edge_follow", "count").GetInt(),
ProfilePic: base.Get("profile_pic_url_hd").GetString(),
Posts: getPosts(base.Get("edge_owner_to_timeline_media")),
}
return data, nil
}
func GetTagFromMarkup(body io.Reader) (*Tag, error) {
sd := sharedData(body)
container, err := simplejson.NewJson(sd)
if err != nil {
return nil, err
}
base := container.GetPath("entry_data", "TagPage").GetIndex(0).GetPath("graphql", "hashtag")
data := &Tag{
Name: base.Get("name").GetString(),
Posts: getPosts(base.Get("edge_hashtag_to_media")),
}
return data, nil
}
// GetUserFromUsername takes a username, makes a request
// and parses the response into a User struct, returning a pointer
func GetUserFromUsername(username string) (*User, error) {
if username == "" {
return nil, nil
}
resp, err := client.Get(fmt.Sprintf("https://www.instagram.com/%s/", username))
if err != nil {
return nil, err
}
defer resp.Body.Close()
return GetUserFromMarkup(resp.Body)
}
func GetUser(r *http.Request) (*User, error) {
username := strings.TrimRight(r.URL.Path[len("/user/"):], "/")
return GetUserFromUsername(username)
}
func GetTag(r *http.Request) (*Tag, error) {
slug := strings.TrimRight(r.URL.Path[len("/tag/"):], "/")
if slug == "" {
return nil, nil
}
resp, err := client.Get(fmt.Sprintf("https://www.instagram.com/explore/tags/%s/", slug))
if err != nil {
return nil, err
}
defer resp.Body.Close()
if err != nil {
return nil, err
}
return GetTagFromMarkup(resp.Body)
}
func sizemax(p *Post, w int) Size {
ix := 0
for i, s := range p.Sizes {
if s.Width <= w {
ix = i
} else {
break
}
}
return p.Sizes[ix]
}
func linkify(s string) template.HTML {
t := regexp.MustCompile(`(?i)#([\p{L}\w]+)`)
s = t.ReplaceAllString(s, `<a href="/tag/$1">#$1</a>`)
u := regexp.MustCompile(`(?i)@([\p{L}\w.]+)`)
s = u.ReplaceAllString(s, `<a href="/user/$1">@$1</a>`)
return template.HTML(s)
}
func setupTemplates() {
base := template.Must(template.ParseFiles("templates/base.html")).Funcs(templateFuncs)
if _, err := base.ParseFiles("templates/custom.html"); err != nil {
base.New("custom.html").Parse("")
}
keys := []string{"index", "post", "search", "tag", "user"}
for _, key := range keys {
clone := template.Must(base.Clone())
tmpl := template.Must(clone.ParseFiles("templates/" + key + ".html"))
templateMap[key] = tmpl
}
}
func renderTemplate(w http.ResponseWriter, key string, data interface{}) *appError {
tmpl, ok := templateMap[key]
if !ok {
return &appError{"Template error", 500, fmt.Errorf(`template "%s" not found`, key)}
}
err := tmpl.ExecuteTemplate(w, "base.html", data)
if err != nil {
return &appError{"Template error", 500, err}
}
return nil
}
func sharedData(r io.Reader) []byte {
re := regexp.MustCompile(`window._sharedData\s?=\s?(.*);</script>`)
b, err := ioutil.ReadAll(r)
if err != nil {
return nil
}
matches := re.FindSubmatch(b)
if len(matches) < 2 {
return nil
}
return matches[1]
}
func getSearchResult(q string) (*SearchResult, error) {
sr := &SearchResult{}
qs := &url.Values{}
qs.Add("context", "blended")
qs.Add("query", q)
r, err := client.Get("https://www.instagram.com/web/search/topsearch/?" + qs.Encode())
if err != nil {
return sr, err
}
defer r.Body.Close()
err = json.NewDecoder(r.Body).Decode(sr)
return sr, err
}
func renderJSON(w http.ResponseWriter, data interface{}) *appError {
w.Header().Set("Content-Type", "application/json")
err := json.NewEncoder(w).Encode(data)
if err != nil {
return &appError{"Could not write response", 500, err}
}
return nil
}
func makeFeedHandler() http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
username := strings.TrimRight(r.URL.Path[len("/feed/"):], "/")
user, err := GetUserFromUsername(username)
if err != nil {
log.Printf("Error fetching user (%s) data for feed: %s", username, err)
w.Write([]byte("Error"))
return
}
now := time.Now()
feed := &feeds.Feed{
Title: fmt.Sprintf("Instagram Posts by %s", username),
Link: &feeds.Link{Href: fmt.Sprintf("https://www.instagram.com/%s", username)},
Description: fmt.Sprintf("Recent photos posted by %s on Instagram", username),
Created: now,
}
for _, post := range user.Posts {
item := feeds.Item{
Id: post.Shortcode,
Title: post.Text,
Link: &feeds.Link{Href: fmt.Sprintf("https://www.instagram.com/p/%s", post.Shortcode)},
Author: &feeds.Author{Name: username},
Created: time.Unix(post.Timestamp, 0),
Content: sizemax(post, 800).URL,
}
feed.Add(&item)
}
err = feed.WriteRss(w)
if err != nil {
log.Printf("Error writing feed: %s", err)
}
})
}
func | makeIndex | identifier_name |
|
iggo.go | := range n.Get("thumbnail_resources").GetArray() {
sizes = append(sizes, Size{
URL: s.Get("src").GetString(),
Width: s.Get("config_width").GetInt(),
Height: s.Get("config_width").GetInt(),
})
}
timestamp := n.Get("taken_at_timestamp").GetInt64()
posts = append(posts, &Post{
ID: n.Get("id").GetString(),
Shortcode: n.Get("shortcode").GetString(),
URL: n.Get("display_url").GetString(),
Timestamp: timestamp,
Time: humanize.Time(time.Unix(timestamp, 0)),
Likes: n.GetPath("edge_liked_by", "count").GetInt(),
Sizes: sizes,
Text: getText(n),
Height: n.GetPath("dimensions", "height").GetInt(),
Width: n.GetPath("dimensions", "width").GetInt(),
Thumbnail: n.Get("thumbnail_src").GetString(),
})
}
return posts
}
func GetUserFromMarkup(body io.Reader) (*User, error) {
sd := sharedData(body)
container, err := simplejson.NewJson(sd)
if err != nil {
return nil, err
}
base := container.GetPath("entry_data", "ProfilePage").GetIndex(0).GetPath("graphql", "user")
data := &User{
ID: base.Get("id").GetString(),
Name: base.Get("full_name").GetString(),
Username: base.Get("username").GetString(),
Bio: base.Get("biography").GetString(),
Followers: base.GetPath("edge_followed_by", "count").GetInt(),
Following: base.GetPath("edge_follow", "count").GetInt(),
ProfilePic: base.Get("profile_pic_url_hd").GetString(),
Posts: getPosts(base.Get("edge_owner_to_timeline_media")),
}
return data, nil
}
func GetTagFromMarkup(body io.Reader) (*Tag, error) {
sd := sharedData(body)
container, err := simplejson.NewJson(sd)
if err != nil {
return nil, err
}
base := container.GetPath("entry_data", "TagPage").GetIndex(0).GetPath("graphql", "hashtag")
data := &Tag{
Name: base.Get("name").GetString(),
Posts: getPosts(base.Get("edge_hashtag_to_media")),
}
return data, nil
}
// GetUserFromUsername takes a username, makes a request
// and parses the response into a User struct, returning a pointer
func GetUserFromUsername(username string) (*User, error) {
if username == "" {
return nil, nil
}
resp, err := client.Get(fmt.Sprintf("https://www.instagram.com/%s/", username))
if err != nil {
return nil, err
}
defer resp.Body.Close()
return GetUserFromMarkup(resp.Body)
}
func GetUser(r *http.Request) (*User, error) {
username := strings.TrimRight(r.URL.Path[len("/user/"):], "/")
return GetUserFromUsername(username)
}
func GetTag(r *http.Request) (*Tag, error) {
slug := strings.TrimRight(r.URL.Path[len("/tag/"):], "/")
if slug == "" {
return nil, nil
}
resp, err := client.Get(fmt.Sprintf("https://www.instagram.com/explore/tags/%s/", slug))
if err != nil {
return nil, err
}
defer resp.Body.Close()
if err != nil {
return nil, err
}
return GetTagFromMarkup(resp.Body)
}
func sizemax(p *Post, w int) Size {
ix := 0
for i, s := range p.Sizes {
if s.Width <= w {
ix = i
} else {
break
}
}
return p.Sizes[ix]
}
func linkify(s string) template.HTML {
t := regexp.MustCompile(`(?i)#([\p{L}\w]+)`)
s = t.ReplaceAllString(s, `<a href="/tag/$1">#$1</a>`)
u := regexp.MustCompile(`(?i)@([\p{L}\w.]+)`)
s = u.ReplaceAllString(s, `<a href="/user/$1">@$1</a>`)
return template.HTML(s)
}
func setupTemplates() {
base := template.Must(template.ParseFiles("templates/base.html")).Funcs(templateFuncs)
if _, err := base.ParseFiles("templates/custom.html"); err != nil {
base.New("custom.html").Parse("")
}
keys := []string{"index", "post", "search", "tag", "user"}
for _, key := range keys {
clone := template.Must(base.Clone())
tmpl := template.Must(clone.ParseFiles("templates/" + key + ".html"))
templateMap[key] = tmpl
}
}
func renderTemplate(w http.ResponseWriter, key string, data interface{}) *appError {
tmpl, ok := templateMap[key]
if !ok {
return &appError{"Template error", 500, fmt.Errorf(`template "%s" not found`, key)}
}
err := tmpl.ExecuteTemplate(w, "base.html", data)
if err != nil {
return &appError{"Template error", 500, err}
}
return nil
}
func sharedData(r io.Reader) []byte {
re := regexp.MustCompile(`window._sharedData\s?=\s?(.*);</script>`)
b, err := ioutil.ReadAll(r)
if err != nil {
return nil
}
matches := re.FindSubmatch(b)
if len(matches) < 2 {
return nil
}
return matches[1]
}
func getSearchResult(q string) (*SearchResult, error) {
sr := &SearchResult{}
qs := &url.Values{}
qs.Add("context", "blended")
qs.Add("query", q)
r, err := client.Get("https://www.instagram.com/web/search/topsearch/?" + qs.Encode())
if err != nil {
return sr, err
}
defer r.Body.Close()
err = json.NewDecoder(r.Body).Decode(sr)
return sr, err
}
func renderJSON(w http.ResponseWriter, data interface{}) *appError {
w.Header().Set("Content-Type", "application/json")
err := json.NewEncoder(w).Encode(data)
if err != nil {
return &appError{"Could not write response", 500, err}
}
return nil
}
func makeFeedHandler() http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
username := strings.TrimRight(r.URL.Path[len("/feed/"):], "/")
user, err := GetUserFromUsername(username)
if err != nil {
log.Printf("Error fetching user (%s) data for feed: %s", username, err)
w.Write([]byte("Error"))
return
}
now := time.Now()
feed := &feeds.Feed{
Title: fmt.Sprintf("Instagram Posts by %s", username),
Link: &feeds.Link{Href: fmt.Sprintf("https://www.instagram.com/%s", username)},
Description: fmt.Sprintf("Recent photos posted by %s on Instagram", username),
Created: now,
}
for _, post := range user.Posts {
item := feeds.Item{
Id: post.Shortcode,
Title: post.Text,
Link: &feeds.Link{Href: fmt.Sprintf("https://www.instagram.com/p/%s", post.Shortcode)},
Author: &feeds.Author{Name: username},
Created: time.Unix(post.Timestamp, 0),
Content: sizemax(post, 800).URL,
}
feed.Add(&item)
}
err = feed.WriteRss(w)
if err != nil {
log.Printf("Error writing feed: %s", err)
}
})
}
func makeIndex() appHandler {
return func(w http.ResponseWriter, r *http.Request) *appError {
q := r.FormValue("q")
if q != "" {
sr, _ := getSearchResult(q)
sr.Query = q
if r.URL.Query().Get("format") == "json" {
return renderJSON(w, &sr)
}
return renderTemplate(w, "search", sr)
}
return renderTemplate(w, "index", nil)
}
}
type appError struct {
Message string
Code int
Error error
}
type appHandler func(w http.ResponseWriter, r *http.Request) *appError
func (fn appHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Access-Control-Allow-Origin", "*")
w.Header().Set("Access-Control-Allow-Headers", "Content-Type")
if apperr := fn(w, r); apperr != nil {
http.Error(w, apperr.Message, apperr.Code)
log.Println(apperr.Error.Error())
}
}
func makeHandler(f func(*http.Request) (interface{}, error), templateKey string) appHandler {
return func(w http.ResponseWriter, r *http.Request) *appError {
data, err := f(r)
if err != nil || data == nil {
return &appError{"Could not load data", 404, err}
}
if r.URL.Query().Get("format") == "json" {
return renderJSON(w, &data)
} |
return renderTemplate(w, templateKey, data)
}
} | random_line_split |
|
fib.rs | {
assert!(self.is_some());
let _self_idx = idx!(self);
/* validate right sibling */
let rh = right!(self);
if rh.is_some() {
let _rh_idx = idx!(rh);
let rhlf = left!(rh).upgrade();
assert!(rhlf.rc_eq(self));
assert!(rhlf.is_some());
rh.validate_ref();
}
/* validate children */
let child = child!(self);
if child.is_some() {
let _child_idx = idx!(child);
let cpw = paren!(child);
assert!(!cpw.is_none());
let cp = cpw.upgrade();
assert!(cp.rc_eq(self));
assert!(cp.is_some());
child.validate_ref();
}
}
}
impl<I: Debug, T: Debug> Debug for Node<I, T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
if self.is_none() {
write!(f, "None")
} else {
write!(f, "{:?}", self.0.as_ref().unwrap().as_ref().borrow())
}
}
}
impl<I: Debug, T: Debug> Display for Node<I, T> {
fn | (&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
writeln!(f, "R({:?}) ", self)?;
let mut curq = vec![(self.clone(), self.children())];
loop {
let mut nxtq = vec![];
for (p, children) in curq {
if children.is_empty() {
break;
}
write!(f, "P({:?}) ", p)?;
let childlen = children.len();
for (i, child) in children.into_iter().enumerate() {
write!(f, "{:?}", child)?;
if i < childlen - 1 {
write!(f, ", ")?;
}
nxtq.push((child.clone(), child.children()));
}
write!(f, "; ")?;
}
if !nxtq.is_empty() {
writeln!(f)?;
curq = nxtq;
} else {
break;
}
}
Ok(())
}
}
impl<I, T> FibHeap<I, T>
where
I: Eq + Hash + Clone + Debug,
T: Ord + Debug
{
////////////////////////////////////////////////////////////////////////////
//// Public method
pub fn new() -> Self {
Self {
len: 0,
rcnt: 0,
min: Node::none(),
nodes: HashMap::new(),
}
}
pub fn len(&self) -> usize {
self.len
}
/// Same index node would be overidden
pub fn push(&mut self, i: I, v: T)
{
let node = node!(i.clone(), v);
self.nodes.insert(i, node.clone());
self.push_into_roots(node.clone());
if val!(node) < val!(self.min) {
self.min = node;
}
self.len += 1;
}
/// Amortized cost O(rank(H))
///
/// trees(H') <= rank(H) + 1 # since no two trees have same rank.
///
/// delete-min
pub fn pop_item(&mut self) -> Option<(I, T)>
{
if self.min.is_none() {
return None;
}
self.len -= 1;
/* push children of oldmin into roots */
for child in self.min.children() {
self.push_into_roots(child.clone());
}
/* update min */
let newmin = self.roots()[1..]
.into_iter()
.min_by_key(|&sib| val!(sib))
.cloned()
.unwrap_or_default();
/* just del old min */
self.remove_from_roots(self.min.clone());
let oldmin = self.min.replace(newmin);
self.consolidate();
Some((
self.remove_from_index(&oldmin),
unwrap_into!(oldmin).val
))
}
/// merge same rank trees recusively
pub fn consolidate(&mut self) {
let mut rank: HashMap<usize, Node<I, T>> = hashmap!();
for mut sib in self.roots() {
while let Some(x) = rank.remove(&rank!(sib)) {
sib = self.merge_same_rank_root(x, sib);
}
rank.insert(rank!(sib), sib);
}
}
/// Return oldval, alias of ReplaceOrPush
///
/// Exec push if the val doesn't exist.
///
pub fn insert(&mut self, i: I, v: T) -> Option<T>
where
I: Eq + Hash + Clone,
T: Ord + Debug
{
match self.nodes.entry(i.clone()) {
Occupied(ent) => {
let x = ent.get().clone();
let oldv = x.replace_key(v);
match val!(x).cmp(&oldv) {
Less => self.decrease_key_(x),
Equal => (),
Greater => self.increase_key_(x),
}
Some(oldv)
}
Vacant(_ent) => {
self.push(i, v);
None
}
}
}
pub fn union(&mut self, _other: Self) {
unimplemented!("link roots, but not O(1) for link index reference")
}
pub fn delete<Q: AsRef<I>>(&mut self, _i: Q) -> Option<T> {
unimplemented!("1. decrease-val to -infi, 2. pop");
}
////////////////////////////////////////////////////////////////////////////
//// Extra functional method
/// Return oldval
///
pub fn decrease_key(&mut self, i: I, v: T) -> Option<T>
where
I: Eq + Hash + Clone,
T: Debug
{
let x;
match self.nodes.entry(i.clone()) {
Occupied(ent) => {
x = ent.get().clone();
let oldv = x.replace_key(v);
self.decrease_key_(x);
Some(oldv)
}
Vacant(_ent) => None,
}
}
pub fn top_item(&self) -> Option<(I, &T)>
where
I: Eq + Clone
{
if self.min.is_some() {
Some((idx!(self.min), val!(self.min)))
} else {
None
}
}
pub fn top(&self) -> Option<&T> {
self.top_item().map(|x| x.1)
}
pub fn pop(&mut self) -> Option<T> {
self.pop_item().map(|x| x.1)
}
pub fn get<Q>(&self, i: &Q) -> Option<&T>
where
I: Borrow<Q>,
Q: Ord + Hash + ?Sized,
{
self.nodes.get(i).map(|node| val!(node))
}
pub fn indexes(&self) -> impl Iterator<Item = &I> {
self.nodes.keys()
}
////////////////////////////////////////////////////////////////////////////
//// Assistant method
fn decrease_key_(&mut self, x: Node<I, T>) {
let ent;
let p = paren!(x);
if !p.is_none() && val!(x) < val!(p.upgrade()) {
// 假装x节点本身也是一个符合条件的父节点
marked!(x, true);
ent = x.downgrade();
} else {
ent = WeakNode::none();
}
self.cut_meld_unmark_to_roots(ent);
if val!(x) < val!(self.min) {
debug_assert!(paren!(x).is_none());
self.min = x;
}
}
/// WARNING: O(rank) = O(n)
fn increase_key_(&mut self, x: Node<I, T>) {
let ent;
let mut children_lost = if marked!(x) { 1 } else { 0 };
for child in x.children() {
if val!(child) < val!(x) {
x.cut_child(child.clone());
self.push_into_roots(child.clone());
marked!(child, false);
children_lost += 1;
}
}
match children_lost.cmp(&1) {
Less => ent = WeakNode::none(),
Equal => {
marked!(x, true);
ent = paren!(x);
}
Greater => {
marked!(x, true);
ent = x.downgrade();
}
}
self.cut_meld_unmark_to_roots(ent);
// WARNING: O(rank), update self.min
if x.rc_eq(&self.min) {
let min_node =
self.roots().into_iter().min_by_key(|x| val!(x)).unwrap();
self.min = min_node;
}
}
fn cut_meld_unmark_to_roots(&mut self, ent: WeakNode<I, T>) {
if ent.is_none() {
return;
}
let mut x = ent.upgrade();
let mut p = paren!(x);
while marked!(x) && !p.is_none() {
let strongp = p.upgrade();
strongp.cut_child(x.clone());
self.push_into_roots(x.clone());
marked!(x, false);
x = strongp;
p = paren!(x);
| fmt | identifier_name |
fib.rs |
/// replace with new val, return old val
fn replace_key(&self, val: T) -> T
where
I: Debug,
T: Debug
{
replace(val_mut!(self), val)
}
fn replace(&mut self, x: Self) -> Self {
let old = Self(self.0.clone());
self.0 = x.0;
old
}
#[cfg(test)]
#[allow(unused)]
fn validate_ref(&self)
where
I: Clone,
{
assert!(self.is_some());
let _self_idx = idx!(self);
/* validate right sibling */
let rh = right!(self);
if rh.is_some() {
let _rh_idx = idx!(rh);
let rhlf = left!(rh).upgrade();
assert!(rhlf.rc_eq(self));
assert!(rhlf.is_some());
rh.validate_ref();
}
/* validate children */
let child = child!(self);
if child.is_some() {
let _child_idx = idx!(child);
let cpw = paren!(child);
assert!(!cpw.is_none());
let cp = cpw.upgrade();
assert!(cp.rc_eq(self));
assert!(cp.is_some());
child.validate_ref();
}
}
}
impl<I: Debug, T: Debug> Debug for Node<I, T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
if self.is_none() {
write!(f, "None")
} else {
write!(f, "{:?}", self.0.as_ref().unwrap().as_ref().borrow())
}
}
}
impl<I: Debug, T: Debug> Display for Node<I, T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
writeln!(f, "R({:?}) ", self)?;
let mut curq = vec![(self.clone(), self.children())];
loop {
let mut nxtq = vec![];
for (p, children) in curq {
if children.is_empty() {
break;
}
write!(f, "P({:?}) ", p)?;
let childlen = children.len();
for (i, child) in children.into_iter().enumerate() {
write!(f, "{:?}", child)?;
if i < childlen - 1 {
write!(f, ", ")?;
}
nxtq.push((child.clone(), child.children()));
}
write!(f, "; ")?;
}
if !nxtq.is_empty() {
writeln!(f)?;
curq = nxtq;
} else {
break;
}
}
Ok(())
}
}
impl<I, T> FibHeap<I, T>
where
I: Eq + Hash + Clone + Debug,
T: Ord + Debug
{
////////////////////////////////////////////////////////////////////////////
//// Public method
pub fn new() -> Self {
Self {
len: 0,
rcnt: 0,
min: Node::none(),
nodes: HashMap::new(),
}
}
pub fn len(&self) -> usize {
self.len
}
/// Same index node would be overidden
pub fn push(&mut self, i: I, v: T)
{
let node = node!(i.clone(), v);
self.nodes.insert(i, node.clone());
self.push_into_roots(node.clone());
if val!(node) < val!(self.min) {
self.min = node;
}
self.len += 1;
}
/// Amortized cost O(rank(H))
///
/// trees(H') <= rank(H) + 1 # since no two trees have same rank.
///
/// delete-min
pub fn pop_item(&mut self) -> Option<(I, T)>
{
if self.min.is_none() {
return None;
}
self.len -= 1;
/* push children of oldmin into roots */
for child in self.min.children() {
self.push_into_roots(child.clone());
}
/* update min */
let newmin = self.roots()[1..]
.into_iter()
.min_by_key(|&sib| val!(sib))
.cloned()
.unwrap_or_default();
/* just del old min */
self.remove_from_roots(self.min.clone());
let oldmin = self.min.replace(newmin);
self.consolidate();
Some((
self.remove_from_index(&oldmin),
unwrap_into!(oldmin).val
))
}
/// merge same rank trees recusively
pub fn consolidate(&mut self) {
let mut rank: HashMap<usize, Node<I, T>> = hashmap!();
for mut sib in self.roots() {
while let Some(x) = rank.remove(&rank!(sib)) {
sib = self.merge_same_rank_root(x, sib);
}
rank.insert(rank!(sib), sib);
}
}
/// Return oldval, alias of ReplaceOrPush
///
/// Exec push if the val doesn't exist.
///
pub fn insert(&mut self, i: I, v: T) -> Option<T>
where
I: Eq + Hash + Clone,
T: Ord + Debug
{
match self.nodes.entry(i.clone()) {
Occupied(ent) => {
let x = ent.get().clone();
let oldv = x.replace_key(v);
match val!(x).cmp(&oldv) {
Less => self.decrease_key_(x),
Equal => (),
Greater => self.increase_key_(x),
}
Some(oldv)
}
Vacant(_ent) => {
self.push(i, v);
None
}
}
}
pub fn union(&mut self, _other: Self) {
unimplemented!("link roots, but not O(1) for link index reference")
}
pub fn delete<Q: AsRef<I>>(&mut self, _i: Q) -> Option<T> {
unimplemented!("1. decrease-val to -infi, 2. pop");
}
////////////////////////////////////////////////////////////////////////////
//// Extra functional method
/// Return oldval
///
pub fn decrease_key(&mut self, i: I, v: T) -> Option<T>
where
I: Eq + Hash + Clone,
T: Debug
{
let x;
match self.nodes.entry(i.clone()) {
Occupied(ent) => {
x = ent.get().clone();
let oldv = x.replace_key(v);
self.decrease_key_(x);
Some(oldv)
}
Vacant(_ent) => None,
}
}
pub fn top_item(&self) -> Option<(I, &T)>
where
I: Eq + Clone
{
if self.min.is_some() {
Some((idx!(self.min), val!(self.min)))
} else {
None
}
}
pub fn top(&self) -> Option<&T> {
self.top_item().map(|x| x.1)
}
pub fn pop(&mut self) -> Option<T> {
self.pop_item().map(|x| x.1)
}
pub fn get<Q>(&self, i: &Q) -> Option<&T>
where
I: Borrow<Q>,
Q: Ord + Hash + ?Sized,
{
self.nodes.get(i).map(|node| val!(node))
}
pub fn indexes(&self) -> impl Iterator<Item = &I> {
self.nodes.keys()
}
////////////////////////////////////////////////////////////////////////////
//// Assistant method
fn decrease_key_(&mut self, x: Node<I, T>) {
let ent;
let p = paren!(x);
if !p.is_none() && val!(x) < val!(p.upgrade()) {
// 假装x节点本身也是一个符合条件的父节点
marked!(x, true);
ent = x.downgrade();
} else {
ent = WeakNode::none();
}
self.cut_meld_unmark_to_roots(ent);
if val!(x) < val!(self.min) {
debug_assert!(paren!(x).is_none());
self.min = x;
}
}
/// WARNING: O(rank) = O(n)
fn increase_key_(&mut self, x: Node<I, T>) {
let ent;
let mut children_lost = if marked!(x) { 1 } else { 0 };
for child in x.children() {
if val!(child) < val!(x) {
x.cut_child(child.clone());
self.push_into_roots(child.clone());
marked!(child, false);
children_lost += 1;
}
}
match children_lost.cmp(&1) {
Less => ent = WeakNode::none(),
Equal => {
marked!(x, true | {
if !left!(x).is_none() {
right!(left!(x).upgrade(), right!(x));
} else {
debug_assert!(child!(self).rc_eq(&x));
child!(self, right!(x));
}
if !right!(x).is_none() {
left!(right!(x), left!(x));
}
rank!(self, rank!(self) - 1);
x.purge_as_root();
} | identifier_body |
|
fib.rs | : T) -> Option<T>
where
I: Eq + Hash + Clone,
T: Debug
{
let x;
match self.nodes.entry(i.clone()) {
Occupied(ent) => {
x = ent.get().clone();
let oldv = x.replace_key(v);
self.decrease_key_(x);
Some(oldv)
}
Vacant(_ent) => None,
}
}
pub fn top_item(&self) -> Option<(I, &T)>
where
I: Eq + Clone
{
if self.min.is_some() {
Some((idx!(self.min), val!(self.min)))
} else {
None
}
}
pub fn top(&self) -> Option<&T> {
self.top_item().map(|x| x.1)
}
pub fn pop(&mut self) -> Option<T> {
self.pop_item().map(|x| x.1)
}
pub fn get<Q>(&self, i: &Q) -> Option<&T>
where
I: Borrow<Q>,
Q: Ord + Hash + ?Sized,
{
self.nodes.get(i).map(|node| val!(node))
}
pub fn indexes(&self) -> impl Iterator<Item = &I> {
self.nodes.keys()
}
////////////////////////////////////////////////////////////////////////////
//// Assistant method
fn decrease_key_(&mut self, x: Node<I, T>) {
let ent;
let p = paren!(x);
if !p.is_none() && val!(x) < val!(p.upgrade()) {
// 假装x节点本身也是一个符合条件的父节点
marked!(x, true);
ent = x.downgrade();
} else {
ent = WeakNode::none();
}
self.cut_meld_unmark_to_roots(ent);
if val!(x) < val!(self.min) {
debug_assert!(paren!(x).is_none());
self.min = x;
}
}
/// WARNING: O(rank) = O(n)
fn increase_key_(&mut self, x: Node<I, T>) {
let ent;
let mut children_lost = if marked!(x) { 1 } else { 0 };
for child in x.children() {
if val!(child) < val!(x) {
x.cut_child(child.clone());
self.push_into_roots(child.clone());
marked!(child, false);
children_lost += 1;
}
}
match children_lost.cmp(&1) {
Less => ent = WeakNode::none(),
Equal => {
marked!(x, true);
ent = paren!(x);
}
Greater => {
marked!(x, true);
ent = x.downgrade();
}
}
self.cut_meld_unmark_to_roots(ent);
// WARNING: O(rank), update self.min
if x.rc_eq(&self.min) {
let min_node =
self.roots().into_iter().min_by_key(|x| val!(x)).unwrap();
self.min = min_node;
}
}
fn cut_meld_unmark_to_roots(&mut self, ent: WeakNode<I, T>) {
if ent.is_none() {
return;
}
let mut x = ent.upgrade();
let mut p = paren!(x);
while marked!(x) && !p.is_none() {
let strongp = p.upgrade();
strongp.cut_child(x.clone());
self.push_into_roots(x.clone());
marked!(x, false);
x = strongp;
p = paren!(x);
}
// 定义上不标记根,但这应该是无所谓的,标记对于可能的pop导致的树规整后的树情况更精确
marked!(x, true);
}
fn remove_from_index(&mut self, x: &Node<I, T>) -> I
where
I: Eq + Hash + Clone
{
let k = idx!(x);
self.nodes.remove(&k);
k
}
/// insert at sib of self.min, with purge
fn push_into_roots(&mut self, x: Node<I, T>) {
debug_assert!(!self.min.rc_eq(&x));
self.rcnt += 1;
x.purge_as_root();
if self.min.is_none() {
self.min = x;
left!(self.min, self.min.downgrade());
right!(self.min, self.min.clone());
} else {
debug_assert!(right!(self.min).is_some());
right!(x, right!(self.min));
left!(x, self.min.downgrade());
right!(self.min, x.clone());
left!(right!(x), x.downgrade());
}
}
/// from self.min go through all roots
fn roots(&self) -> Vec<Node<I, T>> {
let mut sibs = vec![];
if self.min.is_none() {
return sibs;
} else {
sibs.push(self.min.clone());
}
let mut sib = right!(self.min);
while !sib.rc_eq(&self.min) {
sibs.push(sib.clone());
sib = right!(sib);
}
sibs
}
fn remove_from_roots(&mut self, x: Node<I, T>) {
self.rcnt -= 1;
if self.rcnt > 0 {
right!(left!(x).upgrade(), right!(x));
left!(right!(x), left!(x));
}
x.purge_as_root();
}
/// update self.rcnt
fn merge_same_rank_root(
&mut self,
mut x: Node<I, T>,
mut y: Node<I, T>,
) -> Node<I, T> {
debug_assert_eq!(rank!(x), rank!(y));
// let x be parent
if val!(y) < val!(x) || val!(y) == val!(x) && y.rc_eq(&self.min) {
(x, y) = (y, x);
}
// remove y from roots
self.remove_from_roots(y.clone());
// link y to x child
right!(y, child!(x));
if child!(x).is_some() {
left!(child!(x), y.downgrade());
}
// link y to x
paren!(y, x.downgrade());
child!(x, y.clone());
rank!(x, rank!(x) + 1);
x
}
////////////////////////////////////////////////////////////////////////////
//// Validation method
/// Validate nodes are not None or Failed to upgrade to Rc
#[cfg(test)]
#[allow(unused)]
pub(crate) fn validate_ref(&self) {
if self.len() == 0 {
return;
}
/* validate roots */
for root in self.roots() {
assert!(root.is_some());
let rh = right!(root);
assert!(rh.is_some());
let wlf = left!(root);
assert!(!wlf.is_none());
let left = wlf.upgrade();
assert!(left.is_some());
let child = child!(root);
if child.is_some() {
child.validate_ref();
}
}
}
}
impl<I: Eq + Hash + Clone, T: Clone> FibHeap<I, T> {
fn overall_clone(
&self,
nodes: &mut HashMap<I, Node<I, T>>,
x: Node<I, T>,
) -> Node<I, T> {
if x.is_none() {
return Node::none();
}
// overall clone node body
let newx = node!(idx!(x), val!(x).clone(), rank!(x), marked!(x));
// update index reference
nodes.insert(idx!(x), newx.clone());
// recursive call it
let mut childen_iter = x.children().into_iter();
if let Some(child) = childen_iter.next() {
let newchild = self.overall_clone(nodes, child);
child!(newx, newchild.clone());
paren!(newchild, newx.downgrade());
let mut cur = newchild;
for child in childen_iter {
let newchild = self.overall_clone(nodes, child);
right!(cur, newchild.clone());
left!(newchild, cur.downgrade());
cur = newchild;
}
}
newx
}
}
impl<I, T> Drop for FibHeap<I, T> {
fn drop(&mut self) {
if self.len > 0 {
// break circle dependency to enable drop
let tail = left!(self.min).upgrade();
right!(tail, Node::none());
self.nodes.clear();
}
}
}
impl<T: Debug, K: Debug> Display for FibHeap<T, K> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut sib = self.min.clone();
for i in 1..=self.rcnt {
writeln!(f, "{} ({i:03}) {}", "-".repeat(28), "-".repeat(28))?;
// writeln!(f)?;
if sib.rc_eq(&self.min) {
write!(f, "M=>")?;
}
writeln!(f, "{}", sib)?;
debug_assert! | (sib.is_some());
sib = right!(sib);
| conditional_block |
|
fib.rs | where
I: Borrow<Q>,
Q: Ord + Hash + ?Sized,
{
self.nodes.get(i).map(|node| val!(node))
}
pub fn indexes(&self) -> impl Iterator<Item = &I> {
self.nodes.keys()
}
////////////////////////////////////////////////////////////////////////////
//// Assistant method
fn decrease_key_(&mut self, x: Node<I, T>) {
let ent;
let p = paren!(x);
if !p.is_none() && val!(x) < val!(p.upgrade()) {
// 假装x节点本身也是一个符合条件的父节点
marked!(x, true);
ent = x.downgrade();
} else {
ent = WeakNode::none();
}
self.cut_meld_unmark_to_roots(ent);
if val!(x) < val!(self.min) {
debug_assert!(paren!(x).is_none());
self.min = x;
}
}
/// WARNING: O(rank) = O(n)
fn increase_key_(&mut self, x: Node<I, T>) {
let ent;
let mut children_lost = if marked!(x) { 1 } else { 0 };
for child in x.children() {
if val!(child) < val!(x) {
x.cut_child(child.clone());
self.push_into_roots(child.clone());
marked!(child, false);
children_lost += 1;
}
}
match children_lost.cmp(&1) {
Less => ent = WeakNode::none(),
Equal => {
marked!(x, true);
ent = paren!(x);
}
Greater => {
marked!(x, true);
ent = x.downgrade();
}
}
self.cut_meld_unmark_to_roots(ent);
// WARNING: O(rank), update self.min
if x.rc_eq(&self.min) {
let min_node =
self.roots().into_iter().min_by_key(|x| val!(x)).unwrap();
self.min = min_node;
}
}
fn cut_meld_unmark_to_roots(&mut self, ent: WeakNode<I, T>) {
if ent.is_none() {
return;
}
let mut x = ent.upgrade();
let mut p = paren!(x);
while marked!(x) && !p.is_none() {
let strongp = p.upgrade();
strongp.cut_child(x.clone());
self.push_into_roots(x.clone());
marked!(x, false);
x = strongp;
p = paren!(x);
}
// 定义上不标记根,但这应该是无所谓的,标记对于可能的pop导致的树规整后的树情况更精确
marked!(x, true);
}
fn remove_from_index(&mut self, x: &Node<I, T>) -> I
where
I: Eq + Hash + Clone
{
let k = idx!(x);
self.nodes.remove(&k);
k
}
/// insert at sib of self.min, with purge
fn push_into_roots(&mut self, x: Node<I, T>) {
debug_assert!(!self.min.rc_eq(&x));
self.rcnt += 1;
x.purge_as_root();
if self.min.is_none() {
self.min = x;
left!(self.min, self.min.downgrade());
right!(self.min, self.min.clone());
} else {
debug_assert!(right!(self.min).is_some());
right!(x, right!(self.min));
left!(x, self.min.downgrade());
right!(self.min, x.clone());
left!(right!(x), x.downgrade());
}
}
/// from self.min go through all roots
fn roots(&self) -> Vec<Node<I, T>> {
let mut sibs = vec![];
if self.min.is_none() {
return sibs;
} else {
sibs.push(self.min.clone());
}
let mut sib = right!(self.min);
while !sib.rc_eq(&self.min) {
sibs.push(sib.clone());
sib = right!(sib);
}
sibs
}
fn remove_from_roots(&mut self, x: Node<I, T>) {
self.rcnt -= 1;
if self.rcnt > 0 {
right!(left!(x).upgrade(), right!(x));
left!(right!(x), left!(x));
}
x.purge_as_root();
}
/// update self.rcnt
fn merge_same_rank_root(
&mut self,
mut x: Node<I, T>,
mut y: Node<I, T>,
) -> Node<I, T> {
debug_assert_eq!(rank!(x), rank!(y));
// let x be parent
if val!(y) < val!(x) || val!(y) == val!(x) && y.rc_eq(&self.min) {
(x, y) = (y, x);
}
// remove y from roots
self.remove_from_roots(y.clone());
// link y to x child
right!(y, child!(x));
if child!(x).is_some() {
left!(child!(x), y.downgrade());
}
// link y to x
paren!(y, x.downgrade());
child!(x, y.clone());
rank!(x, rank!(x) + 1);
x
}
////////////////////////////////////////////////////////////////////////////
//// Validation method
/// Validate nodes are not None or Failed to upgrade to Rc
#[cfg(test)]
#[allow(unused)]
pub(crate) fn validate_ref(&self) {
if self.len() == 0 {
return;
}
/* validate roots */
for root in self.roots() {
assert!(root.is_some());
let rh = right!(root);
assert!(rh.is_some());
let wlf = left!(root);
assert!(!wlf.is_none());
let left = wlf.upgrade();
assert!(left.is_some());
let child = child!(root);
if child.is_some() {
child.validate_ref();
}
}
}
}
impl<I: Eq + Hash + Clone, T: Clone> FibHeap<I, T> {
fn overall_clone(
&self,
nodes: &mut HashMap<I, Node<I, T>>,
x: Node<I, T>,
) -> Node<I, T> {
if x.is_none() {
return Node::none();
}
// overall clone node body
let newx = node!(idx!(x), val!(x).clone(), rank!(x), marked!(x));
// update index reference
nodes.insert(idx!(x), newx.clone());
// recursive call it
let mut childen_iter = x.children().into_iter();
if let Some(child) = childen_iter.next() {
let newchild = self.overall_clone(nodes, child);
child!(newx, newchild.clone());
paren!(newchild, newx.downgrade());
let mut cur = newchild;
for child in childen_iter {
let newchild = self.overall_clone(nodes, child);
right!(cur, newchild.clone());
left!(newchild, cur.downgrade());
cur = newchild;
}
}
newx
}
}
impl<I, T> Drop for FibHeap<I, T> {
fn drop(&mut self) {
if self.len > 0 {
// break circle dependency to enable drop
let tail = left!(self.min).upgrade();
right!(tail, Node::none());
self.nodes.clear();
}
}
}
impl<T: Debug, K: Debug> Display for FibHeap<T, K> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut sib = self.min.clone();
for i in 1..=self.rcnt {
writeln!(f, "{} ({i:03}) {}", "-".repeat(28), "-".repeat(28))?;
// writeln!(f)?;
if sib.rc_eq(&self.min) {
write!(f, "M=>")?;
}
writeln!(f, "{}", sib)?;
debug_assert!(sib.is_some());
sib = right!(sib);
}
writeln!(f, "{}>> end <<{}", "-".repeat(28), "-".repeat(28))?;
Ok(())
}
}
impl<I: Ord + Hash + Clone + Debug, T: Ord + Clone + Debug> Clone for FibHeap<I, T> {
fn clone(&self) -> Self {
let len = self.len;
let rcnt = self.rcnt;
let mut nodes = HashMap::new();
let min;
let mut roots_iter = self.roots().into_iter();
if let Some(_min) = roots_iter.next() {
min = self.overall_clone(&mut nodes, _min.clone());
let mut cur = min.clone();
for root in roots_iter {
let newroot = self.overall_clone(&mut nodes, root);
right!(cur, newroot.clone());
left!(newroot, cur.downgrade());
cur = newroot;
}
right!(cur, min.clone());
left!(min, cur.downgrade());
} else { | min = Node::none();
}
| random_line_split |
|
main.rs | if let Some(spare) = spare_chemicals.get_mut(&req_chem) {
if *spare >= req_amount {
// We have enough spare to completely fulfill this
// requirement, no need to go further.
*spare -= req_amount;
continue;
} else {
// Reduce the required amount by the amount we have
// spare;
adj_req_amount = req_amount - *spare;
*spare = 0;
}
}
// Find the reaction that produces this ingredient.
let reaction = reactions
.get(&req_chem)
.expect(format!("Couldn't find reaction for {}", req_chem).as_ref());
// Find out how many times we need to run this reaction,
// and how much will be spare.
let output_amount = reaction.output.1;
let reaction_count = (adj_req_amount - 1) / output_amount + 1;
let spare = output_amount * reaction_count - adj_req_amount;
// Update the spare count for this ingredient.
if let Some(existing_spare) = spare_chemicals.get_mut(&req_chem) {
*existing_spare += spare;
} else {
spare_chemicals.insert(req_chem, spare);
}
// Update the required ingredients list with the ingredients
// needed to make this chemical.
for ingredient in reaction.ingredients.clone() {
let ingredient_name = ingredient.0;
let ingredient_count = reaction_count * ingredient.1;
if ingredient_name == ore_name {
ore += ingredient_count;
} else {
requirements.push((ingredient_name, ingredient_count));
}
}
}
}
ore
}
fn calc_fuel_for_ore(ore: u64, reactions: &ReactionMap) -> u64 | let used_ore = calc_ore_for_fuel(current, reactions);
if used_ore < ore {
lower = current;
} else {
upper = current;
}
if upper - 1 == lower {
return lower;
}
}
}
fn parse_chemical(chemical: &str) -> (String, u64) {
let mut iter = chemical.split_whitespace();
let count = iter.next().unwrap().parse::<u64>().unwrap();
let chem = iter.next().unwrap();
(String::from(chem), count)
}
fn parse_reactions(strs: &[String]) -> ReactionMap {
let mut reactions = HashMap::new();
for reaction in strs {
let mut iter = reaction.split(" => ");
let ingredients_str = iter.next().unwrap();
let output_str = iter.next().unwrap();
let mut ingredients = Vec::new();
for ingredient in ingredients_str.split(", ") {
ingredients.push(parse_chemical(ingredient));
}
let output = parse_chemical(output_str);
reactions.insert(
output.0.clone(),
Reaction {
output: output,
ingredients: ingredients,
},
);
}
reactions
}
fn parse_input(filename: &str) -> ReactionMap {
let file = File::open(filename).expect("Failed to open file");
let reader = BufReader::new(file);
let reactions: Vec<String> = reader
.lines()
.map(|l| l.expect("Failed to read line"))
.map(|l| String::from(l.trim()))
.collect();
parse_reactions(reactions.as_slice())
}
fn main() {
let reactions = parse_input("input");
// Part 1
let ore = calc_ore(&reactions);
println!("Require {} ore for 1 fuel", ore);
// Part 2
let fuel = calc_fuel_for_ore(COLLECTED_ORE, &reactions);
println!("Produce {} fuel from {} ore", fuel, COLLECTED_ORE);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse() {
let input = vec![String::from("7 A, 1 E => 1 FUEL")];
let reactions = parse_reactions(input.as_slice());
let result = reactions.get(&String::from("FUEL"));
assert!(result.is_some());
let reaction = result.unwrap();
assert_eq!(
*reaction,
Reaction {
output: (String::from("FUEL"), 1),
ingredients: vec![(String::from("A"), 7), (String::from("E"), 1),],
},
);
}
#[test]
fn example1() {
let input = vec![
String::from("10 ORE => 10 A"),
String::from("1 ORE => 1 B"),
String::from("7 A, 1 B => 1 C"),
String::from("7 A, 1 C => 1 D"),
String::from("7 A, 1 D => 1 E"),
String::from("7 A, 1 E => 1 FUEL"),
];
let reactions = parse_reactions(input.as_slice());
let result = calc_ore(&reactions);
assert_eq!(result, 31);
}
#[test]
fn example2() {
let input = vec![
String::from("9 ORE => 2 A"),
String::from("8 ORE => 3 B"),
String::from("7 ORE => 5 C"),
String::from("3 A, 4 B => 1 AB"),
String::from("5 B, 7 C => 1 BC"),
String::from("4 C, 1 A => 1 CA"),
String::from("2 AB, 3 BC, 4 CA => 1 FUEL"),
];
let reactions = parse_reactions(input.as_slice());
let result = calc_ore(&reactions);
assert_eq!(result, 165);
}
#[test]
fn example3() {
let input = vec![
String::from("157 ORE => 5 NZVS"),
String::from("165 ORE => 6 DCFZ"),
String::from("44 XJWVT, 5 KHKGT, 1 QDVJ, 29 NZVS, 9 GPVTF, 48 HKGWZ => 1 FUEL"),
String::from("12 HKGWZ, 1 GPVTF, 8 PSHF => 9 QDVJ"),
String::from("179 ORE => 7 PSHF"),
String::from("177 ORE => 5 HKGWZ"),
String::from("7 DCFZ, 7 PSHF => 2 XJWVT"),
String::from("165 ORE => 2 GPVTF"),
String::from("3 DCFZ, 7 NZVS, 5 HKGWZ, 10 PSHF => 8 KHKGT"),
];
let reactions = parse_reactions(input.as_slice());
let result = calc_ore(&reactions);
assert_eq!(result, 13312);
let result = calc_fuel_for_ore(COLLECTED_ORE, &reactions);
assert_eq!(result, 82892753);
}
#[test]
fn example4() {
let input = vec![
String::from("2 VPVL, 7 FWMGM, 2 CXFTF, 11 MNCFX => 1 STKFG"),
String::from("17 NVRVD, 3 JNWZP => 8 VPVL"),
String::from("53 STKFG, 6 MNCFX, 46 VJHF, 81 HVMC, 68 CXFTF, 25 GNMV => 1 FUEL"),
String::from("22 VJHF, 37 MNCFX => 5 FWMGM"),
String::from("139 ORE => 4 NVRVD"),
String::from("144 ORE => 7 JNWZP"),
String::from("5 MNCFX, 7 RFSQX, 2 FWMGM, 2 VPVL, 19 CXFTF => 3 HVMC"),
String::from("5 VJHF, 7 MNCFX, 9 VPVL, 37 CXFTF => 6 GNMV"),
String::from("145 ORE => 6 MNCFX"),
String::from("1 NVRVD => 8 CXFTF"),
String::from("1 VJHF, 6 MNCFX => 4 RFSQX"),
String::from("176 ORE => 6 VJHF"),
];
let reactions = parse_reactions(input.as_slice());
let result = calc_ore(&re | {
let mut lower = 1;
let mut current;
let mut upper = 1;
// Find an upper bound to use for binary search.
loop {
let used_ore = calc_ore_for_fuel(upper, reactions);
if used_ore < ore {
upper *= 2;
} else {
break;
}
}
// Binary search to find the highest amount of fuel we can
// produce without using all the ore.
loop {
current = (upper - lower) / 2 + lower;
| identifier_body |
main.rs | if let Some(spare) = spare_chemicals.get_mut(&req_chem) {
if *spare >= req_amount {
// We have enough spare to completely fulfill this
// requirement, no need to go further.
*spare -= req_amount;
continue;
} else {
// Reduce the required amount by the amount we have
// spare;
adj_req_amount = req_amount - *spare;
*spare = 0;
}
}
// Find the reaction that produces this ingredient.
let reaction = reactions
.get(&req_chem)
.expect(format!("Couldn't find reaction for {}", req_chem).as_ref());
// Find out how many times we need to run this reaction,
// and how much will be spare.
let output_amount = reaction.output.1;
let reaction_count = (adj_req_amount - 1) / output_amount + 1;
let spare = output_amount * reaction_count - adj_req_amount;
// Update the spare count for this ingredient.
if let Some(existing_spare) = spare_chemicals.get_mut(&req_chem) {
*existing_spare += spare;
} else {
spare_chemicals.insert(req_chem, spare);
}
// Update the required ingredients list with the ingredients
// needed to make this chemical.
for ingredient in reaction.ingredients.clone() {
let ingredient_name = ingredient.0;
let ingredient_count = reaction_count * ingredient.1;
if ingredient_name == ore_name {
ore += ingredient_count;
} else {
requirements.push((ingredient_name, ingredient_count));
}
}
}
}
ore
}
fn calc_fuel_for_ore(ore: u64, reactions: &ReactionMap) -> u64 {
let mut lower = 1;
let mut current;
let mut upper = 1;
// Find an upper bound to use for binary search.
loop {
let used_ore = calc_ore_for_fuel(upper, reactions);
if used_ore < ore {
upper *= 2;
} else {
break;
}
}
// Binary search to find the highest amount of fuel we can
// produce without using all the ore.
loop {
current = (upper - lower) / 2 + lower;
let used_ore = calc_ore_for_fuel(current, reactions);
if used_ore < ore {
lower = current;
} else {
upper = current;
}
if upper - 1 == lower {
return lower;
}
}
}
fn parse_chemical(chemical: &str) -> (String, u64) {
let mut iter = chemical.split_whitespace();
let count = iter.next().unwrap().parse::<u64>().unwrap();
let chem = iter.next().unwrap();
(String::from(chem), count)
}
fn parse_reactions(strs: &[String]) -> ReactionMap {
let mut reactions = HashMap::new();
for reaction in strs {
let mut iter = reaction.split(" => ");
let ingredients_str = iter.next().unwrap();
let output_str = iter.next().unwrap();
let mut ingredients = Vec::new();
for ingredient in ingredients_str.split(", ") {
ingredients.push(parse_chemical(ingredient));
}
let output = parse_chemical(output_str);
reactions.insert(
output.0.clone(),
Reaction {
output: output,
ingredients: ingredients,
},
);
}
reactions
}
fn parse_input(filename: &str) -> ReactionMap {
let file = File::open(filename).expect("Failed to open file");
let reader = BufReader::new(file);
let reactions: Vec<String> = reader
.lines()
.map(|l| l.expect("Failed to read line"))
.map(|l| String::from(l.trim()))
.collect();
parse_reactions(reactions.as_slice())
}
fn main() {
let reactions = parse_input("input");
// Part 1
let ore = calc_ore(&reactions);
println!("Require {} ore for 1 fuel", ore);
// Part 2
let fuel = calc_fuel_for_ore(COLLECTED_ORE, &reactions);
println!("Produce {} fuel from {} ore", fuel, COLLECTED_ORE);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn | () {
let input = vec![String::from("7 A, 1 E => 1 FUEL")];
let reactions = parse_reactions(input.as_slice());
let result = reactions.get(&String::from("FUEL"));
assert!(result.is_some());
let reaction = result.unwrap();
assert_eq!(
*reaction,
Reaction {
output: (String::from("FUEL"), 1),
ingredients: vec![(String::from("A"), 7), (String::from("E"), 1),],
},
);
}
#[test]
fn example1() {
let input = vec![
String::from("10 ORE => 10 A"),
String::from("1 ORE => 1 B"),
String::from("7 A, 1 B => 1 C"),
String::from("7 A, 1 C => 1 D"),
String::from("7 A, 1 D => 1 E"),
String::from("7 A, 1 E => 1 FUEL"),
];
let reactions = parse_reactions(input.as_slice());
let result = calc_ore(&reactions);
assert_eq!(result, 31);
}
#[test]
fn example2() {
let input = vec![
String::from("9 ORE => 2 A"),
String::from("8 ORE => 3 B"),
String::from("7 ORE => 5 C"),
String::from("3 A, 4 B => 1 AB"),
String::from("5 B, 7 C => 1 BC"),
String::from("4 C, 1 A => 1 CA"),
String::from("2 AB, 3 BC, 4 CA => 1 FUEL"),
];
let reactions = parse_reactions(input.as_slice());
let result = calc_ore(&reactions);
assert_eq!(result, 165);
}
#[test]
fn example3() {
let input = vec![
String::from("157 ORE => 5 NZVS"),
String::from("165 ORE => 6 DCFZ"),
String::from("44 XJWVT, 5 KHKGT, 1 QDVJ, 29 NZVS, 9 GPVTF, 48 HKGWZ => 1 FUEL"),
String::from("12 HKGWZ, 1 GPVTF, 8 PSHF => 9 QDVJ"),
String::from("179 ORE => 7 PSHF"),
String::from("177 ORE => 5 HKGWZ"),
String::from("7 DCFZ, 7 PSHF => 2 XJWVT"),
String::from("165 ORE => 2 GPVTF"),
String::from("3 DCFZ, 7 NZVS, 5 HKGWZ, 10 PSHF => 8 KHKGT"),
];
let reactions = parse_reactions(input.as_slice());
let result = calc_ore(&reactions);
assert_eq!(result, 13312);
let result = calc_fuel_for_ore(COLLECTED_ORE, &reactions);
assert_eq!(result, 82892753);
}
#[test]
fn example4() {
let input = vec![
String::from("2 VPVL, 7 FWMGM, 2 CXFTF, 11 MNCFX => 1 STKFG"),
String::from("17 NVRVD, 3 JNWZP => 8 VPVL"),
String::from("53 STKFG, 6 MNCFX, 46 VJHF, 81 HVMC, 68 CXFTF, 25 GNMV => 1 FUEL"),
String::from("22 VJHF, 37 MNCFX => 5 FWMGM"),
String::from("139 ORE => 4 NVRVD"),
String::from("144 ORE => 7 JNWZP"),
String::from("5 MNCFX, 7 RFSQX, 2 FWMGM, 2 VPVL, 19 CXFTF => 3 HVMC"),
String::from("5 VJHF, 7 MNCFX, 9 VPVL, 37 CXFTF => 6 GNMV"),
String::from("145 ORE => 6 MNCFX"),
String::from("1 NVRVD => 8 CXFTF"),
String::from("1 VJHF, 6 MNCFX => 4 RFSQX"),
String::from("176 ORE => 6 VJHF"),
];
let reactions = parse_reactions(input.as_slice());
let result = calc_ore(&re | test_parse | identifier_name |
main.rs | if let Some(spare) = spare_chemicals.get_mut(&req_chem) {
if *spare >= req_amount {
// We have enough spare to completely fulfill this
// requirement, no need to go further.
*spare -= req_amount;
continue;
} else {
// Reduce the required amount by the amount we have
// spare;
adj_req_amount = req_amount - *spare;
*spare = 0;
}
}
// Find the reaction that produces this ingredient.
let reaction = reactions
.get(&req_chem)
.expect(format!("Couldn't find reaction for {}", req_chem).as_ref());
// Find out how many times we need to run this reaction,
// and how much will be spare.
let output_amount = reaction.output.1;
let reaction_count = (adj_req_amount - 1) / output_amount + 1;
let spare = output_amount * reaction_count - adj_req_amount;
// Update the spare count for this ingredient.
if let Some(existing_spare) = spare_chemicals.get_mut(&req_chem) {
*existing_spare += spare;
} else {
spare_chemicals.insert(req_chem, spare);
}
// Update the required ingredients list with the ingredients
// needed to make this chemical.
for ingredient in reaction.ingredients.clone() {
let ingredient_name = ingredient.0;
let ingredient_count = reaction_count * ingredient.1;
if ingredient_name == ore_name {
ore += ingredient_count;
} else {
requirements.push((ingredient_name, ingredient_count));
}
}
}
}
ore
}
fn calc_fuel_for_ore(ore: u64, reactions: &ReactionMap) -> u64 {
let mut lower = 1;
let mut current;
let mut upper = 1;
// Find an upper bound to use for binary search.
loop {
let used_ore = calc_ore_for_fuel(upper, reactions);
if used_ore < ore {
upper *= 2;
} else {
break;
}
}
// Binary search to find the highest amount of fuel we can
// produce without using all the ore.
loop {
current = (upper - lower) / 2 + lower;
let used_ore = calc_ore_for_fuel(current, reactions);
if used_ore < ore {
lower = current;
} else {
upper = current;
}
if upper - 1 == lower {
return lower;
}
}
}
fn parse_chemical(chemical: &str) -> (String, u64) {
let mut iter = chemical.split_whitespace();
let count = iter.next().unwrap().parse::<u64>().unwrap();
let chem = iter.next().unwrap();
(String::from(chem), count)
}
fn parse_reactions(strs: &[String]) -> ReactionMap {
let mut reactions = HashMap::new();
for reaction in strs {
let mut iter = reaction.split(" => ");
let ingredients_str = iter.next().unwrap();
let output_str = iter.next().unwrap();
let mut ingredients = Vec::new();
for ingredient in ingredients_str.split(", ") {
ingredients.push(parse_chemical(ingredient));
}
let output = parse_chemical(output_str);
reactions.insert(
output.0.clone(),
Reaction {
output: output,
ingredients: ingredients,
},
); | }
fn parse_input(filename: &str) -> ReactionMap {
let file = File::open(filename).expect("Failed to open file");
let reader = BufReader::new(file);
let reactions: Vec<String> = reader
.lines()
.map(|l| l.expect("Failed to read line"))
.map(|l| String::from(l.trim()))
.collect();
parse_reactions(reactions.as_slice())
}
fn main() {
let reactions = parse_input("input");
// Part 1
let ore = calc_ore(&reactions);
println!("Require {} ore for 1 fuel", ore);
// Part 2
let fuel = calc_fuel_for_ore(COLLECTED_ORE, &reactions);
println!("Produce {} fuel from {} ore", fuel, COLLECTED_ORE);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse() {
let input = vec![String::from("7 A, 1 E => 1 FUEL")];
let reactions = parse_reactions(input.as_slice());
let result = reactions.get(&String::from("FUEL"));
assert!(result.is_some());
let reaction = result.unwrap();
assert_eq!(
*reaction,
Reaction {
output: (String::from("FUEL"), 1),
ingredients: vec![(String::from("A"), 7), (String::from("E"), 1),],
},
);
}
#[test]
fn example1() {
let input = vec![
String::from("10 ORE => 10 A"),
String::from("1 ORE => 1 B"),
String::from("7 A, 1 B => 1 C"),
String::from("7 A, 1 C => 1 D"),
String::from("7 A, 1 D => 1 E"),
String::from("7 A, 1 E => 1 FUEL"),
];
let reactions = parse_reactions(input.as_slice());
let result = calc_ore(&reactions);
assert_eq!(result, 31);
}
#[test]
fn example2() {
let input = vec![
String::from("9 ORE => 2 A"),
String::from("8 ORE => 3 B"),
String::from("7 ORE => 5 C"),
String::from("3 A, 4 B => 1 AB"),
String::from("5 B, 7 C => 1 BC"),
String::from("4 C, 1 A => 1 CA"),
String::from("2 AB, 3 BC, 4 CA => 1 FUEL"),
];
let reactions = parse_reactions(input.as_slice());
let result = calc_ore(&reactions);
assert_eq!(result, 165);
}
#[test]
fn example3() {
let input = vec![
String::from("157 ORE => 5 NZVS"),
String::from("165 ORE => 6 DCFZ"),
String::from("44 XJWVT, 5 KHKGT, 1 QDVJ, 29 NZVS, 9 GPVTF, 48 HKGWZ => 1 FUEL"),
String::from("12 HKGWZ, 1 GPVTF, 8 PSHF => 9 QDVJ"),
String::from("179 ORE => 7 PSHF"),
String::from("177 ORE => 5 HKGWZ"),
String::from("7 DCFZ, 7 PSHF => 2 XJWVT"),
String::from("165 ORE => 2 GPVTF"),
String::from("3 DCFZ, 7 NZVS, 5 HKGWZ, 10 PSHF => 8 KHKGT"),
];
let reactions = parse_reactions(input.as_slice());
let result = calc_ore(&reactions);
assert_eq!(result, 13312);
let result = calc_fuel_for_ore(COLLECTED_ORE, &reactions);
assert_eq!(result, 82892753);
}
#[test]
fn example4() {
let input = vec![
String::from("2 VPVL, 7 FWMGM, 2 CXFTF, 11 MNCFX => 1 STKFG"),
String::from("17 NVRVD, 3 JNWZP => 8 VPVL"),
String::from("53 STKFG, 6 MNCFX, 46 VJHF, 81 HVMC, 68 CXFTF, 25 GNMV => 1 FUEL"),
String::from("22 VJHF, 37 MNCFX => 5 FWMGM"),
String::from("139 ORE => 4 NVRVD"),
String::from("144 ORE => 7 JNWZP"),
String::from("5 MNCFX, 7 RFSQX, 2 FWMGM, 2 VPVL, 19 CXFTF => 3 HVMC"),
String::from("5 VJHF, 7 MNCFX, 9 VPVL, 37 CXFTF => 6 GNMV"),
String::from("145 ORE => 6 MNCFX"),
String::from("1 NVRVD => 8 CXFTF"),
String::from("1 VJHF, 6 MNCFX => 4 RFSQX"),
String::from("176 ORE => 6 VJHF"),
];
let reactions = parse_reactions(input.as_slice());
let result = calc_ore(&reactions);
| }
reactions | random_line_split |
main.rs | let Some(spare) = spare_chemicals.get_mut(&req_chem) {
if *spare >= req_amount {
// We have enough spare to completely fulfill this
// requirement, no need to go further.
*spare -= req_amount;
continue;
} else {
// Reduce the required amount by the amount we have
// spare;
adj_req_amount = req_amount - *spare;
*spare = 0;
}
}
// Find the reaction that produces this ingredient.
let reaction = reactions
.get(&req_chem)
.expect(format!("Couldn't find reaction for {}", req_chem).as_ref());
// Find out how many times we need to run this reaction,
// and how much will be spare.
let output_amount = reaction.output.1;
let reaction_count = (adj_req_amount - 1) / output_amount + 1;
let spare = output_amount * reaction_count - adj_req_amount;
// Update the spare count for this ingredient.
if let Some(existing_spare) = spare_chemicals.get_mut(&req_chem) {
*existing_spare += spare;
} else {
spare_chemicals.insert(req_chem, spare);
}
// Update the required ingredients list with the ingredients
// needed to make this chemical.
for ingredient in reaction.ingredients.clone() {
let ingredient_name = ingredient.0;
let ingredient_count = reaction_count * ingredient.1;
if ingredient_name == ore_name {
ore += ingredient_count;
} else {
requirements.push((ingredient_name, ingredient_count));
}
}
}
}
ore
}
fn calc_fuel_for_ore(ore: u64, reactions: &ReactionMap) -> u64 {
let mut lower = 1;
let mut current;
let mut upper = 1;
// Find an upper bound to use for binary search.
loop {
let used_ore = calc_ore_for_fuel(upper, reactions);
if used_ore < ore {
upper *= 2;
} else |
}
// Binary search to find the highest amount of fuel we can
// produce without using all the ore.
loop {
current = (upper - lower) / 2 + lower;
let used_ore = calc_ore_for_fuel(current, reactions);
if used_ore < ore {
lower = current;
} else {
upper = current;
}
if upper - 1 == lower {
return lower;
}
}
}
fn parse_chemical(chemical: &str) -> (String, u64) {
let mut iter = chemical.split_whitespace();
let count = iter.next().unwrap().parse::<u64>().unwrap();
let chem = iter.next().unwrap();
(String::from(chem), count)
}
fn parse_reactions(strs: &[String]) -> ReactionMap {
let mut reactions = HashMap::new();
for reaction in strs {
let mut iter = reaction.split(" => ");
let ingredients_str = iter.next().unwrap();
let output_str = iter.next().unwrap();
let mut ingredients = Vec::new();
for ingredient in ingredients_str.split(", ") {
ingredients.push(parse_chemical(ingredient));
}
let output = parse_chemical(output_str);
reactions.insert(
output.0.clone(),
Reaction {
output: output,
ingredients: ingredients,
},
);
}
reactions
}
fn parse_input(filename: &str) -> ReactionMap {
let file = File::open(filename).expect("Failed to open file");
let reader = BufReader::new(file);
let reactions: Vec<String> = reader
.lines()
.map(|l| l.expect("Failed to read line"))
.map(|l| String::from(l.trim()))
.collect();
parse_reactions(reactions.as_slice())
}
fn main() {
let reactions = parse_input("input");
// Part 1
let ore = calc_ore(&reactions);
println!("Require {} ore for 1 fuel", ore);
// Part 2
let fuel = calc_fuel_for_ore(COLLECTED_ORE, &reactions);
println!("Produce {} fuel from {} ore", fuel, COLLECTED_ORE);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse() {
let input = vec![String::from("7 A, 1 E => 1 FUEL")];
let reactions = parse_reactions(input.as_slice());
let result = reactions.get(&String::from("FUEL"));
assert!(result.is_some());
let reaction = result.unwrap();
assert_eq!(
*reaction,
Reaction {
output: (String::from("FUEL"), 1),
ingredients: vec![(String::from("A"), 7), (String::from("E"), 1),],
},
);
}
#[test]
fn example1() {
let input = vec![
String::from("10 ORE => 10 A"),
String::from("1 ORE => 1 B"),
String::from("7 A, 1 B => 1 C"),
String::from("7 A, 1 C => 1 D"),
String::from("7 A, 1 D => 1 E"),
String::from("7 A, 1 E => 1 FUEL"),
];
let reactions = parse_reactions(input.as_slice());
let result = calc_ore(&reactions);
assert_eq!(result, 31);
}
#[test]
fn example2() {
let input = vec![
String::from("9 ORE => 2 A"),
String::from("8 ORE => 3 B"),
String::from("7 ORE => 5 C"),
String::from("3 A, 4 B => 1 AB"),
String::from("5 B, 7 C => 1 BC"),
String::from("4 C, 1 A => 1 CA"),
String::from("2 AB, 3 BC, 4 CA => 1 FUEL"),
];
let reactions = parse_reactions(input.as_slice());
let result = calc_ore(&reactions);
assert_eq!(result, 165);
}
#[test]
fn example3() {
let input = vec![
String::from("157 ORE => 5 NZVS"),
String::from("165 ORE => 6 DCFZ"),
String::from("44 XJWVT, 5 KHKGT, 1 QDVJ, 29 NZVS, 9 GPVTF, 48 HKGWZ => 1 FUEL"),
String::from("12 HKGWZ, 1 GPVTF, 8 PSHF => 9 QDVJ"),
String::from("179 ORE => 7 PSHF"),
String::from("177 ORE => 5 HKGWZ"),
String::from("7 DCFZ, 7 PSHF => 2 XJWVT"),
String::from("165 ORE => 2 GPVTF"),
String::from("3 DCFZ, 7 NZVS, 5 HKGWZ, 10 PSHF => 8 KHKGT"),
];
let reactions = parse_reactions(input.as_slice());
let result = calc_ore(&reactions);
assert_eq!(result, 13312);
let result = calc_fuel_for_ore(COLLECTED_ORE, &reactions);
assert_eq!(result, 82892753);
}
#[test]
fn example4() {
let input = vec![
String::from("2 VPVL, 7 FWMGM, 2 CXFTF, 11 MNCFX => 1 STKFG"),
String::from("17 NVRVD, 3 JNWZP => 8 VPVL"),
String::from("53 STKFG, 6 MNCFX, 46 VJHF, 81 HVMC, 68 CXFTF, 25 GNMV => 1 FUEL"),
String::from("22 VJHF, 37 MNCFX => 5 FWMGM"),
String::from("139 ORE => 4 NVRVD"),
String::from("144 ORE => 7 JNWZP"),
String::from("5 MNCFX, 7 RFSQX, 2 FWMGM, 2 VPVL, 19 CXFTF => 3 HVMC"),
String::from("5 VJHF, 7 MNCFX, 9 VPVL, 37 CXFTF => 6 GNMV"),
String::from("145 ORE => 6 MNCFX"),
String::from("1 NVRVD => 8 CXFTF"),
String::from("1 VJHF, 6 MNCFX => 4 RFSQX"),
String::from("176 ORE => 6 VJHF"),
];
let reactions = parse_reactions(input.as_slice());
let result = calc_ore(&re | {
break;
} | conditional_block |
$time.js | single-digit hours (12-hour clock).
H Hours; no leading zero for single-digit hours (24-hour clock).
HH Hours; leading zero for single-digit hours (24-hour clock).
M Minutes; no leading zero for single-digit minutes.
MM Minutes; leading zero for single-digit minutes.
s Seconds; no leading zero for single-digit seconds.
ss Seconds; leading zero for single-digit seconds.
l or L Milliseconds. l gives 3 digits. L gives 2 digits.
t Lowercase, single-character time marker string: a or p.
tt Lowercase, two-character time marker string: am or pm.
T Uppercase, single-character time marker string: A or P.
TT Uppercase, two-character time marker string: AM or PM.
Z US timezone abbreviation, e.g. EST or MDT. With non-US timezones or in the Opera browser, the GMT/UTC offset is returned, e.g. GMT-0500
o GMT/UTC timezone offset, e.g. -0500 or +0230.
S The date's ordinal suffix (st, nd, rd, or th). Works well with d.
'…' or "…" Literal character sequence. Surrounding quotes are removed.
UTC: Must be the first four characters of the mask. Converts the date from local time to UTC/GMT/Zulu time before applying the mask. The "UTC:" prefix is removed.
*/
var $dateFormat = (function () {
var token = /d{1,4}|m{1,4}|yy(?:yy)?|([HhMsTt])\1?|[LloSZ]|"[^"]*"|'[^']*'/g,
timezone = /\b(?:[PMCEA][SDP]T|(?:Pacific|Mountain|Central|Eastern|Atlantic) (?:Standard|Daylight|Prevailing) Time|(?:GMT|UTC)(?:[-+]\d{4})?)\b/g,
timezoneClip = /[^-+\dA-Z]/g,
pad = function(val, len) {
val = String(val);
len = len || 2;
while (val.length < len) val = "0" + val;
return val;
};
// Regexes and supporting functions are cached through closure
return function(date, mask, utc, langCode) {
if (!date) {
return date + "";
}
var dF = $dateFormat;
langCode = langCode || dF.defaultLang;
var lang = dF.lang[langCode];
// You can't provide utc if you skip other args (use the "UTC:" mask prefix)
if (arguments.length == 1 && Object.prototype.toString.call(date) == "[object String]" && !/\d/.test(date)) {
mask = date;
date = undefined;
}
// Passing date through Date applies Date.parse, if necessary
date = date ? new Date(date) : new Date;
if (!$isDate(date)) throw SyntaxError("invalid date");
mask = String(dF.masks[mask] || mask || dF.masks["default"]);
// Allow setting the utc argument via the mask
if (mask.slice(0, 4) == "UTC:") {
mask = mask.slice(4);
utc = true;
}
var _ = utc ? "getUTC" : "get",
d = date[_ + "Date"](),
D = date[_ + "Day"](),
m = date[_ + "Month"](),
y = date[_ + "FullYear"](),
H = date[_ + "Hours"](),
M = date[_ + "Minutes"](),
s = date[_ + "Seconds"](),
L = date[_ + "Milliseconds"](),
o = utc ? 0 : date.getTimezoneOffset(),
flags = {
d: d,
dd: pad(d),
ddd: lang.dayNames[D],
dddd: lang.dayNames[D + 7],
m: m + 1,
mm: pad(m + 1),
mmm: lang.monthNames[m],
mmmm: lang.monthNames[m + 12],
yy: String(y).slice(2),
yyyy: y,
h: H % 12 || 12,
hh: pad(H % 12 || 12),
H: H,
HH: pad(H),
M: M,
MM: pad(M),
s: s,
ss: pad(s),
l: pad(L, 3),
L: pad(L > 99 ? Math.round(L / 10) : L),
t: H < 12 ? "a" : "p",
tt: H < 12 ? "am" : "pm",
T: H < 12 ? "A" : "P",
TT: H < 12 ? "AM" : "PM",
Z: utc ? "UTC" : (String(date).match(timezone) || [""]).pop().replace(timezoneClip, ""),
o: (o > 0 ? "-" : "+") + pad(Math.floor(Math.abs(o) / 60) * 100 + Math.abs(o) % 60, 4),
S: ["th", "st", "nd", "rd"][d % 10 > 3 ? 0 : (d % 100 - d % 10 != 10) * d % 10]
};
return mask.replace(token, function ($0) {
return $0 in flags ? flags[$0] : $0.slice(1, $0.length - 1);
});
};
}());
// Some common format strings
$dateFormat.masks = {
"default": "ddd mmm dd yyyy HH:MM:ss",
shortDate: "m/d/yy",
mediumDate: "mmm d, yyyy",
longDate: "mmmm d, yyyy",
fullDate: "dddd, mmmm d, yyyy",
shortTime: "h:MM TT",
mediumTime: "h:MM:ss TT",
longTime: "h:MM:ss TT Z",
isoDate: "yyyy-mm-dd",
isoTime: "HH:MM:ss",
isoDateTime: "yyyy-mm-dd'T'HH:MM:ss",
isoUtcDateTime: "UTC:yyyy-mm-dd'T'HH:MM:ss'Z'"
};
// Internationalization strings
$dateFormat.defaultLang = "en";
$dateFormat.lang = {
en: {
dayNames: [
"Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat",
"Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday"
],
monthNames: [
"Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec",
"January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"
]
}
};
function $secondsToTime(_s) {
var h, m, s, ms, pad, f1, f2, f3;
ms = Math.round((parseFloat(_s) % 1)*1000);
s = parseInt(_s, 10);
h = Math.floor( s / ( 60 * 60 ) );
s -= h * ( 60 * 60 );
m = Math.floor( s / 60 );
s -= m * 60;
pad = function(v) {return (v > 9) ? v : "0"+v;};
f1 = $map([h, m], pad).join(":");
f2 = $map([h, m, s], pad).join(":");
// create x hours x minutes string
// if no hours it will be x minutes
// if no hours or minutes will be x seconds
// plurality of units is handled
var hStr = h ? h + " hour" + (h>1 ? "s " : " ") : "",
mStr = (h || m) ? m + " minute" + (m>1 ? "s" : "") : "",
sStr = (!m && s) ? s + " second" + (s>1 ? "s" : "") : "";
f3 = hStr + mStr + sStr;
return {h: h, m: m, s: s, ms: ms, "hh:mm": f1, "hh:mm:ss": f2, formatted: f3};
}
function $millisToTime(ms) {
| return $secondsToTime(parseInt(ms, 10)/1000);
}
l | identifier_body |
|
$time.js | X Years
*
* Single units are +10%. 1 Year shows first at 1 Year + 10%
*/
function | (val, single) {
if(val >= single && val <= single * (1+margin)) {
return single;
}
return val;
}
function normalizeDateInput(date) {
switch (typeof date) {
case "string":
date = new Date(('' + date).replace(minusRe, "/").replace(tzRe, " "));
break;
case "number":
date = new Date(date);
break;
}
return date;
}
var timeAgo = function(date, compareTo, langCode) {
date = normalizeDateInput(date || $now());
compareTo = normalizeDateInput(compareTo || new Date);
langCode = langCode || this.defaultLang;
var lang = this.formats[langCode];
var token,
isString = (typeof date === "string"),
seconds = (compareTo - date +
(compareTo.getTimezoneOffset() -
// if we received a GMT time from a string, doesn't include time zone bias
// if we got a date object, the time zone is built in, we need to remove it.
(isString ? 0 : date.getTimezoneOffset())
) * 60000
) / 1000;
if (seconds < 0) {
seconds = Math.abs(seconds);
token = '';
} else {
token = ' ' + lang.ago;
}
for(var i = 0, format = formats[0]; formats[i]; format = formats[++i]) {
if(seconds < format[0]) {
if(i === 0) {
// Now
return format[1];
}
var val = Math.ceil(normalize(seconds, format[3]) / (format[3]));
return val +
' ' +
(val != 1 ? format[2] : format[1]) +
(i > 0 ? token : '');
}
}
};
timeAgo.lang = {};
timeAgo.formats = {};
timeAgo.setLang = function(code, newLang) {
this.defaultLang = code;
this.lang[code] = newLang;
this.formats[code] = getFormats(newLang);
};
timeAgo.setLang("en", {
ago: 'Ago',
now: 'Just Now',
minute: 'Minute',
minutes: 'Minutes',
hour: 'Hour',
hours: 'Hours',
day: 'Day',
days: 'Days',
week: 'Week',
weeks: 'Weeks',
month: 'Month',
months: 'Months',
year: 'Year',
years: 'Years'
});
return timeAgo;
}());
var $timer = (function() {
var epoch = new Date(1970, 1, 1, 0, 0, 0, 0).valueOf();
var timerApi = {
parent: null,
interval: null,
started: 0,
elapsed: 0,
start: function() {
var that = this;
this.started = $now();
this.interval = setInterval(function() {
that.update();
}, 1000);
},
stop: function() {
clearInterval(this.interval);
this.reset();
},
pause: function() {
clearInterval(this.interval);
},
reset: function() {
this.started = $now();
this.update();
},
update: function() {
this.elapsed = $now() - this.started;
this.parent.innerHTML = this.format(this.elapsed + $now() - this.started);
},
format: function(ms) {
// console.log(ms, $now() - ms, new Date(ms - $now()).toString());
var d = new Date(ms + epoch).toString().replace(/.*(\d{2}:\d{2}:\d{2}).*/, '$1');
var x = (ms % 1000) + "";
while (x.length < 3) {
x = "0" + x;
}
d += "." + x;
return d.substr(0, d.length - 4);
}
};
return function(parent) {
var timer = $new(timerApi);
timer.parent = parent;
return timer;
}
}());
/*
* Date Format 1.2.3
* (c) 2007-2009 Steven Levithan <stevenlevithan.com>
* MIT license
*
* Includes enhancements by Scott Trenda <scott.trenda.net>
* and Kris Kowal <cixar.com/~kris.kowal/>
*
* Accepts a date, a mask, or a date and a mask.
* Returns a formatted version of the given date.
* The date defaults to the current date/time.
* The mask defaults to dateFormat.masks.default.
* see http://blog.stevenlevithan.com/archives/date-time-format
*/
/* Mask Description
d Day of the month as digits; no leading zero for single-digit days.
dd Day of the month as digits; leading zero for single-digit days.
ddd Day of the week as a three-letter abbreviation.
dddd Day of the week as its full name.
m Month as digits; no leading zero for single-digit months.
mm Month as digits; leading zero for single-digit months.
mmm Month as a three-letter abbreviation.
mmmm Month as its full name.
yy Year as last two digits; leading zero for years less than 10.
yyyy Year represented by four digits.
h Hours; no leading zero for single-digit hours (12-hour clock).
hh Hours; leading zero for single-digit hours (12-hour clock).
H Hours; no leading zero for single-digit hours (24-hour clock).
HH Hours; leading zero for single-digit hours (24-hour clock).
M Minutes; no leading zero for single-digit minutes.
MM Minutes; leading zero for single-digit minutes.
s Seconds; no leading zero for single-digit seconds.
ss Seconds; leading zero for single-digit seconds.
l or L Milliseconds. l gives 3 digits. L gives 2 digits.
t Lowercase, single-character time marker string: a or p.
tt Lowercase, two-character time marker string: am or pm.
T Uppercase, single-character time marker string: A or P.
TT Uppercase, two-character time marker string: AM or PM.
Z US timezone abbreviation, e.g. EST or MDT. With non-US timezones or in the Opera browser, the GMT/UTC offset is returned, e.g. GMT-0500
o GMT/UTC timezone offset, e.g. -0500 or +0230.
S The date's ordinal suffix (st, nd, rd, or th). Works well with d.
'…' or "…" Literal character sequence. Surrounding quotes are removed.
UTC: Must be the first four characters of the mask. Converts the date from local time to UTC/GMT/Zulu time before applying the mask. The "UTC:" prefix is removed.
*/
var $dateFormat = (function () {
var token = /d{1,4}|m{1,4}|yy(?:yy)?|([HhMsTt])\1?|[LloSZ]|"[^"]*"|'[^']*'/g,
timezone = /\b(?:[PMCEA][SDP]T|(?:Pacific|Mountain|Central|Eastern|Atlantic) (?:Standard|Daylight|Prevailing) Time|(?:GMT|UTC)(?:[-+]\d{4})?)\b/g,
timezoneClip = /[^-+\dA-Z]/g,
pad = function(val, len) {
val = String(val);
len = len || 2;
while (val.length < len) val = "0" + val;
return val;
};
// Regexes and supporting functions are cached through closure
return function(date, mask, utc, langCode) {
if (!date) {
return date + "";
}
var dF = $dateFormat;
langCode = langCode || dF.defaultLang;
var lang = dF.lang[langCode];
// You can't provide utc if you skip other args (use the "UTC:" mask prefix)
if (arguments.length == 1 && Object.prototype.toString.call(date) == "[object String]" && !/\d/.test(date)) {
mask = date;
date = undefined;
}
// Passing date through Date applies Date.parse, if necessary
date = date ? new Date(date) : new Date;
if (!$isDate(date)) throw Syntax | normalize | identifier_name |
$time.js | lang.ago;
}
for(var i = 0, format = formats[0]; formats[i]; format = formats[++i]) {
if(seconds < format[0]) {
if(i === 0) {
// Now
return format[1];
}
var val = Math.ceil(normalize(seconds, format[3]) / (format[3]));
return val +
' ' +
(val != 1 ? format[2] : format[1]) +
(i > 0 ? token : '');
}
}
};
timeAgo.lang = {};
timeAgo.formats = {};
timeAgo.setLang = function(code, newLang) {
this.defaultLang = code;
this.lang[code] = newLang;
this.formats[code] = getFormats(newLang);
};
timeAgo.setLang("en", {
ago: 'Ago',
now: 'Just Now',
minute: 'Minute',
minutes: 'Minutes',
hour: 'Hour',
hours: 'Hours',
day: 'Day',
days: 'Days',
week: 'Week',
weeks: 'Weeks',
month: 'Month',
months: 'Months',
year: 'Year',
years: 'Years'
});
return timeAgo;
}());
var $timer = (function() {
var epoch = new Date(1970, 1, 1, 0, 0, 0, 0).valueOf();
var timerApi = {
parent: null,
interval: null,
started: 0,
elapsed: 0,
start: function() {
var that = this;
this.started = $now();
this.interval = setInterval(function() {
that.update();
}, 1000);
},
stop: function() {
clearInterval(this.interval);
this.reset();
},
pause: function() {
clearInterval(this.interval);
},
reset: function() {
this.started = $now();
this.update();
},
update: function() {
this.elapsed = $now() - this.started;
this.parent.innerHTML = this.format(this.elapsed + $now() - this.started);
},
format: function(ms) {
// console.log(ms, $now() - ms, new Date(ms - $now()).toString());
var d = new Date(ms + epoch).toString().replace(/.*(\d{2}:\d{2}:\d{2}).*/, '$1');
var x = (ms % 1000) + "";
while (x.length < 3) {
x = "0" + x;
}
d += "." + x;
return d.substr(0, d.length - 4);
}
};
return function(parent) {
var timer = $new(timerApi);
timer.parent = parent;
return timer;
}
}());
/*
* Date Format 1.2.3
* (c) 2007-2009 Steven Levithan <stevenlevithan.com>
* MIT license
*
* Includes enhancements by Scott Trenda <scott.trenda.net>
* and Kris Kowal <cixar.com/~kris.kowal/>
*
* Accepts a date, a mask, or a date and a mask.
* Returns a formatted version of the given date.
* The date defaults to the current date/time.
* The mask defaults to dateFormat.masks.default.
* see http://blog.stevenlevithan.com/archives/date-time-format
*/
/* Mask Description
d Day of the month as digits; no leading zero for single-digit days.
dd Day of the month as digits; leading zero for single-digit days.
ddd Day of the week as a three-letter abbreviation.
dddd Day of the week as its full name.
m Month as digits; no leading zero for single-digit months.
mm Month as digits; leading zero for single-digit months.
mmm Month as a three-letter abbreviation.
mmmm Month as its full name.
yy Year as last two digits; leading zero for years less than 10.
yyyy Year represented by four digits.
h Hours; no leading zero for single-digit hours (12-hour clock).
hh Hours; leading zero for single-digit hours (12-hour clock).
H Hours; no leading zero for single-digit hours (24-hour clock).
HH Hours; leading zero for single-digit hours (24-hour clock).
M Minutes; no leading zero for single-digit minutes.
MM Minutes; leading zero for single-digit minutes.
s Seconds; no leading zero for single-digit seconds.
ss Seconds; leading zero for single-digit seconds.
l or L Milliseconds. l gives 3 digits. L gives 2 digits.
t Lowercase, single-character time marker string: a or p.
tt Lowercase, two-character time marker string: am or pm.
T Uppercase, single-character time marker string: A or P.
TT Uppercase, two-character time marker string: AM or PM.
Z US timezone abbreviation, e.g. EST or MDT. With non-US timezones or in the Opera browser, the GMT/UTC offset is returned, e.g. GMT-0500
o GMT/UTC timezone offset, e.g. -0500 or +0230.
S The date's ordinal suffix (st, nd, rd, or th). Works well with d.
'…' or "…" Literal character sequence. Surrounding quotes are removed.
UTC: Must be the first four characters of the mask. Converts the date from local time to UTC/GMT/Zulu time before applying the mask. The "UTC:" prefix is removed.
*/
var $dateFormat = (function () {
var token = /d{1,4}|m{1,4}|yy(?:yy)?|([HhMsTt])\1?|[LloSZ]|"[^"]*"|'[^']*'/g,
timezone = /\b(?:[PMCEA][SDP]T|(?:Pacific|Mountain|Central|Eastern|Atlantic) (?:Standard|Daylight|Prevailing) Time|(?:GMT|UTC)(?:[-+]\d{4})?)\b/g,
timezoneClip = /[^-+\dA-Z]/g,
pad = function(val, len) {
val = String(val);
len = len || 2;
while (val.length < len) val = "0" + val;
return val;
};
// Regexes and supporting functions are cached through closure
return function(date, mask, utc, langCode) {
if (!date) {
return date + "";
}
var dF = $dateFormat;
langCode = langCode || dF.defaultLang;
var lang = dF.lang[langCode];
// You can't provide utc if you skip other args (use the "UTC:" mask prefix)
if (arguments.length == 1 && Object.prototype.toString.call(date) == "[object String]" && !/\d/.test(date)) {
mask = date;
date = undefined;
}
// Passing date through Date applies Date.parse, if necessary
date = date ? new Date(date) : new Date;
if (!$isDate(date)) throw SyntaxError("invalid date");
mask = String(dF.masks[mask] || mask || dF.masks["default"]);
// Allow setting the utc argument via the mask
if (mask.slice(0, 4) == "UTC:") {
mask = mask.slice(4);
utc = true;
}
var _ = utc ? "getUTC" : "get",
d = date[_ + "Date"](),
D = date[_ + "Day"](),
m = date[_ + "Month"](),
y = date[_ + "FullYear"](),
H = date[_ + "Hours"](),
M = date[_ + "Minutes"](),
s = date[_ + "Seconds"](),
L = date[_ + "Milliseconds"](),
o = utc ? 0 : date.getTimezoneOffset(),
flags = {
d: d,
dd: pad(d),
ddd: lang.dayNames[D],
dddd: lang.dayNames[D + 7],
m: m + 1,
mm: pad(m + 1),
mmm: lang.monthNames[m],
mmmm: lang.monthNames[m + 12],
yy: String(y).slice(2),
yyyy: y,
h: H % 12 || 12,
hh: pad(H % 12 || 12),
H: H,
HH: pad(H), | M: M,
MM: pad(M),
s: s,
ss: pad(s), | random_line_split |
|
$time.js | X Years
*
* Single units are +10%. 1 Year shows first at 1 Year + 10%
*/
function normalize(val, single) {
if(val >= single && val <= single * (1+margin)) {
return single;
}
return val;
}
function normalizeDateInput(date) {
switch (typeof date) {
case "string":
date = new Date(('' + date).replace(minusRe, "/").replace(tzRe, " "));
break;
case "number":
date = new Date(date);
break;
}
return date;
}
var timeAgo = function(date, compareTo, langCode) {
date = normalizeDateInput(date || $now());
compareTo = normalizeDateInput(compareTo || new Date);
langCode = langCode || this.defaultLang;
var lang = this.formats[langCode];
var token,
isString = (typeof date === "string"),
seconds = (compareTo - date +
(compareTo.getTimezoneOffset() -
// if we received a GMT time from a string, doesn't include time zone bias
// if we got a date object, the time zone is built in, we need to remove it.
(isString ? 0 : date.getTimezoneOffset())
) * 60000
) / 1000;
if (seconds < 0) {
seconds = Math.abs(seconds);
token = '';
} else {
token = ' ' + lang.ago;
}
for(var i = 0, format = formats[0]; formats[i]; format = formats[++i]) {
if(seconds < format[0]) {
if(i === 0) |
var val = Math.ceil(normalize(seconds, format[3]) / (format[3]));
return val +
' ' +
(val != 1 ? format[2] : format[1]) +
(i > 0 ? token : '');
}
}
};
timeAgo.lang = {};
timeAgo.formats = {};
timeAgo.setLang = function(code, newLang) {
this.defaultLang = code;
this.lang[code] = newLang;
this.formats[code] = getFormats(newLang);
};
timeAgo.setLang("en", {
ago: 'Ago',
now: 'Just Now',
minute: 'Minute',
minutes: 'Minutes',
hour: 'Hour',
hours: 'Hours',
day: 'Day',
days: 'Days',
week: 'Week',
weeks: 'Weeks',
month: 'Month',
months: 'Months',
year: 'Year',
years: 'Years'
});
return timeAgo;
}());
var $timer = (function() {
var epoch = new Date(1970, 1, 1, 0, 0, 0, 0).valueOf();
var timerApi = {
parent: null,
interval: null,
started: 0,
elapsed: 0,
start: function() {
var that = this;
this.started = $now();
this.interval = setInterval(function() {
that.update();
}, 1000);
},
stop: function() {
clearInterval(this.interval);
this.reset();
},
pause: function() {
clearInterval(this.interval);
},
reset: function() {
this.started = $now();
this.update();
},
update: function() {
this.elapsed = $now() - this.started;
this.parent.innerHTML = this.format(this.elapsed + $now() - this.started);
},
format: function(ms) {
// console.log(ms, $now() - ms, new Date(ms - $now()).toString());
var d = new Date(ms + epoch).toString().replace(/.*(\d{2}:\d{2}:\d{2}).*/, '$1');
var x = (ms % 1000) + "";
while (x.length < 3) {
x = "0" + x;
}
d += "." + x;
return d.substr(0, d.length - 4);
}
};
return function(parent) {
var timer = $new(timerApi);
timer.parent = parent;
return timer;
}
}());
/*
* Date Format 1.2.3
* (c) 2007-2009 Steven Levithan <stevenlevithan.com>
* MIT license
*
* Includes enhancements by Scott Trenda <scott.trenda.net>
* and Kris Kowal <cixar.com/~kris.kowal/>
*
* Accepts a date, a mask, or a date and a mask.
* Returns a formatted version of the given date.
* The date defaults to the current date/time.
* The mask defaults to dateFormat.masks.default.
* see http://blog.stevenlevithan.com/archives/date-time-format
*/
/* Mask Description
d Day of the month as digits; no leading zero for single-digit days.
dd Day of the month as digits; leading zero for single-digit days.
ddd Day of the week as a three-letter abbreviation.
dddd Day of the week as its full name.
m Month as digits; no leading zero for single-digit months.
mm Month as digits; leading zero for single-digit months.
mmm Month as a three-letter abbreviation.
mmmm Month as its full name.
yy Year as last two digits; leading zero for years less than 10.
yyyy Year represented by four digits.
h Hours; no leading zero for single-digit hours (12-hour clock).
hh Hours; leading zero for single-digit hours (12-hour clock).
H Hours; no leading zero for single-digit hours (24-hour clock).
HH Hours; leading zero for single-digit hours (24-hour clock).
M Minutes; no leading zero for single-digit minutes.
MM Minutes; leading zero for single-digit minutes.
s Seconds; no leading zero for single-digit seconds.
ss Seconds; leading zero for single-digit seconds.
l or L Milliseconds. l gives 3 digits. L gives 2 digits.
t Lowercase, single-character time marker string: a or p.
tt Lowercase, two-character time marker string: am or pm.
T Uppercase, single-character time marker string: A or P.
TT Uppercase, two-character time marker string: AM or PM.
Z US timezone abbreviation, e.g. EST or MDT. With non-US timezones or in the Opera browser, the GMT/UTC offset is returned, e.g. GMT-0500
o GMT/UTC timezone offset, e.g. -0500 or +0230.
S The date's ordinal suffix (st, nd, rd, or th). Works well with d.
'…' or "…" Literal character sequence. Surrounding quotes are removed.
UTC: Must be the first four characters of the mask. Converts the date from local time to UTC/GMT/Zulu time before applying the mask. The "UTC:" prefix is removed.
*/
var $dateFormat = (function () {
var token = /d{1,4}|m{1,4}|yy(?:yy)?|([HhMsTt])\1?|[LloSZ]|"[^"]*"|'[^']*'/g,
timezone = /\b(?:[PMCEA][SDP]T|(?:Pacific|Mountain|Central|Eastern|Atlantic) (?:Standard|Daylight|Prevailing) Time|(?:GMT|UTC)(?:[-+]\d{4})?)\b/g,
timezoneClip = /[^-+\dA-Z]/g,
pad = function(val, len) {
val = String(val);
len = len || 2;
while (val.length < len) val = "0" + val;
return val;
};
// Regexes and supporting functions are cached through closure
return function(date, mask, utc, langCode) {
if (!date) {
return date + "";
}
var dF = $dateFormat;
langCode = langCode || dF.defaultLang;
var lang = dF.lang[langCode];
// You can't provide utc if you skip other args (use the "UTC:" mask prefix)
if (arguments.length == 1 && Object.prototype.toString.call(date) == "[object String]" && !/\d/.test(date)) {
mask = date;
date = undefined;
}
// Passing date through Date applies Date.parse, if necessary
date = date ? new Date(date) : new Date;
if (!$isDate(date)) throw | {
// Now
return format[1];
} | conditional_block |
ecvrf.rs | traits::Uniform, vrf::ecvrf::*};
//! # use rand::{rngs::StdRng, SeedableRng};
//! # let message = b"Test message";
//! # let mut rng: StdRng = SeedableRng::from_seed([0; 32]);
//! # let private_key = VRFPrivateKey::generate_for_testing(&mut rng);
//! # let public_key: VRFPublicKey = (&private_key).into();
//! let proof = private_key.prove(message);
//! assert!(public_key.verify(&proof, message).is_ok());
//! ```
//!
//! Produce a pseudorandom output from a `Proof`:
//!
//! ```
//! # use nextgen_crypto::{traits::Uniform, vrf::ecvrf::*};
//! # use rand::{rngs::StdRng, SeedableRng};
//! # let message = b"Test message";
//! # let mut rng: StdRng = SeedableRng::from_seed([0; 32]);
//! # let private_key = VRFPrivateKey::generate_for_testing(&mut rng);
//! # let public_key: VRFPublicKey = (&private_key).into();
//! # let proof = private_key.prove(message);
//! let output: Output = (&proof).into();
//! ```
use crate::traits::*;
use core::convert::TryFrom;
use curve25519_dalek::{
constants::ED25519_BASEPOINT_POINT,
edwards::{CompressedEdwardsY, EdwardsPoint},
scalar::Scalar as ed25519_Scalar,
};
use derive_deref::Deref;
use ed25519_dalek::{
self, Digest, PublicKey as ed25519_PublicKey, SecretKey as ed25519_PrivateKey, Sha512,
};
use failure::prelude::*;
use serde::{Deserialize, Serialize};
const SUITE: u8 = 0x03;
const ONE: u8 = 0x01;
const TWO: u8 = 0x02;
const THREE: u8 = 0x03;
/// The number of bytes of [`Output`]
pub const OUTPUT_LENGTH: usize = 64;
/// The number of bytes of [`Proof`]
pub const PROOF_LENGTH: usize = 80;
/// An ECVRF private key
#[derive(Serialize, Deserialize, Deref, Debug)]
pub struct VRFPrivateKey(ed25519_PrivateKey);
/// An ECVRF public key
#[derive(Serialize, Deserialize, Deref, Debug, PartialEq, Eq)]
pub struct VRFPublicKey(ed25519_PublicKey);
/// A longer private key which is slightly optimized for proof generation.
///
/// This is similar in structure to ed25519_dalek::ExpandedSecretKey. It can be produced from
/// a VRFPrivateKey.
pub struct VRFExpandedPrivateKey {
pub(super) key: ed25519_Scalar,
pub(super) nonce: [u8; 32],
}
impl VRFPrivateKey {
/// Produces a proof for an input (using the private key)
pub fn prove(&self, alpha: &[u8]) -> Proof {
VRFExpandedPrivateKey::from(self).prove(&VRFPublicKey((&self.0).into()), alpha)
}
}
impl VRFExpandedPrivateKey {
/// Produces a proof for an input (using the expanded private key)
pub fn prove(&self, pk: &VRFPublicKey, alpha: &[u8]) -> Proof {
let h_point = pk.hash_to_curve(alpha);
let k_scalar =
ed25519_Scalar::from_bytes_mod_order_wide(&nonce_generation_bytes(self.nonce, h_point));
let gamma = h_point * self.key;
let c_scalar = hash_points(&[
h_point,
gamma,
ED25519_BASEPOINT_POINT * k_scalar,
h_point * k_scalar,
]);
Proof {
gamma,
c: c_scalar,
s: k_scalar + c_scalar * self.key,
}
}
}
impl Uniform for VRFPrivateKey {
fn generate_for_testing<R>(rng: &mut R) -> Self
where
R: SeedableCryptoRng,
|
}
impl TryFrom<&[u8]> for VRFPrivateKey {
type Error = CryptoMaterialError;
fn try_from(bytes: &[u8]) -> std::result::Result<VRFPrivateKey, CryptoMaterialError> {
Ok(VRFPrivateKey(
ed25519_PrivateKey::from_bytes(bytes).unwrap(),
))
}
}
impl TryFrom<&[u8]> for VRFPublicKey {
type Error = CryptoMaterialError;
fn try_from(bytes: &[u8]) -> std::result::Result<VRFPublicKey, CryptoMaterialError> {
if bytes.len() != ed25519_dalek::PUBLIC_KEY_LENGTH {
return Err(CryptoMaterialError::WrongLengthError);
}
let mut bits: [u8; 32] = [0u8; 32];
bits.copy_from_slice(&bytes[..32]);
let compressed = curve25519_dalek::edwards::CompressedEdwardsY(bits);
let point = compressed
.decompress()
.ok_or(CryptoMaterialError::DeserializationError)?;
// Check if the point lies on a small subgroup. This is required
// when using curves with a small cofactor (in ed25519, cofactor = 8).
if point.is_small_order() {
return Err(CryptoMaterialError::SmallSubgroupError);
}
Ok(VRFPublicKey(ed25519_PublicKey::from_bytes(bytes).unwrap()))
}
}
impl VRFPublicKey {
/// Given a [`Proof`] and an input, returns whether or not the proof is valid for the input
/// and public key
pub fn verify(&self, proof: &Proof, alpha: &[u8]) -> Result<()> {
let h_point = self.hash_to_curve(alpha);
let pk_point = CompressedEdwardsY::from_slice(self.as_bytes())
.decompress()
.unwrap();
let cprime = hash_points(&[
h_point,
proof.gamma,
ED25519_BASEPOINT_POINT * proof.s - pk_point * proof.c,
h_point * proof.s - proof.gamma * proof.c,
]);
if proof.c == cprime {
Ok(())
} else {
bail!("The proof failed to verify for this public key")
}
}
pub(super) fn hash_to_curve(&self, alpha: &[u8]) -> EdwardsPoint {
let mut result = [0u8; 32];
let mut counter = 0;
let mut wrapped_point: Option<EdwardsPoint> = None;
while wrapped_point.is_none() {
result.copy_from_slice(
&Sha512::new()
.chain(&[SUITE, ONE])
.chain(self.as_bytes())
.chain(&alpha)
.chain(&[counter])
.result()[..32],
);
wrapped_point = CompressedEdwardsY::from_slice(&result).decompress();
counter += 1;
}
wrapped_point.unwrap().mul_by_cofactor()
}
}
impl<'a> From<&'a VRFPrivateKey> for VRFPublicKey {
fn from(private_key: &'a VRFPrivateKey) -> Self {
let secret: &ed25519_PrivateKey = private_key;
let public: ed25519_PublicKey = secret.into();
VRFPublicKey(public)
}
}
impl<'a> From<&'a VRFPrivateKey> for VRFExpandedPrivateKey {
fn from(private_key: &'a VRFPrivateKey) -> Self {
let mut h: Sha512 = Sha512::default();
let mut hash: [u8; 64] = [0u8; 64];
let mut lower: [u8; 32] = [0u8; 32];
let mut upper: [u8; 32] = [0u8; 32];
h.input(private_key.to_bytes());
hash.copy_from_slice(h.result().as_slice());
lower.copy_from_slice(&hash[00..32]);
upper.copy_from_slice(&hash[32..64]);
lower[0] &= 248;
lower[31] &= 63;
lower[31] |= 64;
VRFExpandedPrivateKey {
key: ed25519_Scalar::from_bits(lower),
nonce: upper,
}
}
}
/// A VRF proof that can be used to validate an input with a public key
pub struct Proof {
gamma: EdwardsPoint,
c: ed25519_Scalar,
s: ed25519_Scalar,
}
impl Proof {
/// Produces a new Proof struct from its fields
pub fn new(gamma: EdwardsPoint, c: ed25519_Scalar, s: ed25519_Scalar) -> Proof {
Proof { gamma, c, s }
}
/// Converts a Proof into bytes
pub fn to_bytes(&self) -> [u8; PROOF_LENGTH] | {
VRFPrivateKey(ed25519_PrivateKey::generate(rng))
} | identifier_body |
ecvrf.rs | ECVRF private key
#[derive(Serialize, Deserialize, Deref, Debug)]
pub struct VRFPrivateKey(ed25519_PrivateKey);
/// An ECVRF public key
#[derive(Serialize, Deserialize, Deref, Debug, PartialEq, Eq)]
pub struct VRFPublicKey(ed25519_PublicKey);
/// A longer private key which is slightly optimized for proof generation.
///
/// This is similar in structure to ed25519_dalek::ExpandedSecretKey. It can be produced from
/// a VRFPrivateKey.
pub struct VRFExpandedPrivateKey {
pub(super) key: ed25519_Scalar,
pub(super) nonce: [u8; 32],
}
impl VRFPrivateKey {
/// Produces a proof for an input (using the private key)
pub fn prove(&self, alpha: &[u8]) -> Proof {
VRFExpandedPrivateKey::from(self).prove(&VRFPublicKey((&self.0).into()), alpha)
}
}
impl VRFExpandedPrivateKey {
/// Produces a proof for an input (using the expanded private key)
pub fn prove(&self, pk: &VRFPublicKey, alpha: &[u8]) -> Proof {
let h_point = pk.hash_to_curve(alpha);
let k_scalar =
ed25519_Scalar::from_bytes_mod_order_wide(&nonce_generation_bytes(self.nonce, h_point));
let gamma = h_point * self.key;
let c_scalar = hash_points(&[
h_point,
gamma,
ED25519_BASEPOINT_POINT * k_scalar,
h_point * k_scalar,
]);
Proof {
gamma,
c: c_scalar,
s: k_scalar + c_scalar * self.key,
}
}
}
impl Uniform for VRFPrivateKey {
fn generate_for_testing<R>(rng: &mut R) -> Self
where
R: SeedableCryptoRng,
{
VRFPrivateKey(ed25519_PrivateKey::generate(rng))
}
}
impl TryFrom<&[u8]> for VRFPrivateKey {
type Error = CryptoMaterialError;
fn try_from(bytes: &[u8]) -> std::result::Result<VRFPrivateKey, CryptoMaterialError> {
Ok(VRFPrivateKey(
ed25519_PrivateKey::from_bytes(bytes).unwrap(),
))
}
}
impl TryFrom<&[u8]> for VRFPublicKey {
type Error = CryptoMaterialError;
fn try_from(bytes: &[u8]) -> std::result::Result<VRFPublicKey, CryptoMaterialError> {
if bytes.len() != ed25519_dalek::PUBLIC_KEY_LENGTH {
return Err(CryptoMaterialError::WrongLengthError);
}
let mut bits: [u8; 32] = [0u8; 32];
bits.copy_from_slice(&bytes[..32]);
let compressed = curve25519_dalek::edwards::CompressedEdwardsY(bits);
let point = compressed
.decompress()
.ok_or(CryptoMaterialError::DeserializationError)?;
// Check if the point lies on a small subgroup. This is required
// when using curves with a small cofactor (in ed25519, cofactor = 8).
if point.is_small_order() {
return Err(CryptoMaterialError::SmallSubgroupError);
}
Ok(VRFPublicKey(ed25519_PublicKey::from_bytes(bytes).unwrap()))
}
}
impl VRFPublicKey {
/// Given a [`Proof`] and an input, returns whether or not the proof is valid for the input
/// and public key
pub fn verify(&self, proof: &Proof, alpha: &[u8]) -> Result<()> {
let h_point = self.hash_to_curve(alpha);
let pk_point = CompressedEdwardsY::from_slice(self.as_bytes())
.decompress()
.unwrap();
let cprime = hash_points(&[
h_point,
proof.gamma,
ED25519_BASEPOINT_POINT * proof.s - pk_point * proof.c,
h_point * proof.s - proof.gamma * proof.c,
]);
if proof.c == cprime {
Ok(())
} else {
bail!("The proof failed to verify for this public key")
}
}
pub(super) fn hash_to_curve(&self, alpha: &[u8]) -> EdwardsPoint {
let mut result = [0u8; 32];
let mut counter = 0;
let mut wrapped_point: Option<EdwardsPoint> = None;
while wrapped_point.is_none() {
result.copy_from_slice(
&Sha512::new()
.chain(&[SUITE, ONE])
.chain(self.as_bytes())
.chain(&alpha)
.chain(&[counter])
.result()[..32],
);
wrapped_point = CompressedEdwardsY::from_slice(&result).decompress();
counter += 1;
}
wrapped_point.unwrap().mul_by_cofactor()
}
}
impl<'a> From<&'a VRFPrivateKey> for VRFPublicKey {
fn from(private_key: &'a VRFPrivateKey) -> Self {
let secret: &ed25519_PrivateKey = private_key;
let public: ed25519_PublicKey = secret.into();
VRFPublicKey(public)
}
}
impl<'a> From<&'a VRFPrivateKey> for VRFExpandedPrivateKey {
fn from(private_key: &'a VRFPrivateKey) -> Self {
let mut h: Sha512 = Sha512::default();
let mut hash: [u8; 64] = [0u8; 64];
let mut lower: [u8; 32] = [0u8; 32];
let mut upper: [u8; 32] = [0u8; 32];
h.input(private_key.to_bytes());
hash.copy_from_slice(h.result().as_slice());
lower.copy_from_slice(&hash[00..32]);
upper.copy_from_slice(&hash[32..64]);
lower[0] &= 248;
lower[31] &= 63;
lower[31] |= 64;
VRFExpandedPrivateKey {
key: ed25519_Scalar::from_bits(lower),
nonce: upper,
}
}
}
/// A VRF proof that can be used to validate an input with a public key
pub struct Proof {
gamma: EdwardsPoint,
c: ed25519_Scalar,
s: ed25519_Scalar,
}
impl Proof {
/// Produces a new Proof struct from its fields
pub fn new(gamma: EdwardsPoint, c: ed25519_Scalar, s: ed25519_Scalar) -> Proof {
Proof { gamma, c, s }
}
/// Converts a Proof into bytes
pub fn to_bytes(&self) -> [u8; PROOF_LENGTH] {
let mut ret = [0u8; PROOF_LENGTH];
ret[..32].copy_from_slice(&self.gamma.compress().to_bytes()[..]);
ret[32..48].copy_from_slice(&self.c.to_bytes()[..16]);
ret[48..].copy_from_slice(&self.s.to_bytes()[..]);
ret
}
}
impl TryFrom<&[u8]> for Proof {
type Error = CryptoMaterialError;
fn try_from(bytes: &[u8]) -> std::result::Result<Proof, CryptoMaterialError> {
let mut c_buf = [0u8; 32];
c_buf[..16].copy_from_slice(&bytes[32..48]);
let mut s_buf = [0u8; 32];
s_buf.copy_from_slice(&bytes[48..]);
Ok(Proof {
gamma: CompressedEdwardsY::from_slice(&bytes[..32])
.decompress()
.unwrap(),
c: ed25519_Scalar::from_bits(c_buf),
s: ed25519_Scalar::from_bits(s_buf),
})
}
}
/// The ECVRF output produced from the proof
pub struct Output([u8; OUTPUT_LENGTH]);
impl Output {
/// Converts an Output into bytes
#[inline]
pub fn to_bytes(&self) -> [u8; OUTPUT_LENGTH] {
self.0
}
}
impl<'a> From<&'a Proof> for Output {
fn from(proof: &'a Proof) -> Output {
let mut output = [0u8; OUTPUT_LENGTH];
output.copy_from_slice(
&Sha512::new()
.chain(&[SUITE, THREE])
.chain(&proof.gamma.mul_by_cofactor().compress().to_bytes()[..])
.result()[..],
);
Output(output)
}
}
pub(super) fn nonce_generation_bytes(nonce: [u8; 32], h_point: EdwardsPoint) -> [u8; 64] {
let mut k_buf = [0u8; 64];
k_buf.copy_from_slice(
&Sha512::new()
.chain(nonce)
.chain(h_point.compress().as_bytes()) | random_line_split |
||
ecvrf.rs | traits::Uniform, vrf::ecvrf::*};
//! # use rand::{rngs::StdRng, SeedableRng};
//! # let message = b"Test message";
//! # let mut rng: StdRng = SeedableRng::from_seed([0; 32]);
//! # let private_key = VRFPrivateKey::generate_for_testing(&mut rng);
//! # let public_key: VRFPublicKey = (&private_key).into();
//! let proof = private_key.prove(message);
//! assert!(public_key.verify(&proof, message).is_ok());
//! ```
//!
//! Produce a pseudorandom output from a `Proof`:
//!
//! ```
//! # use nextgen_crypto::{traits::Uniform, vrf::ecvrf::*};
//! # use rand::{rngs::StdRng, SeedableRng};
//! # let message = b"Test message";
//! # let mut rng: StdRng = SeedableRng::from_seed([0; 32]);
//! # let private_key = VRFPrivateKey::generate_for_testing(&mut rng);
//! # let public_key: VRFPublicKey = (&private_key).into();
//! # let proof = private_key.prove(message);
//! let output: Output = (&proof).into();
//! ```
use crate::traits::*;
use core::convert::TryFrom;
use curve25519_dalek::{
constants::ED25519_BASEPOINT_POINT,
edwards::{CompressedEdwardsY, EdwardsPoint},
scalar::Scalar as ed25519_Scalar,
};
use derive_deref::Deref;
use ed25519_dalek::{
self, Digest, PublicKey as ed25519_PublicKey, SecretKey as ed25519_PrivateKey, Sha512,
};
use failure::prelude::*;
use serde::{Deserialize, Serialize};
const SUITE: u8 = 0x03;
const ONE: u8 = 0x01;
const TWO: u8 = 0x02;
const THREE: u8 = 0x03;
/// The number of bytes of [`Output`]
pub const OUTPUT_LENGTH: usize = 64;
/// The number of bytes of [`Proof`]
pub const PROOF_LENGTH: usize = 80;
/// An ECVRF private key
#[derive(Serialize, Deserialize, Deref, Debug)]
pub struct VRFPrivateKey(ed25519_PrivateKey);
/// An ECVRF public key
#[derive(Serialize, Deserialize, Deref, Debug, PartialEq, Eq)]
pub struct VRFPublicKey(ed25519_PublicKey);
/// A longer private key which is slightly optimized for proof generation.
///
/// This is similar in structure to ed25519_dalek::ExpandedSecretKey. It can be produced from
/// a VRFPrivateKey.
pub struct VRFExpandedPrivateKey {
pub(super) key: ed25519_Scalar,
pub(super) nonce: [u8; 32],
}
impl VRFPrivateKey {
/// Produces a proof for an input (using the private key)
pub fn prove(&self, alpha: &[u8]) -> Proof {
VRFExpandedPrivateKey::from(self).prove(&VRFPublicKey((&self.0).into()), alpha)
}
}
impl VRFExpandedPrivateKey {
/// Produces a proof for an input (using the expanded private key)
pub fn prove(&self, pk: &VRFPublicKey, alpha: &[u8]) -> Proof {
let h_point = pk.hash_to_curve(alpha);
let k_scalar =
ed25519_Scalar::from_bytes_mod_order_wide(&nonce_generation_bytes(self.nonce, h_point));
let gamma = h_point * self.key;
let c_scalar = hash_points(&[
h_point,
gamma,
ED25519_BASEPOINT_POINT * k_scalar,
h_point * k_scalar,
]);
Proof {
gamma,
c: c_scalar,
s: k_scalar + c_scalar * self.key,
}
}
}
impl Uniform for VRFPrivateKey {
fn generate_for_testing<R>(rng: &mut R) -> Self
where
R: SeedableCryptoRng,
{
VRFPrivateKey(ed25519_PrivateKey::generate(rng))
}
}
impl TryFrom<&[u8]> for VRFPrivateKey {
type Error = CryptoMaterialError;
fn try_from(bytes: &[u8]) -> std::result::Result<VRFPrivateKey, CryptoMaterialError> {
Ok(VRFPrivateKey(
ed25519_PrivateKey::from_bytes(bytes).unwrap(),
))
}
}
impl TryFrom<&[u8]> for VRFPublicKey {
type Error = CryptoMaterialError;
fn try_from(bytes: &[u8]) -> std::result::Result<VRFPublicKey, CryptoMaterialError> {
if bytes.len() != ed25519_dalek::PUBLIC_KEY_LENGTH {
return Err(CryptoMaterialError::WrongLengthError);
}
let mut bits: [u8; 32] = [0u8; 32];
bits.copy_from_slice(&bytes[..32]);
let compressed = curve25519_dalek::edwards::CompressedEdwardsY(bits);
let point = compressed
.decompress()
.ok_or(CryptoMaterialError::DeserializationError)?;
// Check if the point lies on a small subgroup. This is required
// when using curves with a small cofactor (in ed25519, cofactor = 8).
if point.is_small_order() {
return Err(CryptoMaterialError::SmallSubgroupError);
}
Ok(VRFPublicKey(ed25519_PublicKey::from_bytes(bytes).unwrap()))
}
}
impl VRFPublicKey {
/// Given a [`Proof`] and an input, returns whether or not the proof is valid for the input
/// and public key
pub fn verify(&self, proof: &Proof, alpha: &[u8]) -> Result<()> {
let h_point = self.hash_to_curve(alpha);
let pk_point = CompressedEdwardsY::from_slice(self.as_bytes())
.decompress()
.unwrap();
let cprime = hash_points(&[
h_point,
proof.gamma,
ED25519_BASEPOINT_POINT * proof.s - pk_point * proof.c,
h_point * proof.s - proof.gamma * proof.c,
]);
if proof.c == cprime | else {
bail!("The proof failed to verify for this public key")
}
}
pub(super) fn hash_to_curve(&self, alpha: &[u8]) -> EdwardsPoint {
let mut result = [0u8; 32];
let mut counter = 0;
let mut wrapped_point: Option<EdwardsPoint> = None;
while wrapped_point.is_none() {
result.copy_from_slice(
&Sha512::new()
.chain(&[SUITE, ONE])
.chain(self.as_bytes())
.chain(&alpha)
.chain(&[counter])
.result()[..32],
);
wrapped_point = CompressedEdwardsY::from_slice(&result).decompress();
counter += 1;
}
wrapped_point.unwrap().mul_by_cofactor()
}
}
impl<'a> From<&'a VRFPrivateKey> for VRFPublicKey {
fn from(private_key: &'a VRFPrivateKey) -> Self {
let secret: &ed25519_PrivateKey = private_key;
let public: ed25519_PublicKey = secret.into();
VRFPublicKey(public)
}
}
impl<'a> From<&'a VRFPrivateKey> for VRFExpandedPrivateKey {
fn from(private_key: &'a VRFPrivateKey) -> Self {
let mut h: Sha512 = Sha512::default();
let mut hash: [u8; 64] = [0u8; 64];
let mut lower: [u8; 32] = [0u8; 32];
let mut upper: [u8; 32] = [0u8; 32];
h.input(private_key.to_bytes());
hash.copy_from_slice(h.result().as_slice());
lower.copy_from_slice(&hash[00..32]);
upper.copy_from_slice(&hash[32..64]);
lower[0] &= 248;
lower[31] &= 63;
lower[31] |= 64;
VRFExpandedPrivateKey {
key: ed25519_Scalar::from_bits(lower),
nonce: upper,
}
}
}
/// A VRF proof that can be used to validate an input with a public key
pub struct Proof {
gamma: EdwardsPoint,
c: ed25519_Scalar,
s: ed25519_Scalar,
}
impl Proof {
/// Produces a new Proof struct from its fields
pub fn new(gamma: EdwardsPoint, c: ed25519_Scalar, s: ed25519_Scalar) -> Proof {
Proof { gamma, c, s }
}
/// Converts a Proof into bytes
pub fn to_bytes(&self) -> [u8; PROOF_LENGTH] | {
Ok(())
} | conditional_block |
ecvrf.rs | traits::Uniform, vrf::ecvrf::*};
//! # use rand::{rngs::StdRng, SeedableRng};
//! # let message = b"Test message";
//! # let mut rng: StdRng = SeedableRng::from_seed([0; 32]);
//! # let private_key = VRFPrivateKey::generate_for_testing(&mut rng);
//! # let public_key: VRFPublicKey = (&private_key).into();
//! let proof = private_key.prove(message);
//! assert!(public_key.verify(&proof, message).is_ok());
//! ```
//!
//! Produce a pseudorandom output from a `Proof`:
//!
//! ```
//! # use nextgen_crypto::{traits::Uniform, vrf::ecvrf::*};
//! # use rand::{rngs::StdRng, SeedableRng};
//! # let message = b"Test message";
//! # let mut rng: StdRng = SeedableRng::from_seed([0; 32]);
//! # let private_key = VRFPrivateKey::generate_for_testing(&mut rng);
//! # let public_key: VRFPublicKey = (&private_key).into();
//! # let proof = private_key.prove(message);
//! let output: Output = (&proof).into();
//! ```
use crate::traits::*;
use core::convert::TryFrom;
use curve25519_dalek::{
constants::ED25519_BASEPOINT_POINT,
edwards::{CompressedEdwardsY, EdwardsPoint},
scalar::Scalar as ed25519_Scalar,
};
use derive_deref::Deref;
use ed25519_dalek::{
self, Digest, PublicKey as ed25519_PublicKey, SecretKey as ed25519_PrivateKey, Sha512,
};
use failure::prelude::*;
use serde::{Deserialize, Serialize};
const SUITE: u8 = 0x03;
const ONE: u8 = 0x01;
const TWO: u8 = 0x02;
const THREE: u8 = 0x03;
/// The number of bytes of [`Output`]
pub const OUTPUT_LENGTH: usize = 64;
/// The number of bytes of [`Proof`]
pub const PROOF_LENGTH: usize = 80;
/// An ECVRF private key
#[derive(Serialize, Deserialize, Deref, Debug)]
pub struct VRFPrivateKey(ed25519_PrivateKey);
/// An ECVRF public key
#[derive(Serialize, Deserialize, Deref, Debug, PartialEq, Eq)]
pub struct VRFPublicKey(ed25519_PublicKey);
/// A longer private key which is slightly optimized for proof generation.
///
/// This is similar in structure to ed25519_dalek::ExpandedSecretKey. It can be produced from
/// a VRFPrivateKey.
pub struct VRFExpandedPrivateKey {
pub(super) key: ed25519_Scalar,
pub(super) nonce: [u8; 32],
}
impl VRFPrivateKey {
/// Produces a proof for an input (using the private key)
pub fn prove(&self, alpha: &[u8]) -> Proof {
VRFExpandedPrivateKey::from(self).prove(&VRFPublicKey((&self.0).into()), alpha)
}
}
impl VRFExpandedPrivateKey {
/// Produces a proof for an input (using the expanded private key)
pub fn prove(&self, pk: &VRFPublicKey, alpha: &[u8]) -> Proof {
let h_point = pk.hash_to_curve(alpha);
let k_scalar =
ed25519_Scalar::from_bytes_mod_order_wide(&nonce_generation_bytes(self.nonce, h_point));
let gamma = h_point * self.key;
let c_scalar = hash_points(&[
h_point,
gamma,
ED25519_BASEPOINT_POINT * k_scalar,
h_point * k_scalar,
]);
Proof {
gamma,
c: c_scalar,
s: k_scalar + c_scalar * self.key,
}
}
}
impl Uniform for VRFPrivateKey {
fn generate_for_testing<R>(rng: &mut R) -> Self
where
R: SeedableCryptoRng,
{
VRFPrivateKey(ed25519_PrivateKey::generate(rng))
}
}
impl TryFrom<&[u8]> for VRFPrivateKey {
type Error = CryptoMaterialError;
fn try_from(bytes: &[u8]) -> std::result::Result<VRFPrivateKey, CryptoMaterialError> {
Ok(VRFPrivateKey(
ed25519_PrivateKey::from_bytes(bytes).unwrap(),
))
}
}
impl TryFrom<&[u8]> for VRFPublicKey {
type Error = CryptoMaterialError;
fn | (bytes: &[u8]) -> std::result::Result<VRFPublicKey, CryptoMaterialError> {
if bytes.len() != ed25519_dalek::PUBLIC_KEY_LENGTH {
return Err(CryptoMaterialError::WrongLengthError);
}
let mut bits: [u8; 32] = [0u8; 32];
bits.copy_from_slice(&bytes[..32]);
let compressed = curve25519_dalek::edwards::CompressedEdwardsY(bits);
let point = compressed
.decompress()
.ok_or(CryptoMaterialError::DeserializationError)?;
// Check if the point lies on a small subgroup. This is required
// when using curves with a small cofactor (in ed25519, cofactor = 8).
if point.is_small_order() {
return Err(CryptoMaterialError::SmallSubgroupError);
}
Ok(VRFPublicKey(ed25519_PublicKey::from_bytes(bytes).unwrap()))
}
}
impl VRFPublicKey {
/// Given a [`Proof`] and an input, returns whether or not the proof is valid for the input
/// and public key
pub fn verify(&self, proof: &Proof, alpha: &[u8]) -> Result<()> {
let h_point = self.hash_to_curve(alpha);
let pk_point = CompressedEdwardsY::from_slice(self.as_bytes())
.decompress()
.unwrap();
let cprime = hash_points(&[
h_point,
proof.gamma,
ED25519_BASEPOINT_POINT * proof.s - pk_point * proof.c,
h_point * proof.s - proof.gamma * proof.c,
]);
if proof.c == cprime {
Ok(())
} else {
bail!("The proof failed to verify for this public key")
}
}
pub(super) fn hash_to_curve(&self, alpha: &[u8]) -> EdwardsPoint {
let mut result = [0u8; 32];
let mut counter = 0;
let mut wrapped_point: Option<EdwardsPoint> = None;
while wrapped_point.is_none() {
result.copy_from_slice(
&Sha512::new()
.chain(&[SUITE, ONE])
.chain(self.as_bytes())
.chain(&alpha)
.chain(&[counter])
.result()[..32],
);
wrapped_point = CompressedEdwardsY::from_slice(&result).decompress();
counter += 1;
}
wrapped_point.unwrap().mul_by_cofactor()
}
}
impl<'a> From<&'a VRFPrivateKey> for VRFPublicKey {
fn from(private_key: &'a VRFPrivateKey) -> Self {
let secret: &ed25519_PrivateKey = private_key;
let public: ed25519_PublicKey = secret.into();
VRFPublicKey(public)
}
}
impl<'a> From<&'a VRFPrivateKey> for VRFExpandedPrivateKey {
fn from(private_key: &'a VRFPrivateKey) -> Self {
let mut h: Sha512 = Sha512::default();
let mut hash: [u8; 64] = [0u8; 64];
let mut lower: [u8; 32] = [0u8; 32];
let mut upper: [u8; 32] = [0u8; 32];
h.input(private_key.to_bytes());
hash.copy_from_slice(h.result().as_slice());
lower.copy_from_slice(&hash[00..32]);
upper.copy_from_slice(&hash[32..64]);
lower[0] &= 248;
lower[31] &= 63;
lower[31] |= 64;
VRFExpandedPrivateKey {
key: ed25519_Scalar::from_bits(lower),
nonce: upper,
}
}
}
/// A VRF proof that can be used to validate an input with a public key
pub struct Proof {
gamma: EdwardsPoint,
c: ed25519_Scalar,
s: ed25519_Scalar,
}
impl Proof {
/// Produces a new Proof struct from its fields
pub fn new(gamma: EdwardsPoint, c: ed25519_Scalar, s: ed25519_Scalar) -> Proof {
Proof { gamma, c, s }
}
/// Converts a Proof into bytes
pub fn to_bytes(&self) -> [u8; PROOF_LENGTH] | try_from | identifier_name |
interactor.go | }
// For sparse checkouts, we have to do some additional housekeeping after
// the clone is completed. We use Git's global "-C <directory>" flag to
// switch to that directory before running the "sparse-checkout" command,
// because otherwise the command will fail (because it will try to run the
// command in the $PWD, which is not the same as the just-created clone
// directory (i.dir)).
if repoOpts.SparseCheckoutDirs != nil {
if len(repoOpts.SparseCheckoutDirs) == 0 {
return nil
}
sparseCheckoutArgs := []string{"-C", i.dir, "sparse-checkout", "set"}
sparseCheckoutArgs = append(sparseCheckoutArgs, repoOpts.SparseCheckoutDirs...)
if out, err := i.executor.Run(sparseCheckoutArgs...); err != nil {
return fmt.Errorf("error setting it to a sparse checkout: %w %v", err, string(out))
}
}
return nil
}
// MirrorClone sets up a mirror of the source repository.
func (i *interactor) MirrorClone() error {
i.logger.Infof("Creating a mirror of the repo at %s", i.dir)
remote, err := i.remote()
if err != nil {
return fmt.Errorf("could not resolve remote for cloning: %w", err)
}
if out, err := i.executor.Run("clone", "--mirror", remote, i.dir); err != nil {
return fmt.Errorf("error creating a mirror clone: %w %v", err, string(out))
}
return nil
}
// Checkout runs git checkout.
func (i *interactor) Checkout(commitlike string) error {
i.logger.Infof("Checking out %q", commitlike)
if out, err := i.executor.Run("checkout", commitlike); err != nil {
return fmt.Errorf("error checking out %q: %w %v", commitlike, err, string(out))
}
return nil
}
// RevParse runs git rev-parse.
func (i *interactor) RevParse(commitlike string) (string, error) {
i.logger.Infof("Parsing revision %q", commitlike)
out, err := i.executor.Run("rev-parse", commitlike)
if err != nil {
return "", fmt.Errorf("error parsing %q: %w %v", commitlike, err, string(out))
}
return string(out), nil
}
// BranchExists returns true if branch exists in heads.
func (i *interactor) BranchExists(branch string) bool {
i.logger.Infof("Checking if branch %q exists", branch)
_, err := i.executor.Run("ls-remote", "--exit-code", "--heads", "origin", branch)
return err == nil
}
func (i *interactor) ObjectExists(sha string) (bool, error) {
i.logger.WithField("SHA", sha).Info("Checking if Git object exists")
output, err := i.executor.Run("cat-file", "-e", sha)
// If the object does not exist, cat-file will exit with a non-zero exit
// code. This will make err non-nil. However this is a known behavior, so
// we just log it.
//
// We still have the error type as a return value because the v1 git client
// adapter needs to know that this operation is not supported there.
if err != nil {
i.logger.WithError(err).WithField("SHA", sha).Debugf("error from 'git cat-file -e': %s", string(output))
return false, nil
}
return true, nil
}
// CheckoutNewBranch creates a new branch and checks it out.
func (i *interactor) CheckoutNewBranch(branch string) error {
i.logger.Infof("Checking out new branch %q", branch)
if out, err := i.executor.Run("checkout", "-b", branch); err != nil {
return fmt.Errorf("error checking out new branch %q: %w %v", branch, err, string(out))
}
return nil
}
// Merge attempts to merge commitlike into the current branch. It returns true
// if the merge completes. It returns an error if the abort fails.
func (i *interactor) Merge(commitlike string) (bool, error) {
return i.MergeWithStrategy(commitlike, "merge")
}
// MergeWithStrategy attempts to merge commitlike into the current branch given the merge strategy.
// It returns true if the merge completes. if the merge does not complete successfully, we try to
// abort it and return an error if the abort fails.
func (i *interactor) MergeWithStrategy(commitlike, mergeStrategy string, opts ...MergeOpt) (bool, error) {
i.logger.Infof("Merging %q using the %q strategy", commitlike, mergeStrategy)
switch mergeStrategy {
case "merge":
return i.mergeMerge(commitlike, opts...)
case "squash":
return i.squashMerge(commitlike)
case "rebase":
return i.mergeRebase(commitlike)
case "ifNecessary":
return i.mergeIfNecessary(commitlike, opts...)
default:
return false, fmt.Errorf("merge strategy %q is not supported", mergeStrategy)
}
}
func (i *interactor) mergeHelper(args []string, commitlike string, opts ...MergeOpt) (bool, error) {
if len(opts) == 0 {
args = append(args, []string{"-m", "merge"}...)
} else {
for _, opt := range opts {
args = append(args, []string{"-m", opt.CommitMessage}...)
}
}
args = append(args, commitlike)
out, err := i.executor.Run(args...)
if err == nil {
return true, nil
}
i.logger.WithError(err).Infof("Error merging %q: %s", commitlike, string(out))
if out, err := i.executor.Run("merge", "--abort"); err != nil {
return false, fmt.Errorf("error aborting merge of %q: %w %v", commitlike, err, string(out))
}
return false, nil
}
func (i *interactor) mergeMerge(commitlike string, opts ...MergeOpt) (bool, error) {
args := []string{"merge", "--no-ff", "--no-stat"}
return i.mergeHelper(args, commitlike, opts...)
}
func (i *interactor) mergeIfNecessary(commitlike string, opts ...MergeOpt) (bool, error) {
args := []string{"merge", "--ff", "--no-stat"}
return i.mergeHelper(args, commitlike, opts...)
}
func (i *interactor) squashMerge(commitlike string) (bool, error) {
out, err := i.executor.Run("merge", "--squash", "--no-stat", commitlike)
if err != nil {
i.logger.WithError(err).Warnf("Error staging merge for %q: %s", commitlike, string(out))
if out, err := i.executor.Run("reset", "--hard", "HEAD"); err != nil {
return false, fmt.Errorf("error aborting merge of %q: %w %v", commitlike, err, string(out))
}
return false, nil
}
out, err = i.executor.Run("commit", "--no-stat", "-m", "merge")
if err != nil {
i.logger.WithError(err).Warnf("Error committing merge for %q: %s", commitlike, string(out))
if out, err := i.executor.Run("reset", "--hard", "HEAD"); err != nil {
return false, fmt.Errorf("error aborting merge of %q: %w %v", commitlike, err, string(out))
}
return false, nil
}
return true, nil
}
func (i *interactor) mergeRebase(commitlike string) (bool, error) {
if commitlike == "" {
return false, errors.New("branch must be set")
}
headRev, err := i.revParse("HEAD")
if err != nil {
i.logger.WithError(err).Infof("Failed to parse HEAD revision")
return false, err
}
headRev = strings.TrimSuffix(headRev, "\n")
b, err := i.executor.Run("rebase", "--no-stat", headRev, commitlike)
if err != nil {
i.logger.WithField("out", string(b)).WithError(err).Infof("Rebase failed.")
if b, err := i.executor.Run("rebase", "--abort"); err != nil {
return false, fmt.Errorf("error aborting after failed rebase for commitlike %s: %v. output: %s", commitlike, err, string(b))
}
return false, nil
}
return true, nil
}
func (i *interactor) revParse(args ...string) (string, error) {
fullArgs := append([]string{"rev-parse"}, args...)
b, err := i.executor.Run(fullArgs...)
if err != nil {
return "", errors.New(string(b))
}
return string(b), nil
}
// Only the `merge` and `squash` strategies are supported.
func (i *interactor) MergeAndCheckout(baseSHA string, mergeStrategy string, headSHAs ...string) error {
if baseSHA == "" {
return errors.New("baseSHA must be set")
}
if err := i.Checkout(baseSHA); err != nil {
return err
}
for _, headSHA := range headSHAs {
ok, err := i.MergeWithStrategy(headSHA, mergeStrategy)
if err != nil | {
return err
} | conditional_block |
|
interactor.go | q: %w %v", commitlike, err, string(out))
}
return nil
}
// RevParse runs git rev-parse.
func (i *interactor) RevParse(commitlike string) (string, error) {
i.logger.Infof("Parsing revision %q", commitlike)
out, err := i.executor.Run("rev-parse", commitlike)
if err != nil {
return "", fmt.Errorf("error parsing %q: %w %v", commitlike, err, string(out))
}
return string(out), nil
}
// BranchExists returns true if branch exists in heads.
func (i *interactor) BranchExists(branch string) bool {
i.logger.Infof("Checking if branch %q exists", branch)
_, err := i.executor.Run("ls-remote", "--exit-code", "--heads", "origin", branch)
return err == nil
}
func (i *interactor) ObjectExists(sha string) (bool, error) {
i.logger.WithField("SHA", sha).Info("Checking if Git object exists")
output, err := i.executor.Run("cat-file", "-e", sha)
// If the object does not exist, cat-file will exit with a non-zero exit
// code. This will make err non-nil. However this is a known behavior, so
// we just log it.
//
// We still have the error type as a return value because the v1 git client
// adapter needs to know that this operation is not supported there.
if err != nil {
i.logger.WithError(err).WithField("SHA", sha).Debugf("error from 'git cat-file -e': %s", string(output))
return false, nil
}
return true, nil
}
// CheckoutNewBranch creates a new branch and checks it out.
func (i *interactor) CheckoutNewBranch(branch string) error {
i.logger.Infof("Checking out new branch %q", branch)
if out, err := i.executor.Run("checkout", "-b", branch); err != nil {
return fmt.Errorf("error checking out new branch %q: %w %v", branch, err, string(out))
}
return nil
}
// Merge attempts to merge commitlike into the current branch. It returns true
// if the merge completes. It returns an error if the abort fails.
func (i *interactor) Merge(commitlike string) (bool, error) {
return i.MergeWithStrategy(commitlike, "merge")
}
// MergeWithStrategy attempts to merge commitlike into the current branch given the merge strategy.
// It returns true if the merge completes. if the merge does not complete successfully, we try to
// abort it and return an error if the abort fails.
func (i *interactor) MergeWithStrategy(commitlike, mergeStrategy string, opts ...MergeOpt) (bool, error) {
i.logger.Infof("Merging %q using the %q strategy", commitlike, mergeStrategy)
switch mergeStrategy {
case "merge":
return i.mergeMerge(commitlike, opts...)
case "squash":
return i.squashMerge(commitlike)
case "rebase":
return i.mergeRebase(commitlike)
case "ifNecessary":
return i.mergeIfNecessary(commitlike, opts...)
default:
return false, fmt.Errorf("merge strategy %q is not supported", mergeStrategy)
}
}
func (i *interactor) mergeHelper(args []string, commitlike string, opts ...MergeOpt) (bool, error) {
if len(opts) == 0 {
args = append(args, []string{"-m", "merge"}...)
} else {
for _, opt := range opts {
args = append(args, []string{"-m", opt.CommitMessage}...)
}
}
args = append(args, commitlike)
out, err := i.executor.Run(args...)
if err == nil {
return true, nil
}
i.logger.WithError(err).Infof("Error merging %q: %s", commitlike, string(out))
if out, err := i.executor.Run("merge", "--abort"); err != nil {
return false, fmt.Errorf("error aborting merge of %q: %w %v", commitlike, err, string(out))
}
return false, nil
}
func (i *interactor) mergeMerge(commitlike string, opts ...MergeOpt) (bool, error) {
args := []string{"merge", "--no-ff", "--no-stat"}
return i.mergeHelper(args, commitlike, opts...)
}
func (i *interactor) mergeIfNecessary(commitlike string, opts ...MergeOpt) (bool, error) {
args := []string{"merge", "--ff", "--no-stat"}
return i.mergeHelper(args, commitlike, opts...)
}
func (i *interactor) squashMerge(commitlike string) (bool, error) {
out, err := i.executor.Run("merge", "--squash", "--no-stat", commitlike)
if err != nil {
i.logger.WithError(err).Warnf("Error staging merge for %q: %s", commitlike, string(out))
if out, err := i.executor.Run("reset", "--hard", "HEAD"); err != nil {
return false, fmt.Errorf("error aborting merge of %q: %w %v", commitlike, err, string(out))
}
return false, nil
}
out, err = i.executor.Run("commit", "--no-stat", "-m", "merge")
if err != nil {
i.logger.WithError(err).Warnf("Error committing merge for %q: %s", commitlike, string(out))
if out, err := i.executor.Run("reset", "--hard", "HEAD"); err != nil {
return false, fmt.Errorf("error aborting merge of %q: %w %v", commitlike, err, string(out))
}
return false, nil
}
return true, nil
}
func (i *interactor) mergeRebase(commitlike string) (bool, error) {
if commitlike == "" {
return false, errors.New("branch must be set")
}
headRev, err := i.revParse("HEAD")
if err != nil {
i.logger.WithError(err).Infof("Failed to parse HEAD revision")
return false, err
}
headRev = strings.TrimSuffix(headRev, "\n")
b, err := i.executor.Run("rebase", "--no-stat", headRev, commitlike)
if err != nil {
i.logger.WithField("out", string(b)).WithError(err).Infof("Rebase failed.")
if b, err := i.executor.Run("rebase", "--abort"); err != nil {
return false, fmt.Errorf("error aborting after failed rebase for commitlike %s: %v. output: %s", commitlike, err, string(b))
}
return false, nil
}
return true, nil
}
func (i *interactor) revParse(args ...string) (string, error) {
fullArgs := append([]string{"rev-parse"}, args...)
b, err := i.executor.Run(fullArgs...)
if err != nil {
return "", errors.New(string(b))
}
return string(b), nil
}
// Only the `merge` and `squash` strategies are supported.
func (i *interactor) MergeAndCheckout(baseSHA string, mergeStrategy string, headSHAs ...string) error {
if baseSHA == "" {
return errors.New("baseSHA must be set")
}
if err := i.Checkout(baseSHA); err != nil {
return err
}
for _, headSHA := range headSHAs {
ok, err := i.MergeWithStrategy(headSHA, mergeStrategy)
if err != nil {
return err
} else if !ok {
return fmt.Errorf("failed to merge %q", headSHA)
}
}
return nil
}
// Am tries to apply the patch in the given path into the current branch
// by performing a three-way merge (similar to git cherry-pick). It returns
// an error if the patch cannot be applied.
func (i *interactor) Am(path string) error {
i.logger.Infof("Applying patch at %s", path)
out, err := i.executor.Run("am", "--3way", path)
if err == nil {
return nil
}
i.logger.WithError(err).Infof("Patch apply failed with output: %s", string(out))
if abortOut, abortErr := i.executor.Run("am", "--abort"); err != nil {
i.logger.WithError(abortErr).Warningf("Aborting patch apply failed with output: %s", string(abortOut))
}
return errors.New(string(bytes.TrimPrefix(out, []byte("The copy of the patch that failed is found in: .git/rebase-apply/patch"))))
}
// FetchCommits only fetches those commits which we want, and only if they are
// missing.
func (i *interactor) FetchCommits(noFetchTags bool, commitSHAs []string) error | {
fetchArgs := []string{"--no-write-fetch-head"}
if noFetchTags {
fetchArgs = append(fetchArgs, "--no-tags")
}
// For each commit SHA, check if it already exists. If so, don't bother
// fetching it.
var missingCommits bool
for _, commitSHA := range commitSHAs {
if exists, _ := i.ObjectExists(commitSHA); exists {
continue
}
fetchArgs = append(fetchArgs, commitSHA)
missingCommits = true
}
// Skip the fetch operation altogether if nothing is missing (we already | identifier_body |
|
interactor.go | nil {
i.logger.WithError(err).Warnf("Error staging merge for %q: %s", commitlike, string(out))
if out, err := i.executor.Run("reset", "--hard", "HEAD"); err != nil {
return false, fmt.Errorf("error aborting merge of %q: %w %v", commitlike, err, string(out))
}
return false, nil
}
out, err = i.executor.Run("commit", "--no-stat", "-m", "merge")
if err != nil {
i.logger.WithError(err).Warnf("Error committing merge for %q: %s", commitlike, string(out))
if out, err := i.executor.Run("reset", "--hard", "HEAD"); err != nil {
return false, fmt.Errorf("error aborting merge of %q: %w %v", commitlike, err, string(out))
}
return false, nil
}
return true, nil
}
func (i *interactor) mergeRebase(commitlike string) (bool, error) {
if commitlike == "" {
return false, errors.New("branch must be set")
}
headRev, err := i.revParse("HEAD")
if err != nil {
i.logger.WithError(err).Infof("Failed to parse HEAD revision")
return false, err
}
headRev = strings.TrimSuffix(headRev, "\n")
b, err := i.executor.Run("rebase", "--no-stat", headRev, commitlike)
if err != nil {
i.logger.WithField("out", string(b)).WithError(err).Infof("Rebase failed.")
if b, err := i.executor.Run("rebase", "--abort"); err != nil {
return false, fmt.Errorf("error aborting after failed rebase for commitlike %s: %v. output: %s", commitlike, err, string(b))
}
return false, nil
}
return true, nil
}
func (i *interactor) revParse(args ...string) (string, error) {
fullArgs := append([]string{"rev-parse"}, args...)
b, err := i.executor.Run(fullArgs...)
if err != nil {
return "", errors.New(string(b))
}
return string(b), nil
}
// Only the `merge` and `squash` strategies are supported.
func (i *interactor) MergeAndCheckout(baseSHA string, mergeStrategy string, headSHAs ...string) error {
if baseSHA == "" {
return errors.New("baseSHA must be set")
}
if err := i.Checkout(baseSHA); err != nil {
return err
}
for _, headSHA := range headSHAs {
ok, err := i.MergeWithStrategy(headSHA, mergeStrategy)
if err != nil {
return err
} else if !ok {
return fmt.Errorf("failed to merge %q", headSHA)
}
}
return nil
}
// Am tries to apply the patch in the given path into the current branch
// by performing a three-way merge (similar to git cherry-pick). It returns
// an error if the patch cannot be applied.
func (i *interactor) Am(path string) error {
i.logger.Infof("Applying patch at %s", path)
out, err := i.executor.Run("am", "--3way", path)
if err == nil {
return nil
}
i.logger.WithError(err).Infof("Patch apply failed with output: %s", string(out))
if abortOut, abortErr := i.executor.Run("am", "--abort"); err != nil {
i.logger.WithError(abortErr).Warningf("Aborting patch apply failed with output: %s", string(abortOut))
}
return errors.New(string(bytes.TrimPrefix(out, []byte("The copy of the patch that failed is found in: .git/rebase-apply/patch"))))
}
// FetchCommits only fetches those commits which we want, and only if they are
// missing.
func (i *interactor) FetchCommits(noFetchTags bool, commitSHAs []string) error {
fetchArgs := []string{"--no-write-fetch-head"}
if noFetchTags {
fetchArgs = append(fetchArgs, "--no-tags")
}
// For each commit SHA, check if it already exists. If so, don't bother
// fetching it.
var missingCommits bool
for _, commitSHA := range commitSHAs {
if exists, _ := i.ObjectExists(commitSHA); exists {
continue
}
fetchArgs = append(fetchArgs, commitSHA)
missingCommits = true
}
// Skip the fetch operation altogether if nothing is missing (we already
// fetched everything previously at some point).
if !missingCommits {
return nil
}
if err := i.Fetch(fetchArgs...); err != nil {
return fmt.Errorf("failed to fetch %s: %v", fetchArgs, err)
}
return nil
}
// RemoteUpdate fetches all updates from the remote.
func (i *interactor) RemoteUpdate() error {
i.logger.Info("Updating from remote")
if out, err := i.executor.Run("remote", "update", "--prune"); err != nil {
return fmt.Errorf("error updating: %w %v", err, string(out))
}
return nil
}
// Fetch fetches all updates from the remote.
func (i *interactor) Fetch(arg ...string) error {
remote, err := i.remote()
if err != nil {
return fmt.Errorf("could not resolve remote for fetching: %w", err)
}
arg = append([]string{"fetch", remote}, arg...)
i.logger.Infof("Fetching from %s", remote)
if out, err := i.executor.Run(arg...); err != nil {
return fmt.Errorf("error fetching: %w %v", err, string(out))
}
return nil
}
// FetchRef fetches a refspec from the remote and leaves it as FETCH_HEAD.
func (i *interactor) FetchRef(refspec string) error {
remote, err := i.remote()
if err != nil {
return fmt.Errorf("could not resolve remote for fetching: %w", err)
}
i.logger.Infof("Fetching %q from %s", refspec, remote)
if out, err := i.executor.Run("fetch", remote, refspec); err != nil {
return fmt.Errorf("error fetching %q: %w %v", refspec, err, string(out))
}
return nil
}
// FetchFromRemote fetches all update from a specific remote and branch and leaves it as FETCH_HEAD.
func (i *interactor) FetchFromRemote(remote RemoteResolver, branch string) error {
r, err := remote()
if err != nil {
return fmt.Errorf("couldn't get remote: %w", err)
}
i.logger.Infof("Fetching %s from %s", branch, r)
if out, err := i.executor.Run("fetch", r, branch); err != nil {
return fmt.Errorf("error fetching %s from %s: %w %v", branch, r, err, string(out))
}
return nil
}
// CheckoutPullRequest fetches the HEAD of a pull request using a synthetic refspec
// available on GitHub remotes and creates a branch at that commit.
func (i *interactor) CheckoutPullRequest(number int) error {
i.logger.Infof("Checking out pull request %d", number)
if err := i.FetchRef(fmt.Sprintf("pull/%d/head", number)); err != nil {
return err
}
if err := i.Checkout("FETCH_HEAD"); err != nil {
return err
}
if err := i.CheckoutNewBranch(fmt.Sprintf("pull%d", number)); err != nil {
return err
}
return nil
}
// Config runs git config.
func (i *interactor) Config(args ...string) error {
i.logger.WithField("args", args).Info("Configuring.")
if out, err := i.executor.Run(append([]string{"config"}, args...)...); err != nil {
return fmt.Errorf("error configuring %v: %w %v", args, err, string(out))
}
return nil
}
// Diff lists the difference between the two references, returning the output
// line by line.
func (i *interactor) Diff(head, sha string) ([]string, error) {
i.logger.Infof("Finding the differences between %q and %q", head, sha)
out, err := i.executor.Run("diff", head, sha, "--name-only")
if err != nil {
return nil, err
}
var changes []string
scan := bufio.NewScanner(bytes.NewReader(out))
scan.Split(bufio.ScanLines)
for scan.Scan() {
changes = append(changes, scan.Text())
}
return changes, nil
}
// MergeCommitsExistBetween runs 'git log <target>..<head> --merged' to verify
// if merge commits exist between "target" and "head".
func (i *interactor) MergeCommitsExistBetween(target, head string) (bool, error) {
i.logger.Infof("Determining if merge commits exist between %q and %q", target, head)
out, err := i.executor.Run("log", fmt.Sprintf("%s..%s", target, head), "--oneline", "--merges")
if err != nil {
return false, fmt.Errorf("error verifying if merge commits exist between %q and %q: %v %s", target, head, err, string(out))
}
return len(out) != 0, nil
}
func (i *interactor) | ShowRef | identifier_name |
|
interactor.go | the clone is completed. We use Git's global "-C <directory>" flag to
// switch to that directory before running the "sparse-checkout" command,
// because otherwise the command will fail (because it will try to run the
// command in the $PWD, which is not the same as the just-created clone
// directory (i.dir)).
if repoOpts.SparseCheckoutDirs != nil {
if len(repoOpts.SparseCheckoutDirs) == 0 {
return nil
}
sparseCheckoutArgs := []string{"-C", i.dir, "sparse-checkout", "set"}
sparseCheckoutArgs = append(sparseCheckoutArgs, repoOpts.SparseCheckoutDirs...)
if out, err := i.executor.Run(sparseCheckoutArgs...); err != nil {
return fmt.Errorf("error setting it to a sparse checkout: %w %v", err, string(out))
}
}
return nil
}
// MirrorClone sets up a mirror of the source repository.
func (i *interactor) MirrorClone() error {
i.logger.Infof("Creating a mirror of the repo at %s", i.dir)
remote, err := i.remote()
if err != nil {
return fmt.Errorf("could not resolve remote for cloning: %w", err)
}
if out, err := i.executor.Run("clone", "--mirror", remote, i.dir); err != nil {
return fmt.Errorf("error creating a mirror clone: %w %v", err, string(out))
}
return nil
}
// Checkout runs git checkout.
func (i *interactor) Checkout(commitlike string) error {
i.logger.Infof("Checking out %q", commitlike)
if out, err := i.executor.Run("checkout", commitlike); err != nil {
return fmt.Errorf("error checking out %q: %w %v", commitlike, err, string(out))
}
return nil
}
// RevParse runs git rev-parse.
func (i *interactor) RevParse(commitlike string) (string, error) {
i.logger.Infof("Parsing revision %q", commitlike)
out, err := i.executor.Run("rev-parse", commitlike)
if err != nil {
return "", fmt.Errorf("error parsing %q: %w %v", commitlike, err, string(out))
}
return string(out), nil
}
// BranchExists returns true if branch exists in heads.
func (i *interactor) BranchExists(branch string) bool {
i.logger.Infof("Checking if branch %q exists", branch)
_, err := i.executor.Run("ls-remote", "--exit-code", "--heads", "origin", branch)
return err == nil
}
func (i *interactor) ObjectExists(sha string) (bool, error) {
i.logger.WithField("SHA", sha).Info("Checking if Git object exists")
output, err := i.executor.Run("cat-file", "-e", sha)
// If the object does not exist, cat-file will exit with a non-zero exit
// code. This will make err non-nil. However this is a known behavior, so
// we just log it.
//
// We still have the error type as a return value because the v1 git client
// adapter needs to know that this operation is not supported there.
if err != nil {
i.logger.WithError(err).WithField("SHA", sha).Debugf("error from 'git cat-file -e': %s", string(output))
return false, nil
}
return true, nil
}
// CheckoutNewBranch creates a new branch and checks it out.
func (i *interactor) CheckoutNewBranch(branch string) error {
i.logger.Infof("Checking out new branch %q", branch)
if out, err := i.executor.Run("checkout", "-b", branch); err != nil {
return fmt.Errorf("error checking out new branch %q: %w %v", branch, err, string(out))
}
return nil
}
// Merge attempts to merge commitlike into the current branch. It returns true
// if the merge completes. It returns an error if the abort fails.
func (i *interactor) Merge(commitlike string) (bool, error) {
return i.MergeWithStrategy(commitlike, "merge")
}
// MergeWithStrategy attempts to merge commitlike into the current branch given the merge strategy.
// It returns true if the merge completes. if the merge does not complete successfully, we try to
// abort it and return an error if the abort fails.
func (i *interactor) MergeWithStrategy(commitlike, mergeStrategy string, opts ...MergeOpt) (bool, error) {
i.logger.Infof("Merging %q using the %q strategy", commitlike, mergeStrategy)
switch mergeStrategy {
case "merge":
return i.mergeMerge(commitlike, opts...)
case "squash":
return i.squashMerge(commitlike)
case "rebase":
return i.mergeRebase(commitlike)
case "ifNecessary":
return i.mergeIfNecessary(commitlike, opts...)
default:
return false, fmt.Errorf("merge strategy %q is not supported", mergeStrategy)
}
}
func (i *interactor) mergeHelper(args []string, commitlike string, opts ...MergeOpt) (bool, error) {
if len(opts) == 0 {
args = append(args, []string{"-m", "merge"}...)
} else {
for _, opt := range opts {
args = append(args, []string{"-m", opt.CommitMessage}...)
}
}
args = append(args, commitlike)
out, err := i.executor.Run(args...)
if err == nil {
return true, nil
}
i.logger.WithError(err).Infof("Error merging %q: %s", commitlike, string(out))
if out, err := i.executor.Run("merge", "--abort"); err != nil {
return false, fmt.Errorf("error aborting merge of %q: %w %v", commitlike, err, string(out))
}
return false, nil
}
func (i *interactor) mergeMerge(commitlike string, opts ...MergeOpt) (bool, error) {
args := []string{"merge", "--no-ff", "--no-stat"}
return i.mergeHelper(args, commitlike, opts...)
}
func (i *interactor) mergeIfNecessary(commitlike string, opts ...MergeOpt) (bool, error) {
args := []string{"merge", "--ff", "--no-stat"}
return i.mergeHelper(args, commitlike, opts...)
}
func (i *interactor) squashMerge(commitlike string) (bool, error) {
out, err := i.executor.Run("merge", "--squash", "--no-stat", commitlike)
if err != nil {
i.logger.WithError(err).Warnf("Error staging merge for %q: %s", commitlike, string(out))
if out, err := i.executor.Run("reset", "--hard", "HEAD"); err != nil {
return false, fmt.Errorf("error aborting merge of %q: %w %v", commitlike, err, string(out))
}
return false, nil
}
out, err = i.executor.Run("commit", "--no-stat", "-m", "merge")
if err != nil {
i.logger.WithError(err).Warnf("Error committing merge for %q: %s", commitlike, string(out))
if out, err := i.executor.Run("reset", "--hard", "HEAD"); err != nil {
return false, fmt.Errorf("error aborting merge of %q: %w %v", commitlike, err, string(out))
}
return false, nil
}
return true, nil
}
func (i *interactor) mergeRebase(commitlike string) (bool, error) {
if commitlike == "" {
return false, errors.New("branch must be set")
}
headRev, err := i.revParse("HEAD")
if err != nil {
i.logger.WithError(err).Infof("Failed to parse HEAD revision")
return false, err
}
headRev = strings.TrimSuffix(headRev, "\n")
b, err := i.executor.Run("rebase", "--no-stat", headRev, commitlike)
if err != nil {
i.logger.WithField("out", string(b)).WithError(err).Infof("Rebase failed.")
if b, err := i.executor.Run("rebase", "--abort"); err != nil {
return false, fmt.Errorf("error aborting after failed rebase for commitlike %s: %v. output: %s", commitlike, err, string(b))
}
return false, nil
}
return true, nil
}
func (i *interactor) revParse(args ...string) (string, error) {
fullArgs := append([]string{"rev-parse"}, args...)
b, err := i.executor.Run(fullArgs...)
if err != nil {
return "", errors.New(string(b))
}
return string(b), nil
}
// Only the `merge` and `squash` strategies are supported.
func (i *interactor) MergeAndCheckout(baseSHA string, mergeStrategy string, headSHAs ...string) error {
if baseSHA == "" {
return errors.New("baseSHA must be set")
}
if err := i.Checkout(baseSHA); err != nil {
return err
}
for _, headSHA := range headSHAs {
ok, err := i.MergeWithStrategy(headSHA, mergeStrategy)
if err != nil {
return err
} else if !ok { | return fmt.Errorf("failed to merge %q", headSHA)
} | random_line_split |
|
eulerlib.py | )
tn = i = 1
while tn < n:
triangle_numbers[triangle_number(i)] = True
i += 1
tn = triangle_number(i)
return triangle_numbers
def hexagonal_number(n):
"""
Calculate the nth hexagonal number.
:param n: Hn
:return: Hexagonal number
"""
return n * (2 * n - 1)
def is_hexagonal_number(n):
"""
Determines if n is a hexagonal number.
:param n: Hn
:return: Hexagonal number
"""
_, x = quadratic.solve(2, -1, -n)
return is_number(x) and x.is_integer()
def pentagonal_number(n):
return n * (3 * n - 1) / 2
def is_pentagonal_number(n):
"""
Determines if n is a pentagonal number.
:param n:
:return: True if pentagonal.
"""
_, x = quadratic.solve(3, -1, -2 * n)
return is_number(x) and x.is_integer()
def proper_divisors(x):
"""
Returns all the proper divisors for a number x, excluding x.
e.g divisors(1001) = [1, 7, 11, 13, 77, 91, 143]
:param x: number >= 1.
:return: the divisors excluding itself.
"""
return divisors(x)[:-1]
def restricted_divisors(x):
"""
Returns all the restricted divisors for a number x, excluding 1 and x.
e.g divisors(1001) = [7, 11, 13, 77, 91, 143]
:param x: number >= 1.
:return: the divisors excluding 1 and itself.
"""
return divisors(x)[1:-1]
def is_perfect_number(x):
"""
Test if a number is a perfect number. A number is perfect
if the sum of the proper divisors is equal to itself.
:param x: number to test.
:return: True if it is a perfect number.
"""
return sum(proper_divisors(x)) == x
def is_abundant_number(x):
"""
Test if a number is an abundant number. A number is abundant
if the sum of the proper divisors is greater than the number
itself.
:param x: number to test.
:return: True if it is an abundant number.
"""
return sum(proper_divisors(x)) > x
def is_deficient_number(x):
"""
Test if a number is a deficient number. A number is deficient
if the sum of the proper divisors is less than the number
itself.
:param x: number to test.
:return: True if it is a deficient number.
"""
return sum(proper_divisors(x)) < x
def digits(x):
"""
Returns the digits of a number in a list.
:param x: The number to sum the digits of.
:return: Sum of the number x.
"""
return [int(d) for d in str(x)]
def digits_to_int(x):
"""
Concatenate a list of digits to an integer.
:param x:
:return:
"""
if x is None:
return ""
return int(''.join([str(i) for i in x]))
def is_fibonacci_number(x):
"""
Test if x is a Fibonacci number.
:param x: Number to test.
:return: True if it is a Fibonacci number.
"""
a = math.sqrt(5 * x ** 2 + 4)
b = math.sqrt(5 * x ** 2 - 4)
return a.is_integer() or b.is_integer()
def fibonacci_n(n):
"""
Calculate the nth Fibonacci number (Fn).
:param n: which number to calculate.
:return: The nth Fibonacci number.
"""
sqrt5 = math.sqrt(5)
phi = (1 + sqrt5) / 2
psi = (1 - sqrt5) / 2
return (phi**n - psi**n) // sqrt5
def fibonacci_n_inv(x):
"""
Calculate the n for Fn for a Fibonacci number.
:param x: Fibonacci number.
:return: The position of the Fibonacci number (Fn)
"""
if x < 2:
raise ValueError('Function approximation is wrong when x < 2.')
sqrt5 = math.sqrt(5)
phi = (1 + sqrt5) / 2
rad = 5 * x**2
p = math.sqrt(5*x**2 + 4)
n = math.log((x * sqrt5 + math.sqrt(rad + 4)) / 2, phi) \
if p.is_integer() \
else math.log((x * sqrt5 + math.sqrt(rad - 4)) / 2, phi)
return round(n)
def gcd(a, b):
"""
Determines the greatest common divisor for a and b
with the Euclidean Algorithm.
:param a: First number.
:param b: Second number.
:return: Greatest common divisor for a and b.
"""
a = abs(a)
b = abs(b)
if a == b:
return a
if b > a:
a, b = b, a
q = a // b
r = a - b * q
while r != 0:
a = b
b = r
q = a // b
r = a - b * q
return b
def lcm(a, b):
"""
Calculate the least common multiple (LCM) with the GCD
algorithm using: LCM(a,b) = (a*b)/GCD(a,b).
:param a:
:param b:
:return:
"""
return a * b // gcd(a, b)
def lcm3(a, b, c):
"""
Calculating the LCM for multiple digits is done with
LCM(a,b,c) = LCM(LCM(a,b),c)
:param a:
:param b:
:param c:
:return:
"""
return lcm(lcm(a, b), c)
def primitive_pythagorean_triplet_generator(n=math.inf):
"""
Generates n primitive pythagorean triplets.
:param n:
:return:
"""
v = 2
u = 1
while n > 0:
if not(is_odd(v) and is_odd(u)) and gcd(u, v) == 1:
a = v*v - u*u
b = 2*v*u
c = u*u + v*v
if a > b:
a, b = b, a
n -= 1
yield (a, b, c)
u += 1
if u >= v:
v += 1
u = 1
def prime_counting_function(n):
"""
Return the number of primes below a given number.
This is calculated with the proportionality which
states that π(n) ~ n / log(n).
:param n: Upper bound.
:return: Estimate of the number of primes below the
bound.
"""
return n / math.log(n)
def lambertw(x):
"""
Lambert W function with Newton's Method.
:param x:
:return:
"""
eps = 1e-8
w = x
while True:
ew = math.exp(w)
w_new = w - (w * ew - x) / (w * ew + ew)
if abs(w - w_new) <= eps:
break
w = w_new
return w
def prime_counting_function_inv(y):
"""
Returns the upper bound for a given number of primes.
:param y: How many primes you want.
:return: Upper bound.
"""
x = 2
while x / math.log(x) < y:
x += 1
return x
def p | numbers):
"""
Returns the product of a list of numbers.
:param numbers:
:return:
"""
p = 1
for x in numbers:
p *= x
return p
def factorial(n):
"""
Returns the factorial n! of a number.
:param n:
:return:
"""
return product(range(1, n + 1))
def is_even(n):
"""
Returns true if a number is even.
:param n:
:return:
"""
return not n & 1
def is_odd(n):
"""
Returns true if a number is odd.
:param n:
:return:
"""
return n & 1
def permutations(a):
"""
Generates all the permutations for a set.
:param a:
:return:
"""
n = len(a)
return _heap_perm_(n, a)
def _heap_perm_(n, a):
"""
Heap's permutation algorithm.
https://stackoverflow.com/a/29044942
:param n:
:param a:
:return:
"""
if | roduct( | identifier_name |
eulerlib.py | :param b:
:return:
"""
return a * b // gcd(a, b)
def lcm3(a, b, c):
"""
Calculating the LCM for multiple digits is done with
LCM(a,b,c) = LCM(LCM(a,b),c)
:param a:
:param b:
:param c:
:return:
"""
return lcm(lcm(a, b), c)
def primitive_pythagorean_triplet_generator(n=math.inf):
"""
Generates n primitive pythagorean triplets.
:param n:
:return:
"""
v = 2
u = 1
while n > 0:
if not(is_odd(v) and is_odd(u)) and gcd(u, v) == 1:
a = v*v - u*u
b = 2*v*u
c = u*u + v*v
if a > b:
a, b = b, a
n -= 1
yield (a, b, c)
u += 1
if u >= v:
v += 1
u = 1
def prime_counting_function(n):
"""
Return the number of primes below a given number.
This is calculated with the proportionality which
states that π(n) ~ n / log(n).
:param n: Upper bound.
:return: Estimate of the number of primes below the
bound.
"""
return n / math.log(n)
def lambertw(x):
"""
Lambert W function with Newton's Method.
:param x:
:return:
"""
eps = 1e-8
w = x
while True:
ew = math.exp(w)
w_new = w - (w * ew - x) / (w * ew + ew)
if abs(w - w_new) <= eps:
break
w = w_new
return w
def prime_counting_function_inv(y):
"""
Returns the upper bound for a given number of primes.
:param y: How many primes you want.
:return: Upper bound.
"""
x = 2
while x / math.log(x) < y:
x += 1
return x
def product(numbers):
"""
Returns the product of a list of numbers.
:param numbers:
:return:
"""
p = 1
for x in numbers:
p *= x
return p
def factorial(n):
"""
Returns the factorial n! of a number.
:param n:
:return:
"""
return product(range(1, n + 1))
def is_even(n):
"""
Returns true if a number is even.
:param n:
:return:
"""
return not n & 1
def is_odd(n):
"""
Returns true if a number is odd.
:param n:
:return:
"""
return n & 1
def permutations(a):
"""
Generates all the permutations for a set.
:param a:
:return:
"""
n = len(a)
return _heap_perm_(n, a)
def _heap_perm_(n, a):
"""
Heap's permutation algorithm.
https://stackoverflow.com/a/29044942
:param n:
:param a:
:return:
"""
if n == 1:
yield a
else:
for i in range(n-1):
for hp in _heap_perm_(n-1, a):
yield list(hp)
j = 0 if (n % 2) == 1 else i
a[j], a[n - 1] = a[n - 1], a[j]
for hp in _heap_perm_(n-1, a):
yield list(hp)
def shift(a, n=1):
"""
Shift all the elements in the list by n.
:param a:
:param n:
:return:
"""
return a[n:] + a[:n]
def is_palindrome(x):
"""
Returns true if a number or a string is a palindrome.
:param x:
:return:
"""
strx = str(x)
return strx == strx[::-1]
# chars = [c for c in x] if not is_number(x) else digits(x)
# for i in range(len(chars) // 2):
# if chars[i] != chars[len(chars) - i - 1]:
# return False
# return True
def is_pandigital_to_n(x, n, zero_based=False):
"""
Returns true if a list of numbers is pandigital from 1 up to n.
:param x:
:param n:
:param zero_based:
:return:
"""
return set(x) == set(range(0 if zero_based else 1, n + 1))
def to_binary_string(x):
"""
Useful to convert a number into a binary number.
:param x:
:return:
"""
return "{0:b}".format(x)
def _palindrome_number_generator():
"""
https://stackoverflow.com/a/16344628
:return:
"""
yield 0
lower = 1
while True:
higher = lower*10
for i in range(lower, higher):
s = str(i)
yield int(s+s[-2::-1])
for i in range(lower, higher):
s = str(i)
yield int(s+s[::-1])
lower = higher
def palindrome_generator(lower, upper):
"""
Generates all palindromes between [lower, upper].
https://stackoverflow.com/a/16344628
:param lower:
:param upper:
:return:
"""
all_palindrome_numbers = _palindrome_number_generator()
for p in all_palindrome_numbers:
if p >= lower:
break
palindrome_list = [p]
for p in all_palindrome_numbers:
# Because we use the same generator object,
# p continues where the previous loop halted.
if p >= upper:
break
palindrome_list.append(p)
return palindrome_list
def string_split_2d(data, field_delimiter=',', line_delimiter='\n'):
"""
Split a string of 2D data into lists. Example of the data
1,2
3,4
5,6
to:
[[1,2],[3,4],[5,6]]
:param data:
:param field_delimiter: delimiter used between seperate fields, default: ,
:param line_delimiter: delimiter used between lines, default: \n
:return: 2D list
"""
return [line.split(field_delimiter) for line in data.split(line_delimiter)]
def simplify_fraction(a, b):
"""
Simplifies a fraction to the lowest common form.
:param a:
:param b:
:return:
"""
c = gcd(a, b)
return a // c, b // c
def modpow(a, n, p):
"""
Use Fermat's little theorem to calculate a^n mod p, which
can handle very large exponents. Calculates in O(log n) time.
:param a: base
:param n: exponent
:param p: mod
:return: (a^n) mod p
"""
res = 1
a = a % p
while n > 0:
# if n is odd
if n & 1:
res = (res * a) % p
n = n >> 1 # n = n / 2
a = (a*a) % p
return res
def is_prime(n, k):
"""
Test if a number n is prime k-times.
:param n: The prime number to be tested.
:param k: The number of tests.
:return:
"""
if n <= 1 or n == 4:
return False
if n <= 3:
return True
if is_even(n):
return False
while k > 0:
# Take random int in [2, n-2]
a = random.randint(2, n-1)
# Check if a and n are co-prime.
if gcd(n, a) != 1:
return False
# Fermat's little theorem
if modpow(a, n-1, n) != 1:
return False
k -= 1
return True
def _first_index_with_bigger_neighbour(P):
"""
Find the first index from the right whose element is larger
than his neighbour.
:param P:
:return:
"""
i = len(P) - 1
while i > 0 and P[i-1] >= P[i]:
i -= 1
return i
def _first_index_with_smaller_neighbour(P):
"""
Find the first index from the right whose element is smaller
than his neighbour.
:param P:
:return:
"""
i = len(P) - 1
while i > 0 and P[i-1] <= P[i]:
i | -= 1
| conditional_block |
|
eulerlib.py | ):
yield list(hp)
def shift(a, n=1):
"""
Shift all the elements in the list by n.
:param a:
:param n:
:return:
"""
return a[n:] + a[:n]
def is_palindrome(x):
"""
Returns true if a number or a string is a palindrome.
:param x:
:return:
"""
strx = str(x)
return strx == strx[::-1]
# chars = [c for c in x] if not is_number(x) else digits(x)
# for i in range(len(chars) // 2):
# if chars[i] != chars[len(chars) - i - 1]:
# return False
# return True
def is_pandigital_to_n(x, n, zero_based=False):
"""
Returns true if a list of numbers is pandigital from 1 up to n.
:param x:
:param n:
:param zero_based:
:return:
"""
return set(x) == set(range(0 if zero_based else 1, n + 1))
def to_binary_string(x):
"""
Useful to convert a number into a binary number.
:param x:
:return:
"""
return "{0:b}".format(x)
def _palindrome_number_generator():
"""
https://stackoverflow.com/a/16344628
:return:
"""
yield 0
lower = 1
while True:
higher = lower*10
for i in range(lower, higher):
s = str(i)
yield int(s+s[-2::-1])
for i in range(lower, higher):
s = str(i)
yield int(s+s[::-1])
lower = higher
def palindrome_generator(lower, upper):
"""
Generates all palindromes between [lower, upper].
https://stackoverflow.com/a/16344628
:param lower:
:param upper:
:return:
"""
all_palindrome_numbers = _palindrome_number_generator()
for p in all_palindrome_numbers:
if p >= lower:
break
palindrome_list = [p]
for p in all_palindrome_numbers:
# Because we use the same generator object,
# p continues where the previous loop halted.
if p >= upper:
break
palindrome_list.append(p)
return palindrome_list
def string_split_2d(data, field_delimiter=',', line_delimiter='\n'):
"""
Split a string of 2D data into lists. Example of the data
1,2
3,4
5,6
to:
[[1,2],[3,4],[5,6]]
:param data:
:param field_delimiter: delimiter used between seperate fields, default: ,
:param line_delimiter: delimiter used between lines, default: \n
:return: 2D list
"""
return [line.split(field_delimiter) for line in data.split(line_delimiter)]
def simplify_fraction(a, b):
"""
Simplifies a fraction to the lowest common form.
:param a:
:param b:
:return:
"""
c = gcd(a, b)
return a // c, b // c
def modpow(a, n, p):
"""
Use Fermat's little theorem to calculate a^n mod p, which
can handle very large exponents. Calculates in O(log n) time.
:param a: base
:param n: exponent
:param p: mod
:return: (a^n) mod p
"""
res = 1
a = a % p
while n > 0:
# if n is odd
if n & 1:
res = (res * a) % p
n = n >> 1 # n = n / 2
a = (a*a) % p
return res
def is_prime(n, k):
"""
Test if a number n is prime k-times.
:param n: The prime number to be tested.
:param k: The number of tests.
:return:
"""
if n <= 1 or n == 4:
return False
if n <= 3:
return True
if is_even(n):
return False
while k > 0:
# Take random int in [2, n-2]
a = random.randint(2, n-1)
# Check if a and n are co-prime.
if gcd(n, a) != 1:
return False
# Fermat's little theorem
if modpow(a, n-1, n) != 1:
return False
k -= 1
return True
def _first_index_with_bigger_neighbour(P):
"""
Find the first index from the right whose element is larger
than his neighbour.
:param P:
:return:
"""
i = len(P) - 1
while i > 0 and P[i-1] >= P[i]:
i -= 1
return i
def _first_index_with_smaller_neighbour(P):
"""
Find the first index from the right whose element is smaller
than his neighbour.
:param P:
:return:
"""
i = len(P) - 1
while i > 0 and P[i-1] <= P[i]:
i -= 1
return i
def next_permutation(P):
"""
For any given permutation P, give the next permutation.
If there is no next permutation, P will be returned.
:param P:
:return:
"""
n = len(P)
# Find the first index with the bigger neighbour.
i = _first_index_with_bigger_neighbour(P)
# If this is the first, where i=0, then there is no next permutation.
if i == 0:
return P
# From the right, find a value in P that is smaller than
# the previous found value.
j = n - 1
while P[j] <= P[i-1]:
j -= 1
# Swap the values
P[i-1], P[j] = P[j], P[i-1]
# Restore the tail of the permutation.
j = n - 1
while i < j:
P[i], P[j] = P[j], P[i]
i += 1
j -= 1
return P
def previous_permutation(P):
"""
For any given permutation P, give the previous permutation.
If there is no pervious permutation, P will be returned.
:param P:
:return:
"""
n = len(P)
# Find the first index with the smaller neighbour.
i = _first_index_with_smaller_neighbour(P)
# If this is the first, where i=0, then there is no next permutation.
if i == 0:
return P
# From the right, find a value in P that is bigger than
# the previous found value.
j = n - 1
while P[j] >= P[i-1]:
j -= 1
# Swap the values
P[i-1], P[j] = P[j], P[i-1]
# Restore the tail of the permutation.
j = n - 1
while i < j:
P[i], P[j] = P[j], P[i]
i += 1
j -= 1
return P
def prime_factorization(x, sieve=None):
"""
Factorizes a number into the prime factorization.
Requires a sieve to be quick, if sieve is not specified
it will generate one itself.
:param x:
:param sieve:
:return:
"""
if x == 0:
return []
if x in [1, 2]:
return [x]
if sieve is None:
sieve = prime_sieve(x + 1)
factors = []
if sieve[x]:
return [x]
for i in range(2, int(math.sqrt(x) + 1)):
if sieve[x]:
break
if not sieve[i]:
continue
if x % i == 0:
factors.append(i)
x //= i
return factors + prime_factorization(x, sieve)
def is_permutation(A, B):
"""
Returns true if A and B are permutations of each other.
:param A:
:param B:
:return:
"""
return set(A) == set(B)
def is_permutation3(A, B, C):
"""
Returns true if A, B and C are permutations of each other.
:param A:
:param B:
:param C:
:return:
"""
return set(A) == set(B) == set(C)
def equal_sets(S):
"""
Returns true if all the sets s in S are equal
to each other.
:param S:
:return:
"""
s0 = S[0]
res = True
for i in range(1, len(S)):
res = res and s0 == S[i]
return res
| def union_sets(S):
""" | random_line_split |
|
eulerlib.py | 1)
tn = i = 1
while tn < n:
triangle_numbers[triangle_number(i)] = True
i += 1
tn = triangle_number(i)
return triangle_numbers
def hexagonal_number(n):
|
def is_hexagonal_number(n):
"""
Determines if n is a hexagonal number.
:param n: Hn
:return: Hexagonal number
"""
_, x = quadratic.solve(2, -1, -n)
return is_number(x) and x.is_integer()
def pentagonal_number(n):
return n * (3 * n - 1) / 2
def is_pentagonal_number(n):
"""
Determines if n is a pentagonal number.
:param n:
:return: True if pentagonal.
"""
_, x = quadratic.solve(3, -1, -2 * n)
return is_number(x) and x.is_integer()
def proper_divisors(x):
"""
Returns all the proper divisors for a number x, excluding x.
e.g divisors(1001) = [1, 7, 11, 13, 77, 91, 143]
:param x: number >= 1.
:return: the divisors excluding itself.
"""
return divisors(x)[:-1]
def restricted_divisors(x):
"""
Returns all the restricted divisors for a number x, excluding 1 and x.
e.g divisors(1001) = [7, 11, 13, 77, 91, 143]
:param x: number >= 1.
:return: the divisors excluding 1 and itself.
"""
return divisors(x)[1:-1]
def is_perfect_number(x):
"""
Test if a number is a perfect number. A number is perfect
if the sum of the proper divisors is equal to itself.
:param x: number to test.
:return: True if it is a perfect number.
"""
return sum(proper_divisors(x)) == x
def is_abundant_number(x):
"""
Test if a number is an abundant number. A number is abundant
if the sum of the proper divisors is greater than the number
itself.
:param x: number to test.
:return: True if it is an abundant number.
"""
return sum(proper_divisors(x)) > x
def is_deficient_number(x):
"""
Test if a number is a deficient number. A number is deficient
if the sum of the proper divisors is less than the number
itself.
:param x: number to test.
:return: True if it is a deficient number.
"""
return sum(proper_divisors(x)) < x
def digits(x):
"""
Returns the digits of a number in a list.
:param x: The number to sum the digits of.
:return: Sum of the number x.
"""
return [int(d) for d in str(x)]
def digits_to_int(x):
"""
Concatenate a list of digits to an integer.
:param x:
:return:
"""
if x is None:
return ""
return int(''.join([str(i) for i in x]))
def is_fibonacci_number(x):
"""
Test if x is a Fibonacci number.
:param x: Number to test.
:return: True if it is a Fibonacci number.
"""
a = math.sqrt(5 * x ** 2 + 4)
b = math.sqrt(5 * x ** 2 - 4)
return a.is_integer() or b.is_integer()
def fibonacci_n(n):
"""
Calculate the nth Fibonacci number (Fn).
:param n: which number to calculate.
:return: The nth Fibonacci number.
"""
sqrt5 = math.sqrt(5)
phi = (1 + sqrt5) / 2
psi = (1 - sqrt5) / 2
return (phi**n - psi**n) // sqrt5
def fibonacci_n_inv(x):
"""
Calculate the n for Fn for a Fibonacci number.
:param x: Fibonacci number.
:return: The position of the Fibonacci number (Fn)
"""
if x < 2:
raise ValueError('Function approximation is wrong when x < 2.')
sqrt5 = math.sqrt(5)
phi = (1 + sqrt5) / 2
rad = 5 * x**2
p = math.sqrt(5*x**2 + 4)
n = math.log((x * sqrt5 + math.sqrt(rad + 4)) / 2, phi) \
if p.is_integer() \
else math.log((x * sqrt5 + math.sqrt(rad - 4)) / 2, phi)
return round(n)
def gcd(a, b):
"""
Determines the greatest common divisor for a and b
with the Euclidean Algorithm.
:param a: First number.
:param b: Second number.
:return: Greatest common divisor for a and b.
"""
a = abs(a)
b = abs(b)
if a == b:
return a
if b > a:
a, b = b, a
q = a // b
r = a - b * q
while r != 0:
a = b
b = r
q = a // b
r = a - b * q
return b
def lcm(a, b):
"""
Calculate the least common multiple (LCM) with the GCD
algorithm using: LCM(a,b) = (a*b)/GCD(a,b).
:param a:
:param b:
:return:
"""
return a * b // gcd(a, b)
def lcm3(a, b, c):
"""
Calculating the LCM for multiple digits is done with
LCM(a,b,c) = LCM(LCM(a,b),c)
:param a:
:param b:
:param c:
:return:
"""
return lcm(lcm(a, b), c)
def primitive_pythagorean_triplet_generator(n=math.inf):
"""
Generates n primitive pythagorean triplets.
:param n:
:return:
"""
v = 2
u = 1
while n > 0:
if not(is_odd(v) and is_odd(u)) and gcd(u, v) == 1:
a = v*v - u*u
b = 2*v*u
c = u*u + v*v
if a > b:
a, b = b, a
n -= 1
yield (a, b, c)
u += 1
if u >= v:
v += 1
u = 1
def prime_counting_function(n):
"""
Return the number of primes below a given number.
This is calculated with the proportionality which
states that π(n) ~ n / log(n).
:param n: Upper bound.
:return: Estimate of the number of primes below the
bound.
"""
return n / math.log(n)
def lambertw(x):
"""
Lambert W function with Newton's Method.
:param x:
:return:
"""
eps = 1e-8
w = x
while True:
ew = math.exp(w)
w_new = w - (w * ew - x) / (w * ew + ew)
if abs(w - w_new) <= eps:
break
w = w_new
return w
def prime_counting_function_inv(y):
"""
Returns the upper bound for a given number of primes.
:param y: How many primes you want.
:return: Upper bound.
"""
x = 2
while x / math.log(x) < y:
x += 1
return x
def product(numbers):
"""
Returns the product of a list of numbers.
:param numbers:
:return:
"""
p = 1
for x in numbers:
p *= x
return p
def factorial(n):
"""
Returns the factorial n! of a number.
:param n:
:return:
"""
return product(range(1, n + 1))
def is_even(n):
"""
Returns true if a number is even.
:param n:
:return:
"""
return not n & 1
def is_odd(n):
"""
Returns true if a number is odd.
:param n:
:return:
"""
return n & 1
def permutations(a):
"""
Generates all the permutations for a set.
:param a:
:return:
"""
n = len(a)
return _heap_perm_(n, a)
def _heap_perm_(n, a):
"""
Heap's permutation algorithm.
https://stackoverflow.com/a/29044942
:param n:
:param a:
:return:
"""
if n | """
Calculate the nth hexagonal number.
:param n: Hn
:return: Hexagonal number
"""
return n * (2 * n - 1) | identifier_body |
constant_folding.py | (other_et))
value = lst.value + list(other.value)
elements = lst.elements + other_elts
stack.push(_Constant(typ, value, elements, op))
elif isinstance(op, opcodes.MAP_ADD):
elements = stack.fold_args(3, op)
if elements:
map_, key, val = elements.elements
tag, (kt, vt) = map_.typ
assert tag == 'map'
typ = (tag, (kt | {key.typ}, vt | {val.typ}))
value = {**map_.value, **{key.value: val.value}}
elements = {**map_.elements, **{key.value: val}}
stack.push(_Constant(typ, value, elements, op))
elif isinstance(op, opcodes.DICT_UPDATE):
elements = stack.fold_args(2, op)
if elements:
map1, map2 = elements.elements
tag1, (kt1, vt1) = map1.typ
tag2, (kt2, vt2) = map2.typ
assert tag1 == tag2 == 'map'
typ = (tag1, (kt1 | kt2, vt1 | vt2))
value = {**map1.value, **map2.value}
elements = {**map1.elements, **map2.elements}
stack.push(_Constant(typ, value, elements, op))
else:
# If we hit any other bytecode, we are no longer building a literal
# constant. Insert a None as a sentinel to the next BUILD op to
# not fold itself.
stack.push(None)
# Clear the stack to save any folded constants before exiting the block
stack.clear()
# Now rewrite the block to replace folded opcodes with a single
# LOAD_FOLDED_CONSTANT opcode.
out = []
for op in block:
if id(op) in stack.consts:
t = stack.consts[id(op)]
arg = t
pretty_arg = t
o = opcodes.LOAD_FOLDED_CONST(op.index, op.line, arg, pretty_arg)
o.next = op.next
o.target = op.target
o.block_target = op.block_target
o.code = op.code
op.folded = o
folds.add(op)
out.append(o)
elif op.folded:
folds.add(op)
else:
out.append(op)
block.code = out
# Adjust 'next' and 'target' pointers to account for folding.
for op in code.code_iter:
if op.next:
op.next = folds.resolve(op.next)
if op.target:
op.target = folds.resolve(op.target)
return code
def to_literal(typ, always_tuple=False):
"""Convert a typestruct item to a simplified form for ease of use."""
def expand(params):
return (to_literal(x) for x in params)
def union(params):
ret = tuple(sorted(expand(params), key=str))
if len(ret) == 1 and not always_tuple:
ret, = ret # pylint: disable=self-assigning-variable
return ret
tag, params = typ
if tag == 'prim':
return params
elif tag == 'tuple':
vals = tuple(expand(params))
return (tag, *vals)
elif tag == 'map':
k, v = params
return (tag, union(k), union(v))
else:
return (tag, union(params))
def from_literal(tup):
"""Convert from simple literal form to the more uniform typestruct."""
def expand(vals):
return [from_literal(x) for x in vals]
def union(vals):
if not isinstance(vals, tuple):
vals = (vals,)
v = expand(vals)
return frozenset(v)
if not isinstance(tup, tuple):
return ('prim', tup)
elif isinstance(tup[0], str):
tag, *vals = tup
if tag == 'prim':
return tup
elif tag == 'tuple':
params = tuple(expand(vals))
return (tag, params)
elif tag == 'map':
k, v = vals
return (tag, (union(k), union(v)))
else:
vals, = vals # pylint: disable=self-assigning-variable
return (tag, union(vals))
else:
return tuple(expand(tup))
def optimize(code):
"""Fold all constant literals in the bytecode into LOAD_FOLDED_CONST ops."""
return pyc.visit(code, _FoldConstants())
def build_folded_type(ctx, state, const):
"""Convert a typestruct to a vm type."""
def typeconst(t):
"""Create a constant purely to hold types for a recursive call."""
return _Constant(t, None, None, const.op)
def build_pyval(state, const):
if const.value is not None and const.tag in ('prim', 'tuple'):
return state, ctx.convert.constant_to_var(const.value)
else:
return build_folded_type(ctx, state, const)
def expand(state, elements):
vs = []
for e in elements:
state, v = build_pyval(state, e)
vs.append(v)
return state, vs
def join_types(state, ts):
xs = [typeconst(t) for t in ts]
state, vs = expand(state, xs)
val = ctx.convert.build_content(vs)
return state, val
def collect(state, convert_type, params):
state, t = join_types(state, params)
ret = ctx.convert.build_collection_of_type(state.node, convert_type, t)
return state, ret
def collect_tuple(state, elements):
state, vs = expand(state, elements)
return state, ctx.convert.build_tuple(state.node, vs)
def collect_list(state, params, elements):
if elements is None:
return collect(state, ctx.convert.list_type, params)
elif len(elements) < MAX_VAR_SIZE:
state, vs = expand(state, elements)
return state, ctx.convert.build_list(state.node, vs)
else:
# Without constant folding we construct a variable wrapping every element
# in the list and store it; however, we cannot retrieve them all. So as an
# optimisation, we will add the first few elements as pyals, then add one
# element for every contained type, and rely on the fact that the tail
# elements will contribute to the overall list type, but will not be
# retrievable as pyvals.
# TODO(b/175443170): We should use a smaller MAX_SUBSCRIPT cutoff; this
# behaviour is unrelated to MAX_VAR_SIZE (which limits the number of
# distinct bindings for the overall typevar).
n = MAX_VAR_SIZE - len(params) - 1
elts = elements[:n] + tuple(typeconst(t) for t in params)
state, vs = expand(state, elts)
return state, ctx.convert.build_list(state.node, vs)
def collect_map(state, params, elements):
m_var = ctx.convert.build_map(state.node)
m = m_var.data[0]
# Do not forward the state while creating dict literals.
node = state.node
# We want a single string type to store in the Dict.K type param.
# Calling set_str_item on every k/v pair will lead to a type param with a
# lot of literal strings as bindings, causing potentially severe performance
# issues down the line.
str_key = ctx.convert.str_type.instantiate(node)
if elements is not None and len(elements) < MAX_VAR_SIZE:
for (k, v) in elements.items():
_, v = build_pyval(state, v)
k_var = ctx.convert.constant_to_var(k)
m.setitem(node, k_var, v)
if isinstance(k, str):
m.merge_instance_type_params(node, str_key, v)
else:
m.merge_instance_type_params(node, k_var, v)
else:
# Treat a too-large dictionary as {Union[keys] : Union[vals]}. We could
# store a subset of the k/v pairs, as with collect_list, but for
# dictionaries it is less obvious which subset we should be storing.
# Perhaps we could create one variable per unique value type, and then
# store every key in the pyval but reuse the value variables.
k_types, v_types = params
_, v = join_types(state, v_types)
for t in k_types:
_, k = build_folded_type(ctx, state, typeconst(t))
m.setitem(node, k, v)
m.merge_instance_type_params(node, k, v)
return state, m_var
tag, params = const.typ
if tag == 'prim':
if const.value:
return state, ctx.convert.constant_to_var(const.value)
else:
val = ctx.convert.primitive_class_instances[params]
return state, val.to_variable(state.node)
elif tag == 'list':
return collect_list(state, params, const.elements)
elif tag == 'set':
return collect(state, ctx.convert.set_type, params)
elif tag == 'tuple':
# If we get a tuple without const.elements, construct it from the type.
# (e.g. this happens with a large dict with tuple keys)
if not const.elements: | elts = tuple(typeconst(t) for t in params)
else: | random_line_split |
|
constant_folding.py | elements: Tuple[Any, ...]
@attrs.define
class _Map:
"""A dictionary."""
key_types: FrozenSet[Any]
keys: Tuple[Any, ...]
value_types: FrozenSet[Any]
values: Tuple[Any, ...]
elements: Dict[Any, Any]
class _CollectionBuilder:
"""Build up a collection of constants."""
def __init__(self):
self.types = set()
self.values = []
self.elements = []
def add(self, constant):
self.types.add(constant.typ)
self.elements.append(constant)
self.values.append(constant.value)
def build(self):
return _Collection(
types=frozenset(self.types),
values=tuple(reversed(self.values)),
elements=tuple(reversed(self.elements)))
class _MapBuilder:
"""Build up a map of constants."""
def __init__(self):
self.key_types = set()
self.value_types = set()
self.keys = []
self.values = []
self.elements = {}
def add(self, key, value):
self.key_types.add(key.typ)
self.value_types.add(value.typ)
self.keys.append(key.value)
self.values.append(value.value)
self.elements[key.value] = value
def build(self):
return _Map(
key_types=frozenset(self.key_types),
keys=tuple(reversed(self.keys)),
value_types=frozenset(self.value_types),
values=tuple(reversed(self.values)),
elements=self.elements)
class _Stack:
"""A simple opcode stack."""
def __init__(self):
self.stack = []
self.consts = {}
def __iter__(self):
return self.stack.__iter__()
def push(self, val):
self.stack.append(val)
def pop(self):
|
def _preserve_constant(self, c):
if c and (
not isinstance(c.op, opcodes.LOAD_CONST) or
isinstance(c.op, opcodes.BUILD_STRING)):
self.consts[id(c.op)] = c
def clear(self):
# Preserve any constants in the stack before clearing it.
for c in self.stack:
self._preserve_constant(c)
self.stack = []
def _pop_args(self, n):
"""Try to get n args off the stack for a BUILD call."""
if len(self.stack) < n:
# We have started a new block in the middle of constructing a literal
# (e.g. due to an inline function call). Clear the stack, since the
# literal is not constant.
self.clear()
return None
elif n and any(x is None for x in self.stack[-n:]):
# We have something other than constants in the arg list. Pop all the args
# for this op off the stack, preserving constants.
for _ in range(n):
self._preserve_constant(self.pop())
return None
else:
return [self.pop() for _ in range(n)]
def fold_args(self, n, op):
"""Collect the arguments to a build call."""
ret = _CollectionBuilder()
args = self._pop_args(n)
if args is None:
self.push(None)
return None
for elt in args:
ret.add(elt)
elt.op.folded = op
return ret.build()
def fold_map_args(self, n, op):
"""Collect the arguments to a BUILD_MAP call."""
ret = _MapBuilder()
args = self._pop_args(2 * n)
if args is None:
self.push(None)
return None
for i in range(0, 2 * n, 2):
v_elt, k_elt = args[i], args[i + 1]
ret.add(k_elt, v_elt)
k_elt.op.folded = op
v_elt.op.folded = op
return ret.build()
def build_str(self, n, op):
ret = self.fold_args(n, op)
if ret:
self.push(_Constant(('prim', str), '', None, op))
else:
self.push(None)
return ret
def build(self, python_type, op):
"""Build a folded type."""
collection = self.fold_args(op.arg, op)
if collection:
typename = python_type.__name__
typ = (typename, collection.types)
try:
value = python_type(collection.values)
except TypeError as e:
raise ConstantError(f'TypeError: {e.args[0]}', op) from e
elements = collection.elements
self.push(_Constant(typ, value, elements, op))
class _FoldedOps:
"""Mapping from a folded opcode to the top level constant that replaces it."""
def __init__(self):
self.folds = {}
def add(self, op):
self.folds[id(op)] = op.folded
def resolve(self, op):
f = op
while id(f) in self.folds:
f = self.folds[id(f)]
return f
class _FoldConstants:
"""Fold constant literals in pyc code."""
def visit_code(self, code):
"""Visit code, folding literals."""
def build_tuple(tup):
out = []
for e in tup:
if isinstance(e, tuple):
out.append(build_tuple(e))
else:
out.append(('prim', type(e)))
return ('tuple', tuple(out))
folds = _FoldedOps()
for block in code.order:
stack = _Stack()
for op in block:
if isinstance(op, opcodes.LOAD_CONST):
elt = code.consts[op.arg]
if isinstance(elt, tuple):
typ = build_tuple(elt)
stack.push(_Constant(typ, elt, typ[1], op))
else:
stack.push(_Constant(('prim', type(elt)), elt, None, op))
elif isinstance(op, opcodes.BUILD_LIST):
stack.build(list, op)
elif isinstance(op, opcodes.BUILD_SET):
stack.build(set, op)
elif isinstance(op, opcodes.FORMAT_VALUE):
if op.arg & loadmarshal.FVS_MASK:
stack.build_str(2, op)
else:
stack.build_str(1, op)
elif isinstance(op, opcodes.BUILD_STRING):
stack.build_str(op.arg, op)
elif isinstance(op, opcodes.BUILD_MAP):
map_ = stack.fold_map_args(op.arg, op)
if map_:
typ = ('map', (map_.key_types, map_.value_types))
val = dict(zip(map_.keys, map_.values))
stack.push(_Constant(typ, val, map_.elements, op))
elif isinstance(op, opcodes.BUILD_CONST_KEY_MAP):
keys = stack.pop()
vals = stack.fold_args(op.arg, op)
if vals:
keys.op.folded = op
_, t = keys.typ
typ = ('map', (frozenset(t), vals.types))
val = dict(zip(keys.value, vals.values))
elements = dict(zip(keys.value, vals.elements))
stack.push(_Constant(typ, val, elements, op))
elif isinstance(op, opcodes.LIST_APPEND):
elements = stack.fold_args(2, op)
if elements:
lst, element = elements.elements
tag, et = lst.typ
assert tag == 'list'
typ = (tag, et | {element.typ})
value = lst.value + [element.value]
elements = lst.elements + (element,)
stack.push(_Constant(typ, value, elements, op))
elif isinstance(op, opcodes.LIST_EXTEND):
elements = stack.fold_args(2, op)
if elements:
lst, other = elements.elements
tag, et = lst.typ
assert tag == 'list'
other_tag, other_et = other.typ
if other_tag == 'tuple':
# Deconstruct the tuple built in opcodes.LOAD_CONST above
other_elts = tuple(_Constant(('prim', e), v, None, other.op)
for (_, e), v in zip(other_et, other.value))
elif other_tag == 'prim':
assert other_et == str
other_et = {other.typ}
other_elts = tuple(_Constant(('prim', str), v, None, other.op)
for v in other.value)
else:
other_elts = other.elements
typ = (tag, et | set(other_et))
value = lst.value + list(other.value)
elements = lst.elements + other_elts
stack.push(_Constant(typ, value, elements, op))
elif isinstance(op, opcodes.MAP_ADD):
elements = stack.fold_args(3, op)
if elements:
map_, key, val = elements.elements
tag, (kt, vt) = map_.typ
assert tag == 'map'
typ = (tag, (kt | {key.typ}, vt | {val.typ}))
value = {**map_.value, **{key.value: val.value}}
elements = {**map_.elements, **{key.value: val}}
stack.push(_Constant(typ, value, elements, op))
elif isinstance(op, opcodes.DICT_UPDATE):
elements = stack.fold_args(2, op)
if elements:
map1, map2 = elements.elements
tag1, (kt1, vt1) = map1.typ
tag2, (kt2, vt2) = map2.typ
assert tag1 == tag2 == 'map | return self.stack.pop() | identifier_body |
constant_folding.py | """Collect the arguments to a build call."""
ret = _CollectionBuilder()
args = self._pop_args(n)
if args is None:
self.push(None)
return None
for elt in args:
ret.add(elt)
elt.op.folded = op
return ret.build()
def fold_map_args(self, n, op):
"""Collect the arguments to a BUILD_MAP call."""
ret = _MapBuilder()
args = self._pop_args(2 * n)
if args is None:
self.push(None)
return None
for i in range(0, 2 * n, 2):
v_elt, k_elt = args[i], args[i + 1]
ret.add(k_elt, v_elt)
k_elt.op.folded = op
v_elt.op.folded = op
return ret.build()
def build_str(self, n, op):
ret = self.fold_args(n, op)
if ret:
self.push(_Constant(('prim', str), '', None, op))
else:
self.push(None)
return ret
def build(self, python_type, op):
"""Build a folded type."""
collection = self.fold_args(op.arg, op)
if collection:
typename = python_type.__name__
typ = (typename, collection.types)
try:
value = python_type(collection.values)
except TypeError as e:
raise ConstantError(f'TypeError: {e.args[0]}', op) from e
elements = collection.elements
self.push(_Constant(typ, value, elements, op))
class _FoldedOps:
"""Mapping from a folded opcode to the top level constant that replaces it."""
def __init__(self):
self.folds = {}
def add(self, op):
self.folds[id(op)] = op.folded
def resolve(self, op):
f = op
while id(f) in self.folds:
f = self.folds[id(f)]
return f
class _FoldConstants:
"""Fold constant literals in pyc code."""
def visit_code(self, code):
"""Visit code, folding literals."""
def build_tuple(tup):
out = []
for e in tup:
if isinstance(e, tuple):
out.append(build_tuple(e))
else:
out.append(('prim', type(e)))
return ('tuple', tuple(out))
folds = _FoldedOps()
for block in code.order:
stack = _Stack()
for op in block:
if isinstance(op, opcodes.LOAD_CONST):
elt = code.consts[op.arg]
if isinstance(elt, tuple):
typ = build_tuple(elt)
stack.push(_Constant(typ, elt, typ[1], op))
else:
stack.push(_Constant(('prim', type(elt)), elt, None, op))
elif isinstance(op, opcodes.BUILD_LIST):
stack.build(list, op)
elif isinstance(op, opcodes.BUILD_SET):
stack.build(set, op)
elif isinstance(op, opcodes.FORMAT_VALUE):
if op.arg & loadmarshal.FVS_MASK:
stack.build_str(2, op)
else:
stack.build_str(1, op)
elif isinstance(op, opcodes.BUILD_STRING):
stack.build_str(op.arg, op)
elif isinstance(op, opcodes.BUILD_MAP):
map_ = stack.fold_map_args(op.arg, op)
if map_:
typ = ('map', (map_.key_types, map_.value_types))
val = dict(zip(map_.keys, map_.values))
stack.push(_Constant(typ, val, map_.elements, op))
elif isinstance(op, opcodes.BUILD_CONST_KEY_MAP):
keys = stack.pop()
vals = stack.fold_args(op.arg, op)
if vals:
keys.op.folded = op
_, t = keys.typ
typ = ('map', (frozenset(t), vals.types))
val = dict(zip(keys.value, vals.values))
elements = dict(zip(keys.value, vals.elements))
stack.push(_Constant(typ, val, elements, op))
elif isinstance(op, opcodes.LIST_APPEND):
elements = stack.fold_args(2, op)
if elements:
lst, element = elements.elements
tag, et = lst.typ
assert tag == 'list'
typ = (tag, et | {element.typ})
value = lst.value + [element.value]
elements = lst.elements + (element,)
stack.push(_Constant(typ, value, elements, op))
elif isinstance(op, opcodes.LIST_EXTEND):
elements = stack.fold_args(2, op)
if elements:
lst, other = elements.elements
tag, et = lst.typ
assert tag == 'list'
other_tag, other_et = other.typ
if other_tag == 'tuple':
# Deconstruct the tuple built in opcodes.LOAD_CONST above
other_elts = tuple(_Constant(('prim', e), v, None, other.op)
for (_, e), v in zip(other_et, other.value))
elif other_tag == 'prim':
assert other_et == str
other_et = {other.typ}
other_elts = tuple(_Constant(('prim', str), v, None, other.op)
for v in other.value)
else:
other_elts = other.elements
typ = (tag, et | set(other_et))
value = lst.value + list(other.value)
elements = lst.elements + other_elts
stack.push(_Constant(typ, value, elements, op))
elif isinstance(op, opcodes.MAP_ADD):
elements = stack.fold_args(3, op)
if elements:
map_, key, val = elements.elements
tag, (kt, vt) = map_.typ
assert tag == 'map'
typ = (tag, (kt | {key.typ}, vt | {val.typ}))
value = {**map_.value, **{key.value: val.value}}
elements = {**map_.elements, **{key.value: val}}
stack.push(_Constant(typ, value, elements, op))
elif isinstance(op, opcodes.DICT_UPDATE):
elements = stack.fold_args(2, op)
if elements:
map1, map2 = elements.elements
tag1, (kt1, vt1) = map1.typ
tag2, (kt2, vt2) = map2.typ
assert tag1 == tag2 == 'map'
typ = (tag1, (kt1 | kt2, vt1 | vt2))
value = {**map1.value, **map2.value}
elements = {**map1.elements, **map2.elements}
stack.push(_Constant(typ, value, elements, op))
else:
# If we hit any other bytecode, we are no longer building a literal
# constant. Insert a None as a sentinel to the next BUILD op to
# not fold itself.
stack.push(None)
# Clear the stack to save any folded constants before exiting the block
stack.clear()
# Now rewrite the block to replace folded opcodes with a single
# LOAD_FOLDED_CONSTANT opcode.
out = []
for op in block:
if id(op) in stack.consts:
t = stack.consts[id(op)]
arg = t
pretty_arg = t
o = opcodes.LOAD_FOLDED_CONST(op.index, op.line, arg, pretty_arg)
o.next = op.next
o.target = op.target
o.block_target = op.block_target
o.code = op.code
op.folded = o
folds.add(op)
out.append(o)
elif op.folded:
folds.add(op)
else:
out.append(op)
block.code = out
# Adjust 'next' and 'target' pointers to account for folding.
for op in code.code_iter:
if op.next:
op.next = folds.resolve(op.next)
if op.target:
op.target = folds.resolve(op.target)
return code
def to_literal(typ, always_tuple=False):
"""Convert a typestruct item to a simplified form for ease of use."""
def expand(params):
return (to_literal(x) for x in params)
def union(params):
ret = tuple(sorted(expand(params), key=str))
if len(ret) == 1 and not always_tuple:
ret, = ret # pylint: disable=self-assigning-variable
return ret
tag, params = typ
if tag == 'prim':
return params
elif tag == 'tuple':
vals = tuple(expand(params))
return (tag, *vals)
elif tag == 'map':
k, v = params
return (tag, union(k), union(v))
else:
return (tag, union(params))
def from_literal(tup):
"""Convert from simple literal form to the more uniform typestruct."""
def expand(vals):
return [from_literal(x) for x in vals]
def union(vals):
if not isinstance(vals, tuple):
vals = (vals,)
v = expand(vals)
return frozenset(v)
if not isinstance(tup, tuple):
return ('prim', tup)
elif isinstance(tup[0], str):
tag, *vals = tup
if tag == 'prim':
return tup
elif tag == 'tuple':
| params = tuple(expand(vals))
return (tag, params) | conditional_block |
|
constant_folding.py | elements: Tuple[Any, ...]
@attrs.define
class _Map:
"""A dictionary."""
key_types: FrozenSet[Any]
keys: Tuple[Any, ...]
value_types: FrozenSet[Any]
values: Tuple[Any, ...]
elements: Dict[Any, Any]
class _CollectionBuilder:
"""Build up a collection of constants."""
def __init__(self):
self.types = set()
self.values = []
self.elements = []
def add(self, constant):
self.types.add(constant.typ)
self.elements.append(constant)
self.values.append(constant.value)
def build(self):
return _Collection(
types=frozenset(self.types),
values=tuple(reversed(self.values)),
elements=tuple(reversed(self.elements)))
class _MapBuilder:
"""Build up a map of constants."""
def __init__(self):
self.key_types = set()
self.value_types = set()
self.keys = []
self.values = []
self.elements = {}
def add(self, key, value):
self.key_types.add(key.typ)
self.value_types.add(value.typ)
self.keys.append(key.value)
self.values.append(value.value)
self.elements[key.value] = value
def build(self):
return _Map(
key_types=frozenset(self.key_types),
keys=tuple(reversed(self.keys)),
value_types=frozenset(self.value_types),
values=tuple(reversed(self.values)),
elements=self.elements)
class _Stack:
"""A simple opcode stack."""
def __init__(self):
self.stack = []
self.consts = {}
def __iter__(self):
return self.stack.__iter__()
def push(self, val):
self.stack.append(val)
def pop(self):
return self.stack.pop()
def _preserve_constant(self, c):
if c and (
not isinstance(c.op, opcodes.LOAD_CONST) or
isinstance(c.op, opcodes.BUILD_STRING)):
self.consts[id(c.op)] = c
def clear(self):
# Preserve any constants in the stack before clearing it.
for c in self.stack:
self._preserve_constant(c)
self.stack = []
def _pop_args(self, n):
"""Try to get n args off the stack for a BUILD call."""
if len(self.stack) < n:
# We have started a new block in the middle of constructing a literal
# (e.g. due to an inline function call). Clear the stack, since the
# literal is not constant.
self.clear()
return None
elif n and any(x is None for x in self.stack[-n:]):
# We have something other than constants in the arg list. Pop all the args
# for this op off the stack, preserving constants.
for _ in range(n):
self._preserve_constant(self.pop())
return None
else:
return [self.pop() for _ in range(n)]
def fold_args(self, n, op):
"""Collect the arguments to a build call."""
ret = _CollectionBuilder()
args = self._pop_args(n)
if args is None:
self.push(None)
return None
for elt in args:
ret.add(elt)
elt.op.folded = op
return ret.build()
def fold_map_args(self, n, op):
"""Collect the arguments to a BUILD_MAP call."""
ret = _MapBuilder()
args = self._pop_args(2 * n)
if args is None:
self.push(None)
return None
for i in range(0, 2 * n, 2):
v_elt, k_elt = args[i], args[i + 1]
ret.add(k_elt, v_elt)
k_elt.op.folded = op
v_elt.op.folded = op
return ret.build()
def build_str(self, n, op):
ret = self.fold_args(n, op)
if ret:
self.push(_Constant(('prim', str), '', None, op))
else:
self.push(None)
return ret
def | (self, python_type, op):
"""Build a folded type."""
collection = self.fold_args(op.arg, op)
if collection:
typename = python_type.__name__
typ = (typename, collection.types)
try:
value = python_type(collection.values)
except TypeError as e:
raise ConstantError(f'TypeError: {e.args[0]}', op) from e
elements = collection.elements
self.push(_Constant(typ, value, elements, op))
class _FoldedOps:
"""Mapping from a folded opcode to the top level constant that replaces it."""
def __init__(self):
self.folds = {}
def add(self, op):
self.folds[id(op)] = op.folded
def resolve(self, op):
f = op
while id(f) in self.folds:
f = self.folds[id(f)]
return f
class _FoldConstants:
"""Fold constant literals in pyc code."""
def visit_code(self, code):
"""Visit code, folding literals."""
def build_tuple(tup):
out = []
for e in tup:
if isinstance(e, tuple):
out.append(build_tuple(e))
else:
out.append(('prim', type(e)))
return ('tuple', tuple(out))
folds = _FoldedOps()
for block in code.order:
stack = _Stack()
for op in block:
if isinstance(op, opcodes.LOAD_CONST):
elt = code.consts[op.arg]
if isinstance(elt, tuple):
typ = build_tuple(elt)
stack.push(_Constant(typ, elt, typ[1], op))
else:
stack.push(_Constant(('prim', type(elt)), elt, None, op))
elif isinstance(op, opcodes.BUILD_LIST):
stack.build(list, op)
elif isinstance(op, opcodes.BUILD_SET):
stack.build(set, op)
elif isinstance(op, opcodes.FORMAT_VALUE):
if op.arg & loadmarshal.FVS_MASK:
stack.build_str(2, op)
else:
stack.build_str(1, op)
elif isinstance(op, opcodes.BUILD_STRING):
stack.build_str(op.arg, op)
elif isinstance(op, opcodes.BUILD_MAP):
map_ = stack.fold_map_args(op.arg, op)
if map_:
typ = ('map', (map_.key_types, map_.value_types))
val = dict(zip(map_.keys, map_.values))
stack.push(_Constant(typ, val, map_.elements, op))
elif isinstance(op, opcodes.BUILD_CONST_KEY_MAP):
keys = stack.pop()
vals = stack.fold_args(op.arg, op)
if vals:
keys.op.folded = op
_, t = keys.typ
typ = ('map', (frozenset(t), vals.types))
val = dict(zip(keys.value, vals.values))
elements = dict(zip(keys.value, vals.elements))
stack.push(_Constant(typ, val, elements, op))
elif isinstance(op, opcodes.LIST_APPEND):
elements = stack.fold_args(2, op)
if elements:
lst, element = elements.elements
tag, et = lst.typ
assert tag == 'list'
typ = (tag, et | {element.typ})
value = lst.value + [element.value]
elements = lst.elements + (element,)
stack.push(_Constant(typ, value, elements, op))
elif isinstance(op, opcodes.LIST_EXTEND):
elements = stack.fold_args(2, op)
if elements:
lst, other = elements.elements
tag, et = lst.typ
assert tag == 'list'
other_tag, other_et = other.typ
if other_tag == 'tuple':
# Deconstruct the tuple built in opcodes.LOAD_CONST above
other_elts = tuple(_Constant(('prim', e), v, None, other.op)
for (_, e), v in zip(other_et, other.value))
elif other_tag == 'prim':
assert other_et == str
other_et = {other.typ}
other_elts = tuple(_Constant(('prim', str), v, None, other.op)
for v in other.value)
else:
other_elts = other.elements
typ = (tag, et | set(other_et))
value = lst.value + list(other.value)
elements = lst.elements + other_elts
stack.push(_Constant(typ, value, elements, op))
elif isinstance(op, opcodes.MAP_ADD):
elements = stack.fold_args(3, op)
if elements:
map_, key, val = elements.elements
tag, (kt, vt) = map_.typ
assert tag == 'map'
typ = (tag, (kt | {key.typ}, vt | {val.typ}))
value = {**map_.value, **{key.value: val.value}}
elements = {**map_.elements, **{key.value: val}}
stack.push(_Constant(typ, value, elements, op))
elif isinstance(op, opcodes.DICT_UPDATE):
elements = stack.fold_args(2, op)
if elements:
map1, map2 = elements.elements
tag1, (kt1, vt1) = map1.typ
tag2, (kt2, vt2) = map2.typ
assert tag1 == tag2 == 'map | build | identifier_name |
parser.rs | whitespace and comments
fn space<'a>() -> Parser<'a, u8, ()> {
(ws() | comment()).repeat(0..).discard()
}
fn semi<'a>() -> Parser<'a, u8, ()> {
keyword(b";").name("semi")
}
fn to_eol<'a>() -> Parser<'a, u8, String> {
fn anything_else(term: u8) -> bool {
!is_cr(term) && !is_lf(term)
}
is_a(anything_else)
.repeat(0..)
.map(|u8s| String::from_utf8(u8s).expect("can only parse utf"))
}
fn line_comment<'a>() -> Parser<'a, u8, ()> {
(seq(b"//") * to_eol() - eol())
.discard()
.name("line comment")
}
fn eol<'a>() -> Parser<'a, u8, ()> {
((is_a(is_cr) * is_a(is_lf)) | is_a(is_lf) | is_a(is_cr)).discard()
}
fn keyword<'a>(keyword: &'static [u8]) -> Parser<'a, u8, ()> {
literal(keyword).discard().name("keyword")
}
fn literal<'a>(literal: &'static [u8]) -> Parser<'a, u8, String> {
spaced(seq(literal))
.map(|u8s| String::from_utf8(u8s.to_vec()).expect("can only parse utf"))
.name("literal")
}
fn star_comment<'a>() -> Parser<'a, u8, ()> {
fn anything_else(term: u8) -> bool {
term != b'*'
}
(seq(b"/*") * is_a(anything_else).repeat(0..) - seq(b"*/")).discard()
}
fn comment<'a>() -> Parser<'a, u8, ()> {
line_comment() | star_comment()
}
/// a parser wrapped in whitespace
fn spaced<'a, T>(parser: Parser<'a, u8, T>) -> Parser<'a, u8, T>
where
T: 'a,
{
space() * parser - space()
}
fn is_cr(term: u8) -> bool {
term == b'\r'
}
fn is_lf(term: u8) -> bool {
term == b'\n'
}
fn is_underscore(term: u8) -> bool {
term == b'_'
}
fn state_id<'a>() -> Parser<'a, u8, StateId> {
(identifier())
.map(|(ident)| StateId(ident))
}
fn identifier<'a>() -> Parser<'a, u8, String> {
let it = ((is_a(alpha) | is_a(is_underscore))
+ (is_a(alphanum) | is_a(is_underscore)).repeat(0..))
.map(|(first, rest)| format!("{}{}", first as char, String::from_utf8(rest).unwrap()));
spaced(it).name("name")
}
fn string<'a>() -> Parser<'a, u8, String> {
let special_char = sym(b'\\')
| sym(b'/')
| sym(b'"')
| sym(b'b').map(|_| b'\x08')
| sym(b'f').map(|_| b'\x0C')
| sym(b'n').map(|_| b'\n')
| sym(b'r').map(|_| b'\r')
| sym(b't').map(|_| b'\t');
let escape_sequence = sym(b'\\') * special_char;
let string = sym(b'"') * (none_of(b"\\\"") | escape_sequence).repeat(0..) - sym(b'"');
string.convert(String::from_utf8)
}
fn state<'a>() -> Parser<'a, u8, State> {
let raw = keyword(b"state") * identifier() + string().opt()
- semi();
raw.map(move |(identifier, description)| State {
id: StateId(identifier),
is_starting_state: false,
description
})
}
fn | <'a>() -> Parser<'a, u8, Vec<State>> {
fn tag_starting_state(idx: usize, state: State) -> State {
State {
is_starting_state: idx == 0,
..state
}
};
state().repeat(0..).map(|states| states.into_iter().enumerate().map(|(idx, state)| tag_starting_state(idx, state)).collect())
}
fn accept_states_list<'a>() -> Parser<'a, u8, Vec<AcceptState>> {
accept_states_chain()
.repeat(0..)
.map(|chains| chains.into_iter().flatten().collect())
}
fn accept_states_chain<'a>() -> Parser<'a, u8, Vec<AcceptState>> {
let raw = spaced(list(spaced(state_id()), keyword(b"->"))) - semi();
raw.map(move |(state_ids)| {
if state_ids.len() < 2 {
return vec![];
}
let mut result = vec![];
for i in 0..state_ids.len() -1 {
let left = state_ids[i].clone();
let right = state_ids[i+1].clone();
let accept = AcceptState(left, right);
result.push(accept);
}
return result;
})
}
pub fn state_machine<'a>() -> Parser<'a, u8, StateMachine> {
let header = keyword(b"machine") * identifier() - semi();
let raw = header
+ state_list()
+ accept_states_list();
raw.map(move |((name, states), accept_states)| StateMachine {
name,
states,
accept_states
})
}
#[cfg(test)]
mod test {
use super::*;
use std::cmp::min;
use std::path::{Path, PathBuf};
use std::{fs, io};
macro_rules! assert_consumes_all {
( $ parser: expr, $input: expr ) => {
let terminating_parser = $parser - space() - end();
let res = terminating_parser.parse($input);
if let Err(_) = res {
panic!("parser failed to match and consume everything")
}
};
( $ parser: expr, $input: expr, $expected: expr) => {
let terminating_parser = $parser - space() - end();
let res = terminating_parser.parse($input);
match res {
Ok(answer) => {
// it parsed, but was it right?
assert_eq!(answer, $expected)
}
Err(_) => {
//
panic!("parser failed to match and consume everything")
}
}
};
}
#[test]
fn parse_keywords() -> Result<()> {
assert_consumes_all![eol(), b"\r"];
assert_consumes_all![eol(), b"\r\n"];
assert_consumes_all![eol(), b"\n"];
assert_consumes_all![space(), b""];
assert_consumes_all![space(), b" "];
assert_consumes_all![space(), b" \t \n \r "];
assert_consumes_all![line_comment(), b"//\r"];
assert_consumes_all![line_comment(), b"//\n"];
assert_consumes_all![line_comment(), b"//\r\n"];
assert_consumes_all![line_comment(), b"// xyz \r\n"];
assert_consumes_all![star_comment(), b"/* thing */"];
assert_consumes_all![star_comment(), b"/* thing \r\n thing */"];
assert_consumes_all!(
identifier(),
b"foo"
);
assert_consumes_all!(
state_id(),
b"foo"
);
assert_consumes_all!(
accept_states_chain(),
b"foo-> bar -> baz;",
vec![
AcceptState(StateId("foo".into()), StateId("bar".into())),
AcceptState(StateId("bar".into()), StateId("baz".into())),
]
);
assert_consumes_all!(
accept_states_list(),
b"foo-> bar -> baz; baz -> quux;",
vec![
AcceptState(StateId("foo".into()), StateId("bar".into())),
AcceptState(StateId("bar".into()), StateId("baz".into())),
AcceptState(StateId("baz".into()), StateId("quux".into())),
]
);
Ok(())
}
#[test]
fn parse_state_machines() -> Result<()> {
let emptymachine = StateMachine {
name: "foo".into(),
states: Default::default(),
accept_states: vec![]
};
assert_consumes_all!(
state_machine(),
b"machine foo;",
emptymachine
);
assert_consumes_all!(
state_machine(),
b"
machine foo;
state bar \"it's a bar thing\";
state baz;
bar -> baz;
",
StateMachine {
name: "foo".into(),
states: vec![
State {
id: StateId("bar".into()),
is_starting_state: true,
description: Some("it's a bar thing".into())
},
State {
id: StateId("baz".into()),
is_starting_state: false,
description: None
},
],
accept_states: vec![
AcceptState(StateId("bar".into()), StateId("baz".into()))
]
| state_list | identifier_name |
parser.rs | fn to_eol<'a>() -> Parser<'a, u8, String> {
fn anything_else(term: u8) -> bool {
!is_cr(term) && !is_lf(term)
}
is_a(anything_else)
.repeat(0..)
.map(|u8s| String::from_utf8(u8s).expect("can only parse utf"))
}
fn line_comment<'a>() -> Parser<'a, u8, ()> {
(seq(b"//") * to_eol() - eol())
.discard()
.name("line comment")
}
fn eol<'a>() -> Parser<'a, u8, ()> {
((is_a(is_cr) * is_a(is_lf)) | is_a(is_lf) | is_a(is_cr)).discard()
}
fn keyword<'a>(keyword: &'static [u8]) -> Parser<'a, u8, ()> {
literal(keyword).discard().name("keyword")
}
fn literal<'a>(literal: &'static [u8]) -> Parser<'a, u8, String> {
spaced(seq(literal))
.map(|u8s| String::from_utf8(u8s.to_vec()).expect("can only parse utf"))
.name("literal")
}
fn star_comment<'a>() -> Parser<'a, u8, ()> {
fn anything_else(term: u8) -> bool {
term != b'*'
}
(seq(b"/*") * is_a(anything_else).repeat(0..) - seq(b"*/")).discard()
}
fn comment<'a>() -> Parser<'a, u8, ()> {
line_comment() | star_comment()
}
/// a parser wrapped in whitespace
fn spaced<'a, T>(parser: Parser<'a, u8, T>) -> Parser<'a, u8, T>
where
T: 'a,
{
space() * parser - space()
}
fn is_cr(term: u8) -> bool {
term == b'\r'
}
fn is_lf(term: u8) -> bool {
term == b'\n'
}
fn is_underscore(term: u8) -> bool {
term == b'_'
}
fn state_id<'a>() -> Parser<'a, u8, StateId> {
(identifier())
.map(|(ident)| StateId(ident))
}
fn identifier<'a>() -> Parser<'a, u8, String> {
let it = ((is_a(alpha) | is_a(is_underscore))
+ (is_a(alphanum) | is_a(is_underscore)).repeat(0..))
.map(|(first, rest)| format!("{}{}", first as char, String::from_utf8(rest).unwrap()));
spaced(it).name("name")
}
fn string<'a>() -> Parser<'a, u8, String> {
let special_char = sym(b'\\')
| sym(b'/')
| sym(b'"')
| sym(b'b').map(|_| b'\x08')
| sym(b'f').map(|_| b'\x0C')
| sym(b'n').map(|_| b'\n')
| sym(b'r').map(|_| b'\r')
| sym(b't').map(|_| b'\t');
let escape_sequence = sym(b'\\') * special_char;
let string = sym(b'"') * (none_of(b"\\\"") | escape_sequence).repeat(0..) - sym(b'"');
string.convert(String::from_utf8)
}
fn state<'a>() -> Parser<'a, u8, State> {
let raw = keyword(b"state") * identifier() + string().opt()
- semi();
raw.map(move |(identifier, description)| State {
id: StateId(identifier),
is_starting_state: false,
description
})
}
fn state_list<'a>() -> Parser<'a, u8, Vec<State>> {
fn tag_starting_state(idx: usize, state: State) -> State {
State {
is_starting_state: idx == 0,
..state
}
};
state().repeat(0..).map(|states| states.into_iter().enumerate().map(|(idx, state)| tag_starting_state(idx, state)).collect())
}
fn accept_states_list<'a>() -> Parser<'a, u8, Vec<AcceptState>> {
accept_states_chain()
.repeat(0..)
.map(|chains| chains.into_iter().flatten().collect())
}
fn accept_states_chain<'a>() -> Parser<'a, u8, Vec<AcceptState>> {
let raw = spaced(list(spaced(state_id()), keyword(b"->"))) - semi();
raw.map(move |(state_ids)| {
if state_ids.len() < 2 {
return vec![];
}
let mut result = vec![];
for i in 0..state_ids.len() -1 {
let left = state_ids[i].clone();
let right = state_ids[i+1].clone();
let accept = AcceptState(left, right);
result.push(accept);
}
return result;
})
}
pub fn state_machine<'a>() -> Parser<'a, u8, StateMachine> {
let header = keyword(b"machine") * identifier() - semi();
let raw = header
+ state_list()
+ accept_states_list();
raw.map(move |((name, states), accept_states)| StateMachine {
name,
states,
accept_states
})
}
#[cfg(test)]
mod test {
use super::*;
use std::cmp::min;
use std::path::{Path, PathBuf};
use std::{fs, io};
macro_rules! assert_consumes_all {
( $ parser: expr, $input: expr ) => {
let terminating_parser = $parser - space() - end();
let res = terminating_parser.parse($input);
if let Err(_) = res {
panic!("parser failed to match and consume everything")
}
};
( $ parser: expr, $input: expr, $expected: expr) => {
let terminating_parser = $parser - space() - end();
let res = terminating_parser.parse($input);
match res {
Ok(answer) => {
// it parsed, but was it right?
assert_eq!(answer, $expected)
}
Err(_) => {
//
panic!("parser failed to match and consume everything")
}
}
};
}
#[test]
fn parse_keywords() -> Result<()> {
assert_consumes_all![eol(), b"\r"];
assert_consumes_all![eol(), b"\r\n"];
assert_consumes_all![eol(), b"\n"];
assert_consumes_all![space(), b""];
assert_consumes_all![space(), b" "];
assert_consumes_all![space(), b" \t \n \r "];
assert_consumes_all![line_comment(), b"//\r"];
assert_consumes_all![line_comment(), b"//\n"];
assert_consumes_all![line_comment(), b"//\r\n"];
assert_consumes_all![line_comment(), b"// xyz \r\n"];
assert_consumes_all![star_comment(), b"/* thing */"];
assert_consumes_all![star_comment(), b"/* thing \r\n thing */"];
assert_consumes_all!(
identifier(),
b"foo"
);
assert_consumes_all!(
state_id(),
b"foo"
);
assert_consumes_all!(
accept_states_chain(),
b"foo-> bar -> baz;",
vec![
AcceptState(StateId("foo".into()), StateId("bar".into())),
AcceptState(StateId("bar".into()), StateId("baz".into())),
]
);
assert_consumes_all!(
accept_states_list(),
b"foo-> bar -> baz; baz -> quux;",
vec![
AcceptState(StateId("foo".into()), StateId("bar".into())),
AcceptState(StateId("bar".into()), StateId("baz".into())),
AcceptState(StateId("baz".into()), StateId("quux".into())),
]
);
Ok(())
}
#[test]
fn parse_state_machines() -> Result<()> {
let emptymachine = StateMachine {
name: "foo".into(),
states: Default::default(),
accept_states: vec![]
};
assert_consumes_all!(
state_machine(),
b"machine foo;",
emptymachine
);
assert_consumes_all!(
state_machine(),
b"
machine foo;
state bar \"it's a bar thing\";
state baz;
bar -> baz;
",
StateMachine {
name: "foo".into(),
states: vec![
State {
id: StateId("bar".into()),
is_starting_state: true,
description: Some("it's a bar thing".into())
},
State {
id: StateId("baz".into()),
is_starting_state: false,
description: None
},
],
accept_states: vec![
AcceptState(StateId("bar".into()), StateId("baz".into()))
]
}
);
Ok(())
}
fn count_lines(byte_slice: &[u8]) -> usize | {
let line_parser = (to_eol() - eol()).repeat(0..);
let parse_result = line_parser.parse(byte_slice).unwrap();
parse_result.len()
} | identifier_body |
|
parser.rs | (is_underscore)).repeat(0..))
.map(|(first, rest)| format!("{}{}", first as char, String::from_utf8(rest).unwrap()));
spaced(it).name("name")
}
fn string<'a>() -> Parser<'a, u8, String> {
let special_char = sym(b'\\')
| sym(b'/')
| sym(b'"')
| sym(b'b').map(|_| b'\x08')
| sym(b'f').map(|_| b'\x0C')
| sym(b'n').map(|_| b'\n')
| sym(b'r').map(|_| b'\r')
| sym(b't').map(|_| b'\t');
let escape_sequence = sym(b'\\') * special_char;
let string = sym(b'"') * (none_of(b"\\\"") | escape_sequence).repeat(0..) - sym(b'"');
string.convert(String::from_utf8)
}
fn state<'a>() -> Parser<'a, u8, State> {
let raw = keyword(b"state") * identifier() + string().opt()
- semi();
raw.map(move |(identifier, description)| State {
id: StateId(identifier),
is_starting_state: false,
description
})
}
fn state_list<'a>() -> Parser<'a, u8, Vec<State>> {
fn tag_starting_state(idx: usize, state: State) -> State {
State {
is_starting_state: idx == 0,
..state
}
};
state().repeat(0..).map(|states| states.into_iter().enumerate().map(|(idx, state)| tag_starting_state(idx, state)).collect())
}
fn accept_states_list<'a>() -> Parser<'a, u8, Vec<AcceptState>> {
accept_states_chain()
.repeat(0..)
.map(|chains| chains.into_iter().flatten().collect())
}
fn accept_states_chain<'a>() -> Parser<'a, u8, Vec<AcceptState>> {
let raw = spaced(list(spaced(state_id()), keyword(b"->"))) - semi();
raw.map(move |(state_ids)| {
if state_ids.len() < 2 {
return vec![];
}
let mut result = vec![];
for i in 0..state_ids.len() -1 {
let left = state_ids[i].clone();
let right = state_ids[i+1].clone();
let accept = AcceptState(left, right);
result.push(accept);
}
return result;
})
}
pub fn state_machine<'a>() -> Parser<'a, u8, StateMachine> {
let header = keyword(b"machine") * identifier() - semi();
let raw = header
+ state_list()
+ accept_states_list();
raw.map(move |((name, states), accept_states)| StateMachine {
name,
states,
accept_states
})
}
#[cfg(test)]
mod test {
use super::*;
use std::cmp::min;
use std::path::{Path, PathBuf};
use std::{fs, io};
macro_rules! assert_consumes_all {
( $ parser: expr, $input: expr ) => {
let terminating_parser = $parser - space() - end();
let res = terminating_parser.parse($input);
if let Err(_) = res {
panic!("parser failed to match and consume everything")
}
};
( $ parser: expr, $input: expr, $expected: expr) => {
let terminating_parser = $parser - space() - end();
let res = terminating_parser.parse($input);
match res {
Ok(answer) => {
// it parsed, but was it right?
assert_eq!(answer, $expected)
}
Err(_) => {
//
panic!("parser failed to match and consume everything")
}
}
};
}
#[test]
fn parse_keywords() -> Result<()> {
assert_consumes_all![eol(), b"\r"];
assert_consumes_all![eol(), b"\r\n"];
assert_consumes_all![eol(), b"\n"];
assert_consumes_all![space(), b""];
assert_consumes_all![space(), b" "];
assert_consumes_all![space(), b" \t \n \r "];
assert_consumes_all![line_comment(), b"//\r"];
assert_consumes_all![line_comment(), b"//\n"];
assert_consumes_all![line_comment(), b"//\r\n"];
assert_consumes_all![line_comment(), b"// xyz \r\n"];
assert_consumes_all![star_comment(), b"/* thing */"];
assert_consumes_all![star_comment(), b"/* thing \r\n thing */"];
assert_consumes_all!(
identifier(),
b"foo"
);
assert_consumes_all!(
state_id(),
b"foo"
);
assert_consumes_all!(
accept_states_chain(),
b"foo-> bar -> baz;",
vec![
AcceptState(StateId("foo".into()), StateId("bar".into())),
AcceptState(StateId("bar".into()), StateId("baz".into())),
]
);
assert_consumes_all!(
accept_states_list(),
b"foo-> bar -> baz; baz -> quux;",
vec![
AcceptState(StateId("foo".into()), StateId("bar".into())),
AcceptState(StateId("bar".into()), StateId("baz".into())),
AcceptState(StateId("baz".into()), StateId("quux".into())),
]
);
Ok(())
}
#[test]
fn parse_state_machines() -> Result<()> {
let emptymachine = StateMachine {
name: "foo".into(),
states: Default::default(),
accept_states: vec![]
};
assert_consumes_all!(
state_machine(),
b"machine foo;",
emptymachine
);
assert_consumes_all!(
state_machine(),
b"
machine foo;
state bar \"it's a bar thing\";
state baz;
bar -> baz;
",
StateMachine {
name: "foo".into(),
states: vec![
State {
id: StateId("bar".into()),
is_starting_state: true,
description: Some("it's a bar thing".into())
},
State {
id: StateId("baz".into()),
is_starting_state: false,
description: None
},
],
accept_states: vec![
AcceptState(StateId("bar".into()), StateId("baz".into()))
]
}
);
Ok(())
}
fn count_lines(byte_slice: &[u8]) -> usize {
let line_parser = (to_eol() - eol()).repeat(0..);
let parse_result = line_parser.parse(byte_slice).unwrap();
parse_result.len()
}
#[test]
fn line_counter_works() {
let file_path_str = "assets/fsml/simple-state-machine.fsml";
let byte_vec: Vec<u8> = std::fs::read(file_path_str).unwrap();
let actual = count_lines(&byte_vec);
assert_eq!(12, actual);
}
#[test]
fn parse_state_machine_file() {
let file_path_str = "assets/fsml/simple-state-machine.fsml";
assert_parse_file(PathBuf::from_str(file_path_str).unwrap().as_path());
}
#[test]
fn parse_all_files() -> Result<()> {
let mut entries = fs::read_dir("assets/fsml")?
.map(|res| res.map(|e| e.path()))
//.filter(|f| )
.collect::<std::result::Result<Vec<_>, io::Error>>()?;
entries.sort();
for file_path_str in entries {
println!("");
println!("{}", file_path_str.to_str().unwrap());
println!("");
assert_parse_file(file_path_str.as_path());
}
Ok(())
}
fn assert_parse_file(file_path_str: &Path) {
let byte_vec: Vec<u8> = std::fs::read(file_path_str).unwrap();
let file_content =
String::from_utf8(byte_vec.clone()).expect("should be able to read the file");
let byte_slice: &[u8] = &byte_vec;
let parser = state_machine();
let parse_result = match parser.parse(byte_slice) {
Ok(parse_result) => parse_result,
Err(pom::Error::Mismatch { message, position }) => | {
let start_str = &byte_vec[0..position];
let line = count_lines(start_str) + 1;
let end = min(position + 50, file_content.len() - 1);
let extract = &file_content[position..end];
let extract = extract
.to_string()
.replace("\n", "\\n")
.replace("\r", "\\r")
.replace("\t", "\\t");
let err_location = format!("{}:{}:{}", file_path_str.to_str().unwrap(), line, 1);
// thread 'idl_parser::test::parse_full_html5_file' panicked at 'whoops', src/idl_parser.rs:428:9
let better_message = format!(
"thread 'idl_parser::test::parse_full_html5_file' panicked at 'parsing', {}\n\n{}",
err_location, extract
);
println!("{}", better_message);
panic!(message)
} | conditional_block |
|
parser.rs | /// whitespace and comments
fn space<'a>() -> Parser<'a, u8, ()> {
(ws() | comment()).repeat(0..).discard()
}
fn semi<'a>() -> Parser<'a, u8, ()> {
keyword(b";").name("semi")
}
fn to_eol<'a>() -> Parser<'a, u8, String> {
fn anything_else(term: u8) -> bool {
!is_cr(term) && !is_lf(term)
}
is_a(anything_else)
.repeat(0..)
.map(|u8s| String::from_utf8(u8s).expect("can only parse utf"))
}
fn line_comment<'a>() -> Parser<'a, u8, ()> {
(seq(b"//") * to_eol() - eol())
.discard()
.name("line comment")
}
fn eol<'a>() -> Parser<'a, u8, ()> {
((is_a(is_cr) * is_a(is_lf)) | is_a(is_lf) | is_a(is_cr)).discard()
}
fn keyword<'a>(keyword: &'static [u8]) -> Parser<'a, u8, ()> {
literal(keyword).discard().name("keyword")
}
fn literal<'a>(literal: &'static [u8]) -> Parser<'a, u8, String> {
spaced(seq(literal))
.map(|u8s| String::from_utf8(u8s.to_vec()).expect("can only parse utf"))
.name("literal")
}
fn star_comment<'a>() -> Parser<'a, u8, ()> {
fn anything_else(term: u8) -> bool {
term != b'*'
}
(seq(b"/*") * is_a(anything_else).repeat(0..) - seq(b"*/")).discard()
}
fn comment<'a>() -> Parser<'a, u8, ()> {
line_comment() | star_comment()
}
/// a parser wrapped in whitespace
fn spaced<'a, T>(parser: Parser<'a, u8, T>) -> Parser<'a, u8, T>
where
T: 'a,
{
space() * parser - space()
}
fn is_cr(term: u8) -> bool {
term == b'\r'
}
fn is_lf(term: u8) -> bool {
term == b'\n'
}
fn is_underscore(term: u8) -> bool {
term == b'_'
}
fn state_id<'a>() -> Parser<'a, u8, StateId> {
(identifier())
.map(|(ident)| StateId(ident))
}
fn identifier<'a>() -> Parser<'a, u8, String> {
let it = ((is_a(alpha) | is_a(is_underscore))
+ (is_a(alphanum) | is_a(is_underscore)).repeat(0..))
.map(|(first, rest)| format!("{}{}", first as char, String::from_utf8(rest).unwrap()));
spaced(it).name("name")
}
fn string<'a>() -> Parser<'a, u8, String> {
let special_char = sym(b'\\')
| sym(b'/')
| sym(b'"')
| sym(b'b').map(|_| b'\x08')
| sym(b'f').map(|_| b'\x0C')
| sym(b'n').map(|_| b'\n')
| sym(b'r').map(|_| b'\r')
| sym(b't').map(|_| b'\t');
let escape_sequence = sym(b'\\') * special_char;
let string = sym(b'"') * (none_of(b"\\\"") | escape_sequence).repeat(0..) - sym(b'"');
string.convert(String::from_utf8)
}
fn state<'a>() -> Parser<'a, u8, State> {
let raw = keyword(b"state") * identifier() + string().opt()
- semi();
raw.map(move |(identifier, description)| State {
id: StateId(identifier),
is_starting_state: false,
description
})
}
fn state_list<'a>() -> Parser<'a, u8, Vec<State>> {
fn tag_starting_state(idx: usize, state: State) -> State {
State {
is_starting_state: idx == 0,
..state | fn accept_states_list<'a>() -> Parser<'a, u8, Vec<AcceptState>> {
accept_states_chain()
.repeat(0..)
.map(|chains| chains.into_iter().flatten().collect())
}
fn accept_states_chain<'a>() -> Parser<'a, u8, Vec<AcceptState>> {
let raw = spaced(list(spaced(state_id()), keyword(b"->"))) - semi();
raw.map(move |(state_ids)| {
if state_ids.len() < 2 {
return vec![];
}
let mut result = vec![];
for i in 0..state_ids.len() -1 {
let left = state_ids[i].clone();
let right = state_ids[i+1].clone();
let accept = AcceptState(left, right);
result.push(accept);
}
return result;
})
}
pub fn state_machine<'a>() -> Parser<'a, u8, StateMachine> {
let header = keyword(b"machine") * identifier() - semi();
let raw = header
+ state_list()
+ accept_states_list();
raw.map(move |((name, states), accept_states)| StateMachine {
name,
states,
accept_states
})
}
#[cfg(test)]
mod test {
use super::*;
use std::cmp::min;
use std::path::{Path, PathBuf};
use std::{fs, io};
macro_rules! assert_consumes_all {
( $ parser: expr, $input: expr ) => {
let terminating_parser = $parser - space() - end();
let res = terminating_parser.parse($input);
if let Err(_) = res {
panic!("parser failed to match and consume everything")
}
};
( $ parser: expr, $input: expr, $expected: expr) => {
let terminating_parser = $parser - space() - end();
let res = terminating_parser.parse($input);
match res {
Ok(answer) => {
// it parsed, but was it right?
assert_eq!(answer, $expected)
}
Err(_) => {
//
panic!("parser failed to match and consume everything")
}
}
};
}
#[test]
fn parse_keywords() -> Result<()> {
assert_consumes_all![eol(), b"\r"];
assert_consumes_all![eol(), b"\r\n"];
assert_consumes_all![eol(), b"\n"];
assert_consumes_all![space(), b""];
assert_consumes_all![space(), b" "];
assert_consumes_all![space(), b" \t \n \r "];
assert_consumes_all![line_comment(), b"//\r"];
assert_consumes_all![line_comment(), b"//\n"];
assert_consumes_all![line_comment(), b"//\r\n"];
assert_consumes_all![line_comment(), b"// xyz \r\n"];
assert_consumes_all![star_comment(), b"/* thing */"];
assert_consumes_all![star_comment(), b"/* thing \r\n thing */"];
assert_consumes_all!(
identifier(),
b"foo"
);
assert_consumes_all!(
state_id(),
b"foo"
);
assert_consumes_all!(
accept_states_chain(),
b"foo-> bar -> baz;",
vec![
AcceptState(StateId("foo".into()), StateId("bar".into())),
AcceptState(StateId("bar".into()), StateId("baz".into())),
]
);
assert_consumes_all!(
accept_states_list(),
b"foo-> bar -> baz; baz -> quux;",
vec![
AcceptState(StateId("foo".into()), StateId("bar".into())),
AcceptState(StateId("bar".into()), StateId("baz".into())),
AcceptState(StateId("baz".into()), StateId("quux".into())),
]
);
Ok(())
}
#[test]
fn parse_state_machines() -> Result<()> {
let emptymachine = StateMachine {
name: "foo".into(),
states: Default::default(),
accept_states: vec![]
};
assert_consumes_all!(
state_machine(),
b"machine foo;",
emptymachine
);
assert_consumes_all!(
state_machine(),
b"
machine foo;
state bar \"it's a bar thing\";
state baz;
bar -> baz;
",
StateMachine {
name: "foo".into(),
states: vec![
State {
id: StateId("bar".into()),
is_starting_state: true,
description: Some("it's a bar thing".into())
},
State {
id: StateId("baz".into()),
is_starting_state: false,
description: None
},
],
accept_states: vec![
AcceptState(StateId("bar".into()), StateId("baz".into()))
]
}
| }
};
state().repeat(0..).map(|states| states.into_iter().enumerate().map(|(idx, state)| tag_starting_state(idx, state)).collect())
}
| random_line_split |
lib.rs | null? args)
// (if (tail? next)
// c
// (list ’frame next c))
// (loop (cdr args)
// (compile (car args)
// (list ’argument c)))))])]
// [else
// (list ’constant x next)])))
pub fn compile(x: CoreLanguage, next: Code) -> Code {
match x {
Variable(str) => {
box REFER{var:str, k:next}
},
Quote(obj) => {
box CONSTANT{obj:ONil, k:next}
},
Lambda(vars, body) => {
box CLOSE{ vars:vars, body:compile(*body, box RETURN{unused:true}), k:next }
},
If(test, seq, alt) => {
let thenc = compile(*seq, next.clone());
let elsec = compile(*alt, next.clone());
compile(*test, box TEST{kthen:thenc, kelse:elsec})
},
Set(var, x) => {
compile(*x, box ASSIGN{var:var, k:next} )
},
CallCC(x) => {
let c = box CONTI{
k: box ARGUMENT{ k:compile(*x, box APPLY{unused:true}) }
};
if is_tail(&next) { c } else { box FRAME{k:next, ret:c} }
},
List(x) => {
let args = x.slice_from(1);
let mut c = compile((*x[0]).clone(), box APPLY{unused:true});
for arg in args.iter() {
c = compile((**arg).clone(), box ARGUMENT{k:c});
}
if is_tail(&next) { c } else { box FRAME{k:next, ret:c} }
}
_ =>
{ box CONSTANT{obj:ONil /*x*/, k:next} }
}
}
fn is_tail(x: &Code) -> bool {
match **x {
RETURN{..} => true,
_ => false
}
}
///////////////////////////////////////////////////////////////////////////////
// Opcode
// these are a dozen primitive instructions that implement scheme-like
// semantics. This is applicative-order lambda calculus with lexically-scoped
// environments: everything reduces to function calls where arguments are
// evaluated before application of function to arguments; variables are
// bound in their static (lexical) scope like Scheme, not in their dynamic
// (runtime) scope like earlier Lisps.
// Execution model is heap-based and there is support for call-with-current-continuation
// so exception semantics can be implemented easily in terms of call/cc.
#[deriving(Clone)]
pub enum Opcode {
HALT {unused:bool},
REFER {var: String, k: Code},
CONSTANT {obj: Obj, k: Code},
CLOSE {vars: Vec<String>, body: Code, k: Code},
TEST {kthen: Code, kelse: Code},
ASSIGN {var: String, k: Code},
CONTI {k: Code},
NUATE {s: Frame, var: String},
FRAME {k: Code, ret: Code},
ARGUMENT {k: Code},
APPLY {unused:bool},
INVOKE {method: String, k: Code},
RETURN {unused:bool},
}
pub type Code = Box<Opcode>;
/// Scope is a dynamic environment: a set of bindings, implemented
/// as a map from variable names (as Str, representing symbols)
/// to runtime value (Obj? if not typing; or AxonVal derivatives)
#[deriving(Clone)]
struct Scope {
parent: Option<Box<Scope>>, // link to enclosing scope
//local: HashMap<String, Obj>// local vars (conceptually includes fn params)
vars: Vec<String>,
vals: Vec<Obj>
}
impl Scope
{
fn new(parent:Option<Box<Scope>>) -> Scope {
Scope { parent:parent, vars:vec!(), vals:vec!() }
}
fn get(&self, var: &String) -> Option<Obj> {
let ix_opt = self.vars.iter().position(|v| { v == var });
match ix_opt {
Some(ix) => Some(self.vals[ix].clone()),
None => None
}
}
fn set(&mut self, var: &String, val: Obj) {
| let ix_opt = self.vars.iter().position(|v| { v == var });
match ix_opt {
Some(ix) => { *self.vals.get_mut(ix) = val },
None => self.vals.push(val)
};
}
fn extend(&self, vars: Vec<String>, vals: Vec<Obj>) -> Scope {
Scope{
parent: Some(box self.clone()),
vars: vars,
vals: vals
}
}
}
/// Frame is the dynamic (runtime) representation of a function
/// execution. It captures the caller-frame and return-address,
/// so the complete dynamic context can be traced by walking back
/// thru the caller links; a bindings context (which pushes and
/// pops local scopes for variable definitions); the AST for the
/// function's code; and the instruction-pointer which indicates
/// the current point of execution in the code.
#[deriving(Clone)]
struct Frame {
// *X* when this frame returns, exec. resumes from caller.code[ret] (ie. ret is index into code of caller)
ret: Code,
// *E* parms,locals
bindings: Scope,
// *R* accumulator of arg vals, to be combined w/ param names in extending env
valueRib: Vec<Obj>,
// *S* previous frame
caller: Option<Box<Frame>>,
//code: Code //belongs in Frame (there's a frame for every lambda definition)
}
impl Frame {
fn make(env:Scope, rib: Vec<Obj>, ret: Code, caller: Option<Box<Frame>>)
-> Frame
{
Frame { bindings:env, valueRib:rib, ret:ret, caller:caller }
}
}
/// closure captures the environment where it was created; when called,
/// it binds its params to actual-arg values (in left-to-right listed order)
/// and extends its environment with those bindings, and executes its
/// body with that extended environment.
#[deriving(Clone)]
pub struct Closure {
// names of parameters to be applied to closure
params: Vec<String>,
// static environment (lexical scope, captures scopes enclosing definition)
env: Scope,
// code implementing body of closure.
body: Code
}
impl Closure {
fn make(params: Vec<String>, env: Scope, body: Code) -> Closure {
Closure { params:params, env:env, body:body }
}
}
/// The VM below is fundamentally a state machine, of course, and
/// the five registers capture the entire current-state of that machine.
struct VMState
{
/////////////////////////////////////////////////////////////////////
// Machine Registers
// accumulator (most-recently-evaluated-expression value)
A: Obj,
// next instruction to be executed (source is compiled into a directed-graph of Opcode)
X: Code,
// current (lexical) environment (bindings map, context, ...)
E: Scope,
// value rib (accumulator for values of arguments to a fn application)
R: Vec<Obj>,
// control stack (ptr to top call frame; frames have link to prev frame)
S: Frame
}
impl VMState {
fn make(a:Obj, x:Code, e:Scope, r:Vec<Obj>, s:Frame) -> VMState {
VMState { A:a, X:x, E:e, R:r, S:s }
}
fn accumulator(&self) -> &Obj { &self.A }
fn program(&self) -> &Code { &self.X }
fn environment(&self) -> &Scope { &self.E }
fn arguments(&self) -> &Vec<Obj> { &self.R }
fn stackframe(&self) -> &Frame { &self.S }
}
///////////////////////////////////////////////////////////////////////////////
// axon machine: definition and implementation of virtual machine for
// scheme-like semantics
//
// let code be an in-memory graph (DAG) of instructions, where the entry-point
// to a sub-program is a single instruction.
// let instruction be a composition of:
// - opcode, an enumeration identifying its type
// - operands, compile-time constant arguments to the instruction
// - links, 0, 1, or 2 links to successor-instructions.
// note the single exception: the 'nuate instruction takes a Frame
// argument. This means that (as written), compiled code that
// includes call/cc won't be serializable, because the live control-stack
// frames aren't serializable. This only matters if we start thinking
// about serializing execution-in-process code and moving it to a
// different machine for resumption.
// ...
// ...
// A VM with 5 registers, 12 primitive instructions, and
// 3 basic data structures:
// - Frame captures a call-frame and maintains a dynamic control stack
// - Scope manages bindings of variables to values in lexically nested scopes
// - Closure binds parameters to actual args and executes code
struct Machine {
state: VMState
}
impl Machine
{
fn init(state: VMState) -> Machine { Machine { state:state } }
fn step(&mut self) -> Option<Obj> {
let (mut | identifier_name |
|
lib.rs | (*test, box TEST{kthen:thenc, kelse:elsec})
},
Set(var, x) => {
compile(*x, box ASSIGN{var:var, k:next} )
},
CallCC(x) => {
let c = box CONTI{
k: box ARGUMENT{ k:compile(*x, box APPLY{unused:true}) }
};
if is_tail(&next) { c } else { box FRAME{k:next, ret:c} }
},
List(x) => {
let args = x.slice_from(1);
let mut c = compile((*x[0]).clone(), box APPLY{unused:true});
for arg in args.iter() {
c = compile((**arg).clone(), box ARGUMENT{k:c});
}
if is_tail(&next) { c } else { box FRAME{k:next, ret:c} }
}
_ =>
{ box CONSTANT{obj:ONil /*x*/, k:next} }
}
}
fn is_tail(x: &Code) -> bool {
match **x {
RETURN{..} => true,
_ => false
}
}
///////////////////////////////////////////////////////////////////////////////
// Opcode
// these are a dozen primitive instructions that implement scheme-like
// semantics. This is applicative-order lambda calculus with lexically-scoped
// environments: everything reduces to function calls where arguments are
// evaluated before application of function to arguments; variables are
// bound in their static (lexical) scope like Scheme, not in their dynamic
// (runtime) scope like earlier Lisps.
// Execution model is heap-based and there is support for call-with-current-continuation
// so exception semantics can be implemented easily in terms of call/cc.
#[deriving(Clone)]
pub enum Opcode {
HALT {unused:bool},
REFER {var: String, k: Code},
CONSTANT {obj: Obj, k: Code},
CLOSE {vars: Vec<String>, body: Code, k: Code},
TEST {kthen: Code, kelse: Code},
ASSIGN {var: String, k: Code},
CONTI {k: Code},
NUATE {s: Frame, var: String},
FRAME {k: Code, ret: Code},
ARGUMENT {k: Code},
APPLY {unused:bool},
INVOKE {method: String, k: Code},
RETURN {unused:bool},
}
pub type Code = Box<Opcode>;
/// Scope is a dynamic environment: a set of bindings, implemented
/// as a map from variable names (as Str, representing symbols)
/// to runtime value (Obj? if not typing; or AxonVal derivatives)
#[deriving(Clone)]
struct Scope {
parent: Option<Box<Scope>>, // link to enclosing scope
//local: HashMap<String, Obj>// local vars (conceptually includes fn params)
vars: Vec<String>,
vals: Vec<Obj>
}
impl Scope
{
fn new(parent:Option<Box<Scope>>) -> Scope {
Scope { parent:parent, vars:vec!(), vals:vec!() }
}
fn get(&self, var: &String) -> Option<Obj> {
let ix_opt = self.vars.iter().position(|v| { v == var });
match ix_opt {
Some(ix) => Some(self.vals[ix].clone()),
None => None
}
}
fn set(&mut self, var: &String, val: Obj) {
let ix_opt = self.vars.iter().position(|v| { v == var });
match ix_opt {
Some(ix) => { *self.vals.get_mut(ix) = val },
None => self.vals.push(val)
};
}
fn extend(&self, vars: Vec<String>, vals: Vec<Obj>) -> Scope {
Scope{
parent: Some(box self.clone()),
vars: vars,
vals: vals
}
}
}
/// Frame is the dynamic (runtime) representation of a function
/// execution. It captures the caller-frame and return-address,
/// so the complete dynamic context can be traced by walking back
/// thru the caller links; a bindings context (which pushes and
/// pops local scopes for variable definitions); the AST for the
/// function's code; and the instruction-pointer which indicates
/// the current point of execution in the code.
#[deriving(Clone)]
struct Frame {
// *X* when this frame returns, exec. resumes from caller.code[ret] (ie. ret is index into code of caller)
ret: Code,
// *E* parms,locals
bindings: Scope,
// *R* accumulator of arg vals, to be combined w/ param names in extending env
valueRib: Vec<Obj>,
// *S* previous frame
caller: Option<Box<Frame>>,
//code: Code //belongs in Frame (there's a frame for every lambda definition)
}
impl Frame {
fn make(env:Scope, rib: Vec<Obj>, ret: Code, caller: Option<Box<Frame>>)
-> Frame
{
Frame { bindings:env, valueRib:rib, ret:ret, caller:caller }
}
}
/// closure captures the environment where it was created; when called,
/// it binds its params to actual-arg values (in left-to-right listed order)
/// and extends its environment with those bindings, and executes its
/// body with that extended environment.
#[deriving(Clone)]
pub struct Closure {
// names of parameters to be applied to closure
params: Vec<String>,
// static environment (lexical scope, captures scopes enclosing definition)
env: Scope,
// code implementing body of closure.
body: Code
}
impl Closure {
fn make(params: Vec<String>, env: Scope, body: Code) -> Closure {
Closure { params:params, env:env, body:body }
}
}
/// The VM below is fundamentally a state machine, of course, and
/// the five registers capture the entire current-state of that machine.
struct VMState
{
/////////////////////////////////////////////////////////////////////
// Machine Registers
// accumulator (most-recently-evaluated-expression value)
A: Obj,
// next instruction to be executed (source is compiled into a directed-graph of Opcode)
X: Code,
// current (lexical) environment (bindings map, context, ...)
E: Scope,
// value rib (accumulator for values of arguments to a fn application)
R: Vec<Obj>,
// control stack (ptr to top call frame; frames have link to prev frame)
S: Frame
}
impl VMState {
fn make(a:Obj, x:Code, e:Scope, r:Vec<Obj>, s:Frame) -> VMState {
VMState { A:a, X:x, E:e, R:r, S:s }
}
fn accumulator(&self) -> &Obj { &self.A }
fn program(&self) -> &Code { &self.X }
fn environment(&self) -> &Scope { &self.E }
fn arguments(&self) -> &Vec<Obj> { &self.R }
fn stackframe(&self) -> &Frame { &self.S }
}
///////////////////////////////////////////////////////////////////////////////
// axon machine: definition and implementation of virtual machine for
// scheme-like semantics
//
// let code be an in-memory graph (DAG) of instructions, where the entry-point
// to a sub-program is a single instruction.
// let instruction be a composition of:
// - opcode, an enumeration identifying its type
// - operands, compile-time constant arguments to the instruction
// - links, 0, 1, or 2 links to successor-instructions.
// note the single exception: the 'nuate instruction takes a Frame
// argument. This means that (as written), compiled code that
// includes call/cc won't be serializable, because the live control-stack
// frames aren't serializable. This only matters if we start thinking
// about serializing execution-in-process code and moving it to a
// different machine for resumption.
// ...
// ...
// A VM with 5 registers, 12 primitive instructions, and
// 3 basic data structures:
// - Frame captures a call-frame and maintains a dynamic control stack
// - Scope manages bindings of variables to values in lexically nested scopes
// - Closure binds parameters to actual args and executes code
struct Machine {
state: VMState
}
impl Machine
{
fn init(state: VMState) -> Machine { Machine { state:state } }
fn step(&mut self) -> Option<Obj> {
let (mut A,X,mut E,mut R,mut S) = (
self.state.A.clone(),
self.state.X.clone(),
self.state.E.clone(),
self.state.R.clone(),
self.state.S.clone()
);
let x = match *X {
// case HALT : return // and return A
HALT {..} => {
box HALT {unused:true}
},
// case REFER : I: REFER ; A = E[I.var]; X = I.next
REFER {var:ref var, k:ref k} => {
A = E.get(var).expect("yowza");
k.clone()
},
// case CONSTANT: I: CONSTANT; A = I.obj; X = I.next
CONSTANT {obj:ref obj, k:ref k} => {
A = obj.clone();
k.clone()
},
// | case CLOSE : I: CLOSE ; A = Closur | conditional_block |
|
lib.rs | (null? args)
// (if (tail? next)
// c
// (list ’frame next c))
// (loop (cdr args)
// (compile (car args)
// (list ’argument c)))))])]
// [else
// (list ’constant x next)])))
pub fn compile(x: CoreLanguage, next: Code) -> Code {
match x {
Variable(str) => {
box REFER{var:str, k:next}
},
Quote(obj) => {
box CONSTANT{obj:ONil, k:next}
},
Lambda(vars, body) => {
box CLOSE{ vars:vars, body:compile(*body, box RETURN{unused:true}), k:next }
},
If(test, seq, alt) => {
let thenc = compile(*seq, next.clone());
let elsec = compile(*alt, next.clone());
compile(*test, box TEST{kthen:thenc, kelse:elsec})
},
Set(var, x) => {
compile(*x, box ASSIGN{var:var, k:next} )
},
CallCC(x) => {
let c = box CONTI{
k: box ARGUMENT{ k:compile(*x, box APPLY{unused:true}) }
};
if is_tail(&next) { c } else { box FRAME{k:next, ret:c} }
},
List(x) => {
let args = x.slice_from(1);
let mut c = compile((*x[0]).clone(), box APPLY{unused:true});
for arg in args.iter() {
c = compile((**arg).clone(), box ARGUMENT{k:c});
}
if is_tail(&next) { c } else { box FRAME{k:next, ret:c} }
}
_ =>
{ box CONSTANT{obj:ONil /*x*/, k:next} }
}
}
fn is_tail(x: &Code) -> bool {
match **x {
RETURN{..} => true,
_ => false
}
}
///////////////////////////////////////////////////////////////////////////////
// Opcode
// these are a dozen primitive instructions that implement scheme-like
// semantics. This is applicative-order lambda calculus with lexically-scoped
// environments: everything reduces to function calls where arguments are
// evaluated before application of function to arguments; variables are
// bound in their static (lexical) scope like Scheme, not in their dynamic
// (runtime) scope like earlier Lisps.
// Execution model is heap-based and there is support for call-with-current-continuation
// so exception semantics can be implemented easily in terms of call/cc.
#[deriving(Clone)]
pub enum Opcode {
HALT {unused:bool},
REFER {var: String, k: Code},
CONSTANT {obj: Obj, k: Code},
CLOSE {vars: Vec<String>, body: Code, k: Code},
TEST {kthen: Code, kelse: Code},
ASSIGN {var: String, k: Code},
CONTI {k: Code},
NUATE {s: Frame, var: String},
FRAME {k: Code, ret: Code},
ARGUMENT {k: Code},
APPLY {unused:bool},
INVOKE {method: String, k: Code},
RETURN {unused:bool},
}
pub type Code = Box<Opcode>;
/// Scope is a dynamic environment: a set of bindings, implemented
/// as a map from variable names (as Str, representing symbols)
/// to runtime value (Obj? if not typing; or AxonVal derivatives)
#[deriving(Clone)]
struct Scope {
parent: Option<Box<Scope>>, // link to enclosing scope
//local: HashMap<String, Obj>// local vars (conceptually includes fn params)
vars: Vec<String>,
vals: Vec<Obj>
}
impl Scope
{
fn new(parent:Option<Box<Scope>>) -> Scope {
Scope { parent:parent, vars:vec!(), vals:vec!() }
}
fn get(&self, var: &String) -> Option<Obj> {
let ix_opt = self.vars.iter().position(|v| { v == var });
match ix_opt {
Some(ix) => Some(self.vals[ix].clone()),
None => None
}
}
fn set(&mut self, var: &String, val: Obj) {
let ix_opt = self.vars.iter().position(|v| { v == var });
match ix_opt {
Some(ix) => { *self.vals.get_mut(ix) = val },
None => self.vals.push(val)
};
}
fn extend(&self, vars: Vec<String>, vals: Vec<Obj>) -> Scope {
Scope{
parent: Some(box self.clone()),
vars: vars,
vals: vals
}
}
}
/// Frame is the dynamic (runtime) representation of a function
/// execution. It captures the caller-frame and return-address,
/// so the complete dynamic context can be traced by walking back
/// thru the caller links; a bindings context (which pushes and
/// pops local scopes for variable definitions); the AST for the
/// function's code; and the instruction-pointer which indicates
/// the current point of execution in the code.
#[deriving(Clone)]
struct Frame {
// *X* when this frame returns, exec. resumes from caller.code[ret] (ie. ret is index into code of caller)
ret: Code,
// *E* parms,locals
bindings: Scope,
// *R* accumulator of arg vals, to be combined w/ param names in extending env
valueRib: Vec<Obj>,
// *S* previous frame
caller: Option<Box<Frame>>,
//code: Code //belongs in Frame (there's a frame for every lambda definition)
}
impl Frame {
fn make(env:Scope, rib: Vec<Obj>, ret: Code, caller: Option<Box<Frame>>)
-> Frame
{
Frame { bindings:env, valueRib:rib, ret:ret, caller:caller }
}
}
/// closure captures the environment where it was created; when called,
/// it binds its params to actual-arg values (in left-to-right listed order)
/// and extends its environment with those bindings, and executes its
/// body with that extended environment.
#[deriving(Clone)]
pub struct Closure {
// names of parameters to be applied to closure
params: Vec<String>,
// static environment (lexical scope, captures scopes enclosing definition)
env: Scope,
// code implementing body of closure.
body: Code
}
impl Closure {
fn make(params: Vec<String>, env: Scope, body: Code) -> Closure {
Closure { params:params, env:env, body:body }
}
}
/// The VM below is fundamentally a state machine, of course, and
/// the five registers capture the entire current-state of that machine.
struct VMState
{
/////////////////////////////////////////////////////////////////////
// Machine Registers
// accumulator (most-recently-evaluated-expression value)
A: Obj,
// next instruction to be executed (source is compiled into a directed-graph of Opcode)
X: Code,
// current (lexical) environment (bindings map, context, ...)
E: Scope,
// value rib (accumulator for values of arguments to a fn application)
R: Vec<Obj>, | S: Frame
}
impl VMState {
fn make(a:Obj, x:Code, e:Scope, r:Vec<Obj>, s:Frame) -> VMState {
VMState { A:a, X:x, E:e, R:r, S:s }
}
fn accumulator(&self) -> &Obj { &self.A }
fn program(&self) -> &Code { &self.X }
fn environment(&self) -> &Scope { &self.E }
fn arguments(&self) -> &Vec<Obj> { &self.R }
fn stackframe(&self) -> &Frame { &self.S }
}
///////////////////////////////////////////////////////////////////////////////
// axon machine: definition and implementation of virtual machine for
// scheme-like semantics
//
// let code be an in-memory graph (DAG) of instructions, where the entry-point
// to a sub-program is a single instruction.
// let instruction be a composition of:
// - opcode, an enumeration identifying its type
// - operands, compile-time constant arguments to the instruction
// - links, 0, 1, or 2 links to successor-instructions.
// note the single exception: the 'nuate instruction takes a Frame
// argument. This means that (as written), compiled code that
// includes call/cc won't be serializable, because the live control-stack
// frames aren't serializable. This only matters if we start thinking
// about serializing execution-in-process code and moving it to a
// different machine for resumption.
// ...
// ...
// A VM with 5 registers, 12 primitive instructions, and
// 3 basic data structures:
// - Frame captures a call-frame and maintains a dynamic control stack
// - Scope manages bindings of variables to values in lexically nested scopes
// - Closure binds parameters to actual args and executes code
struct Machine {
state: VMState
}
impl Machine
{
fn init(state: VMState) -> Machine { Machine { state:state } }
fn step(&mut self) -> Option<Obj> {
let (mut A |
// control stack (ptr to top call frame; frames have link to prev frame) | random_line_split |
nvg.rs | the scaling
/// since aforementioned pixel snapping.
///
/// While this may sound a little odd, the setup allows you to always render the
/// same way regardless of scaling.
///
/// Note: currently only solid color fill is supported for text.
pub fn create_font(
&self,
name: &str,
filename: &str,
) -> std::result::Result<Font, Box<dyn std::error::Error>> {
let name = std::ffi::CString::new(name).unwrap();
let filename = std::ffi::CString::new(filename).unwrap();
let handle = unsafe { sys::nvgCreateFont(self.ctx, name.as_ptr(), filename.as_ptr()) };
match handle {
-1 => Err(Box::new(std::io::Error::new(
std::io::ErrorKind::Other,
"unable to load font",
))),
_ => Ok(Font { handle }),
}
}
/// NanoVG allows you to load jpg, png, psd, tga, pic and gif files to be used for rendering.
/// In addition you can upload your own image. The image loading is provided by stb_image.
pub fn create_image(
&self,
filename: &str,
) -> std::result::Result<Image, Box<dyn std::error::Error>> {
let filename = std::ffi::CString::new(filename).unwrap();
let handle = unsafe { sys::nvgCreateImage(self.ctx, filename.as_ptr(), 0) };
match handle {
-1 => Err(Box::new(std::io::Error::new(
std::io::ErrorKind::Other,
"unable to load image",
))),
_ => Ok(Image {
ctx: self.ctx,
handle,
}),
}
}
}
impl Drop for Context {
fn drop(&mut self) {
unsafe {
sys::nvgDeleteInternal(self.ctx);
}
}
}
/// Methods to draw on a frame. See `Context::draw_frame`.
pub struct Frame {
ctx: *mut sys::NVGcontext,
}
impl Frame {
/// Draw a path.
pub fn draw_path<F: Fn(&Path) -> Result>(&self, style: &Style, f: F) -> Result {
unsafe {
// sys::nvgSave(self.ctx);
// sys::nvgReset(self.ctx);
sys::nvgBeginPath(self.ctx);
}
if let Some(stroke) = &style.stroke {
match stroke {
PaintOrColor::Paint(p) => unsafe {
sys::nvgStrokePaint(self.ctx, &p.0);
},
PaintOrColor::Color(c) => unsafe {
sys::nvgStrokeColor(self.ctx, &c.0);
},
}
}
if let Some(fill) = &style.fill {
match fill {
PaintOrColor::Paint(p) => unsafe {
sys::nvgFillPaint(self.ctx, &p.0);
},
PaintOrColor::Color(c) => unsafe {
sys::nvgFillColor(self.ctx, &c.0);
},
}
}
let path = Path { ctx: self.ctx };
let r = f(&path);
if style.stroke.is_some() {
unsafe {
sys::nvgStroke(self.ctx);
}
}
if style.fill.is_some() {
unsafe {
sys::nvgFill(self.ctx);
}
}
/*
unsafe {
sys::nvgRestore(self.ctx);
}
*/
r
}
}
/// A path.
pub struct Path {
ctx: *mut sys::NVGcontext,
}
impl Path {
/// Starts new sub-path with specified point as first point.
pub fn move_to(&self, x: f32, y: f32) {
unsafe {
sys::nvgMoveTo(self.ctx, x, y);
}
}
/// Adds line segment from the last point in the path to the specified point.
pub fn line_to(&self, x: f32, y: f32) {
unsafe {
sys::nvgLineTo(self.ctx, x, y);
}
}
/// Adds cubic bezier segment from last point in the path via two control points to the specified point.
pub fn bezier_to(&self, c1x: f32, c1y: f32, c2x: f32, c2y: f32, x: f32, y: f32) {
unsafe {
sys::nvgBezierTo(self.ctx, c1x, c1y, c2x, c2y, x, y);
}
}
/// Adds quadratic bezier segment from last point in the path via a control point to the
/// specified point.
pub fn quad_to(&self, cx: f32, cy: f32, x: f32, y: f32) {
unsafe {
sys::nvgQuadTo(self.ctx, cx, cy, x, y);
}
}
/// Adds an arc segment at the corner defined by the last path point, and two specified points.
pub fn arc_to(&self, x1: f32, y1: f32, x2: f32, y2: f32, radius: f32) {
unsafe {
sys::nvgArcTo(self.ctx, x1, y1, x2, y2, radius);
}
}
/// Closes current sub-path with a line segment.
pub fn close_path(&self) {
unsafe {
sys::nvgClosePath(self.ctx);
}
}
/// Creates a new circle arc shaped sub-path. The arc center is at (`cx`,`cy`), the arc radius
/// is `r`, and the arc is drawn from angle `a0` to `a1`, and swept in direction `dir`.
/// Angles are in radians.
pub fn arc(&self, cx: f32, cy: f32, r: f32, a0: f32, a1: f32, dir: Direction) {
unsafe {
sys::nvgArc(self.ctx, cx, cy, r, a0, a1, dir.to_sys() as _);
}
}
/// Creates a new oval arc shaped sub-path. The arc center is at (`cx`, `cy`), the arc radius
/// is (`rx`, `ry`), and the arc is draw from angle a0 to a1, and swept in direction `dir`.
#[allow(clippy::too_many_arguments)]
pub fn elliptical_arc(
&self,
cx: f32,
cy: f32,
rx: f32,
ry: f32,
a0: f32,
a1: f32,
dir: Direction,
) {
unsafe {
sys::nvgEllipticalArc(self.ctx, cx, cy, rx, ry, a0, a1, dir.to_sys() as _);
}
}
/// Creates new rectangle shaped sub-path.
pub fn rect(&self, x: f32, y: f32, w: f32, h: f32) {
unsafe {
sys::nvgRect(self.ctx, x, y, w, h);
}
}
/// Creates a new rounded rectangle sub-path with rounded corners
#[allow(clippy::many_single_char_names)]
pub fn rounded_rect(&self, x: f32, y: f32, w: f32, h: f32, r: f32) {
unsafe {
sys::nvgRoundedRect(self.ctx, x, y, w, h, r);
}
}
/// Creates new rounded rectangle shaped sub-path with varying radii for each corner.
#[allow(clippy::too_many_arguments)]
#[allow(clippy::many_single_char_names)]
pub fn rounded_rect_varying(
&self,
x: f32,
y: f32,
w: f32,
h: f32,
rad_top_left: f32,
rad_top_right: f32,
rad_bottom_right: f32,
rad_bottom_left: f32,
) {
unsafe {
sys::nvgRoundedRectVarying(
self.ctx,
x,
y,
w,
h,
rad_top_left,
rad_top_right,
rad_bottom_right,
rad_bottom_left,
);
}
}
/// Creates a new ellipse shaped sub-path.
pub fn ellipse(&self, cx: f32, cy: f32, rx: f32, ry: f32) {
unsafe {
sys::nvgEllipse(self.ctx, cx, cy, rx, ry);
}
}
/// Creates a new circle shaped path.
pub fn circle(&self, cx: f32, cy: f32, r: f32) {
unsafe {
sys::nvgCircle(self.ctx, cx, cy, r);
}
}
// TODO: fill
}
/// Winding direction
#[derive(Debug, Clone, Copy)]
pub enum Direction {
/// Winding for holes.
Clockwise,
/// Winding for solid shapes.
CounterClockwise,
}
impl Direction {
fn | to_sys | identifier_name |
|
nvg.rs | { handle }),
}
}
/// NanoVG allows you to load jpg, png, psd, tga, pic and gif files to be used for rendering.
/// In addition you can upload your own image. The image loading is provided by stb_image.
pub fn create_image(
&self,
filename: &str,
) -> std::result::Result<Image, Box<dyn std::error::Error>> {
let filename = std::ffi::CString::new(filename).unwrap();
let handle = unsafe { sys::nvgCreateImage(self.ctx, filename.as_ptr(), 0) };
match handle {
-1 => Err(Box::new(std::io::Error::new(
std::io::ErrorKind::Other,
"unable to load image",
))),
_ => Ok(Image {
ctx: self.ctx,
handle,
}),
}
}
}
impl Drop for Context {
fn drop(&mut self) {
unsafe {
sys::nvgDeleteInternal(self.ctx);
}
}
}
/// Methods to draw on a frame. See `Context::draw_frame`.
pub struct Frame {
ctx: *mut sys::NVGcontext,
}
impl Frame {
/// Draw a path.
pub fn draw_path<F: Fn(&Path) -> Result>(&self, style: &Style, f: F) -> Result {
unsafe {
// sys::nvgSave(self.ctx);
// sys::nvgReset(self.ctx);
sys::nvgBeginPath(self.ctx);
}
if let Some(stroke) = &style.stroke {
match stroke {
PaintOrColor::Paint(p) => unsafe {
sys::nvgStrokePaint(self.ctx, &p.0);
},
PaintOrColor::Color(c) => unsafe {
sys::nvgStrokeColor(self.ctx, &c.0);
},
}
}
if let Some(fill) = &style.fill {
match fill {
PaintOrColor::Paint(p) => unsafe {
sys::nvgFillPaint(self.ctx, &p.0);
},
PaintOrColor::Color(c) => unsafe {
sys::nvgFillColor(self.ctx, &c.0);
},
}
}
let path = Path { ctx: self.ctx };
let r = f(&path);
if style.stroke.is_some() {
unsafe {
sys::nvgStroke(self.ctx);
}
}
if style.fill.is_some() {
unsafe {
sys::nvgFill(self.ctx);
}
}
/*
unsafe {
sys::nvgRestore(self.ctx);
}
*/
r
}
}
/// A path.
pub struct Path {
ctx: *mut sys::NVGcontext,
}
impl Path {
/// Starts new sub-path with specified point as first point.
pub fn move_to(&self, x: f32, y: f32) {
unsafe {
sys::nvgMoveTo(self.ctx, x, y);
}
}
/// Adds line segment from the last point in the path to the specified point.
pub fn line_to(&self, x: f32, y: f32) {
unsafe {
sys::nvgLineTo(self.ctx, x, y);
}
}
/// Adds cubic bezier segment from last point in the path via two control points to the specified point.
pub fn bezier_to(&self, c1x: f32, c1y: f32, c2x: f32, c2y: f32, x: f32, y: f32) {
unsafe {
sys::nvgBezierTo(self.ctx, c1x, c1y, c2x, c2y, x, y);
}
}
/// Adds quadratic bezier segment from last point in the path via a control point to the
/// specified point.
pub fn quad_to(&self, cx: f32, cy: f32, x: f32, y: f32) {
unsafe {
sys::nvgQuadTo(self.ctx, cx, cy, x, y);
}
}
/// Adds an arc segment at the corner defined by the last path point, and two specified points.
pub fn arc_to(&self, x1: f32, y1: f32, x2: f32, y2: f32, radius: f32) {
unsafe {
sys::nvgArcTo(self.ctx, x1, y1, x2, y2, radius);
}
}
/// Closes current sub-path with a line segment.
pub fn close_path(&self) {
unsafe {
sys::nvgClosePath(self.ctx);
}
}
/// Creates a new circle arc shaped sub-path. The arc center is at (`cx`,`cy`), the arc radius
/// is `r`, and the arc is drawn from angle `a0` to `a1`, and swept in direction `dir`.
/// Angles are in radians.
pub fn arc(&self, cx: f32, cy: f32, r: f32, a0: f32, a1: f32, dir: Direction) {
unsafe {
sys::nvgArc(self.ctx, cx, cy, r, a0, a1, dir.to_sys() as _);
}
}
/// Creates a new oval arc shaped sub-path. The arc center is at (`cx`, `cy`), the arc radius
/// is (`rx`, `ry`), and the arc is draw from angle a0 to a1, and swept in direction `dir`.
#[allow(clippy::too_many_arguments)]
pub fn elliptical_arc(
&self,
cx: f32,
cy: f32,
rx: f32,
ry: f32,
a0: f32,
a1: f32,
dir: Direction,
) {
unsafe {
sys::nvgEllipticalArc(self.ctx, cx, cy, rx, ry, a0, a1, dir.to_sys() as _);
}
}
/// Creates new rectangle shaped sub-path.
pub fn rect(&self, x: f32, y: f32, w: f32, h: f32) {
unsafe {
sys::nvgRect(self.ctx, x, y, w, h);
}
}
/// Creates a new rounded rectangle sub-path with rounded corners
#[allow(clippy::many_single_char_names)]
pub fn rounded_rect(&self, x: f32, y: f32, w: f32, h: f32, r: f32) {
unsafe {
sys::nvgRoundedRect(self.ctx, x, y, w, h, r);
}
}
/// Creates new rounded rectangle shaped sub-path with varying radii for each corner.
#[allow(clippy::too_many_arguments)]
#[allow(clippy::many_single_char_names)]
pub fn rounded_rect_varying(
&self,
x: f32,
y: f32,
w: f32,
h: f32,
rad_top_left: f32,
rad_top_right: f32,
rad_bottom_right: f32,
rad_bottom_left: f32,
) {
unsafe {
sys::nvgRoundedRectVarying(
self.ctx,
x,
y,
w,
h,
rad_top_left,
rad_top_right,
rad_bottom_right,
rad_bottom_left,
);
}
}
/// Creates a new ellipse shaped sub-path.
pub fn ellipse(&self, cx: f32, cy: f32, rx: f32, ry: f32) {
unsafe {
sys::nvgEllipse(self.ctx, cx, cy, rx, ry);
}
}
/// Creates a new circle shaped path.
pub fn circle(&self, cx: f32, cy: f32, r: f32) {
unsafe {
sys::nvgCircle(self.ctx, cx, cy, r);
}
}
// TODO: fill
}
/// Winding direction
#[derive(Debug, Clone, Copy)]
pub enum Direction {
/// Winding for holes.
Clockwise,
/// Winding for solid shapes.
CounterClockwise,
}
impl Direction {
fn to_sys(self) -> sys::NVGwinding {
match self {
Direction::Clockwise => sys::NVGwinding_NVG_CW,
Direction::CounterClockwise => sys::NVGwinding_NVG_CCW,
}
}
}
#[derive(Debug)]
#[doc(hidden)]
pub enum PaintOrColor {
Paint(Paint),
Color(Color),
}
impl From<Paint> for PaintOrColor {
fn from(p: Paint) -> PaintOrColor {
PaintOrColor::Paint(p)
}
}
impl From<Color> for PaintOrColor {
fn from(c: Color) -> PaintOrColor {
PaintOrColor::Color(c)
}
}
/// The stroke and/or fill which will be applied to a path.
#[derive(Debug, Default)]
pub struct Style {
stroke: Option<PaintOrColor>,
fill: Option<PaintOrColor>, | }
impl Style {
/// Set the stroke of this style. | random_line_split |
|
deployment-center-state-manager.ts | $ = new BehaviorSubject<string>('');
public bitBucketToken$ = new BehaviorSubject<string>('');
public gitHubToken$ = new BehaviorSubject<string>('');
public replacementPublishUrl = '';
constructor(
private _cacheService: CacheService,
private _azureDevOpsService: AzureDevOpsService,
private _translateService: TranslateService,
private _scenarioService: ScenarioService,
private _githubService: GithubService,
private _logService: LogService,
private _siteService: SiteService,
userService: UserService,
subscriptionService: SubscriptionService
) {
this.resourceIdStream$
.switchMap(r => {
this._resourceId = r;
const siteDescriptor = new ArmSiteDescriptor(this._resourceId);
this.siteName = siteDescriptor.site;
this.slotName = siteDescriptor.slot;
// TODO (michinoy): Figure out a way to only generate this guid IF github actions build provider
// is selected. This might require refactoring a ton of stuff in step-complete component to understand
// what build provider is selected.
this.gitHubPublishProfileSecretGuid = Guid.newGuid()
.toLowerCase()
.replace(/[-]/g, '');
return forkJoin(
this._siteService.getSite(this._resourceId),
this._siteService.getSiteConfig(this._resourceId),
this._siteService.getAppSettings(this._resourceId),
this._siteService.fetchSiteConfigMetadata(this._resourceId),
this._siteService.getPublishingCredentials(this._resourceId),
subscriptionService.getSubscription(siteDescriptor.subscription)
);
})
.switchMap(result => {
const [site, config, appSettings, configMetadata, publishingCredentials, sub] = result;
this.siteArm = site.result;
this.isLinuxApp = this.siteArm.kind.toLowerCase().includes(Kinds.linux);
this.isFunctionApp = this.siteArm.kind.toLowerCase().includes(Kinds.functionApp);
this.subscriptionName = sub.result.displayName;
// NOTE(michinoy): temporary fix, while the backend reinstates the scm url in the publish url property.
this.replacementPublishUrl = this.isLinuxApp ? this._getScmUri(publishingCredentials) : null;
if (config.isSuccessful && appSettings.isSuccessful && configMetadata.isSuccessful) {
this._setStackAndVersion(config.result.properties, appSettings.result.properties, configMetadata.result.properties);
}
this.siteArmObj$.next(this.siteArm);
return this._scenarioService.checkScenarioAsync(ScenarioIds.vstsDeploymentHide, { site: this.siteArm });
})
.subscribe(vstsScenarioCheck => {
this.hideBuild = vstsScenarioCheck.status === 'disabled';
});
userService
.getStartupInfo()
.takeUntil(this._ngUnsubscribe$)
.subscribe(r => {
this._token = r.token;
});
}
public get wizardValues(): WizardForm {
return this.wizardForm.value;
}
public set wizardValues(values: WizardForm) {
this.wizardForm.patchValue(values);
}
public get sourceSettings(): FormGroup {
return (this.wizardForm && (this.wizardForm.controls.sourceSettings as FormGroup)) || null;
}
public get buildSettings(): FormGroup {
return (this.wizardForm && (this.wizardForm.controls.buildSettings as FormGroup)) || null;
}
public deploy(): Observable<{ status: string; statusMessage: string; result: any }> {
switch (this.wizardValues.buildProvider) {
case 'github':
// NOTE(michinoy): Only initiate writing a workflow configuration file if the branch does not already have it OR
// the user opted to overwrite it.
if (
!this.wizardValues.sourceSettings.githubActionWorkflowOption ||
this.wizardValues.sourceSettings.githubActionWorkflowOption === WorkflowOptions.Overwrite
) {
return this._deployGithubActions().map(result => ({ status: 'succeeded', statusMessage: null, result }));
} else {
return this._deployKudu().map(result => ({ status: 'succeeded', statusMessage: null, result }));
}
default:
return this._deployKudu().map(result => ({ status: 'succeeded', statusMessage: null, result }));
}
}
public fetchVSTSProfile() {
// if the first get fails, it's likely because the user doesn't have an account in vsts yet
// the fix for this is to do an empty post call on the same url and then get it
return this._cacheService
.get(AzureDevOpsService.AzDevProfileUri, true, this._azureDevOpsService.getAzDevDirectHeaders(false))
.catch(() => {
return this._cacheService
.post(AzureDevOpsService.AzDevProfileUri, true, this._azureDevOpsService.getAzDevDirectHeaders(false))
.switchMap(() => {
return this._cacheService.get(AzureDevOpsService.AzDevProfileUri, true, this._azureDevOpsService.getAzDevDirectHeaders(false));
});
});
}
private _setStackAndVersion(
siteConfig: SiteConfig,
siteAppSettings: { [key: string]: string },
configMetadata: { [key: string]: string }
) {
if (this.isLinuxApp) {
this._setStackAndVersionForLinux(siteConfig);
} else {
this._setStackAndVersionForWindows(siteConfig, siteAppSettings, configMetadata);
}
}
private _setStackAndVersionForWindows(
siteConfig: SiteConfig,
siteAppSettings: { [key: string]: string },
configMetadata: { [key: string]: string }
) {
if (configMetadata['CURRENT_STACK']) {
const metadataStack = configMetadata['CURRENT_STACK'].toLowerCase();
// NOTE(michinoy): Java is special, so need to handle it carefully. Also in this case, use
// the string 'java' rather than any of the constants defined as it is not related to any of the
// defined constants.
if (metadataStack === 'java') {
this.stack = siteConfig.javaVersion === JavaVersions.WindowsVersion8 ? RuntimeStacks.java8 : RuntimeStacks.java11;
} else if (metadataStack === 'dotnet') {
this.stack = RuntimeStacks.aspnet;
} else {
this.stack = metadataStack;
}
}
if (this.stack === RuntimeStacks.node) {
this.stackVersion = siteAppSettings[Constants.nodeVersionAppSettingName];
} else if (this.stack === RuntimeStacks.python) {
this.stackVersion = siteConfig.pythonVersion;
} else if (this.stack === RuntimeStacks.java8 || this.stack === RuntimeStacks.java11) {
this.stackVersion = `${siteConfig.javaVersion}|${siteConfig.javaContainer}|${siteConfig.javaContainerVersion}`;
} else if (this.stack === RuntimeStacks.aspnet && !!siteConfig.netFrameworkVersion) {
this.stackVersion == siteConfig.netFrameworkVersion;
} else if (this.stack === '') {
this.stackVersion = '';
}
}
private _setStackAndVersionForLinux(siteConfig: SiteConfig) {
const linuxFxVersionParts = siteConfig.linuxFxVersion ? siteConfig.linuxFxVersion.split('|') : [];
const runtimeStack = linuxFxVersionParts.length > 0 ? linuxFxVersionParts[0].toLocaleLowerCase() : '';
// NOTE(michinoy): Java is special, so need to handle it carefully.
if (runtimeStack === JavaContainers.JavaSE || runtimeStack === JavaContainers.Tomcat) {
const fxVersionParts = !!siteConfig.linuxFxVersion ? siteConfig.linuxFxVersion.split('-') : [];
const fxStack = fxVersionParts.length === 2 ? fxVersionParts[1].toLocaleLowerCase() : '';
if (fxStack === JavaVersions.LinuxVersion8 || fxStack === JavaVersions.LinuxVersion11) {
this.stack = fxStack === JavaVersions.LinuxVersion8 ? RuntimeStacks.java8 : RuntimeStacks.java11;
} else {
this.stack = '';
}
} else {
// NOTE(michinoy): So it seems that in the stack API the stack value is 'aspnet', whereas from site config, the stack identifier is
// 'dotnetcore'. Due to this mismatch, we need to hard code the conversion on the client side.
this.stack = siteConfig.linuxFxVersion.toLocaleLowerCase() === 'dotnetcore|5.0' ? RuntimeStacks.aspnet : runtimeStack;
}
this.stackVersion = !!siteConfig.linuxFxVersion ? siteConfig.linuxFxVersion : '';
}
private _deployGithubActions() | {
const repo = this.wizardValues.sourceSettings.repoUrl.replace(`${DeploymentCenterConstants.githubUri}/`, '');
const branch = this.wizardValues.sourceSettings.branch || 'master';
const workflowInformation = this._githubService.getWorkflowInformation(
this.wizardValues.buildSettings,
this.wizardValues.sourceSettings,
this.isLinuxApp,
this.gitHubPublishProfileSecretGuid,
this.siteName,
this.slotName
);
const commitInfo: GitHubCommit = {
repoName: repo,
branchName: branch,
filePath: `.github/workflows/${workflowInformation.fileName}`,
message: this._translateService.instant(PortalResources.githubActionWorkflowCommitMessage),
contentBase64Encoded: btoa(workflowInformation.content),
committer: {
name: 'Azure App Service', | identifier_body |
|
deployment-center-state-manager.ts | stsScenarioCheck => {
this.hideBuild = vstsScenarioCheck.status === 'disabled';
});
userService
.getStartupInfo()
.takeUntil(this._ngUnsubscribe$)
.subscribe(r => {
this._token = r.token;
});
}
public get wizardValues(): WizardForm {
return this.wizardForm.value;
}
public set wizardValues(values: WizardForm) {
this.wizardForm.patchValue(values);
}
public get sourceSettings(): FormGroup {
return (this.wizardForm && (this.wizardForm.controls.sourceSettings as FormGroup)) || null;
}
public get buildSettings(): FormGroup {
return (this.wizardForm && (this.wizardForm.controls.buildSettings as FormGroup)) || null;
}
public deploy(): Observable<{ status: string; statusMessage: string; result: any }> {
switch (this.wizardValues.buildProvider) {
case 'github':
// NOTE(michinoy): Only initiate writing a workflow configuration file if the branch does not already have it OR
// the user opted to overwrite it.
if (
!this.wizardValues.sourceSettings.githubActionWorkflowOption ||
this.wizardValues.sourceSettings.githubActionWorkflowOption === WorkflowOptions.Overwrite
) {
return this._deployGithubActions().map(result => ({ status: 'succeeded', statusMessage: null, result }));
} else {
return this._deployKudu().map(result => ({ status: 'succeeded', statusMessage: null, result }));
}
default:
return this._deployKudu().map(result => ({ status: 'succeeded', statusMessage: null, result }));
}
}
public fetchVSTSProfile() {
// if the first get fails, it's likely because the user doesn't have an account in vsts yet
// the fix for this is to do an empty post call on the same url and then get it
return this._cacheService
.get(AzureDevOpsService.AzDevProfileUri, true, this._azureDevOpsService.getAzDevDirectHeaders(false))
.catch(() => {
return this._cacheService
.post(AzureDevOpsService.AzDevProfileUri, true, this._azureDevOpsService.getAzDevDirectHeaders(false))
.switchMap(() => {
return this._cacheService.get(AzureDevOpsService.AzDevProfileUri, true, this._azureDevOpsService.getAzDevDirectHeaders(false));
});
});
}
private _setStackAndVersion(
siteConfig: SiteConfig,
siteAppSettings: { [key: string]: string },
configMetadata: { [key: string]: string }
) {
if (this.isLinuxApp) {
this._setStackAndVersionForLinux(siteConfig);
} else {
this._setStackAndVersionForWindows(siteConfig, siteAppSettings, configMetadata);
}
}
private _setStackAndVersionForWindows(
siteConfig: SiteConfig,
siteAppSettings: { [key: string]: string },
configMetadata: { [key: string]: string }
) {
if (configMetadata['CURRENT_STACK']) {
const metadataStack = configMetadata['CURRENT_STACK'].toLowerCase();
// NOTE(michinoy): Java is special, so need to handle it carefully. Also in this case, use
// the string 'java' rather than any of the constants defined as it is not related to any of the
// defined constants.
if (metadataStack === 'java') {
this.stack = siteConfig.javaVersion === JavaVersions.WindowsVersion8 ? RuntimeStacks.java8 : RuntimeStacks.java11;
} else if (metadataStack === 'dotnet') {
this.stack = RuntimeStacks.aspnet;
} else {
this.stack = metadataStack;
}
}
if (this.stack === RuntimeStacks.node) {
this.stackVersion = siteAppSettings[Constants.nodeVersionAppSettingName];
} else if (this.stack === RuntimeStacks.python) {
this.stackVersion = siteConfig.pythonVersion;
} else if (this.stack === RuntimeStacks.java8 || this.stack === RuntimeStacks.java11) {
this.stackVersion = `${siteConfig.javaVersion}|${siteConfig.javaContainer}|${siteConfig.javaContainerVersion}`;
} else if (this.stack === RuntimeStacks.aspnet && !!siteConfig.netFrameworkVersion) {
this.stackVersion == siteConfig.netFrameworkVersion;
} else if (this.stack === '') {
this.stackVersion = '';
}
}
private _setStackAndVersionForLinux(siteConfig: SiteConfig) {
const linuxFxVersionParts = siteConfig.linuxFxVersion ? siteConfig.linuxFxVersion.split('|') : [];
const runtimeStack = linuxFxVersionParts.length > 0 ? linuxFxVersionParts[0].toLocaleLowerCase() : '';
// NOTE(michinoy): Java is special, so need to handle it carefully.
if (runtimeStack === JavaContainers.JavaSE || runtimeStack === JavaContainers.Tomcat) {
const fxVersionParts = !!siteConfig.linuxFxVersion ? siteConfig.linuxFxVersion.split('-') : [];
const fxStack = fxVersionParts.length === 2 ? fxVersionParts[1].toLocaleLowerCase() : '';
if (fxStack === JavaVersions.LinuxVersion8 || fxStack === JavaVersions.LinuxVersion11) {
this.stack = fxStack === JavaVersions.LinuxVersion8 ? RuntimeStacks.java8 : RuntimeStacks.java11;
} else {
this.stack = '';
}
} else {
// NOTE(michinoy): So it seems that in the stack API the stack value is 'aspnet', whereas from site config, the stack identifier is
// 'dotnetcore'. Due to this mismatch, we need to hard code the conversion on the client side.
this.stack = siteConfig.linuxFxVersion.toLocaleLowerCase() === 'dotnetcore|5.0' ? RuntimeStacks.aspnet : runtimeStack;
}
this.stackVersion = !!siteConfig.linuxFxVersion ? siteConfig.linuxFxVersion : '';
}
private _deployGithubActions() {
const repo = this.wizardValues.sourceSettings.repoUrl.replace(`${DeploymentCenterConstants.githubUri}/`, '');
const branch = this.wizardValues.sourceSettings.branch || 'master';
const workflowInformation = this._githubService.getWorkflowInformation(
this.wizardValues.buildSettings,
this.wizardValues.sourceSettings,
this.isLinuxApp,
this.gitHubPublishProfileSecretGuid,
this.siteName,
this.slotName
);
const commitInfo: GitHubCommit = {
repoName: repo,
branchName: branch,
filePath: `.github/workflows/${workflowInformation.fileName}`,
message: this._translateService.instant(PortalResources.githubActionWorkflowCommitMessage),
contentBase64Encoded: btoa(workflowInformation.content),
committer: {
name: 'Azure App Service',
email: '[email protected]',
},
};
return this._githubService
.fetchWorkflowConfiguration(this.gitHubToken$.getValue(), this.wizardValues.sourceSettings.repoUrl, repo, branch, commitInfo.filePath)
.switchMap(fileContentResponse => {
if (fileContentResponse) {
commitInfo.sha = fileContentResponse.sha;
}
const requestContent: GitHubActionWorkflowRequestContent = {
resourceId: this._resourceId,
secretName: workflowInformation.secretName,
commit: commitInfo,
};
return this._githubService.createOrUpdateActionWorkflow(
this.getToken(),
this.gitHubToken$.getValue(),
requestContent,
this.replacementPublishUrl
);
})
.switchMap(_ => {
return this._deployKudu();
});
}
private _deployKudu() {
const payload = this.wizardValues.sourceSettings;
payload.isGitHubAction = this.wizardValues.buildProvider === 'github';
payload.isManualIntegration = this.wizardValues.sourceProvider === 'external';
if (this.wizardValues.sourceProvider === 'localgit') {
return this._cacheService
.patchArm(`${this._resourceId}/config/web`, ARMApiVersions.antaresApiVersion20181101, {
properties: {
scmType: 'LocalGit',
},
})
.map(r => r.json());
} else {
return this._cacheService
.putArm(`${this._resourceId}/sourcecontrols/web`, ARMApiVersions.antaresApiVersion20181101, {
properties: payload,
})
.map(r => r.json())
.catch((err, _) => {
if (payload.isGitHubAction && this._isApiSyncError(err.json())) {
// NOTE(michinoy): If the save operation was being done for GitHub Action, and
// we are experiencing the API sync error, populate the source controls properties
// manually.
this._logService.error(LogCategories.cicd, 'apiSyncErrorWorkaround', { resourceId: this._resourceId });
return this._updateGitHubActionSourceControlPropertiesManually(payload);
} else {
return Observable.throw(err);
}
});
}
}
private _updateGitHubActionSourceControlPropertiesManually(sourceSettingsPayload: SourceSettings) {
return this._fetchMetadata()
.switchMap(r => {
if (r && r.result && r.result.properties) | {
return this._updateMetadata(r.result.properties, sourceSettingsPayload);
} | conditional_block |
|
deployment-center-state-manager.ts | .hideBuild = vstsScenarioCheck.status === 'disabled';
});
userService
.getStartupInfo()
.takeUntil(this._ngUnsubscribe$)
.subscribe(r => {
this._token = r.token;
});
}
public get wizardValues(): WizardForm {
return this.wizardForm.value;
}
public set wizardValues(values: WizardForm) {
this.wizardForm.patchValue(values);
}
public get sourceSettings(): FormGroup {
return (this.wizardForm && (this.wizardForm.controls.sourceSettings as FormGroup)) || null;
}
public get buildSettings(): FormGroup {
return (this.wizardForm && (this.wizardForm.controls.buildSettings as FormGroup)) || null;
}
public deploy(): Observable<{ status: string; statusMessage: string; result: any }> {
switch (this.wizardValues.buildProvider) {
case 'github':
// NOTE(michinoy): Only initiate writing a workflow configuration file if the branch does not already have it OR
// the user opted to overwrite it.
if (
!this.wizardValues.sourceSettings.githubActionWorkflowOption ||
this.wizardValues.sourceSettings.githubActionWorkflowOption === WorkflowOptions.Overwrite
) {
return this._deployGithubActions().map(result => ({ status: 'succeeded', statusMessage: null, result }));
} else {
return this._deployKudu().map(result => ({ status: 'succeeded', statusMessage: null, result }));
}
default:
return this._deployKudu().map(result => ({ status: 'succeeded', statusMessage: null, result }));
}
}
public fetchVSTSProfile() {
// if the first get fails, it's likely because the user doesn't have an account in vsts yet
// the fix for this is to do an empty post call on the same url and then get it
return this._cacheService
.get(AzureDevOpsService.AzDevProfileUri, true, this._azureDevOpsService.getAzDevDirectHeaders(false))
.catch(() => {
return this._cacheService
.post(AzureDevOpsService.AzDevProfileUri, true, this._azureDevOpsService.getAzDevDirectHeaders(false))
.switchMap(() => {
return this._cacheService.get(AzureDevOpsService.AzDevProfileUri, true, this._azureDevOpsService.getAzDevDirectHeaders(false));
});
});
}
private _setStackAndVersion(
siteConfig: SiteConfig,
siteAppSettings: { [key: string]: string },
configMetadata: { [key: string]: string }
) {
if (this.isLinuxApp) {
this._setStackAndVersionForLinux(siteConfig);
} else {
this._setStackAndVersionForWindows(siteConfig, siteAppSettings, configMetadata);
}
}
private _setStackAndVersionForWindows(
siteConfig: SiteConfig,
siteAppSettings: { [key: string]: string },
configMetadata: { [key: string]: string }
) {
if (configMetadata['CURRENT_STACK']) {
const metadataStack = configMetadata['CURRENT_STACK'].toLowerCase();
// NOTE(michinoy): Java is special, so need to handle it carefully. Also in this case, use
// the string 'java' rather than any of the constants defined as it is not related to any of the
// defined constants.
if (metadataStack === 'java') {
this.stack = siteConfig.javaVersion === JavaVersions.WindowsVersion8 ? RuntimeStacks.java8 : RuntimeStacks.java11;
} else if (metadataStack === 'dotnet') {
this.stack = RuntimeStacks.aspnet;
} else {
this.stack = metadataStack;
}
}
if (this.stack === RuntimeStacks.node) {
this.stackVersion = siteAppSettings[Constants.nodeVersionAppSettingName];
} else if (this.stack === RuntimeStacks.python) {
this.stackVersion = siteConfig.pythonVersion;
} else if (this.stack === RuntimeStacks.java8 || this.stack === RuntimeStacks.java11) {
this.stackVersion = `${siteConfig.javaVersion}|${siteConfig.javaContainer}|${siteConfig.javaContainerVersion}`;
} else if (this.stack === RuntimeStacks.aspnet && !!siteConfig.netFrameworkVersion) {
this.stackVersion == siteConfig.netFrameworkVersion;
} else if (this.stack === '') {
this.stackVersion = '';
}
}
private _setStackAndVersionForLinux(siteConfig: SiteConfig) {
const linuxFxVersionParts = siteConfig.linuxFxVersion ? siteConfig.linuxFxVersion.split('|') : [];
const runtimeStack = linuxFxVersionParts.length > 0 ? linuxFxVersionParts[0].toLocaleLowerCase() : '';
// NOTE(michinoy): Java is special, so need to handle it carefully.
if (runtimeStack === JavaContainers.JavaSE || runtimeStack === JavaContainers.Tomcat) {
const fxVersionParts = !!siteConfig.linuxFxVersion ? siteConfig.linuxFxVersion.split('-') : [];
const fxStack = fxVersionParts.length === 2 ? fxVersionParts[1].toLocaleLowerCase() : '';
if (fxStack === JavaVersions.LinuxVersion8 || fxStack === JavaVersions.LinuxVersion11) {
this.stack = fxStack === JavaVersions.LinuxVersion8 ? RuntimeStacks.java8 : RuntimeStacks.java11;
} else {
this.stack = '';
}
} else {
// NOTE(michinoy): So it seems that in the stack API the stack value is 'aspnet', whereas from site config, the stack identifier is
// 'dotnetcore'. Due to this mismatch, we need to hard code the conversion on the client side.
this.stack = siteConfig.linuxFxVersion.toLocaleLowerCase() === 'dotnetcore|5.0' ? RuntimeStacks.aspnet : runtimeStack;
}
this.stackVersion = !!siteConfig.linuxFxVersion ? siteConfig.linuxFxVersion : '';
}
private _deployGithubActions() {
const repo = this.wizardValues.sourceSettings.repoUrl.replace(`${DeploymentCenterConstants.githubUri}/`, '');
const branch = this.wizardValues.sourceSettings.branch || 'master';
const workflowInformation = this._githubService.getWorkflowInformation(
this.wizardValues.buildSettings,
this.wizardValues.sourceSettings,
this.isLinuxApp,
this.gitHubPublishProfileSecretGuid,
this.siteName,
this.slotName
);
const commitInfo: GitHubCommit = {
repoName: repo,
branchName: branch,
filePath: `.github/workflows/${workflowInformation.fileName}`,
message: this._translateService.instant(PortalResources.githubActionWorkflowCommitMessage),
contentBase64Encoded: btoa(workflowInformation.content),
committer: {
name: 'Azure App Service',
email: '[email protected]',
},
};
return this._githubService
.fetchWorkflowConfiguration(this.gitHubToken$.getValue(), this.wizardValues.sourceSettings.repoUrl, repo, branch, commitInfo.filePath)
.switchMap(fileContentResponse => {
if (fileContentResponse) {
commitInfo.sha = fileContentResponse.sha;
}
const requestContent: GitHubActionWorkflowRequestContent = {
resourceId: this._resourceId,
secretName: workflowInformation.secretName,
commit: commitInfo,
};
return this._githubService.createOrUpdateActionWorkflow(
this.getToken(),
this.gitHubToken$.getValue(),
requestContent,
this.replacementPublishUrl
);
})
.switchMap(_ => {
return this._deployKudu();
});
}
private _deployKudu() {
const payload = this.wizardValues.sourceSettings;
payload.isGitHubAction = this.wizardValues.buildProvider === 'github';
payload.isManualIntegration = this.wizardValues.sourceProvider === 'external';
if (this.wizardValues.sourceProvider === 'localgit') {
return this._cacheService
.patchArm(`${this._resourceId}/config/web`, ARMApiVersions.antaresApiVersion20181101, {
properties: {
scmType: 'LocalGit',
},
})
.map(r => r.json());
} else {
return this._cacheService
.putArm(`${this._resourceId}/sourcecontrols/web`, ARMApiVersions.antaresApiVersion20181101, {
properties: payload,
})
.map(r => r.json())
.catch((err, _) => {
if (payload.isGitHubAction && this._isApiSyncError(err.json())) {
// NOTE(michinoy): If the save operation was being done for GitHub Action, and
// we are experiencing the API sync error, populate the source controls properties
// manually.
this._logService.error(LogCategories.cicd, 'apiSyncErrorWorkaround', { resourceId: this._resourceId });
return this._updateGitHubActionSourceControlPropertiesManually(payload);
} else {
return Observable.throw(err);
}
});
}
}
private _updateGitHubActionSourceControlPropertiesManually(sourceSettingsPayload: SourceSettings) {
return this._fetchMetadata()
.switchMap(r => {
if (r && r.result && r.result.properties) {
return this._updateMetadata(r.result.properties, sourceSettingsPayload);
} else {
return Observable.throw(r); | random_line_split |
||
deployment-center-state-manager.ts | wizardValues(values: WizardForm) {
this.wizardForm.patchValue(values);
}
public get sourceSettings(): FormGroup {
return (this.wizardForm && (this.wizardForm.controls.sourceSettings as FormGroup)) || null;
}
public get buildSettings(): FormGroup {
return (this.wizardForm && (this.wizardForm.controls.buildSettings as FormGroup)) || null;
}
public deploy(): Observable<{ status: string; statusMessage: string; result: any }> {
switch (this.wizardValues.buildProvider) {
case 'github':
// NOTE(michinoy): Only initiate writing a workflow configuration file if the branch does not already have it OR
// the user opted to overwrite it.
if (
!this.wizardValues.sourceSettings.githubActionWorkflowOption ||
this.wizardValues.sourceSettings.githubActionWorkflowOption === WorkflowOptions.Overwrite
) {
return this._deployGithubActions().map(result => ({ status: 'succeeded', statusMessage: null, result }));
} else {
return this._deployKudu().map(result => ({ status: 'succeeded', statusMessage: null, result }));
}
default:
return this._deployKudu().map(result => ({ status: 'succeeded', statusMessage: null, result }));
}
}
public fetchVSTSProfile() {
// if the first get fails, it's likely because the user doesn't have an account in vsts yet
// the fix for this is to do an empty post call on the same url and then get it
return this._cacheService
.get(AzureDevOpsService.AzDevProfileUri, true, this._azureDevOpsService.getAzDevDirectHeaders(false))
.catch(() => {
return this._cacheService
.post(AzureDevOpsService.AzDevProfileUri, true, this._azureDevOpsService.getAzDevDirectHeaders(false))
.switchMap(() => {
return this._cacheService.get(AzureDevOpsService.AzDevProfileUri, true, this._azureDevOpsService.getAzDevDirectHeaders(false));
});
});
}
private _setStackAndVersion(
siteConfig: SiteConfig,
siteAppSettings: { [key: string]: string },
configMetadata: { [key: string]: string }
) {
if (this.isLinuxApp) {
this._setStackAndVersionForLinux(siteConfig);
} else {
this._setStackAndVersionForWindows(siteConfig, siteAppSettings, configMetadata);
}
}
private _setStackAndVersionForWindows(
siteConfig: SiteConfig,
siteAppSettings: { [key: string]: string },
configMetadata: { [key: string]: string }
) {
if (configMetadata['CURRENT_STACK']) {
const metadataStack = configMetadata['CURRENT_STACK'].toLowerCase();
// NOTE(michinoy): Java is special, so need to handle it carefully. Also in this case, use
// the string 'java' rather than any of the constants defined as it is not related to any of the
// defined constants.
if (metadataStack === 'java') {
this.stack = siteConfig.javaVersion === JavaVersions.WindowsVersion8 ? RuntimeStacks.java8 : RuntimeStacks.java11;
} else if (metadataStack === 'dotnet') {
this.stack = RuntimeStacks.aspnet;
} else {
this.stack = metadataStack;
}
}
if (this.stack === RuntimeStacks.node) {
this.stackVersion = siteAppSettings[Constants.nodeVersionAppSettingName];
} else if (this.stack === RuntimeStacks.python) {
this.stackVersion = siteConfig.pythonVersion;
} else if (this.stack === RuntimeStacks.java8 || this.stack === RuntimeStacks.java11) {
this.stackVersion = `${siteConfig.javaVersion}|${siteConfig.javaContainer}|${siteConfig.javaContainerVersion}`;
} else if (this.stack === RuntimeStacks.aspnet && !!siteConfig.netFrameworkVersion) {
this.stackVersion == siteConfig.netFrameworkVersion;
} else if (this.stack === '') {
this.stackVersion = '';
}
}
private _setStackAndVersionForLinux(siteConfig: SiteConfig) {
const linuxFxVersionParts = siteConfig.linuxFxVersion ? siteConfig.linuxFxVersion.split('|') : [];
const runtimeStack = linuxFxVersionParts.length > 0 ? linuxFxVersionParts[0].toLocaleLowerCase() : '';
// NOTE(michinoy): Java is special, so need to handle it carefully.
if (runtimeStack === JavaContainers.JavaSE || runtimeStack === JavaContainers.Tomcat) {
const fxVersionParts = !!siteConfig.linuxFxVersion ? siteConfig.linuxFxVersion.split('-') : [];
const fxStack = fxVersionParts.length === 2 ? fxVersionParts[1].toLocaleLowerCase() : '';
if (fxStack === JavaVersions.LinuxVersion8 || fxStack === JavaVersions.LinuxVersion11) {
this.stack = fxStack === JavaVersions.LinuxVersion8 ? RuntimeStacks.java8 : RuntimeStacks.java11;
} else {
this.stack = '';
}
} else {
// NOTE(michinoy): So it seems that in the stack API the stack value is 'aspnet', whereas from site config, the stack identifier is
// 'dotnetcore'. Due to this mismatch, we need to hard code the conversion on the client side.
this.stack = siteConfig.linuxFxVersion.toLocaleLowerCase() === 'dotnetcore|5.0' ? RuntimeStacks.aspnet : runtimeStack;
}
this.stackVersion = !!siteConfig.linuxFxVersion ? siteConfig.linuxFxVersion : '';
}
private _deployGithubActions() {
const repo = this.wizardValues.sourceSettings.repoUrl.replace(`${DeploymentCenterConstants.githubUri}/`, '');
const branch = this.wizardValues.sourceSettings.branch || 'master';
const workflowInformation = this._githubService.getWorkflowInformation(
this.wizardValues.buildSettings,
this.wizardValues.sourceSettings,
this.isLinuxApp,
this.gitHubPublishProfileSecretGuid,
this.siteName,
this.slotName
);
const commitInfo: GitHubCommit = {
repoName: repo,
branchName: branch,
filePath: `.github/workflows/${workflowInformation.fileName}`,
message: this._translateService.instant(PortalResources.githubActionWorkflowCommitMessage),
contentBase64Encoded: btoa(workflowInformation.content),
committer: {
name: 'Azure App Service',
email: '[email protected]',
},
};
return this._githubService
.fetchWorkflowConfiguration(this.gitHubToken$.getValue(), this.wizardValues.sourceSettings.repoUrl, repo, branch, commitInfo.filePath)
.switchMap(fileContentResponse => {
if (fileContentResponse) {
commitInfo.sha = fileContentResponse.sha;
}
const requestContent: GitHubActionWorkflowRequestContent = {
resourceId: this._resourceId,
secretName: workflowInformation.secretName,
commit: commitInfo,
};
return this._githubService.createOrUpdateActionWorkflow(
this.getToken(),
this.gitHubToken$.getValue(),
requestContent,
this.replacementPublishUrl
);
})
.switchMap(_ => {
return this._deployKudu();
});
}
private _deployKudu() {
const payload = this.wizardValues.sourceSettings;
payload.isGitHubAction = this.wizardValues.buildProvider === 'github';
payload.isManualIntegration = this.wizardValues.sourceProvider === 'external';
if (this.wizardValues.sourceProvider === 'localgit') {
return this._cacheService
.patchArm(`${this._resourceId}/config/web`, ARMApiVersions.antaresApiVersion20181101, {
properties: {
scmType: 'LocalGit',
},
})
.map(r => r.json());
} else {
return this._cacheService
.putArm(`${this._resourceId}/sourcecontrols/web`, ARMApiVersions.antaresApiVersion20181101, {
properties: payload,
})
.map(r => r.json())
.catch((err, _) => {
if (payload.isGitHubAction && this._isApiSyncError(err.json())) {
// NOTE(michinoy): If the save operation was being done for GitHub Action, and
// we are experiencing the API sync error, populate the source controls properties
// manually.
this._logService.error(LogCategories.cicd, 'apiSyncErrorWorkaround', { resourceId: this._resourceId });
return this._updateGitHubActionSourceControlPropertiesManually(payload);
} else {
return Observable.throw(err);
}
});
}
}
private _updateGitHubActionSourceControlPropertiesManually(sourceSettingsPayload: SourceSettings) {
return this._fetchMetadata()
.switchMap(r => {
if (r && r.result && r.result.properties) {
return this._updateMetadata(r.result.properties, sourceSettingsPayload);
} else {
return Observable.throw(r);
}
})
.switchMap(r => {
if (r && r.status === 200) {
return this._patchSiteConfigForGitHubAction();
} else {
return Observable.throw(r);
}
})
.catch(r => Observable.throw(r))
.map(r => r.json());
}
private | _updateMetadata | identifier_name |
|
actix.rs | : Self::Backend) -> Self::Backend {
for handler in self.handlers.clone() {
let inner = handler.inner;
output = output.route(&handler.name, handler.method.clone(), move |request| {
inner(request)
});
}
output
}
}
impl ExtendApiBackend for actix_web::Scope<()> {
fn extend<'a, I>(mut self, items: I) -> Self
where
I: IntoIterator<Item = (&'a str, &'a ApiScope)>,
{
for item in items {
self = self.nested(&item.0, move |scope| item.1.actix_backend.wire(scope))
}
self
}
}
impl ResponseError for api::Error {
fn error_response(&self) -> HttpResponse {
match self {
api::Error::BadRequest(err) => HttpResponse::BadRequest().body(err.to_string()),
api::Error::InternalError(err) => {
HttpResponse::InternalServerError().body(err.to_string())
}
api::Error::Io(err) => HttpResponse::InternalServerError().body(err.to_string()),
api::Error::Storage(err) => HttpResponse::InternalServerError().body(err.to_string()),
api::Error::Gone => HttpResponse::Gone().finish(),
api::Error::MovedPermanently(new_location) => HttpResponse::MovedPermanently()
.header(header::LOCATION, new_location.clone())
.finish(),
api::Error::NotFound(err) => HttpResponse::NotFound().body(err.to_string()),
api::Error::Unauthorized => HttpResponse::Unauthorized().finish(),
}
}
}
/// Creates a `HttpResponse` object from the provided JSON value.
/// Depending on the `actuality` parameter value, the warning about endpoint
/// being deprecated can be added.
fn json_response<T: Serialize>(actuality: Actuality, json_value: T) -> HttpResponse {
let mut response = HttpResponse::Ok();
if let Actuality::Deprecated {
ref discontinued_on,
ref description,
} = actuality
{
// There is a proposal for creating special deprecation header within HTTP,
// but currently it's only a draft. So the conventional way to notify API user
// about endpoint deprecation is setting the `Warning` header.
let expiration_note = match discontinued_on {
// Date is formatted according to HTTP-date format.
Some(date) => format!(
"The old API is maintained until {}.",
date.format("%a, %d %b %Y %T GMT")
),
None => "Currently there is no specific date for disabling this endpoint.".into(),
};
let mut warning_text = format!(
"Deprecated API: This endpoint is deprecated, \
see the service documentation to find an alternative. \
{}",
expiration_note
);
if let Some(description) = description {
warning_text = format!("{} Additional information: {}.", warning_text, description);
}
let warning_string = create_warning_header(&warning_text);
response.header(header::WARNING, warning_string);
}
response.json(json_value)
}
/// Formats warning string according to the following format:
/// "<warn-code> <warn-agent> \"<warn-text>\" [<warn-date>]"
/// <warn-code> in our case is 299, which means a miscellaneous persistent warning.
/// <warn-agent> is optional, so we set it to "-".
/// <warn-text> is a warning description, which is taken as an only argument.
/// <warn-date> is not required.
/// For details you can see RFC 7234, section 5.5: Warning.
fn create_warning_header(warning_text: &str) -> String {
format!("299 - \"{}\"", warning_text)
}
impl From<EndpointMutability> for actix_web::http::Method {
fn from(mutability: EndpointMutability) -> Self {
match mutability {
EndpointMutability::Immutable => actix_web::http::Method::GET,
EndpointMutability::Mutable => actix_web::http::Method::POST,
}
}
}
impl<Q, I, F> From<NamedWith<Q, I, api::Result<I>, F>> for RequestHandler
where
F: Fn(Q) -> api::Result<I> + 'static + Send + Sync + Clone,
Q: DeserializeOwned + 'static,
I: Serialize + 'static,
{
fn from(f: NamedWith<Q, I, api::Result<I>, F>) -> Self {
// Convert handler that returns a `Result` into handler that will return `FutureResult`.
let handler = f.inner.handler;
let future_endpoint = move |query| -> Box<dyn Future<Item = I, Error = api::Error>> {
let future = handler(query).into_future();
Box::new(future)
};
let named_with_future = NamedWith::new(f.name, future_endpoint, f.mutability);
// Then we can create a `RequestHandler` with the `From` specialization for future result.
RequestHandler::from(named_with_future)
}
}
/// Takes `HttpRequest` as a parameter and extracts query:
/// - If request is immutable, the query is parsed from query string,
/// - If request is mutable, the query is parsed from the request body as JSON.
fn extract_query<Q>(
request: HttpRequest,
mutability: EndpointMutability,
) -> impl Future<Item = Q, Error = actix_web::error::Error>
where
Q: DeserializeOwned + 'static,
{
match mutability {
EndpointMutability::Immutable => {
let future = Query::from_request(&request, &Default::default())
.map(Query::into_inner)
.map_err(From::from)
.into_future();
Either::A(future)
}
EndpointMutability::Mutable => {
let future = request.json().from_err();
Either::B(future)
}
}
}
impl<Q, I, F> From<NamedWith<Q, I, FutureResult<I>, F>> for RequestHandler
where
F: Fn(Q) -> FutureResult<I> + 'static + Clone + Send + Sync,
Q: DeserializeOwned + 'static,
I: Serialize + 'static,
{
fn from(f: NamedWith<Q, I, FutureResult<I>, F>) -> Self | }
}
}
/// Creates `actix_web::App` for the given aggregator and runtime configuration.
pub(crate) fn create_app(aggregator: &ApiAggregator, runtime_config: ApiRuntimeConfig) -> App {
let app_config = runtime_config.app_config;
let access = runtime_config.access;
let mut app = App::new();
app = app.scope("api", |scope| aggregator.extend_backend(access, scope));
if let Some(app_config) = app_config {
app = app_config(app);
}
app
}
/// Configuration parameters for the `App` runtime.
#[derive(Clone)]
pub struct ApiRuntimeConfig {
/// The socket address to bind.
pub listen_address: SocketAddr,
/// API access level.
pub access: ApiAccess,
/// Optional App configuration.
pub app_config: Option<AppConfig>,
}
impl ApiRuntimeConfig {
/// Creates API runtime configuration for the given address and access level.
pub fn new(listen_address: SocketAddr, access: ApiAccess) -> Self {
Self {
listen_address,
access,
app_config: Default::default(),
}
}
}
impl fmt::Debug for ApiRuntimeConfig {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("ApiRuntimeConfig")
.field("listen_address", &self.listen_address)
.field("access", &self.access)
.field("app_config", &self.app_config.as_ref().map(drop))
.finish()
}
}
/// Configuration parameters for the actix system runtime.
#[derive(Debug, Clone)]
pub struct SystemRuntimeConfig {
/// Active API runtimes.
pub api_runtimes: Vec<ApiRuntimeConfig>,
/// API aggregator.
pub api_aggregator: ApiAggregator,
/// The interval in milliseconds between attempts of restarting HTTP-server in case
/// the server failed to restart
pub server_restart_retry_timeout: u64,
/// The attempts counts of restarting HTTP-server in case the server failed to restart
pub server_restart_max_retries: u16,
}
/// Actix system runtime handle.
pub struct SystemRuntime {
system_thread: JoinHandle<result::Result<(), Error>>,
system: System,
}
impl SystemRuntimeConfig {
/// Starts actix system runtime along with all web runtimes.
pub fn start(
self,
endpoints_rx: mpsc::Receiver<UpdateEndpoints>,
) -> result::Result<SystemRuntime, Error> {
// Creates a system thread.
let (system_tx, system_rx) = mpsc::unbounded();
let system_thread = thread::spawn(move || | {
let handler = f.inner.handler;
let actuality = f.inner.actuality;
let mutability = f.mutability;
let index = move |request: HttpRequest| -> FutureResponse {
let handler = handler.clone();
let actuality = actuality.clone();
extract_query(request, mutability)
.and_then(move |query| {
handler(query)
.map(|value| json_response(actuality, value))
.map_err(From::from)
})
.responder()
};
Self {
name: f.name,
method: f.mutability.into(),
inner: Arc::from(index) as Arc<RawHandler>, | identifier_body |
actix.rs | ()
.header(header::LOCATION, new_location.clone())
.finish(),
api::Error::NotFound(err) => HttpResponse::NotFound().body(err.to_string()),
api::Error::Unauthorized => HttpResponse::Unauthorized().finish(),
}
}
}
/// Creates a `HttpResponse` object from the provided JSON value.
/// Depending on the `actuality` parameter value, the warning about endpoint
/// being deprecated can be added.
fn json_response<T: Serialize>(actuality: Actuality, json_value: T) -> HttpResponse {
let mut response = HttpResponse::Ok();
if let Actuality::Deprecated {
ref discontinued_on,
ref description,
} = actuality
{
// There is a proposal for creating special deprecation header within HTTP,
// but currently it's only a draft. So the conventional way to notify API user
// about endpoint deprecation is setting the `Warning` header.
let expiration_note = match discontinued_on {
// Date is formatted according to HTTP-date format.
Some(date) => format!(
"The old API is maintained until {}.",
date.format("%a, %d %b %Y %T GMT")
),
None => "Currently there is no specific date for disabling this endpoint.".into(),
};
let mut warning_text = format!(
"Deprecated API: This endpoint is deprecated, \
see the service documentation to find an alternative. \
{}",
expiration_note
);
if let Some(description) = description {
warning_text = format!("{} Additional information: {}.", warning_text, description);
}
let warning_string = create_warning_header(&warning_text);
response.header(header::WARNING, warning_string);
}
response.json(json_value)
}
/// Formats warning string according to the following format:
/// "<warn-code> <warn-agent> \"<warn-text>\" [<warn-date>]"
/// <warn-code> in our case is 299, which means a miscellaneous persistent warning.
/// <warn-agent> is optional, so we set it to "-".
/// <warn-text> is a warning description, which is taken as an only argument.
/// <warn-date> is not required.
/// For details you can see RFC 7234, section 5.5: Warning.
fn create_warning_header(warning_text: &str) -> String {
format!("299 - \"{}\"", warning_text)
}
impl From<EndpointMutability> for actix_web::http::Method {
fn from(mutability: EndpointMutability) -> Self {
match mutability {
EndpointMutability::Immutable => actix_web::http::Method::GET,
EndpointMutability::Mutable => actix_web::http::Method::POST,
}
}
}
impl<Q, I, F> From<NamedWith<Q, I, api::Result<I>, F>> for RequestHandler
where
F: Fn(Q) -> api::Result<I> + 'static + Send + Sync + Clone,
Q: DeserializeOwned + 'static,
I: Serialize + 'static,
{
fn from(f: NamedWith<Q, I, api::Result<I>, F>) -> Self {
// Convert handler that returns a `Result` into handler that will return `FutureResult`.
let handler = f.inner.handler;
let future_endpoint = move |query| -> Box<dyn Future<Item = I, Error = api::Error>> {
let future = handler(query).into_future();
Box::new(future)
};
let named_with_future = NamedWith::new(f.name, future_endpoint, f.mutability);
// Then we can create a `RequestHandler` with the `From` specialization for future result.
RequestHandler::from(named_with_future)
}
}
/// Takes `HttpRequest` as a parameter and extracts query:
/// - If request is immutable, the query is parsed from query string,
/// - If request is mutable, the query is parsed from the request body as JSON.
fn extract_query<Q>(
request: HttpRequest,
mutability: EndpointMutability,
) -> impl Future<Item = Q, Error = actix_web::error::Error>
where
Q: DeserializeOwned + 'static,
{
match mutability {
EndpointMutability::Immutable => {
let future = Query::from_request(&request, &Default::default())
.map(Query::into_inner)
.map_err(From::from)
.into_future();
Either::A(future)
}
EndpointMutability::Mutable => {
let future = request.json().from_err();
Either::B(future)
}
}
}
impl<Q, I, F> From<NamedWith<Q, I, FutureResult<I>, F>> for RequestHandler
where
F: Fn(Q) -> FutureResult<I> + 'static + Clone + Send + Sync,
Q: DeserializeOwned + 'static,
I: Serialize + 'static,
{
fn from(f: NamedWith<Q, I, FutureResult<I>, F>) -> Self {
let handler = f.inner.handler;
let actuality = f.inner.actuality;
let mutability = f.mutability;
let index = move |request: HttpRequest| -> FutureResponse {
let handler = handler.clone();
let actuality = actuality.clone();
extract_query(request, mutability)
.and_then(move |query| {
handler(query)
.map(|value| json_response(actuality, value))
.map_err(From::from)
})
.responder()
};
Self {
name: f.name,
method: f.mutability.into(),
inner: Arc::from(index) as Arc<RawHandler>,
}
}
}
/// Creates `actix_web::App` for the given aggregator and runtime configuration.
pub(crate) fn create_app(aggregator: &ApiAggregator, runtime_config: ApiRuntimeConfig) -> App {
let app_config = runtime_config.app_config;
let access = runtime_config.access;
let mut app = App::new();
app = app.scope("api", |scope| aggregator.extend_backend(access, scope));
if let Some(app_config) = app_config {
app = app_config(app);
}
app
}
/// Configuration parameters for the `App` runtime.
#[derive(Clone)]
pub struct ApiRuntimeConfig {
/// The socket address to bind.
pub listen_address: SocketAddr,
/// API access level.
pub access: ApiAccess,
/// Optional App configuration.
pub app_config: Option<AppConfig>,
}
impl ApiRuntimeConfig {
/// Creates API runtime configuration for the given address and access level.
pub fn new(listen_address: SocketAddr, access: ApiAccess) -> Self {
Self {
listen_address,
access,
app_config: Default::default(),
}
}
}
impl fmt::Debug for ApiRuntimeConfig {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("ApiRuntimeConfig")
.field("listen_address", &self.listen_address)
.field("access", &self.access)
.field("app_config", &self.app_config.as_ref().map(drop))
.finish()
}
}
/// Configuration parameters for the actix system runtime.
#[derive(Debug, Clone)]
pub struct SystemRuntimeConfig {
/// Active API runtimes.
pub api_runtimes: Vec<ApiRuntimeConfig>,
/// API aggregator.
pub api_aggregator: ApiAggregator,
/// The interval in milliseconds between attempts of restarting HTTP-server in case
/// the server failed to restart
pub server_restart_retry_timeout: u64,
/// The attempts counts of restarting HTTP-server in case the server failed to restart
pub server_restart_max_retries: u16,
}
/// Actix system runtime handle.
pub struct SystemRuntime {
system_thread: JoinHandle<result::Result<(), Error>>,
system: System,
}
impl SystemRuntimeConfig {
/// Starts actix system runtime along with all web runtimes.
pub fn start(
self,
endpoints_rx: mpsc::Receiver<UpdateEndpoints>,
) -> result::Result<SystemRuntime, Error> {
// Creates a system thread.
let (system_tx, system_rx) = mpsc::unbounded();
let system_thread = thread::spawn(move || -> result::Result<(), Error> {
let system = System::new("http-server");
system_tx.unbounded_send(System::current())?;
ApiManager::new(self, endpoints_rx).start();
// Starts actix-web runtime.
let code = system.run();
trace!("Actix runtime finished with code {}", code);
ensure!(
code == 0,
"Actix runtime finished with the non zero error code: {}",
code
);
Ok(())
});
// Receives addresses of runtime items.
let system = system_rx
.wait()
.next()
.ok_or_else(|| format_err!("Unable to receive actix system handle"))?
.map_err(|()| format_err!("Unable to receive actix system handle"))?;
Ok(SystemRuntime {
system_thread,
system,
})
}
}
impl SystemRuntime {
/// Stops the actix system runtime along with all web runtimes.
pub fn stop(self) -> result::Result<(), Error> {
// Stop actix system runtime.
self.system.stop();
self.system_thread.join().map_err(|e| {
format_err!(
"Unable to join actix web api thread, an error occurred: {:?}",
e
)
})?
}
}
impl fmt::Debug for SystemRuntime {
fn | fmt | identifier_name |
|
actix.rs | let warning_string = create_warning_header(&warning_text);
response.header(header::WARNING, warning_string);
}
response.json(json_value)
}
/// Formats warning string according to the following format:
/// "<warn-code> <warn-agent> \"<warn-text>\" [<warn-date>]"
/// <warn-code> in our case is 299, which means a miscellaneous persistent warning.
/// <warn-agent> is optional, so we set it to "-".
/// <warn-text> is a warning description, which is taken as an only argument.
/// <warn-date> is not required.
/// For details you can see RFC 7234, section 5.5: Warning.
fn create_warning_header(warning_text: &str) -> String {
format!("299 - \"{}\"", warning_text)
}
impl From<EndpointMutability> for actix_web::http::Method {
fn from(mutability: EndpointMutability) -> Self {
match mutability {
EndpointMutability::Immutable => actix_web::http::Method::GET,
EndpointMutability::Mutable => actix_web::http::Method::POST,
}
}
}
impl<Q, I, F> From<NamedWith<Q, I, api::Result<I>, F>> for RequestHandler
where
F: Fn(Q) -> api::Result<I> + 'static + Send + Sync + Clone,
Q: DeserializeOwned + 'static,
I: Serialize + 'static,
{
fn from(f: NamedWith<Q, I, api::Result<I>, F>) -> Self {
// Convert handler that returns a `Result` into handler that will return `FutureResult`.
let handler = f.inner.handler;
let future_endpoint = move |query| -> Box<dyn Future<Item = I, Error = api::Error>> {
let future = handler(query).into_future();
Box::new(future)
};
let named_with_future = NamedWith::new(f.name, future_endpoint, f.mutability);
// Then we can create a `RequestHandler` with the `From` specialization for future result.
RequestHandler::from(named_with_future)
}
}
/// Takes `HttpRequest` as a parameter and extracts query:
/// - If request is immutable, the query is parsed from query string,
/// - If request is mutable, the query is parsed from the request body as JSON.
fn extract_query<Q>(
request: HttpRequest,
mutability: EndpointMutability,
) -> impl Future<Item = Q, Error = actix_web::error::Error>
where
Q: DeserializeOwned + 'static,
{
match mutability {
EndpointMutability::Immutable => {
let future = Query::from_request(&request, &Default::default())
.map(Query::into_inner)
.map_err(From::from)
.into_future();
Either::A(future)
}
EndpointMutability::Mutable => {
let future = request.json().from_err();
Either::B(future)
}
}
}
impl<Q, I, F> From<NamedWith<Q, I, FutureResult<I>, F>> for RequestHandler
where
F: Fn(Q) -> FutureResult<I> + 'static + Clone + Send + Sync,
Q: DeserializeOwned + 'static,
I: Serialize + 'static,
{
fn from(f: NamedWith<Q, I, FutureResult<I>, F>) -> Self {
let handler = f.inner.handler;
let actuality = f.inner.actuality;
let mutability = f.mutability;
let index = move |request: HttpRequest| -> FutureResponse {
let handler = handler.clone();
let actuality = actuality.clone();
extract_query(request, mutability)
.and_then(move |query| {
handler(query)
.map(|value| json_response(actuality, value))
.map_err(From::from)
})
.responder()
};
Self {
name: f.name,
method: f.mutability.into(),
inner: Arc::from(index) as Arc<RawHandler>,
}
}
}
/// Creates `actix_web::App` for the given aggregator and runtime configuration.
pub(crate) fn create_app(aggregator: &ApiAggregator, runtime_config: ApiRuntimeConfig) -> App {
let app_config = runtime_config.app_config;
let access = runtime_config.access;
let mut app = App::new();
app = app.scope("api", |scope| aggregator.extend_backend(access, scope));
if let Some(app_config) = app_config {
app = app_config(app);
}
app
}
/// Configuration parameters for the `App` runtime.
#[derive(Clone)]
pub struct ApiRuntimeConfig {
/// The socket address to bind.
pub listen_address: SocketAddr,
/// API access level.
pub access: ApiAccess,
/// Optional App configuration.
pub app_config: Option<AppConfig>,
}
impl ApiRuntimeConfig {
/// Creates API runtime configuration for the given address and access level.
pub fn new(listen_address: SocketAddr, access: ApiAccess) -> Self {
Self {
listen_address,
access,
app_config: Default::default(),
}
}
}
impl fmt::Debug for ApiRuntimeConfig {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("ApiRuntimeConfig")
.field("listen_address", &self.listen_address)
.field("access", &self.access)
.field("app_config", &self.app_config.as_ref().map(drop))
.finish()
}
}
/// Configuration parameters for the actix system runtime.
#[derive(Debug, Clone)]
pub struct SystemRuntimeConfig {
/// Active API runtimes.
pub api_runtimes: Vec<ApiRuntimeConfig>,
/// API aggregator.
pub api_aggregator: ApiAggregator,
/// The interval in milliseconds between attempts of restarting HTTP-server in case
/// the server failed to restart
pub server_restart_retry_timeout: u64,
/// The attempts counts of restarting HTTP-server in case the server failed to restart
pub server_restart_max_retries: u16,
}
/// Actix system runtime handle.
pub struct SystemRuntime {
system_thread: JoinHandle<result::Result<(), Error>>,
system: System,
}
impl SystemRuntimeConfig {
/// Starts actix system runtime along with all web runtimes.
pub fn start(
self,
endpoints_rx: mpsc::Receiver<UpdateEndpoints>,
) -> result::Result<SystemRuntime, Error> {
// Creates a system thread.
let (system_tx, system_rx) = mpsc::unbounded();
let system_thread = thread::spawn(move || -> result::Result<(), Error> {
let system = System::new("http-server");
system_tx.unbounded_send(System::current())?;
ApiManager::new(self, endpoints_rx).start();
// Starts actix-web runtime.
let code = system.run();
trace!("Actix runtime finished with code {}", code);
ensure!(
code == 0,
"Actix runtime finished with the non zero error code: {}",
code
);
Ok(())
});
// Receives addresses of runtime items.
let system = system_rx
.wait()
.next()
.ok_or_else(|| format_err!("Unable to receive actix system handle"))?
.map_err(|()| format_err!("Unable to receive actix system handle"))?;
Ok(SystemRuntime {
system_thread,
system,
})
}
}
impl SystemRuntime {
/// Stops the actix system runtime along with all web runtimes.
pub fn stop(self) -> result::Result<(), Error> {
// Stop actix system runtime.
self.system.stop();
self.system_thread.join().map_err(|e| {
format_err!(
"Unable to join actix web api thread, an error occurred: {:?}",
e
)
})?
}
}
impl fmt::Debug for SystemRuntime {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("SystemRuntime").finish()
}
}
/// CORS header specification.
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum AllowOrigin {
/// Allows access from any host.
Any,
/// Allows access only from the specified hosts.
Whitelist(Vec<String>),
}
impl ser::Serialize for AllowOrigin {
fn serialize<S>(&self, serializer: S) -> result::Result<S::Ok, S::Error>
where
S: ser::Serializer,
{
match *self {
AllowOrigin::Any => "*".serialize(serializer),
AllowOrigin::Whitelist(ref hosts) => {
if hosts.len() == 1 {
hosts[0].serialize(serializer)
} else {
hosts.serialize(serializer)
}
}
}
}
}
impl<'de> de::Deserialize<'de> for AllowOrigin {
fn deserialize<D>(d: D) -> result::Result<Self, D::Error>
where
D: de::Deserializer<'de>,
{
struct Visitor;
impl<'de> de::Visitor<'de> for Visitor {
type Value = AllowOrigin;
fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
formatter.write_str("a list of hosts or \"*\"")
}
fn visit_str<E>(self, value: &str) -> result::Result<AllowOrigin, E>
where
E: de::Error,
{ | match value { | random_line_split |
|
seq2seq.py | _results = self.backward_rnn(inputs)
if not self.merge_mode:
# follow Bahdanau's paper
backward_results[0] = layers.Concatenate()([forward_results[0], backward_results[0]])
final_results = backward_results
else:
merge_func = layers.Concatenate() if self.merge_mode == 'concat' else layers.Add()
final_results = [merge_func([i, j]) for i, j in zip(forward_results, backward_results)]
else:
final_results = forward_results
output, hidden = final_results[0], final_results[1:]
hidden = [layers.Dense(units=self.forward_rnn.units, activation='tanh')(x) for x in hidden]
return output, hidden
class RNNWithAttentionDecoder(_Seq2SeqBase):
def __init__(self, units, n_classes, dec_max_time_steps, eos_token=0,
attn_method='concat', attn_before_rnn=True, **kwargs):
self.rnn = self.gru()(units=units, return_state=True)
self.attn_score = self.build_attn_score_func(units, attn_method, **kwargs)
self.attn_combine = layers.Dense(units=units, activation='tanh', name='dec_attn_combine')
self.attn_before_rnn = attn_before_rnn
self.output_fc = layers.Dense(units=n_classes, name='dec_output_fc')
self.dec_max_time_steps = dec_max_time_steps
self.eos_token = eos_token # todo: early stopping
@staticmethod
def build_attn_score_func(units, attn_method, **kwargs): # todo: share?
if attn_method == 'concat':
fcs = [
tf.layers.Dense(units=units, activation='tanh', name='w'),
tf.layers.Dense(units=1, name='r')
]
def f(*args):
_, h, e = args
h = tf.expand_dims(h, axis=1) # ?*1*N
h = tf.tile(h, multiples=[1, e.shape[1], 1]) # ?*20*N
x = tf.concat([e, h], axis=-1)
for layer in fcs:
x = layer(x)
return x # ?*20*1
return f
elif attn_method == 'location':
|
elif attn_method == 'dot':
def f(*args):
_, h, e = args
h = tf.expand_dims(h, axis=-1) # ?*32*1
return tf.matmul(e, h) # ?*20*1
return f
else:
raise NotImplemented
def __call__(self, inputs, encoder_output, encoder_state, teacher_forcing, **kwargs):
hidden_state = encoder_state
outputs = []
def without_teacher_forcing():
embed = kwargs.get('embed', None)
assert embed
return embed(tf.argmax(pred, axis=1))
for step in range(self.dec_max_time_steps):
if step == 0:
x = inputs[:, 0, :]
else:
x = tf.cond(teacher_forcing, true_fn=lambda: inputs[:, step, :],
false_fn=without_teacher_forcing, name='dec_switch_teacher_forcing')
'''calculate attention'''
h_state = hidden_state[0]
atten_scores = self.attn_score(x, h_state, encoder_output)
atten_weights = tf.nn.softmax(atten_scores, dim=1)
atten_context = tf.multiply(encoder_output, atten_weights) # ?*20*32 ?*20*1
atten_context = tf.reduce_sum(atten_context, axis=1)
'''across rnn'''
if self.attn_before_rnn:
x = tf.expand_dims(tf.concat([atten_context, x], axis=-1), axis=1) # todo: delete x?
results = self.rnn(x, initial_state=hidden_state)
output, hidden_state = results[0], results[1:]
else:
# follow Luong's paper. a little bit different~
x = tf.expand_dims(x, axis=1)
results = self.rnn(x, initial_state=hidden_state)
output, hidden_state = results[0], results[1:]
x = tf.concat([atten_context, output], axis=-1)
output = self.attn_combine(x)
pred = self.output_fc(output) # logits
outputs.append(pred)
outputs = tf.stack(outputs, axis=1)
return outputs
def _default_batchify_fn(data):
if isinstance(data[0], np.ndarray):
return np.stack(data)
elif isinstance(data[0], tuple):
data = zip(*data)
return [_default_batchify_fn(i) for i in data]
else:
data = np.asarray(data)
return data
class _MMetric(object):
def __init__(self):
self.num = 0
self.total = 0
def update(self, num, total):
self.num += num
self.total += total
def get(self):
return self.num / self.total
def reset(self):
self.num = 0
self.total = 0
if __name__ == '__main__':
import warnings
import os
import numpy as np
import pandas as pd
from mxnet.gluon.data import ArrayDataset, DataLoader
from sklearn.model_selection import train_test_split
from tqdm import tqdm
warnings.filterwarnings('ignore')
os.environ["CUDA_VISIBLE_DEVICES"] = ""
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
sess = tf.InteractiveSession(config=config)
hidden_size = 32
sos_token = 10
use_teacher_forcing_ratio = 0.5
'''build encoder'''
encoder_input = tf.placeholder(tf.int32, shape=(None, 20))
encoder_embedding = layers.Embedding(input_dim=11, output_dim=8, trainable=True)
encoder = RNNEncoder(units=hidden_size, bidirectional=True, merge_mode='sum')
encoder_output, encoder_state = encoder(inputs=encoder_embedding(encoder_input))
'''build decoder'''
decoder_input = tf.placeholder(tf.int32, shape=(None, None))
teacher_forcing = tf.placeholder_with_default(False, shape=None)
decoder = RNNWithAttentionDecoder(
units=hidden_size,
n_classes=10,
enc_max_time_steps=20,
dec_max_time_steps=20,
attn_method='dot',
attn_before_rnn=False
)
decoder_output = decoder(inputs=encoder_embedding(decoder_input), encoder_output=encoder_output,
encoder_state=encoder_state, teacher_forcing=teacher_forcing,
embed=encoder_embedding)
softmax_label = tf.placeholder(tf.int64, shape=(None, 20))
'''build loss'''
loss = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=decoder_output, labels=softmax_label)
loss = tf.reduce_mean(loss)
'''build optimizer'''
opt = tf.train.AdamOptimizer(learning_rate=0.02).minimize(loss)
'''build metric'''
pred_label = tf.argmax(decoder_output, axis=-1)
n_true = tf.reduce_all(tf.equal(pred_label, softmax_label), axis=1)
n_true = tf.cast(n_true, dtype=tf.int32)
n_true = tf.reduce_sum(n_true)
'''load data'''
def load_data(path):
return pd.read_csv(path, header=None).values
X_train = load_data('./dataset/task8_train_input.csv')
y_train = load_data('./dataset/task8_train_output.csv')
X_test = load_data('./dataset/task8_test_input.csv')
y_test = load_data('./dataset/task8_test_output.csv')
X_train, X_val, y_train, y_val = train_test_split(X_train, y_train, train_size=0.9, random_state=0)
print('TrainSet Shape:{}'.format(X_train.shape))
print('TestSet Shape:{}'.format(X_test.shape))
build_dataloader = partial(DataLoader, batch_size=32, shuffle=False, last_batch='keep',
batchify_fn=_default_batchify_fn)
train_dataloader = build_dataloader(dataset=ArrayDataset(X_train, y_train))
test_dataloader = build_dataloader(dataset=ArrayDataset(X_test, y_test))
val_dataloader = build_dataloader(dataset=ArrayDataset(X_val, y_val))
'''start training'''
sess.run(tf.global_variables_initializer())
train_loss, train_acc = _MMetric(), _MMetric()
print_freq = 50
for step, (x, y) in enumerate(tqdm(train_dataloader, desc='Training', position=0)):
sos_input = np.ones(shape=(len(y), 1), dtype=np.int32) * sos_token
t = np.random.rand() < use_teacher_forcing_ratio
d = sos_input if not t else np.concatenate((sos_input, y[:, 1:]), axis=1)
feed_dict = {encoder | enc_max_time_steps = kwargs.get('enc_max_time_steps', None)
assert enc_max_time_steps
fc = tf.layers.Dense(units=enc_max_time_steps)
def f(*args):
x = fc(tf.concat(args[:-1], axis=-1)) # ?*20
return tf.expand_dims(x, axis=-1) # ?*20*1
return f | conditional_block |
seq2seq.py | (self, units, bidirectional=False, merge_mode=None):
rnn_model = partial(self.gru(), units=units, return_sequences=True, return_state=True, unroll=True)
self.forward_rnn = rnn_model(go_backwards=False, name='enc_forward_rnn')
self.backward_rnn = rnn_model(go_backwards=True, name='enc_backward_rnn') if bidirectional else None
self.merge_mode = merge_mode
def __call__(self, inputs):
forward_results = self.forward_rnn(inputs)
if self.backward_rnn:
backward_results = self.backward_rnn(inputs)
if not self.merge_mode:
# follow Bahdanau's paper
backward_results[0] = layers.Concatenate()([forward_results[0], backward_results[0]])
final_results = backward_results
else:
merge_func = layers.Concatenate() if self.merge_mode == 'concat' else layers.Add()
final_results = [merge_func([i, j]) for i, j in zip(forward_results, backward_results)]
else:
final_results = forward_results
output, hidden = final_results[0], final_results[1:]
hidden = [layers.Dense(units=self.forward_rnn.units, activation='tanh')(x) for x in hidden]
return output, hidden
class RNNWithAttentionDecoder(_Seq2SeqBase):
def __init__(self, units, n_classes, dec_max_time_steps, eos_token=0,
attn_method='concat', attn_before_rnn=True, **kwargs):
self.rnn = self.gru()(units=units, return_state=True)
self.attn_score = self.build_attn_score_func(units, attn_method, **kwargs)
self.attn_combine = layers.Dense(units=units, activation='tanh', name='dec_attn_combine')
self.attn_before_rnn = attn_before_rnn
self.output_fc = layers.Dense(units=n_classes, name='dec_output_fc')
self.dec_max_time_steps = dec_max_time_steps
self.eos_token = eos_token # todo: early stopping
@staticmethod
def build_attn_score_func(units, attn_method, **kwargs): # todo: share?
if attn_method == 'concat':
fcs = [
tf.layers.Dense(units=units, activation='tanh', name='w'),
tf.layers.Dense(units=1, name='r')
]
def f(*args):
_, h, e = args
h = tf.expand_dims(h, axis=1) # ?*1*N
h = tf.tile(h, multiples=[1, e.shape[1], 1]) # ?*20*N
x = tf.concat([e, h], axis=-1)
for layer in fcs:
x = layer(x)
return x # ?*20*1
return f
elif attn_method == 'location':
enc_max_time_steps = kwargs.get('enc_max_time_steps', None)
assert enc_max_time_steps
fc = tf.layers.Dense(units=enc_max_time_steps)
def f(*args):
x = fc(tf.concat(args[:-1], axis=-1)) # ?*20
return tf.expand_dims(x, axis=-1) # ?*20*1
return f
elif attn_method == 'dot':
def f(*args):
_, h, e = args
h = tf.expand_dims(h, axis=-1) # ?*32*1
return tf.matmul(e, h) # ?*20*1
return f
else:
raise NotImplemented
def __call__(self, inputs, encoder_output, encoder_state, teacher_forcing, **kwargs):
hidden_state = encoder_state
outputs = []
def without_teacher_forcing():
embed = kwargs.get('embed', None)
assert embed
return embed(tf.argmax(pred, axis=1))
for step in range(self.dec_max_time_steps):
if step == 0:
x = inputs[:, 0, :]
else:
x = tf.cond(teacher_forcing, true_fn=lambda: inputs[:, step, :],
false_fn=without_teacher_forcing, name='dec_switch_teacher_forcing')
'''calculate attention'''
h_state = hidden_state[0]
atten_scores = self.attn_score(x, h_state, encoder_output)
atten_weights = tf.nn.softmax(atten_scores, dim=1)
atten_context = tf.multiply(encoder_output, atten_weights) # ?*20*32 ?*20*1
atten_context = tf.reduce_sum(atten_context, axis=1)
'''across rnn'''
if self.attn_before_rnn:
x = tf.expand_dims(tf.concat([atten_context, x], axis=-1), axis=1) # todo: delete x?
results = self.rnn(x, initial_state=hidden_state)
output, hidden_state = results[0], results[1:]
else:
# follow Luong's paper. a little bit different~
x = tf.expand_dims(x, axis=1)
results = self.rnn(x, initial_state=hidden_state)
output, hidden_state = results[0], results[1:]
x = tf.concat([atten_context, output], axis=-1)
output = self.attn_combine(x)
pred = self.output_fc(output) # logits
outputs.append(pred)
outputs = tf.stack(outputs, axis=1)
return outputs
def _default_batchify_fn(data):
if isinstance(data[0], np.ndarray):
return np.stack(data)
elif isinstance(data[0], tuple):
data = zip(*data)
return [_default_batchify_fn(i) for i in data]
else:
data = np.asarray(data)
return data
class _MMetric(object):
def __init__(self):
self.num = 0
self.total = 0
def update(self, num, total):
self.num += num
self.total += total
def get(self):
return self.num / self.total
def reset(self):
self.num = 0
self.total = 0
if __name__ == '__main__':
import warnings
import os
import numpy as np
import pandas as pd
from mxnet.gluon.data import ArrayDataset, DataLoader
from sklearn.model_selection import train_test_split
from tqdm import tqdm
warnings.filterwarnings('ignore')
os.environ["CUDA_VISIBLE_DEVICES"] = ""
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
sess = tf.InteractiveSession(config=config)
hidden_size = 32
sos_token = 10
use_teacher_forcing_ratio = 0.5
'''build encoder'''
encoder_input = tf.placeholder(tf.int32, shape=(None, 20))
encoder_embedding = layers.Embedding(input_dim=11, output_dim=8, trainable=True)
encoder = RNNEncoder(units=hidden_size, bidirectional=True, merge_mode='sum')
encoder_output, encoder_state = encoder(inputs=encoder_embedding(encoder_input))
'''build decoder'''
decoder_input = tf.placeholder(tf.int32, shape=(None, None))
teacher_forcing = tf.placeholder_with_default(False, shape=None)
decoder = RNNWithAttentionDecoder(
units=hidden_size,
n_classes=10,
enc_max_time_steps=20,
dec_max_time_steps=20,
attn_method='dot',
attn_before_rnn=False
)
decoder_output = decoder(inputs=encoder_embedding(decoder_input), encoder_output=encoder_output,
encoder_state=encoder_state, teacher_forcing=teacher_forcing,
embed=encoder_embedding)
softmax_label = tf.placeholder(tf.int64, shape=(None, 20))
'''build loss'''
loss = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=decoder_output, labels=softmax_label)
loss = tf.reduce_mean(loss)
'''build optimizer'''
opt = tf.train.AdamOptimizer(learning_rate=0.02).minimize(loss)
'''build metric'''
pred_label = tf.argmax(decoder_output, axis=-1)
n_true = tf.reduce_all(tf.equal(pred_label, softmax_label), axis=1)
n_true = tf.cast(n_true, dtype=tf.int32)
n_true = tf.reduce_sum(n_true)
'''load data'''
def load_data(path):
return pd.read_csv(path, header=None).values
X_train = load_data('./dataset/task8_train_input.csv')
y_train = load_data('./dataset/task8_train_output.csv')
X_test = load_data('./dataset/task8_test_input.csv')
y_test = load_data('./dataset/task8_test_output.csv')
X_train, X_val, y_train, y_val = train_test_split(X_train, y_train, train_size=0.9, random_state=0)
print('TrainSet Shape:{}'.format(X_train.shape))
print('TestSet Shape:{}'.format(X_test.shape))
build_dataloader = partial(DataLoader, batch_size=32, shuffle=False, last_batch='keep',
batchify_fn=_default_batchify_fn)
train_dataloader = build_dataloader(dataset=ArrayDataset(X_train, y_train))
test_dataloader = build_dataloader(dataset=ArrayDataset(X_test, y_test))
val_dataloader = build_dataloader(dataset=ArrayDataset(X_val, y_val))
'''start training'''
sess.run(tf.global_variables_initializer())
train_loss | __init__ | identifier_name |
|
seq2seq.py | _results = self.backward_rnn(inputs)
if not self.merge_mode:
# follow Bahdanau's paper
backward_results[0] = layers.Concatenate()([forward_results[0], backward_results[0]])
final_results = backward_results
else:
merge_func = layers.Concatenate() if self.merge_mode == 'concat' else layers.Add()
final_results = [merge_func([i, j]) for i, j in zip(forward_results, backward_results)]
else:
final_results = forward_results
output, hidden = final_results[0], final_results[1:]
hidden = [layers.Dense(units=self.forward_rnn.units, activation='tanh')(x) for x in hidden]
return output, hidden
class RNNWithAttentionDecoder(_Seq2SeqBase):
def __init__(self, units, n_classes, dec_max_time_steps, eos_token=0,
attn_method='concat', attn_before_rnn=True, **kwargs):
|
@staticmethod
def build_attn_score_func(units, attn_method, **kwargs): # todo: share?
if attn_method == 'concat':
fcs = [
tf.layers.Dense(units=units, activation='tanh', name='w'),
tf.layers.Dense(units=1, name='r')
]
def f(*args):
_, h, e = args
h = tf.expand_dims(h, axis=1) # ?*1*N
h = tf.tile(h, multiples=[1, e.shape[1], 1]) # ?*20*N
x = tf.concat([e, h], axis=-1)
for layer in fcs:
x = layer(x)
return x # ?*20*1
return f
elif attn_method == 'location':
enc_max_time_steps = kwargs.get('enc_max_time_steps', None)
assert enc_max_time_steps
fc = tf.layers.Dense(units=enc_max_time_steps)
def f(*args):
x = fc(tf.concat(args[:-1], axis=-1)) # ?*20
return tf.expand_dims(x, axis=-1) # ?*20*1
return f
elif attn_method == 'dot':
def f(*args):
_, h, e = args
h = tf.expand_dims(h, axis=-1) # ?*32*1
return tf.matmul(e, h) # ?*20*1
return f
else:
raise NotImplemented
def __call__(self, inputs, encoder_output, encoder_state, teacher_forcing, **kwargs):
hidden_state = encoder_state
outputs = []
def without_teacher_forcing():
embed = kwargs.get('embed', None)
assert embed
return embed(tf.argmax(pred, axis=1))
for step in range(self.dec_max_time_steps):
if step == 0:
x = inputs[:, 0, :]
else:
x = tf.cond(teacher_forcing, true_fn=lambda: inputs[:, step, :],
false_fn=without_teacher_forcing, name='dec_switch_teacher_forcing')
'''calculate attention'''
h_state = hidden_state[0]
atten_scores = self.attn_score(x, h_state, encoder_output)
atten_weights = tf.nn.softmax(atten_scores, dim=1)
atten_context = tf.multiply(encoder_output, atten_weights) # ?*20*32 ?*20*1
atten_context = tf.reduce_sum(atten_context, axis=1)
'''across rnn'''
if self.attn_before_rnn:
x = tf.expand_dims(tf.concat([atten_context, x], axis=-1), axis=1) # todo: delete x?
results = self.rnn(x, initial_state=hidden_state)
output, hidden_state = results[0], results[1:]
else:
# follow Luong's paper. a little bit different~
x = tf.expand_dims(x, axis=1)
results = self.rnn(x, initial_state=hidden_state)
output, hidden_state = results[0], results[1:]
x = tf.concat([atten_context, output], axis=-1)
output = self.attn_combine(x)
pred = self.output_fc(output) # logits
outputs.append(pred)
outputs = tf.stack(outputs, axis=1)
return outputs
def _default_batchify_fn(data):
if isinstance(data[0], np.ndarray):
return np.stack(data)
elif isinstance(data[0], tuple):
data = zip(*data)
return [_default_batchify_fn(i) for i in data]
else:
data = np.asarray(data)
return data
class _MMetric(object):
def __init__(self):
self.num = 0
self.total = 0
def update(self, num, total):
self.num += num
self.total += total
def get(self):
return self.num / self.total
def reset(self):
self.num = 0
self.total = 0
if __name__ == '__main__':
import warnings
import os
import numpy as np
import pandas as pd
from mxnet.gluon.data import ArrayDataset, DataLoader
from sklearn.model_selection import train_test_split
from tqdm import tqdm
warnings.filterwarnings('ignore')
os.environ["CUDA_VISIBLE_DEVICES"] = ""
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
sess = tf.InteractiveSession(config=config)
hidden_size = 32
sos_token = 10
use_teacher_forcing_ratio = 0.5
'''build encoder'''
encoder_input = tf.placeholder(tf.int32, shape=(None, 20))
encoder_embedding = layers.Embedding(input_dim=11, output_dim=8, trainable=True)
encoder = RNNEncoder(units=hidden_size, bidirectional=True, merge_mode='sum')
encoder_output, encoder_state = encoder(inputs=encoder_embedding(encoder_input))
'''build decoder'''
decoder_input = tf.placeholder(tf.int32, shape=(None, None))
teacher_forcing = tf.placeholder_with_default(False, shape=None)
decoder = RNNWithAttentionDecoder(
units=hidden_size,
n_classes=10,
enc_max_time_steps=20,
dec_max_time_steps=20,
attn_method='dot',
attn_before_rnn=False
)
decoder_output = decoder(inputs=encoder_embedding(decoder_input), encoder_output=encoder_output,
encoder_state=encoder_state, teacher_forcing=teacher_forcing,
embed=encoder_embedding)
softmax_label = tf.placeholder(tf.int64, shape=(None, 20))
'''build loss'''
loss = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=decoder_output, labels=softmax_label)
loss = tf.reduce_mean(loss)
'''build optimizer'''
opt = tf.train.AdamOptimizer(learning_rate=0.02).minimize(loss)
'''build metric'''
pred_label = tf.argmax(decoder_output, axis=-1)
n_true = tf.reduce_all(tf.equal(pred_label, softmax_label), axis=1)
n_true = tf.cast(n_true, dtype=tf.int32)
n_true = tf.reduce_sum(n_true)
'''load data'''
def load_data(path):
return pd.read_csv(path, header=None).values
X_train = load_data('./dataset/task8_train_input.csv')
y_train = load_data('./dataset/task8_train_output.csv')
X_test = load_data('./dataset/task8_test_input.csv')
y_test = load_data('./dataset/task8_test_output.csv')
X_train, X_val, y_train, y_val = train_test_split(X_train, y_train, train_size=0.9, random_state=0)
print('TrainSet Shape:{}'.format(X_train.shape))
print('TestSet Shape:{}'.format(X_test.shape))
build_dataloader = partial(DataLoader, batch_size=32, shuffle=False, last_batch='keep',
batchify_fn=_default_batchify_fn)
train_dataloader = build_dataloader(dataset=ArrayDataset(X_train, y_train))
test_dataloader = build_dataloader(dataset=ArrayDataset(X_test, y_test))
val_dataloader = build_dataloader(dataset=ArrayDataset(X_val, y_val))
'''start training'''
sess.run(tf.global_variables_initializer())
train_loss, train_acc = _MMetric(), _MMetric()
print_freq = 50
for step, (x, y) in enumerate(tqdm(train_dataloader, desc='Training', position=0)):
sos_input = np.ones(shape=(len(y), 1), dtype=np.int32) * sos_token
t = np.random.rand() < use_teacher_forcing_ratio
d = sos_input if not t else np.concatenate((sos_input, y[:, 1:]), axis=1)
feed_dict = {encoder | self.rnn = self.gru()(units=units, return_state=True)
self.attn_score = self.build_attn_score_func(units, attn_method, **kwargs)
self.attn_combine = layers.Dense(units=units, activation='tanh', name='dec_attn_combine')
self.attn_before_rnn = attn_before_rnn
self.output_fc = layers.Dense(units=n_classes, name='dec_output_fc')
self.dec_max_time_steps = dec_max_time_steps
self.eos_token = eos_token # todo: early stopping | identifier_body |
seq2seq.py | _results = self.backward_rnn(inputs)
if not self.merge_mode:
# follow Bahdanau's paper
backward_results[0] = layers.Concatenate()([forward_results[0], backward_results[0]])
final_results = backward_results
else:
merge_func = layers.Concatenate() if self.merge_mode == 'concat' else layers.Add()
final_results = [merge_func([i, j]) for i, j in zip(forward_results, backward_results)]
else:
final_results = forward_results
output, hidden = final_results[0], final_results[1:]
hidden = [layers.Dense(units=self.forward_rnn.units, activation='tanh')(x) for x in hidden]
return output, hidden
class RNNWithAttentionDecoder(_Seq2SeqBase):
def __init__(self, units, n_classes, dec_max_time_steps, eos_token=0,
attn_method='concat', attn_before_rnn=True, **kwargs):
self.rnn = self.gru()(units=units, return_state=True)
self.attn_score = self.build_attn_score_func(units, attn_method, **kwargs)
self.attn_combine = layers.Dense(units=units, activation='tanh', name='dec_attn_combine')
self.attn_before_rnn = attn_before_rnn
self.output_fc = layers.Dense(units=n_classes, name='dec_output_fc')
self.dec_max_time_steps = dec_max_time_steps
self.eos_token = eos_token # todo: early stopping
@staticmethod
def build_attn_score_func(units, attn_method, **kwargs): # todo: share?
if attn_method == 'concat':
fcs = [
tf.layers.Dense(units=units, activation='tanh', name='w'),
tf.layers.Dense(units=1, name='r')
]
def f(*args):
_, h, e = args
h = tf.expand_dims(h, axis=1) # ?*1*N
h = tf.tile(h, multiples=[1, e.shape[1], 1]) # ?*20*N
x = tf.concat([e, h], axis=-1)
for layer in fcs:
x = layer(x)
return x # ?*20*1
return f
elif attn_method == 'location':
enc_max_time_steps = kwargs.get('enc_max_time_steps', None)
assert enc_max_time_steps
fc = tf.layers.Dense(units=enc_max_time_steps)
def f(*args):
x = fc(tf.concat(args[:-1], axis=-1)) # ?*20
return tf.expand_dims(x, axis=-1) # ?*20*1
return f
elif attn_method == 'dot':
def f(*args):
_, h, e = args
h = tf.expand_dims(h, axis=-1) # ?*32*1
return tf.matmul(e, h) # ?*20*1
return f
else:
raise NotImplemented
def __call__(self, inputs, encoder_output, encoder_state, teacher_forcing, **kwargs):
hidden_state = encoder_state
outputs = []
def without_teacher_forcing():
embed = kwargs.get('embed', None)
assert embed
return embed(tf.argmax(pred, axis=1))
for step in range(self.dec_max_time_steps):
if step == 0:
x = inputs[:, 0, :]
else:
x = tf.cond(teacher_forcing, true_fn=lambda: inputs[:, step, :],
false_fn=without_teacher_forcing, name='dec_switch_teacher_forcing')
'''calculate attention'''
h_state = hidden_state[0]
atten_scores = self.attn_score(x, h_state, encoder_output)
atten_weights = tf.nn.softmax(atten_scores, dim=1)
atten_context = tf.multiply(encoder_output, atten_weights) # ?*20*32 ?*20*1
atten_context = tf.reduce_sum(atten_context, axis=1)
'''across rnn'''
if self.attn_before_rnn:
x = tf.expand_dims(tf.concat([atten_context, x], axis=-1), axis=1) # todo: delete x?
results = self.rnn(x, initial_state=hidden_state)
output, hidden_state = results[0], results[1:]
else:
# follow Luong's paper. a little bit different~
x = tf.expand_dims(x, axis=1)
results = self.rnn(x, initial_state=hidden_state)
output, hidden_state = results[0], results[1:]
x = tf.concat([atten_context, output], axis=-1)
output = self.attn_combine(x)
pred = self.output_fc(output) # logits
outputs.append(pred)
outputs = tf.stack(outputs, axis=1)
return outputs
def _default_batchify_fn(data):
if isinstance(data[0], np.ndarray):
return np.stack(data)
elif isinstance(data[0], tuple):
data = zip(*data)
return [_default_batchify_fn(i) for i in data]
else:
data = np.asarray(data)
return data
class _MMetric(object):
def __init__(self):
self.num = 0
self.total = 0
def update(self, num, total):
self.num += num
self.total += total
def get(self):
return self.num / self.total
def reset(self):
self.num = 0
self.total = 0
if __name__ == '__main__':
import warnings
import os
import numpy as np
import pandas as pd
from mxnet.gluon.data import ArrayDataset, DataLoader
from sklearn.model_selection import train_test_split
from tqdm import tqdm
warnings.filterwarnings('ignore')
os.environ["CUDA_VISIBLE_DEVICES"] = ""
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
sess = tf.InteractiveSession(config=config)
hidden_size = 32
sos_token = 10
use_teacher_forcing_ratio = 0.5
'''build encoder'''
encoder_input = tf.placeholder(tf.int32, shape=(None, 20))
encoder_embedding = layers.Embedding(input_dim=11, output_dim=8, trainable=True)
encoder = RNNEncoder(units=hidden_size, bidirectional=True, merge_mode='sum')
encoder_output, encoder_state = encoder(inputs=encoder_embedding(encoder_input))
'''build decoder'''
decoder_input = tf.placeholder(tf.int32, shape=(None, None))
teacher_forcing = tf.placeholder_with_default(False, shape=None)
decoder = RNNWithAttentionDecoder(
units=hidden_size,
n_classes=10,
enc_max_time_steps=20,
dec_max_time_steps=20,
attn_method='dot',
attn_before_rnn=False
)
decoder_output = decoder(inputs=encoder_embedding(decoder_input), encoder_output=encoder_output,
encoder_state=encoder_state, teacher_forcing=teacher_forcing,
embed=encoder_embedding)
softmax_label = tf.placeholder(tf.int64, shape=(None, 20))
'''build loss'''
loss = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=decoder_output, labels=softmax_label)
loss = tf.reduce_mean(loss)
'''build optimizer'''
opt = tf.train.AdamOptimizer(learning_rate=0.02).minimize(loss)
'''build metric'''
pred_label = tf.argmax(decoder_output, axis=-1)
n_true = tf.reduce_all(tf.equal(pred_label, softmax_label), axis=1)
n_true = tf.cast(n_true, dtype=tf.int32)
n_true = tf.reduce_sum(n_true)
'''load data'''
def load_data(path):
return pd.read_csv(path, header=None).values
X_train = load_data('./dataset/task8_train_input.csv')
y_train = load_data('./dataset/task8_train_output.csv') | print('TestSet Shape:{}'.format(X_test.shape))
build_dataloader = partial(DataLoader, batch_size=32, shuffle=False, last_batch='keep',
batchify_fn=_default_batchify_fn)
train_dataloader = build_dataloader(dataset=ArrayDataset(X_train, y_train))
test_dataloader = build_dataloader(dataset=ArrayDataset(X_test, y_test))
val_dataloader = build_dataloader(dataset=ArrayDataset(X_val, y_val))
'''start training'''
sess.run(tf.global_variables_initializer())
train_loss, train_acc = _MMetric(), _MMetric()
print_freq = 50
for step, (x, y) in enumerate(tqdm(train_dataloader, desc='Training', position=0)):
sos_input = np.ones(shape=(len(y), 1), dtype=np.int32) * sos_token
t = np.random.rand() < use_teacher_forcing_ratio
d = sos_input if not t else np.concatenate((sos_input, y[:, 1:]), axis=1)
feed_dict = {encoder | X_test = load_data('./dataset/task8_test_input.csv')
y_test = load_data('./dataset/task8_test_output.csv')
X_train, X_val, y_train, y_val = train_test_split(X_train, y_train, train_size=0.9, random_state=0)
print('TrainSet Shape:{}'.format(X_train.shape)) | random_line_split |
mock.rs | atories: u16 = 4;
pub const UnsignedPriority: u64 = 100;
pub const EthNetworkId: <Runtime as Config>::NetworkId = 0;
}
#[derive(PartialEq, Eq, Clone, Encode, Decode, Debug)]
pub struct MyTestXt<Call, Extra> {
/// Signature of the extrinsic.
pub signature: Option<(AccountId, Extra)>,
/// Call of the extrinsic.
pub call: Call,
}
parity_util_mem::malloc_size_of_is_0!(any: MyTestXt<Call, Extra>);
impl<Call: Codec + Sync + Send, Context, Extra> Checkable<Context> for MyTestXt<Call, Extra> {
type Checked = Self;
fn check(self, _c: &Context) -> Result<Self::Checked, TransactionValidityError> {
Ok(self)
}
}
impl<Call: Codec + Sync + Send, Extra> traits::Extrinsic for MyTestXt<Call, Extra> {
type Call = Call;
type SignaturePayload = (AccountId, Extra);
fn is_signed(&self) -> Option<bool> {
Some(self.signature.is_some())
}
fn new(c: Call, sig: Option<Self::SignaturePayload>) -> Option<Self> {
Some(MyTestXt {
signature: sig,
call: c,
})
}
}
impl SignedExtension for MyExtra {
type AccountId = AccountId;
type Call = Call;
type AdditionalSigned = ();
type Pre = ();
const IDENTIFIER: &'static str = "testextension";
fn additional_signed(&self) -> Result<Self::AdditionalSigned, TransactionValidityError> {
Ok(())
}
}
impl<Origin, Call, Extra> Applyable for MyTestXt<Call, Extra>
where
Call:
'static + Sized + Send + Sync + Clone + Eq + Codec + Debug + Dispatchable<Origin = Origin>,
Extra: SignedExtension<AccountId = AccountId, Call = Call>,
Origin: From<Option<AccountId32>>,
{
type Call = Call;
/// Checks to see if this is a valid *transaction*. It returns information on it if so.
fn validate<U: ValidateUnsigned<Call = Self::Call>>(
&self,
_source: TransactionSource,
_info: &DispatchInfoOf<Self::Call>,
_len: usize,
) -> TransactionValidity {
Ok(Default::default())
}
/// Executes all necessary logic needed prior to dispatch and deconstructs into function call,
/// index and sender.
fn apply<U: ValidateUnsigned<Call = Self::Call>>(
self,
info: &DispatchInfoOf<Self::Call>,
len: usize,
) -> ApplyExtrinsicResultWithInfo<PostDispatchInfoOf<Self::Call>> {
let maybe_who = if let Some((who, extra)) = self.signature | else {
Extra::pre_dispatch_unsigned(&self.call, info, len)?;
None
};
Ok(self.call.dispatch(maybe_who.into()))
}
}
impl<Call, Extra> Serialize for MyTestXt<Call, Extra>
where
MyTestXt<Call, Extra>: Encode,
{
fn serialize<S>(&self, seq: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
self.using_encoded(|bytes| seq.serialize_bytes(bytes))
}
}
impl<Call: Encode, Extra: Encode> GetDispatchInfo for MyTestXt<Call, Extra> {
fn get_dispatch_info(&self) -> DispatchInfo {
// for testing: weight == size.
DispatchInfo {
weight: self.encode().len() as _,
pays_fee: Pays::No,
..Default::default()
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Encode, Decode)]
pub struct MyExtra;
pub type TestExtrinsic = MyTestXt<Call, MyExtra>;
parameter_types! {
pub const BlockHashCount: u64 = 250;
pub const MaximumBlockWeight: Weight = 1024;
pub const MaximumBlockLength: u32 = 2 * 1024;
pub const AvailableBlockRatio: Perbill = Perbill::from_percent(75);
pub const ExistentialDeposit: u128 = 0;
}
impl frame_system::Config for Runtime {
type BaseCallFilter = ();
type BlockWeights = ();
type BlockLength = ();
type Origin = Origin;
type Call = Call;
type Index = u64;
type BlockNumber = u64;
type Hash = H256;
type Hashing = BlakeTwo256;
type AccountId = AccountId;
type Lookup = IdentityLookup<Self::AccountId>;
type Header = Header;
type Event = Event;
type BlockHashCount = BlockHashCount;
type DbWeight = ();
type Version = ();
type AccountData = pallet_balances::AccountData<Balance>;
type OnNewAccount = ();
type OnKilledAccount = ();
type SystemWeightInfo = ();
type PalletInfo = PalletInfo;
type SS58Prefix = ();
}
impl<T: SigningTypes> frame_system::offchain::SignMessage<T> for Runtime {
type SignatureData = ();
fn sign_message(&self, _message: &[u8]) -> Self::SignatureData {
unimplemented!()
}
fn sign<TPayload, F>(&self, _f: F) -> Self::SignatureData
where
F: Fn(&Account<T>) -> TPayload,
TPayload: frame_system::offchain::SignedPayload<T>,
{
unimplemented!()
}
}
impl<LocalCall> frame_system::offchain::CreateSignedTransaction<LocalCall> for Runtime
where
Call: From<LocalCall>,
{
fn create_transaction<C: frame_system::offchain::AppCrypto<Self::Public, Self::Signature>>(
call: Call,
_public: <Signature as Verify>::Signer,
account: <Runtime as frame_system::Config>::AccountId,
_index: <Runtime as frame_system::Config>::Index,
) -> Option<(
Call,
<TestExtrinsic as sp_runtime::traits::Extrinsic>::SignaturePayload,
)> {
Some((call, (account, MyExtra {})))
}
}
impl frame_system::offchain::SigningTypes for Runtime {
type Public = <Signature as Verify>::Signer;
type Signature = Signature;
}
impl<C> frame_system::offchain::SendTransactionTypes<C> for Runtime
where
Call: From<C>,
{
type OverarchingCall = Call;
type Extrinsic = TestExtrinsic;
}
impl pallet_balances::Config for Runtime {
/// The type for recording an account's balance.
type Balance = Balance;
/// The ubiquitous event type.
type Event = Event;
type DustRemoval = ();
type ExistentialDeposit = ExistentialDeposit;
type AccountStore = System;
type WeightInfo = ();
type MaxLocks = ();
}
impl tokens::Config for Runtime {
type Event = Event;
type Balance = Balance;
type Amount = Amount;
type CurrencyId = <Runtime as assets::Config>::AssetId;
type WeightInfo = ();
type ExistentialDeposits = ExistentialDeposits;
type OnDust = ();
}
impl currencies::Config for Runtime {
type Event = Event;
type MultiCurrency = Tokens;
type NativeCurrency = BasicCurrencyAdapter<Runtime, Balances, Amount, BlockNumber>;
type GetNativeCurrencyId = <Runtime as assets::Config>::GetBaseAssetId;
type WeightInfo = ();
}
impl assets::Config for Runtime {
type Event = Event;
type ExtraAccountId = [u8; 32];
type ExtraAssetRecordArg =
common::AssetIdExtraAssetRecordArg<common::DEXId, common::LiquiditySourceType, [u8; 32]>;
type AssetId = common::AssetId32<PredefinedAssetId>;
type GetBaseAssetId = GetBaseAssetId;
type Currency = currencies::Module<Runtime>;
type WeightInfo = ();
}
impl common::Config for Runtime {
type DEXId = common::DEXId;
type LstId = common::LiquiditySourceType;
}
impl permissions::Config for Runtime {
type Event = Event;
}
impl bridge_multisig::Config for Runtime {
type Call = Call;
type Event = Event;
type Currency = Balances;
type DepositBase = DepositBase;
type DepositFactor = DepositFactor;
type MaxSignatories = MaxSignatories;
type WeightInfo = ();
}
impl pallet_sudo::Config for Runtime {
type Call = Call;
type Event = Event;
}
impl crate::Config for Runtime {
type PeerId = crate::crypto::TestAuthId;
type Call = Call;
type Event = Event;
type NetworkId = u32;
type GetEthNetworkId = EthNetworkId;
type WeightInfo = ();
}
impl sp_runtime::traits::ExtrinsicMetadata for TestExtrinsic {
const VERSION: u8 = 1;
type SignedExtensions = ();
}
construct_runtime!(
pub enum Runtime where
Block = Block,
NodeBlock = Block,
UncheckedExtrinsic = Unchecked | {
Extra::pre_dispatch(extra, &who, &self.call, info, len)?;
Some(who)
} | conditional_block |
mock.rs | atories: u16 = 4;
pub const UnsignedPriority: u64 = 100;
pub const EthNetworkId: <Runtime as Config>::NetworkId = 0;
}
#[derive(PartialEq, Eq, Clone, Encode, Decode, Debug)]
pub struct MyTestXt<Call, Extra> {
/// Signature of the extrinsic.
pub signature: Option<(AccountId, Extra)>,
/// Call of the extrinsic.
pub call: Call,
}
parity_util_mem::malloc_size_of_is_0!(any: MyTestXt<Call, Extra>);
impl<Call: Codec + Sync + Send, Context, Extra> Checkable<Context> for MyTestXt<Call, Extra> {
type Checked = Self;
fn check(self, _c: &Context) -> Result<Self::Checked, TransactionValidityError> {
Ok(self)
}
}
impl<Call: Codec + Sync + Send, Extra> traits::Extrinsic for MyTestXt<Call, Extra> {
type Call = Call;
type SignaturePayload = (AccountId, Extra);
fn is_signed(&self) -> Option<bool> {
Some(self.signature.is_some())
}
fn new(c: Call, sig: Option<Self::SignaturePayload>) -> Option<Self> {
Some(MyTestXt {
signature: sig,
call: c,
})
}
}
impl SignedExtension for MyExtra {
type AccountId = AccountId;
type Call = Call;
type AdditionalSigned = ();
type Pre = ();
const IDENTIFIER: &'static str = "testextension";
fn additional_signed(&self) -> Result<Self::AdditionalSigned, TransactionValidityError> {
Ok(())
}
}
impl<Origin, Call, Extra> Applyable for MyTestXt<Call, Extra>
where
Call:
'static + Sized + Send + Sync + Clone + Eq + Codec + Debug + Dispatchable<Origin = Origin>,
Extra: SignedExtension<AccountId = AccountId, Call = Call>,
Origin: From<Option<AccountId32>>,
{
type Call = Call;
/// Checks to see if this is a valid *transaction*. It returns information on it if so.
fn validate<U: ValidateUnsigned<Call = Self::Call>>(
&self,
_source: TransactionSource,
_info: &DispatchInfoOf<Self::Call>,
_len: usize,
) -> TransactionValidity {
Ok(Default::default())
}
/// Executes all necessary logic needed prior to dispatch and deconstructs into function call,
/// index and sender.
fn apply<U: ValidateUnsigned<Call = Self::Call>>(
self,
info: &DispatchInfoOf<Self::Call>,
len: usize,
) -> ApplyExtrinsicResultWithInfo<PostDispatchInfoOf<Self::Call>> {
let maybe_who = if let Some((who, extra)) = self.signature {
Extra::pre_dispatch(extra, &who, &self.call, info, len)?;
Some(who)
} else {
Extra::pre_dispatch_unsigned(&self.call, info, len)?;
None
};
Ok(self.call.dispatch(maybe_who.into()))
}
}
impl<Call, Extra> Serialize for MyTestXt<Call, Extra>
where
MyTestXt<Call, Extra>: Encode,
{
fn serialize<S>(&self, seq: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
self.using_encoded(|bytes| seq.serialize_bytes(bytes))
}
}
impl<Call: Encode, Extra: Encode> GetDispatchInfo for MyTestXt<Call, Extra> {
fn get_dispatch_info(&self) -> DispatchInfo {
// for testing: weight == size.
DispatchInfo {
weight: self.encode().len() as _,
pays_fee: Pays::No,
..Default::default()
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Encode, Decode)]
pub struct MyExtra;
pub type TestExtrinsic = MyTestXt<Call, MyExtra>;
parameter_types! {
pub const BlockHashCount: u64 = 250;
pub const MaximumBlockWeight: Weight = 1024;
pub const MaximumBlockLength: u32 = 2 * 1024;
pub const AvailableBlockRatio: Perbill = Perbill::from_percent(75);
pub const ExistentialDeposit: u128 = 0;
}
impl frame_system::Config for Runtime {
type BaseCallFilter = ();
type BlockWeights = ();
type BlockLength = ();
type Origin = Origin;
type Call = Call;
type Index = u64;
type BlockNumber = u64;
type Hash = H256;
type Hashing = BlakeTwo256;
type AccountId = AccountId;
type Lookup = IdentityLookup<Self::AccountId>;
type Header = Header;
type Event = Event;
type BlockHashCount = BlockHashCount;
type DbWeight = ();
type Version = ();
type AccountData = pallet_balances::AccountData<Balance>;
type OnNewAccount = ();
type OnKilledAccount = ();
type SystemWeightInfo = ();
type PalletInfo = PalletInfo;
type SS58Prefix = ();
}
impl<T: SigningTypes> frame_system::offchain::SignMessage<T> for Runtime {
type SignatureData = ();
fn sign_message(&self, _message: &[u8]) -> Self::SignatureData {
unimplemented!()
}
fn sign<TPayload, F>(&self, _f: F) -> Self::SignatureData
where
F: Fn(&Account<T>) -> TPayload,
TPayload: frame_system::offchain::SignedPayload<T>,
{
unimplemented!()
}
}
impl<LocalCall> frame_system::offchain::CreateSignedTransaction<LocalCall> for Runtime
where
Call: From<LocalCall>,
{
fn create_transaction<C: frame_system::offchain::AppCrypto<Self::Public, Self::Signature>>(
call: Call,
_public: <Signature as Verify>::Signer,
account: <Runtime as frame_system::Config>::AccountId,
_index: <Runtime as frame_system::Config>::Index,
) -> Option<(
Call,
<TestExtrinsic as sp_runtime::traits::Extrinsic>::SignaturePayload,
)> {
Some((call, (account, MyExtra {})))
}
}
impl frame_system::offchain::SigningTypes for Runtime {
type Public = <Signature as Verify>::Signer;
type Signature = Signature;
}
impl<C> frame_system::offchain::SendTransactionTypes<C> for Runtime
where
Call: From<C>,
{
type OverarchingCall = Call;
type Extrinsic = TestExtrinsic;
}
impl pallet_balances::Config for Runtime {
/// The type for recording an account's balance.
type Balance = Balance;
/// The ubiquitous event type.
type Event = Event;
type DustRemoval = ();
type ExistentialDeposit = ExistentialDeposit;
type AccountStore = System;
type WeightInfo = ();
type MaxLocks = ();
}
impl tokens::Config for Runtime {
type Event = Event;
type Balance = Balance;
type Amount = Amount;
type CurrencyId = <Runtime as assets::Config>::AssetId;
type WeightInfo = ();
type ExistentialDeposits = ExistentialDeposits;
type OnDust = ();
}
impl currencies::Config for Runtime {
type Event = Event;
type MultiCurrency = Tokens;
type NativeCurrency = BasicCurrencyAdapter<Runtime, Balances, Amount, BlockNumber>;
type GetNativeCurrencyId = <Runtime as assets::Config>::GetBaseAssetId;
type WeightInfo = ();
}
impl assets::Config for Runtime {
type Event = Event;
type ExtraAccountId = [u8; 32];
type ExtraAssetRecordArg =
common::AssetIdExtraAssetRecordArg<common::DEXId, common::LiquiditySourceType, [u8; 32]>;
type AssetId = common::AssetId32<PredefinedAssetId>;
type GetBaseAssetId = GetBaseAssetId;
type Currency = currencies::Module<Runtime>;
type WeightInfo = ();
}
impl common::Config for Runtime {
type DEXId = common::DEXId;
type LstId = common::LiquiditySourceType;
}
impl permissions::Config for Runtime {
type Event = Event;
}
impl bridge_multisig::Config for Runtime {
type Call = Call;
type Event = Event;
type Currency = Balances;
type DepositBase = DepositBase;
type DepositFactor = DepositFactor;
type MaxSignatories = MaxSignatories;
type WeightInfo = ();
}
impl pallet_sudo::Config for Runtime {
type Call = Call;
type Event = Event;
}
impl crate::Config for Runtime {
type PeerId = crate::crypto::TestAuthId;
type Call = Call;
type Event = Event;
type NetworkId = u32; | impl sp_runtime::traits::ExtrinsicMetadata for TestExtrinsic {
const VERSION: u8 = 1;
type SignedExtensions = ();
}
construct_runtime!(
pub enum Runtime where
Block = Block,
NodeBlock = Block,
UncheckedExtrinsic = UncheckedEx | type GetEthNetworkId = EthNetworkId;
type WeightInfo = ();
}
| random_line_split |
mock.rs | atories: u16 = 4;
pub const UnsignedPriority: u64 = 100;
pub const EthNetworkId: <Runtime as Config>::NetworkId = 0;
}
#[derive(PartialEq, Eq, Clone, Encode, Decode, Debug)]
pub struct MyTestXt<Call, Extra> {
/// Signature of the extrinsic.
pub signature: Option<(AccountId, Extra)>,
/// Call of the extrinsic.
pub call: Call,
}
parity_util_mem::malloc_size_of_is_0!(any: MyTestXt<Call, Extra>);
impl<Call: Codec + Sync + Send, Context, Extra> Checkable<Context> for MyTestXt<Call, Extra> {
type Checked = Self;
fn check(self, _c: &Context) -> Result<Self::Checked, TransactionValidityError> {
Ok(self)
}
}
impl<Call: Codec + Sync + Send, Extra> traits::Extrinsic for MyTestXt<Call, Extra> {
type Call = Call;
type SignaturePayload = (AccountId, Extra);
fn is_signed(&self) -> Option<bool> {
Some(self.signature.is_some())
}
fn new(c: Call, sig: Option<Self::SignaturePayload>) -> Option<Self> {
Some(MyTestXt {
signature: sig,
call: c,
})
}
}
impl SignedExtension for MyExtra {
type AccountId = AccountId;
type Call = Call;
type AdditionalSigned = ();
type Pre = ();
const IDENTIFIER: &'static str = "testextension";
fn additional_signed(&self) -> Result<Self::AdditionalSigned, TransactionValidityError> {
Ok(())
}
}
impl<Origin, Call, Extra> Applyable for MyTestXt<Call, Extra>
where
Call:
'static + Sized + Send + Sync + Clone + Eq + Codec + Debug + Dispatchable<Origin = Origin>,
Extra: SignedExtension<AccountId = AccountId, Call = Call>,
Origin: From<Option<AccountId32>>,
{
type Call = Call;
/// Checks to see if this is a valid *transaction*. It returns information on it if so.
fn validate<U: ValidateUnsigned<Call = Self::Call>>(
&self,
_source: TransactionSource,
_info: &DispatchInfoOf<Self::Call>,
_len: usize,
) -> TransactionValidity {
Ok(Default::default())
}
/// Executes all necessary logic needed prior to dispatch and deconstructs into function call,
/// index and sender.
fn apply<U: ValidateUnsigned<Call = Self::Call>>(
self,
info: &DispatchInfoOf<Self::Call>,
len: usize,
) -> ApplyExtrinsicResultWithInfo<PostDispatchInfoOf<Self::Call>> {
let maybe_who = if let Some((who, extra)) = self.signature {
Extra::pre_dispatch(extra, &who, &self.call, info, len)?;
Some(who)
} else {
Extra::pre_dispatch_unsigned(&self.call, info, len)?;
None
};
Ok(self.call.dispatch(maybe_who.into()))
}
}
impl<Call, Extra> Serialize for MyTestXt<Call, Extra>
where
MyTestXt<Call, Extra>: Encode,
{
fn serialize<S>(&self, seq: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
self.using_encoded(|bytes| seq.serialize_bytes(bytes))
}
}
impl<Call: Encode, Extra: Encode> GetDispatchInfo for MyTestXt<Call, Extra> {
fn get_dispatch_info(&self) -> DispatchInfo {
// for testing: weight == size.
DispatchInfo {
weight: self.encode().len() as _,
pays_fee: Pays::No,
..Default::default()
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Encode, Decode)]
pub struct | ;
pub type TestExtrinsic = MyTestXt<Call, MyExtra>;
parameter_types! {
pub const BlockHashCount: u64 = 250;
pub const MaximumBlockWeight: Weight = 1024;
pub const MaximumBlockLength: u32 = 2 * 1024;
pub const AvailableBlockRatio: Perbill = Perbill::from_percent(75);
pub const ExistentialDeposit: u128 = 0;
}
impl frame_system::Config for Runtime {
type BaseCallFilter = ();
type BlockWeights = ();
type BlockLength = ();
type Origin = Origin;
type Call = Call;
type Index = u64;
type BlockNumber = u64;
type Hash = H256;
type Hashing = BlakeTwo256;
type AccountId = AccountId;
type Lookup = IdentityLookup<Self::AccountId>;
type Header = Header;
type Event = Event;
type BlockHashCount = BlockHashCount;
type DbWeight = ();
type Version = ();
type AccountData = pallet_balances::AccountData<Balance>;
type OnNewAccount = ();
type OnKilledAccount = ();
type SystemWeightInfo = ();
type PalletInfo = PalletInfo;
type SS58Prefix = ();
}
impl<T: SigningTypes> frame_system::offchain::SignMessage<T> for Runtime {
type SignatureData = ();
fn sign_message(&self, _message: &[u8]) -> Self::SignatureData {
unimplemented!()
}
fn sign<TPayload, F>(&self, _f: F) -> Self::SignatureData
where
F: Fn(&Account<T>) -> TPayload,
TPayload: frame_system::offchain::SignedPayload<T>,
{
unimplemented!()
}
}
impl<LocalCall> frame_system::offchain::CreateSignedTransaction<LocalCall> for Runtime
where
Call: From<LocalCall>,
{
fn create_transaction<C: frame_system::offchain::AppCrypto<Self::Public, Self::Signature>>(
call: Call,
_public: <Signature as Verify>::Signer,
account: <Runtime as frame_system::Config>::AccountId,
_index: <Runtime as frame_system::Config>::Index,
) -> Option<(
Call,
<TestExtrinsic as sp_runtime::traits::Extrinsic>::SignaturePayload,
)> {
Some((call, (account, MyExtra {})))
}
}
impl frame_system::offchain::SigningTypes for Runtime {
type Public = <Signature as Verify>::Signer;
type Signature = Signature;
}
impl<C> frame_system::offchain::SendTransactionTypes<C> for Runtime
where
Call: From<C>,
{
type OverarchingCall = Call;
type Extrinsic = TestExtrinsic;
}
impl pallet_balances::Config for Runtime {
/// The type for recording an account's balance.
type Balance = Balance;
/// The ubiquitous event type.
type Event = Event;
type DustRemoval = ();
type ExistentialDeposit = ExistentialDeposit;
type AccountStore = System;
type WeightInfo = ();
type MaxLocks = ();
}
impl tokens::Config for Runtime {
type Event = Event;
type Balance = Balance;
type Amount = Amount;
type CurrencyId = <Runtime as assets::Config>::AssetId;
type WeightInfo = ();
type ExistentialDeposits = ExistentialDeposits;
type OnDust = ();
}
impl currencies::Config for Runtime {
type Event = Event;
type MultiCurrency = Tokens;
type NativeCurrency = BasicCurrencyAdapter<Runtime, Balances, Amount, BlockNumber>;
type GetNativeCurrencyId = <Runtime as assets::Config>::GetBaseAssetId;
type WeightInfo = ();
}
impl assets::Config for Runtime {
type Event = Event;
type ExtraAccountId = [u8; 32];
type ExtraAssetRecordArg =
common::AssetIdExtraAssetRecordArg<common::DEXId, common::LiquiditySourceType, [u8; 32]>;
type AssetId = common::AssetId32<PredefinedAssetId>;
type GetBaseAssetId = GetBaseAssetId;
type Currency = currencies::Module<Runtime>;
type WeightInfo = ();
}
impl common::Config for Runtime {
type DEXId = common::DEXId;
type LstId = common::LiquiditySourceType;
}
impl permissions::Config for Runtime {
type Event = Event;
}
impl bridge_multisig::Config for Runtime {
type Call = Call;
type Event = Event;
type Currency = Balances;
type DepositBase = DepositBase;
type DepositFactor = DepositFactor;
type MaxSignatories = MaxSignatories;
type WeightInfo = ();
}
impl pallet_sudo::Config for Runtime {
type Call = Call;
type Event = Event;
}
impl crate::Config for Runtime {
type PeerId = crate::crypto::TestAuthId;
type Call = Call;
type Event = Event;
type NetworkId = u32;
type GetEthNetworkId = EthNetworkId;
type WeightInfo = ();
}
impl sp_runtime::traits::ExtrinsicMetadata for TestExtrinsic {
const VERSION: u8 = 1;
type SignedExtensions = ();
}
construct_runtime!(
pub enum Runtime where
Block = Block,
NodeBlock = Block,
UncheckedExtrinsic = Unchecked | MyExtra | identifier_name |
mock.rs | Number = u64;
type Hash = H256;
type Hashing = BlakeTwo256;
type AccountId = AccountId;
type Lookup = IdentityLookup<Self::AccountId>;
type Header = Header;
type Event = Event;
type BlockHashCount = BlockHashCount;
type DbWeight = ();
type Version = ();
type AccountData = pallet_balances::AccountData<Balance>;
type OnNewAccount = ();
type OnKilledAccount = ();
type SystemWeightInfo = ();
type PalletInfo = PalletInfo;
type SS58Prefix = ();
}
impl<T: SigningTypes> frame_system::offchain::SignMessage<T> for Runtime {
type SignatureData = ();
fn sign_message(&self, _message: &[u8]) -> Self::SignatureData {
unimplemented!()
}
fn sign<TPayload, F>(&self, _f: F) -> Self::SignatureData
where
F: Fn(&Account<T>) -> TPayload,
TPayload: frame_system::offchain::SignedPayload<T>,
{
unimplemented!()
}
}
impl<LocalCall> frame_system::offchain::CreateSignedTransaction<LocalCall> for Runtime
where
Call: From<LocalCall>,
{
fn create_transaction<C: frame_system::offchain::AppCrypto<Self::Public, Self::Signature>>(
call: Call,
_public: <Signature as Verify>::Signer,
account: <Runtime as frame_system::Config>::AccountId,
_index: <Runtime as frame_system::Config>::Index,
) -> Option<(
Call,
<TestExtrinsic as sp_runtime::traits::Extrinsic>::SignaturePayload,
)> {
Some((call, (account, MyExtra {})))
}
}
impl frame_system::offchain::SigningTypes for Runtime {
type Public = <Signature as Verify>::Signer;
type Signature = Signature;
}
impl<C> frame_system::offchain::SendTransactionTypes<C> for Runtime
where
Call: From<C>,
{
type OverarchingCall = Call;
type Extrinsic = TestExtrinsic;
}
impl pallet_balances::Config for Runtime {
/// The type for recording an account's balance.
type Balance = Balance;
/// The ubiquitous event type.
type Event = Event;
type DustRemoval = ();
type ExistentialDeposit = ExistentialDeposit;
type AccountStore = System;
type WeightInfo = ();
type MaxLocks = ();
}
impl tokens::Config for Runtime {
type Event = Event;
type Balance = Balance;
type Amount = Amount;
type CurrencyId = <Runtime as assets::Config>::AssetId;
type WeightInfo = ();
type ExistentialDeposits = ExistentialDeposits;
type OnDust = ();
}
impl currencies::Config for Runtime {
type Event = Event;
type MultiCurrency = Tokens;
type NativeCurrency = BasicCurrencyAdapter<Runtime, Balances, Amount, BlockNumber>;
type GetNativeCurrencyId = <Runtime as assets::Config>::GetBaseAssetId;
type WeightInfo = ();
}
impl assets::Config for Runtime {
type Event = Event;
type ExtraAccountId = [u8; 32];
type ExtraAssetRecordArg =
common::AssetIdExtraAssetRecordArg<common::DEXId, common::LiquiditySourceType, [u8; 32]>;
type AssetId = common::AssetId32<PredefinedAssetId>;
type GetBaseAssetId = GetBaseAssetId;
type Currency = currencies::Module<Runtime>;
type WeightInfo = ();
}
impl common::Config for Runtime {
type DEXId = common::DEXId;
type LstId = common::LiquiditySourceType;
}
impl permissions::Config for Runtime {
type Event = Event;
}
impl bridge_multisig::Config for Runtime {
type Call = Call;
type Event = Event;
type Currency = Balances;
type DepositBase = DepositBase;
type DepositFactor = DepositFactor;
type MaxSignatories = MaxSignatories;
type WeightInfo = ();
}
impl pallet_sudo::Config for Runtime {
type Call = Call;
type Event = Event;
}
impl crate::Config for Runtime {
type PeerId = crate::crypto::TestAuthId;
type Call = Call;
type Event = Event;
type NetworkId = u32;
type GetEthNetworkId = EthNetworkId;
type WeightInfo = ();
}
impl sp_runtime::traits::ExtrinsicMetadata for TestExtrinsic {
const VERSION: u8 = 1;
type SignedExtensions = ();
}
construct_runtime!(
pub enum Runtime where
Block = Block,
NodeBlock = Block,
UncheckedExtrinsic = UncheckedExtrinsic
{
System: frame_system::{Module, Call, Config, Storage, Event<T>},
Balances: pallet_balances::{Module, Call, Storage, Config<T>, Event<T>},
Multisig: bridge_multisig::{Module, Call, Storage, Config<T>, Event<T>},
Tokens: tokens::{Module, Call, Storage, Config<T>, Event<T>},
Currencies: currencies::{Module, Call, Storage, Event<T>},
Assets: assets::{Module, Call, Storage, Config<T>, Event<T>},
Permissions: permissions::{Module, Call, Storage, Config<T>, Event<T>},
Sudo: pallet_sudo::{Module, Call, Storage, Config<T>, Event<T>},
EthBridge: eth_bridge::{Module, Call, Storage, Config<T>, Event<T>},
}
);
pub type SubstrateAccountId = <<Signature as Verify>::Signer as IdentifyAccount>::AccountId;
pub struct State {
pub networks: HashMap<u32, ExtendedNetworkConfig>,
pub authority_account_id: AccountId32,
pub pool_state: Arc<RwLock<PoolState>>,
pub offchain_state: Arc<RwLock<OffchainState>>,
}
#[derive(Clone, Debug)]
pub struct ExtendedNetworkConfig {
pub ocw_keypairs: Vec<(MultiSigner, AccountId32, [u8; 32])>,
pub config: NetworkConfig<Runtime>,
}
pub struct ExtBuilder {
pub networks: HashMap<u32, ExtendedNetworkConfig>,
last_network_id: u32,
root_account_id: AccountId32,
}
impl Default for ExtBuilder {
fn default() -> Self {
let mut builder = Self {
networks: Default::default(),
last_network_id: Default::default(),
root_account_id: get_account_id_from_seed::<sr25519::Public>("Alice"),
};
builder.add_network(
vec![
AssetConfig::Thischain { id: PSWAP.into() },
AssetConfig::Sidechain {
id: XOR.into(),
sidechain_id: sp_core::H160::from_str(
"40fd72257597aa14c7231a7b1aaa29fce868f677",
)
.unwrap(),
owned: true,
precision: DEFAULT_BALANCE_PRECISION,
},
AssetConfig::Sidechain {
id: VAL.into(),
sidechain_id: sp_core::H160::from_str(
"3f9feac97e5feb15d8bf98042a9a01b515da3dfb",
)
.unwrap(),
owned: true,
precision: DEFAULT_BALANCE_PRECISION,
},
],
Some(vec![
(XOR.into(), common::balance!(350000)),
(VAL.into(), common::balance!(33900000)),
]),
Some(4),
);
builder
}
}
impl ExtBuilder {
pub fn new() -> Self {
Self {
networks: Default::default(),
last_network_id: Default::default(),
root_account_id: get_account_id_from_seed::<sr25519::Public>("Alice"),
}
}
pub fn add_currency(
&mut self,
network_id: u32,
currency: AssetConfig<AssetId32<PredefinedAssetId>>,
) {
self.networks
.get_mut(&network_id)
.unwrap()
.config
.assets
.push(currency);
}
pub fn add_network(
&mut self,
assets: Vec<AssetConfig<AssetId32<PredefinedAssetId>>>,
reserves: Option<Vec<(AssetId32<PredefinedAssetId>, Balance)>>,
peers_num: Option<usize>,
) -> u32 | {
let net_id = self.last_network_id;
let multisig_account_id = bridge_multisig::Module::<Runtime>::multi_account_id(
&self.root_account_id,
1,
net_id as u64 + 10,
);
let peers_keys = gen_peers_keys(&format!("OCW{}", net_id), peers_num.unwrap_or(4));
self.networks.insert(
net_id,
ExtendedNetworkConfig {
config: NetworkConfig {
initial_peers: peers_keys.iter().map(|(_, id, _)| id).cloned().collect(),
bridge_account_id: multisig_account_id.clone(),
assets,
bridge_contract_address: Default::default(),
reserves: reserves.unwrap_or_default(),
},
ocw_keypairs: peers_keys,
}, | identifier_body |
|
lofsigrank.py | genes by burden of LOF mutations
gene_scores = sorted(lof_sig_scores(lof_table, samples),
key=lambda pair: pair[1])
# Step_3: Compare gene LOF scores to a simulated "background" distribution
if args.permutations:
# Calculate gene score percentiles
orig_pctiles = numpy.arange(1, 0, -1. / len(gene_scores))
# Calculate percentiles for simulated "background" scores
perm_scores = simulate_lof_scores(data_table, args.permutations,
genes, samples, summary_function)
# Calculate FDR for each gene
table_header = ["Gene", "Obs.Score", "Obs.Pctile", "Sim.Score",
"Sim.Pctile", "FDR"]
table_rows = []
perm_pctiles = numpy.arange(1, 0, -1. / len(perm_scores))
perm_pctiles_rev = perm_pctiles[::-1]
for (gene, obs_score), obs_pctile in zip(gene_scores, orig_pctiles):
score_rank = perm_scores.searchsorted(obs_score)
if score_rank == len(perm_scores):
exp_pctile = 0
fdr = 0.0
else:
exp_pctile = perm_pctiles[score_rank]
# FDR: % false positives / % true positives
fdr = min(1.0, exp_pctile / obs_pctile)
exp_score = perm_scores[len(perm_scores) - 1 -
perm_pctiles_rev.searchsorted(obs_pctile)]
table_rows.append((gene, obs_score, obs_pctile, exp_score,
exp_pctile, fdr))
out_table = pandas.DataFrame.from_records(table_rows,
columns=table_header)
else:
out_table = pandas.DataFrame.from_records(gene_scores,
columns=["Gene", "Score"])
# Output as a table to file or screen
if args.output:
out_table.to_csv(args.output, index=False)
else:
print(out_table.to_string(index=False))
def read_list(fname):
"""Parse a "list" file of one string per line."""
with open(fname) as handle:
items = [line.strip() for line in handle]
return items
# _____________________________________________________________________________
# Step_1: Calculate gene-level mutational statistics
def make_lof_table(data_table, my_genes, my_samples, summary_func):
|
gs_lookup = group_data_by_gs(data_table)
for gene in my_genes:
synonymous = missense_benign = missense_possibly = missense_probably = \
missense_na = frameshift = nonsense = splice = indel = 0
out_row = [gene]
for sample in my_samples:
normalized = [0]
# Count mutations of each type for this gene and sample
for entry in gs_lookup[gene][sample]:
if entry['muttype'] == 'Silent':
synonymous += 1
continue
if entry['muttype'] == 'Intron':
# Shouldn't be here; ignore
continue
if entry['muttype'] == 'Missense_Mutation':
if entry['consequence'] == 'benign':
missense_benign += 1
elif entry['consequence'] == 'possibly':
missense_possibly += 1
elif entry['consequence'] == 'probably':
missense_probably += 1
elif entry['consequence'] == 'NA':
missense_na += 1
else:
print("Unhandled missense consequence level:",
entry['consequence'], file=sys.stderr)
elif entry['muttype'] == 'Nonsense_Mutation':
nonsense += 1
elif entry['muttype'] == 'Splice_Site':
splice += 1
elif entry['muttype'] in ('Frame_Shift_Ins', 'Frame_Shift_Del'):
frameshift += 1
elif entry['muttype'] in ('In_Frame_Ins', 'In_Frame_Del'):
indel += 1
else:
print("Unhandled mutation type:", entry['muttype'],
file=sys.stderr)
continue
normalized.append(entry['normalized'])
# Summarize the normalized mutation counts for this gene and sample
out_row.append(summary_func(normalized))
out_row.extend((missense_benign, missense_possibly, missense_probably,
missense_na, indel, nonsense, frameshift, splice,
synonymous))
table_records.append(out_row)
return pandas.DataFrame.from_records(table_records, columns=table_header)
def group_data_by_gs(data_table):
"""Group relevant fields in a data table by gene and sample."""
gene_data = collections.defaultdict(lambda: collections.defaultdict(list))
for _idx, row in data_table.iterrows():
samp = row['sample']
gene = row['gene']
gene_data[gene][samp].append({
'muttype': row['type'].strip(),
'normalized': row['Normalized'], # NMAF in the manuscript
'consequence': row['MissenseConsequence'].strip(),
})
return gene_data
# _____________________________________________________________________________
# Step_2: Rank genes by burden of LOF mutations
def lof_sig_scores(table, samples, verbose=True):
"""Calculate LOF mutation burden scores for genes in the processed table."""
mut_probdam = 'Missense:Probably'
mut_syn = 'Synonymous'
mut_trunc = ['Nonsense', 'Frameshift', 'Splice-site']
mut_other = ['Missense:Benign', 'Missense:Possibly', 'MissenseNA', 'Indel']
mut_all = [mut_probdam, mut_syn] + mut_trunc + mut_other
# Calculate the global nonsynonymous:synonymous ratio ---------------------
# Within each mutation category, sum counts (across all genes)
tot_count_probdam = sum(table[mut_probdam])
tot_count_syn = sum(table[mut_syn])
tot_count_trunc = sum(itertools.chain(*(list(table[col])
for col in mut_trunc)))
tot_count_other = sum(itertools.chain(*(list(table[col])
for col in mut_other)))
# Global mutation count across all categories and genes (= 3504)
tot_count_all = sum((tot_count_probdam, tot_count_syn, tot_count_trunc,
tot_count_other))
if verbose:
print("Counted", tot_count_all, "mutations across", len(table), "genes",
"and", len(samples), "samples", file=sys.stderr)
# Fraction of global mutations in each category of interest
tot_frac_probdam = tot_count_probdam / tot_count_all
tot_frac_syn = tot_count_syn / tot_count_all
tot_frac_trunc = tot_count_trunc / tot_count_all
# Global nonsynonymous:synonymous ratio = (1-syn)/syn (= 2.13697)
tot_ns_s_ratio = (1 - tot_frac_syn) / tot_frac_syn
# Calculate each gene's mutation score ------------------------------------
for _idx, row in table.iterrows():
gene_count_all = sum([row[col] for col in mut_all])
if not gene_count_all:
# Gene is not mutated at all --> zero score
yield (row['Gene'], 0.0)
continue
# Initial score is the sum the 'Normalized' values across all samples
raw_score = sum(row[sid] for sid in samples)
# Adjust for NS:S ratio
gene_count_syn = row[mut_syn]
syn_factor = max(1 - tot_ns_s_ratio * gene_count_syn / gene_count_all,
0)
new_score = raw_score * syn_factor
# Adjust for "probably damaging" missense and truncating mutations
gene_frac_probdam = row[mut_probdam] / gene_count_all
probdam_factor = 1 + gene_frac_probdam - tot_frac_probdam
gene_frac_trunc = sum([row[col] for col in mut_trunc]) / gene_count_all
trunc_factor = gene_frac_trunc / tot_frac_trunc
final_score = new_score * probdam_factor * trunc_factor
yield (row['Gene | """Calculate gene-level mutational statistics from a table of mutations.
Input: nested dict of genes -> samples -> list of mut. type, NMAF, Polyphen
Output: table stratifying the mutational status of a gene in each sample.
The output table has a row for each gene and a column for each sample, in
which there is a number ranging from 0-2 that corresponds to the estimated
number of alleles lost in the sample. This value is calculated by summing
the normalized mutant alleles frequencies (NMAF) of all non-synonymous
mutations striking the gene in this sample, capped at 2. In addition, the
final 9 columns of output are the counts of each mutation type (not weighted
by MAF).
This output is used as input to Step 2 to calculate the LOF burden.
"""
table_header = ["Gene"] + my_samples + [
"Missense:Benign", "Missense:Possibly", "Missense:Probably",
"MissenseNA", "Indel", "Nonsense", "Frameshift", "Splice-site",
"Synonymous"]
table_records = [] | identifier_body |
lofsigrank.py | genes by burden of LOF mutations
gene_scores = sorted(lof_sig_scores(lof_table, samples),
key=lambda pair: pair[1])
# Step_3: Compare gene LOF scores to a simulated "background" distribution
if args.permutations:
# Calculate gene score percentiles
orig_pctiles = numpy.arange(1, 0, -1. / len(gene_scores))
# Calculate percentiles for simulated "background" scores
perm_scores = simulate_lof_scores(data_table, args.permutations,
genes, samples, summary_function)
# Calculate FDR for each gene
table_header = ["Gene", "Obs.Score", "Obs.Pctile", "Sim.Score",
"Sim.Pctile", "FDR"]
table_rows = []
perm_pctiles = numpy.arange(1, 0, -1. / len(perm_scores))
perm_pctiles_rev = perm_pctiles[::-1]
for (gene, obs_score), obs_pctile in zip(gene_scores, orig_pctiles):
score_rank = perm_scores.searchsorted(obs_score)
if score_rank == len(perm_scores):
exp_pctile = 0
fdr = 0.0
else:
exp_pctile = perm_pctiles[score_rank]
# FDR: % false positives / % true positives
fdr = min(1.0, exp_pctile / obs_pctile)
exp_score = perm_scores[len(perm_scores) - 1 -
perm_pctiles_rev.searchsorted(obs_pctile)]
table_rows.append((gene, obs_score, obs_pctile, exp_score,
exp_pctile, fdr))
out_table = pandas.DataFrame.from_records(table_rows,
columns=table_header)
else:
out_table = pandas.DataFrame.from_records(gene_scores,
columns=["Gene", "Score"])
# Output as a table to file or screen
if args.output:
out_table.to_csv(args.output, index=False)
else:
print(out_table.to_string(index=False))
def read_list(fname):
"""Parse a "list" file of one string per line."""
with open(fname) as handle:
items = [line.strip() for line in handle]
return items
# _____________________________________________________________________________
# Step_1: Calculate gene-level mutational statistics
def make_lof_table(data_table, my_genes, my_samples, summary_func):
"""Calculate gene-level mutational statistics from a table of mutations.
Input: nested dict of genes -> samples -> list of mut. type, NMAF, Polyphen
Output: table stratifying the mutational status of a gene in each sample.
The output table has a row for each gene and a column for each sample, in
which there is a number ranging from 0-2 that corresponds to the estimated
number of alleles lost in the sample. This value is calculated by summing
the normalized mutant alleles frequencies (NMAF) of all non-synonymous
mutations striking the gene in this sample, capped at 2. In addition, the
final 9 columns of output are the counts of each mutation type (not weighted
by MAF).
This output is used as input to Step 2 to calculate the LOF burden.
"""
table_header = ["Gene"] + my_samples + [
"Missense:Benign", "Missense:Possibly", "Missense:Probably",
"MissenseNA", "Indel", "Nonsense", "Frameshift", "Splice-site",
"Synonymous"]
table_records = []
gs_lookup = group_data_by_gs(data_table)
for gene in my_genes:
synonymous = missense_benign = missense_possibly = missense_probably = \
missense_na = frameshift = nonsense = splice = indel = 0
out_row = [gene]
for sample in my_samples:
normalized = [0]
# Count mutations of each type for this gene and sample
for entry in gs_lookup[gene][sample]:
if entry['muttype'] == 'Silent':
synonymous += 1
continue
if entry['muttype'] == 'Intron':
# Shouldn't be here; ignore
continue
if entry['muttype'] == 'Missense_Mutation':
if entry['consequence'] == 'benign':
missense_benign += 1
elif entry['consequence'] == 'possibly':
missense_possibly += 1
elif entry['consequence'] == 'probably':
missense_probably += 1
elif entry['consequence'] == 'NA':
missense_na += 1
else:
print("Unhandled missense consequence level:",
entry['consequence'], file=sys.stderr)
elif entry['muttype'] == 'Nonsense_Mutation':
nonsense += 1
elif entry['muttype'] == 'Splice_Site':
splice += 1
elif entry['muttype'] in ('Frame_Shift_Ins', 'Frame_Shift_Del'):
frameshift += 1
elif entry['muttype'] in ('In_Frame_Ins', 'In_Frame_Del'):
indel += 1
else:
print("Unhandled mutation type:", entry['muttype'],
file=sys.stderr)
continue
normalized.append(entry['normalized'])
# Summarize the normalized mutation counts for this gene and sample
out_row.append(summary_func(normalized))
out_row.extend((missense_benign, missense_possibly, missense_probably,
missense_na, indel, nonsense, frameshift, splice,
synonymous))
table_records.append(out_row)
return pandas.DataFrame.from_records(table_records, columns=table_header)
def group_data_by_gs(data_table):
"""Group relevant fields in a data table by gene and sample."""
gene_data = collections.defaultdict(lambda: collections.defaultdict(list))
for _idx, row in data_table.iterrows():
|
return gene_data
# _____________________________________________________________________________
# Step_2: Rank genes by burden of LOF mutations
def lof_sig_scores(table, samples, verbose=True):
"""Calculate LOF mutation burden scores for genes in the processed table."""
mut_probdam = 'Missense:Probably'
mut_syn = 'Synonymous'
mut_trunc = ['Nonsense', 'Frameshift', 'Splice-site']
mut_other = ['Missense:Benign', 'Missense:Possibly', 'MissenseNA', 'Indel']
mut_all = [mut_probdam, mut_syn] + mut_trunc + mut_other
# Calculate the global nonsynonymous:synonymous ratio ---------------------
# Within each mutation category, sum counts (across all genes)
tot_count_probdam = sum(table[mut_probdam])
tot_count_syn = sum(table[mut_syn])
tot_count_trunc = sum(itertools.chain(*(list(table[col])
for col in mut_trunc)))
tot_count_other = sum(itertools.chain(*(list(table[col])
for col in mut_other)))
# Global mutation count across all categories and genes (= 3504)
tot_count_all = sum((tot_count_probdam, tot_count_syn, tot_count_trunc,
tot_count_other))
if verbose:
print("Counted", tot_count_all, "mutations across", len(table), "genes",
"and", len(samples), "samples", file=sys.stderr)
# Fraction of global mutations in each category of interest
tot_frac_probdam = tot_count_probdam / tot_count_all
tot_frac_syn = tot_count_syn / tot_count_all
tot_frac_trunc = tot_count_trunc / tot_count_all
# Global nonsynonymous:synonymous ratio = (1-syn)/syn (= 2.13697)
tot_ns_s_ratio = (1 - tot_frac_syn) / tot_frac_syn
# Calculate each gene's mutation score ------------------------------------
for _idx, row in table.iterrows():
gene_count_all = sum([row[col] for col in mut_all])
if not gene_count_all:
# Gene is not mutated at all --> zero score
yield (row['Gene'], 0.0)
continue
# Initial score is the sum the 'Normalized' values across all samples
raw_score = sum(row[sid] for sid in samples)
# Adjust for NS:S ratio
gene_count_syn = row[mut_syn]
syn_factor = max(1 - tot_ns_s_ratio * gene_count_syn / gene_count_all,
0)
new_score = raw_score * syn_factor
# Adjust for "probably damaging" missense and truncating mutations
gene_frac_probdam = row[mut_probdam] / gene_count_all
probdam_factor = 1 + gene_frac_probdam - tot_frac_probdam
gene_frac_trunc = sum([row[col] for col in mut_trunc]) / gene_count_all
trunc_factor = gene_frac_trunc / tot_frac_trunc
final_score = new_score * probdam_factor * trunc_factor
yield (row['Gene | samp = row['sample']
gene = row['gene']
gene_data[gene][samp].append({
'muttype': row['type'].strip(),
'normalized': row['Normalized'], # NMAF in the manuscript
'consequence': row['MissenseConsequence'].strip(),
}) | conditional_block |
lofsigrank.py | genes by burden of LOF mutations
gene_scores = sorted(lof_sig_scores(lof_table, samples),
key=lambda pair: pair[1])
# Step_3: Compare gene LOF scores to a simulated "background" distribution
if args.permutations:
# Calculate gene score percentiles
orig_pctiles = numpy.arange(1, 0, -1. / len(gene_scores))
# Calculate percentiles for simulated "background" scores
perm_scores = simulate_lof_scores(data_table, args.permutations,
genes, samples, summary_function)
# Calculate FDR for each gene
table_header = ["Gene", "Obs.Score", "Obs.Pctile", "Sim.Score",
"Sim.Pctile", "FDR"]
table_rows = []
perm_pctiles = numpy.arange(1, 0, -1. / len(perm_scores))
perm_pctiles_rev = perm_pctiles[::-1]
for (gene, obs_score), obs_pctile in zip(gene_scores, orig_pctiles):
score_rank = perm_scores.searchsorted(obs_score)
if score_rank == len(perm_scores):
exp_pctile = 0
fdr = 0.0
else:
exp_pctile = perm_pctiles[score_rank]
# FDR: % false positives / % true positives
fdr = min(1.0, exp_pctile / obs_pctile)
exp_score = perm_scores[len(perm_scores) - 1 -
perm_pctiles_rev.searchsorted(obs_pctile)]
table_rows.append((gene, obs_score, obs_pctile, exp_score,
exp_pctile, fdr))
out_table = pandas.DataFrame.from_records(table_rows,
columns=table_header)
else:
out_table = pandas.DataFrame.from_records(gene_scores,
columns=["Gene", "Score"])
# Output as a table to file or screen
if args.output:
out_table.to_csv(args.output, index=False)
else:
print(out_table.to_string(index=False))
def read_list(fname):
"""Parse a "list" file of one string per line."""
with open(fname) as handle:
items = [line.strip() for line in handle]
return items
# _____________________________________________________________________________
# Step_1: Calculate gene-level mutational statistics
def make_lof_table(data_table, my_genes, my_samples, summary_func):
"""Calculate gene-level mutational statistics from a table of mutations.
Input: nested dict of genes -> samples -> list of mut. type, NMAF, Polyphen
Output: table stratifying the mutational status of a gene in each sample.
The output table has a row for each gene and a column for each sample, in
which there is a number ranging from 0-2 that corresponds to the estimated
number of alleles lost in the sample. This value is calculated by summing
the normalized mutant alleles frequencies (NMAF) of all non-synonymous
mutations striking the gene in this sample, capped at 2. In addition, the
final 9 columns of output are the counts of each mutation type (not weighted
by MAF).
This output is used as input to Step 2 to calculate the LOF burden.
"""
table_header = ["Gene"] + my_samples + [
"Missense:Benign", "Missense:Possibly", "Missense:Probably",
"MissenseNA", "Indel", "Nonsense", "Frameshift", "Splice-site",
"Synonymous"]
table_records = []
gs_lookup = group_data_by_gs(data_table)
for gene in my_genes:
synonymous = missense_benign = missense_possibly = missense_probably = \
missense_na = frameshift = nonsense = splice = indel = 0
out_row = [gene] | synonymous += 1
continue
if entry['muttype'] == 'Intron':
# Shouldn't be here; ignore
continue
if entry['muttype'] == 'Missense_Mutation':
if entry['consequence'] == 'benign':
missense_benign += 1
elif entry['consequence'] == 'possibly':
missense_possibly += 1
elif entry['consequence'] == 'probably':
missense_probably += 1
elif entry['consequence'] == 'NA':
missense_na += 1
else:
print("Unhandled missense consequence level:",
entry['consequence'], file=sys.stderr)
elif entry['muttype'] == 'Nonsense_Mutation':
nonsense += 1
elif entry['muttype'] == 'Splice_Site':
splice += 1
elif entry['muttype'] in ('Frame_Shift_Ins', 'Frame_Shift_Del'):
frameshift += 1
elif entry['muttype'] in ('In_Frame_Ins', 'In_Frame_Del'):
indel += 1
else:
print("Unhandled mutation type:", entry['muttype'],
file=sys.stderr)
continue
normalized.append(entry['normalized'])
# Summarize the normalized mutation counts for this gene and sample
out_row.append(summary_func(normalized))
out_row.extend((missense_benign, missense_possibly, missense_probably,
missense_na, indel, nonsense, frameshift, splice,
synonymous))
table_records.append(out_row)
return pandas.DataFrame.from_records(table_records, columns=table_header)
def group_data_by_gs(data_table):
"""Group relevant fields in a data table by gene and sample."""
gene_data = collections.defaultdict(lambda: collections.defaultdict(list))
for _idx, row in data_table.iterrows():
samp = row['sample']
gene = row['gene']
gene_data[gene][samp].append({
'muttype': row['type'].strip(),
'normalized': row['Normalized'], # NMAF in the manuscript
'consequence': row['MissenseConsequence'].strip(),
})
return gene_data
# _____________________________________________________________________________
# Step_2: Rank genes by burden of LOF mutations
def lof_sig_scores(table, samples, verbose=True):
"""Calculate LOF mutation burden scores for genes in the processed table."""
mut_probdam = 'Missense:Probably'
mut_syn = 'Synonymous'
mut_trunc = ['Nonsense', 'Frameshift', 'Splice-site']
mut_other = ['Missense:Benign', 'Missense:Possibly', 'MissenseNA', 'Indel']
mut_all = [mut_probdam, mut_syn] + mut_trunc + mut_other
# Calculate the global nonsynonymous:synonymous ratio ---------------------
# Within each mutation category, sum counts (across all genes)
tot_count_probdam = sum(table[mut_probdam])
tot_count_syn = sum(table[mut_syn])
tot_count_trunc = sum(itertools.chain(*(list(table[col])
for col in mut_trunc)))
tot_count_other = sum(itertools.chain(*(list(table[col])
for col in mut_other)))
# Global mutation count across all categories and genes (= 3504)
tot_count_all = sum((tot_count_probdam, tot_count_syn, tot_count_trunc,
tot_count_other))
if verbose:
print("Counted", tot_count_all, "mutations across", len(table), "genes",
"and", len(samples), "samples", file=sys.stderr)
# Fraction of global mutations in each category of interest
tot_frac_probdam = tot_count_probdam / tot_count_all
tot_frac_syn = tot_count_syn / tot_count_all
tot_frac_trunc = tot_count_trunc / tot_count_all
# Global nonsynonymous:synonymous ratio = (1-syn)/syn (= 2.13697)
tot_ns_s_ratio = (1 - tot_frac_syn) / tot_frac_syn
# Calculate each gene's mutation score ------------------------------------
for _idx, row in table.iterrows():
gene_count_all = sum([row[col] for col in mut_all])
if not gene_count_all:
# Gene is not mutated at all --> zero score
yield (row['Gene'], 0.0)
continue
# Initial score is the sum the 'Normalized' values across all samples
raw_score = sum(row[sid] for sid in samples)
# Adjust for NS:S ratio
gene_count_syn = row[mut_syn]
syn_factor = max(1 - tot_ns_s_ratio * gene_count_syn / gene_count_all,
0)
new_score = raw_score * syn_factor
# Adjust for "probably damaging" missense and truncating mutations
gene_frac_probdam = row[mut_probdam] / gene_count_all
probdam_factor = 1 + gene_frac_probdam - tot_frac_probdam
gene_frac_trunc = sum([row[col] for col in mut_trunc]) / gene_count_all
trunc_factor = gene_frac_trunc / tot_frac_trunc
final_score = new_score * probdam_factor * trunc_factor
yield (row['Gene'], | for sample in my_samples:
normalized = [0]
# Count mutations of each type for this gene and sample
for entry in gs_lookup[gene][sample]:
if entry['muttype'] == 'Silent': | random_line_split |
lofsigrank.py | genes by burden of LOF mutations
gene_scores = sorted(lof_sig_scores(lof_table, samples),
key=lambda pair: pair[1])
# Step_3: Compare gene LOF scores to a simulated "background" distribution
if args.permutations:
# Calculate gene score percentiles
orig_pctiles = numpy.arange(1, 0, -1. / len(gene_scores))
# Calculate percentiles for simulated "background" scores
perm_scores = simulate_lof_scores(data_table, args.permutations,
genes, samples, summary_function)
# Calculate FDR for each gene
table_header = ["Gene", "Obs.Score", "Obs.Pctile", "Sim.Score",
"Sim.Pctile", "FDR"]
table_rows = []
perm_pctiles = numpy.arange(1, 0, -1. / len(perm_scores))
perm_pctiles_rev = perm_pctiles[::-1]
for (gene, obs_score), obs_pctile in zip(gene_scores, orig_pctiles):
score_rank = perm_scores.searchsorted(obs_score)
if score_rank == len(perm_scores):
exp_pctile = 0
fdr = 0.0
else:
exp_pctile = perm_pctiles[score_rank]
# FDR: % false positives / % true positives
fdr = min(1.0, exp_pctile / obs_pctile)
exp_score = perm_scores[len(perm_scores) - 1 -
perm_pctiles_rev.searchsorted(obs_pctile)]
table_rows.append((gene, obs_score, obs_pctile, exp_score,
exp_pctile, fdr))
out_table = pandas.DataFrame.from_records(table_rows,
columns=table_header)
else:
out_table = pandas.DataFrame.from_records(gene_scores,
columns=["Gene", "Score"])
# Output as a table to file or screen
if args.output:
out_table.to_csv(args.output, index=False)
else:
print(out_table.to_string(index=False))
def read_list(fname):
"""Parse a "list" file of one string per line."""
with open(fname) as handle:
items = [line.strip() for line in handle]
return items
# _____________________________________________________________________________
# Step_1: Calculate gene-level mutational statistics
def make_lof_table(data_table, my_genes, my_samples, summary_func):
"""Calculate gene-level mutational statistics from a table of mutations.
Input: nested dict of genes -> samples -> list of mut. type, NMAF, Polyphen
Output: table stratifying the mutational status of a gene in each sample.
The output table has a row for each gene and a column for each sample, in
which there is a number ranging from 0-2 that corresponds to the estimated
number of alleles lost in the sample. This value is calculated by summing
the normalized mutant alleles frequencies (NMAF) of all non-synonymous
mutations striking the gene in this sample, capped at 2. In addition, the
final 9 columns of output are the counts of each mutation type (not weighted
by MAF).
This output is used as input to Step 2 to calculate the LOF burden.
"""
table_header = ["Gene"] + my_samples + [
"Missense:Benign", "Missense:Possibly", "Missense:Probably",
"MissenseNA", "Indel", "Nonsense", "Frameshift", "Splice-site",
"Synonymous"]
table_records = []
gs_lookup = group_data_by_gs(data_table)
for gene in my_genes:
synonymous = missense_benign = missense_possibly = missense_probably = \
missense_na = frameshift = nonsense = splice = indel = 0
out_row = [gene]
for sample in my_samples:
normalized = [0]
# Count mutations of each type for this gene and sample
for entry in gs_lookup[gene][sample]:
if entry['muttype'] == 'Silent':
synonymous += 1
continue
if entry['muttype'] == 'Intron':
# Shouldn't be here; ignore
continue
if entry['muttype'] == 'Missense_Mutation':
if entry['consequence'] == 'benign':
missense_benign += 1
elif entry['consequence'] == 'possibly':
missense_possibly += 1
elif entry['consequence'] == 'probably':
missense_probably += 1
elif entry['consequence'] == 'NA':
missense_na += 1
else:
print("Unhandled missense consequence level:",
entry['consequence'], file=sys.stderr)
elif entry['muttype'] == 'Nonsense_Mutation':
nonsense += 1
elif entry['muttype'] == 'Splice_Site':
splice += 1
elif entry['muttype'] in ('Frame_Shift_Ins', 'Frame_Shift_Del'):
frameshift += 1
elif entry['muttype'] in ('In_Frame_Ins', 'In_Frame_Del'):
indel += 1
else:
print("Unhandled mutation type:", entry['muttype'],
file=sys.stderr)
continue
normalized.append(entry['normalized'])
# Summarize the normalized mutation counts for this gene and sample
out_row.append(summary_func(normalized))
out_row.extend((missense_benign, missense_possibly, missense_probably,
missense_na, indel, nonsense, frameshift, splice,
synonymous))
table_records.append(out_row)
return pandas.DataFrame.from_records(table_records, columns=table_header)
def | (data_table):
"""Group relevant fields in a data table by gene and sample."""
gene_data = collections.defaultdict(lambda: collections.defaultdict(list))
for _idx, row in data_table.iterrows():
samp = row['sample']
gene = row['gene']
gene_data[gene][samp].append({
'muttype': row['type'].strip(),
'normalized': row['Normalized'], # NMAF in the manuscript
'consequence': row['MissenseConsequence'].strip(),
})
return gene_data
# _____________________________________________________________________________
# Step_2: Rank genes by burden of LOF mutations
def lof_sig_scores(table, samples, verbose=True):
"""Calculate LOF mutation burden scores for genes in the processed table."""
mut_probdam = 'Missense:Probably'
mut_syn = 'Synonymous'
mut_trunc = ['Nonsense', 'Frameshift', 'Splice-site']
mut_other = ['Missense:Benign', 'Missense:Possibly', 'MissenseNA', 'Indel']
mut_all = [mut_probdam, mut_syn] + mut_trunc + mut_other
# Calculate the global nonsynonymous:synonymous ratio ---------------------
# Within each mutation category, sum counts (across all genes)
tot_count_probdam = sum(table[mut_probdam])
tot_count_syn = sum(table[mut_syn])
tot_count_trunc = sum(itertools.chain(*(list(table[col])
for col in mut_trunc)))
tot_count_other = sum(itertools.chain(*(list(table[col])
for col in mut_other)))
# Global mutation count across all categories and genes (= 3504)
tot_count_all = sum((tot_count_probdam, tot_count_syn, tot_count_trunc,
tot_count_other))
if verbose:
print("Counted", tot_count_all, "mutations across", len(table), "genes",
"and", len(samples), "samples", file=sys.stderr)
# Fraction of global mutations in each category of interest
tot_frac_probdam = tot_count_probdam / tot_count_all
tot_frac_syn = tot_count_syn / tot_count_all
tot_frac_trunc = tot_count_trunc / tot_count_all
# Global nonsynonymous:synonymous ratio = (1-syn)/syn (= 2.13697)
tot_ns_s_ratio = (1 - tot_frac_syn) / tot_frac_syn
# Calculate each gene's mutation score ------------------------------------
for _idx, row in table.iterrows():
gene_count_all = sum([row[col] for col in mut_all])
if not gene_count_all:
# Gene is not mutated at all --> zero score
yield (row['Gene'], 0.0)
continue
# Initial score is the sum the 'Normalized' values across all samples
raw_score = sum(row[sid] for sid in samples)
# Adjust for NS:S ratio
gene_count_syn = row[mut_syn]
syn_factor = max(1 - tot_ns_s_ratio * gene_count_syn / gene_count_all,
0)
new_score = raw_score * syn_factor
# Adjust for "probably damaging" missense and truncating mutations
gene_frac_probdam = row[mut_probdam] / gene_count_all
probdam_factor = 1 + gene_frac_probdam - tot_frac_probdam
gene_frac_trunc = sum([row[col] for col in mut_trunc]) / gene_count_all
trunc_factor = gene_frac_trunc / tot_frac_trunc
final_score = new_score * probdam_factor * trunc_factor
yield (row['Gene | group_data_by_gs | identifier_name |
helpers.py | return name1
return name
def get_field_attr(name):
"""leidt veldnaam, type en lengte af uit de definities in models.py
"""
# de variant met een repeating group (entiteit, dataitem) levert hier nog een probleem op.
# is dat omdat er twee entiteiten in 1 scherm staan?
fields = []
opts = my.rectypes[name]._meta
for x in opts.get_fields(): # fields:
fldname = x.name
fldtype = x.get_internal_type()
if fldname == 'id' or fldtype in ('ForeignKey', 'ManyToManyField'):
# if fldname == 'id' or any((x.many2one, x.many2many, x.one2many))
continue
try:
length = x.max_length
except AttributeError:
length = -1
fields.append((fldname, fldtype[:-5], length))
return fields
def get_relation_fields(name):
"""deze functie is van de vorige afgesplitst (afwijkend pad als tweede argument alles = True)
enig gemeenschappelijke is loopen over get_fields
deze werd bovendien nergens gebruikt
"""
fields = []
opts = my.rectypes[name]._meta
for rel in opts.get_fields():
# print(rel, rel.one_to_many or rel.many_to_many)
if rel.one_to_many or rel.many_to_many:
try:
fields.append((rel.name, rel.get_internal_type(), rel.max_length))
except AttributeError:
fields.append((rel.name, rel.get_internal_type(), -1))
return fields
def get_new_numberkey_for_soort(owner_proj, soort):
"""generate new id for certain document types
"""
if soort == 'userwijz':
sel = owner_proj.rfcs
elif soort == 'userprob':
sel = owner_proj.probs
elif soort == 'bevinding':
sel = owner_proj.tbev
else:
return ''
ny = str(datetime.date.today().year)
h = ''
try:
last_id = sel.latest("datum_in").nummer
except ObjectDoesNotExist:
pass
else:
yr, nr = last_id.split('-')
if yr == ny:
h = '-'.join((yr, '%04i' % (int(nr) + 1)))
if h == '':
h = '-'.join((ny, '0001'))
return h
def get_stats_texts(proj, action_type):
"""get certain texts for certain document types (also registered in actiereg)
"""
first = _("(nog) geen")
if action_type == 'userwijz':
all_objects = my.Userwijz.objects.filter(project=proj)
second = _('ingediend')
hlp = _("gerealiseerd"), _('in behandeling via')
elif action_type == 'probleem':
all_objects = my.Userprob.objects.filter(project=proj)
second = _("gemeld")
hlp = _('opgelost'), _('doorgekoppeld naar')
elif action_type == 'bevinding':
all_objects = my.Bevinding.objects.filter(project=proj)
second = _("opgevoerd")
hlp = _('opgelost'), _('doorgekoppeld naar')
else:
return '', ''
solved = all_objects.filter(gereed=True).count()
working = all_objects.filter(gereed=False).filter(actie__isnull=False).count()
if all_objects.count() != 0:
first = all_objects.count()
second = str(_("waarvan {} {} en {} {} Actiereg").format(solved, hlp[0], working, hlp[1]))
return first, second
def get_names_for_type(typename):
"get verbose names from model definition"
return (my.rectypes[typename]._meta.verbose_name,
my.rectypes[typename]._meta.verbose_name_plural,
my.rectypes[typename].section)
def get_projectlist():
"return list of all the projects"
return my.Project.objects.all().order_by('naam')
def get_ordered_objectlist(proj, soort):
"return ordered list of objects of the given type for the given project"
# if soort in my.rectypes: -- overbodige test volgens mij
# return None
# if proj:
lijst = my.rectypes[soort].objects.filter(project=proj)
# else:
# lijst = my.rectypes[soort].objects.select_related()
# ik denk dat het voorgaande nooit gewerkt heeft. Om te beginnen omdat het vanaf het begin af aan
# select.related heeft gestaan en dat heeft noit bestaan, dus ik denk dat je hier nooit komt met een
# leeg project (want dan ga je naar get_projectlist) - dus maar weghalen:w
# if soort in ('userwijz', 'userprob', 'bevinding'):
if 'naam' in [x[0] for x in get_field_attr(soort)]:
return lijst.order_by('naam')
return lijst.order_by('nummer') | def get_object(soort, id, new=False):
"return specified document object"
if soort not in my.rectypes:
raise Http404('Onbekend type `{}`'.format(soort))
if new:
o = my.rectypes[soort]()
else:
try:
o = my.rectypes[soort].objects.get(pk=id)
except ObjectDoesNotExist:
raise Http404(str(id).join((soort + ' ', _(' bestaat niet'))))
return o
def determine_adjacent(all_items, o):
"return keys for previous and next object"
prev = next = 0
nog_een = False
for x in all_items:
if nog_een:
next = x.id
nog_een = False
break
if x == o:
nog_een = True
else:
prev = x.id
return prev, next
def get_list_title_attrs(proj, soort, srt, id, rel):
"return title, name (single and plural) and section for object type"
soortnm_ev, soortnm_mv, sect = get_names_for_type(soort)
if srt:
srtnm_ev, srtnm_mv = get_names_for_type(srt)[:2]
if proj:
pr = my.Project.objects.get(pk=proj)
title = _(' bij project ').join((soortnm_mv.capitalize(), pr.naam))
else:
pr = None
title = _('Lijst ') + str(soortnm_mv)
if rel:
document = my.rectypes[srt].objects.get(pk=id)
if srt in ('userwijz', 'userprob', 'bevinding'):
docid = document.nummer
else:
docid = document.naam
itemoms = '{} "{}"'.format(srtnm_ev, docid)
relstr = str(_('{} relateren aan {}'))
if rel == 'from':
title = relstr.format(itemoms, soortnm_ev)
else:
title = relstr.format(soortnm_ev, itemoms)
if pr: # is dit niet dubbel? Ja zeker
title = "Project {0} - {1}".format(pr.naam, title)
return title, soortnm_ev, soortnm_mv, sect
def init_infodict_for_detail(proj, soort, edit, meld):
return {'start': '', 'soort': soort, 'prev': '', 'notnw': 'new', 'next': '', "sites": SITES,
'proj': '' if proj == 'proj' else proj, 'sect': '', 'meld': meld,
'projecten': get_projectlist(),
# 'edit': 'view' if edit else '',
# 'view': 'edit' if not edit else '',
'mode': 'edit' if edit else 'view',
'new': 'nieuw' if edit == 'new' else ''}
def get_update_url(proj, edit, soort='', id='', srt='', verw=''):
"return url to view that does the actual update"
if edit == 'new': # form action for new document
if soort:
ref = '{}/{}/'.format(srt, verw) if srt else ''
return "/{}/{}/mut/{}".format(proj, soort, ref)
return "/proj/mut/"
elif edit: # form action for existing
if soort:
return "/{}/{}/{}/mut/".format(proj, soort, id)
return "/{}/mut/".format(proj)
return ''
def get_fieldlengths(soort):
"return dictionary of maxlength per field"
return {x: z for x, y, z in get_field_attr(soort)}
def get_margins_for_type(typename):
"geeft voor een aantal soorten afwijkende marges terug"
left_margin = {"project": 140,
"userspec": 230,
"funcdoc": 1 | random_line_split |
|
helpers.py |
def remove_relation(o, soort, r, srt):
attr_name, multiple = get_relation(soort, srt)
if multiple:
o.__getattribute__(attr_name).remove(r)
else:
o.__setattr__(attr_name, None)
o.save()
def corr_naam(name):
"""convert name used in program to model name and back
Note: all names must be unique!
"""
names = (("techtaak", 'techtask'), ("programma", 'procproc'))
for name1, name2 in names:
if name == name1:
return name2
if name == name2:
return name1
return name
def get_field_attr(name):
"""leidt veldnaam, type en lengte af uit de definities in models.py
"""
# de variant met een repeating group (entiteit, dataitem) levert hier nog een probleem op.
# is dat omdat er twee entiteiten in 1 scherm staan?
fields = []
opts = my.rectypes[name]._meta
for x in opts.get_fields(): # fields:
fldname = x.name
fldtype = x.get_internal_type()
if fldname == 'id' or fldtype in ('ForeignKey', 'ManyToManyField'):
# if fldname == 'id' or any((x.many2one, x.many2many, x.one2many))
continue
try:
length = x.max_length
except AttributeError:
length = -1
fields.append((fldname, fldtype[:-5], length))
return fields
def get_relation_fields(name):
"""deze functie is van de vorige afgesplitst (afwijkend pad als tweede argument alles = True)
enig gemeenschappelijke is loopen over get_fields
deze werd bovendien nergens gebruikt
"""
fields = []
opts = my.rectypes[name]._meta
for rel in opts.get_fields():
# print(rel, rel.one_to_many or rel.many_to_many)
if rel.one_to_many or rel.many_to_many:
try:
fields.append((rel.name, rel.get_internal_type(), rel.max_length))
except AttributeError:
fields.append((rel.name, rel.get_internal_type(), -1))
return fields
def get_new_numberkey_for_soort(owner_proj, soort):
"""generate new id for certain document types
"""
if soort == 'userwijz':
sel = owner_proj.rfcs
elif soort == 'userprob':
sel = owner_proj.probs
elif soort == 'bevinding':
sel = owner_proj.tbev
else:
return ''
ny = str(datetime.date.today().year)
h = ''
try:
last_id = sel.latest("datum_in").nummer
except ObjectDoesNotExist:
pass
else:
yr, nr = last_id.split('-')
if yr == ny:
h = '-'.join((yr, '%04i' % (int(nr) + 1)))
if h == '':
h = '-'.join((ny, '0001'))
return h
def get_stats_texts(proj, action_type):
"""get certain texts for certain document types (also registered in actiereg)
"""
first = _("(nog) geen")
if action_type == 'userwijz':
all_objects = my.Userwijz.objects.filter(project=proj)
second = _('ingediend')
hlp = _("gerealiseerd"), _('in behandeling via')
elif action_type == 'probleem':
all_objects = my.Userprob.objects.filter(project=proj)
second = _("gemeld")
hlp = _('opgelost'), _('doorgekoppeld naar')
elif action_type == 'bevinding':
all_objects = my.Bevinding.objects.filter(project=proj)
second = _("opgevoerd")
hlp = _('opgelost'), _('doorgekoppeld naar')
else:
return '', ''
solved = all_objects.filter(gereed=True).count()
working = all_objects.filter(gereed=False).filter(actie__isnull=False).count()
if all_objects.count() != 0:
first = all_objects.count()
second = str(_("waarvan {} {} en {} {} Actiereg").format(solved, hlp[0], working, hlp[1]))
return first, second
def get_names_for_type(typename):
"get verbose names from model definition"
return (my.rectypes[typename]._meta.verbose_name,
my.rectypes[typename]._meta.verbose_name_plural,
my.rectypes[typename].section)
def get_projectlist():
"return list of all the projects"
return my.Project.objects.all().order_by('naam')
def get_ordered_objectlist(proj, soort):
"return ordered list of objects of the given type for the given project"
# if soort in my.rectypes: -- overbodige test volgens mij
# return None
# if proj:
lijst = my.rectypes[soort].objects.filter(project=proj)
# else:
# lijst = my.rectypes[soort].objects.select_related()
# ik denk dat het voorgaande nooit gewerkt heeft. Om te beginnen omdat het vanaf het begin af aan
# select.related heeft gestaan en dat heeft noit bestaan, dus ik denk dat je hier nooit komt met een
# leeg project (want dan ga je naar get_projectlist) - dus maar weghalen:w
# if soort in ('userwijz', 'userprob', 'bevinding'):
if 'naam' in [x[0] for x in get_field_attr(soort)]:
return lijst.order_by('naam')
return lijst.order_by('nummer')
def get_object(soort, id, new=False):
"return specified document object"
if soort not in my.rectypes:
raise Http404('Onbekend type `{}`'.format(soort))
if new:
o = my.rectypes[soort]()
else:
try:
o = my.rectypes[soort].objects.get(pk=id)
except ObjectDoesNotExist:
raise Http404(str(id).join((soort + ' ', _(' bestaat niet'))))
return o
def determine_adjacent(all_items, o):
"return keys for previous and next object"
prev = next = 0
nog_een = False
for x in all_items:
if nog_een:
next = x.id
nog_een = False
break
if x == o:
nog_een = True
else:
prev = x.id
return prev, next
def get_list_title_attrs(proj, soort, srt, id, rel):
"return title, name (single and plural) and section for object type"
soortnm_ev, soortnm_mv, sect = get_names_for_type(soort)
if srt:
srtnm_ev, srtnm_mv = get_names_for_type(srt)[:2]
if proj:
pr = my.Project.objects.get(pk=proj)
title = _(' bij project ').join((soortnm_mv.capitalize(), pr.naam))
else:
pr = None
title = _('Lijst ') + str(soortnm_mv)
if rel:
document = my.rectypes[srt].objects.get(pk=id)
if srt in ('userwijz', 'userprob', 'bevinding'):
docid = document.nummer
else:
docid = document.naam
itemoms = '{} "{}"'.format(srtnm_ev, docid)
relstr = str(_('{} relateren aan {}'))
if rel == 'from':
title = relstr.format(itemoms, soortnm_ev)
else:
title = relstr.format(soortnm_ev, itemoms)
if pr: # is dit niet dubbel? Ja zeker
title = "Project {0} - {1}".format(pr.naam, title)
return title, soortnm_ev, soortnm_mv, sect
def init_infodict_for_detail(proj, soort, edit, meld):
return {'start': '', 'soort': soort, 'prev': '', 'notnw': 'new', 'next': '', "sites": SITES,
'proj': '' if proj == 'proj' else proj, 'sect': '', 'meld': meld,
'projecten': get_projectlist(),
# 'edit': 'view' if edit else '',
# 'view': 'edit' if not edit else '',
'mode': 'edit' if edit else 'view',
'new': 'nieuw' if edit == 'new' else ''}
def get_update_url(proj, edit, soort='', id='', srt='', verw=''):
"return url to view that does the actual update"
if edit == 'new': # form action for new document
if so | attr_name, multiple = get_relation(soort, srt)
if multiple:
o.__getattribute__(attr_name).add(r)
else:
o.__setattr__(attr_name, r)
o.save() | identifier_body |
|
helpers.py | name1
return name
def get_field_attr(name):
"""leidt veldnaam, type en lengte af uit de definities in models.py
"""
# de variant met een repeating group (entiteit, dataitem) levert hier nog een probleem op.
# is dat omdat er twee entiteiten in 1 scherm staan?
fields = []
opts = my.rectypes[name]._meta
for x in opts.get_fields(): # fields:
fldname = x.name
fldtype = x.get_internal_type()
if fldname == 'id' or fldtype in ('ForeignKey', 'ManyToManyField'):
# if fldname == 'id' or any((x.many2one, x.many2many, x.one2many))
continue
try:
length = x.max_length
except AttributeError:
length = -1
fields.append((fldname, fldtype[:-5], length))
return fields
def get_relation_fields(name):
"""deze functie is van de vorige afgesplitst (afwijkend pad als tweede argument alles = True)
enig gemeenschappelijke is loopen over get_fields
deze werd bovendien nergens gebruikt
"""
fields = []
opts = my.rectypes[name]._meta
for rel in opts.get_fields():
# print(rel, rel.one_to_many or rel.many_to_many)
if rel.one_to_many or rel.many_to_many:
try:
fields.append((rel.name, rel.get_internal_type(), rel.max_length))
except AttributeError:
fields.append((rel.name, rel.get_internal_type(), -1))
return fields
def get_new_numberkey_for_soort(owner_proj, soort):
"""generate new id for certain document types
"""
if soort == 'userwijz':
sel = owner_proj.rfcs
elif soort == 'userprob':
sel = owner_proj.probs
elif soort == 'bevinding':
sel = owner_proj.tbev
else:
return ''
ny = str(datetime.date.today().year)
h = ''
try:
last_id = sel.latest("datum_in").nummer
except ObjectDoesNotExist:
pass
else:
yr, nr = last_id.split('-')
if yr == ny:
h = '-'.join((yr, '%04i' % (int(nr) + 1)))
if h == '':
h = '-'.join((ny, '0001'))
return h
def get_stats_texts(proj, action_type):
"""get certain texts for certain document types (also registered in actiereg)
"""
first = _("(nog) geen")
if action_type == 'userwijz':
all_objects = my.Userwijz.objects.filter(project=proj)
second = _('ingediend')
hlp = _("gerealiseerd"), _('in behandeling via')
elif action_type == 'probleem':
all_objects = my.Userprob.objects.filter(project=proj)
second = _("gemeld")
hlp = _('opgelost'), _('doorgekoppeld naar')
elif action_type == 'bevinding':
all_objects = my.Bevinding.objects.filter(project=proj)
second = _("opgevoerd")
hlp = _('opgelost'), _('doorgekoppeld naar')
else:
return '', ''
solved = all_objects.filter(gereed=True).count()
working = all_objects.filter(gereed=False).filter(actie__isnull=False).count()
if all_objects.count() != 0:
|
return first, second
def get_names_for_type(typename):
"get verbose names from model definition"
return (my.rectypes[typename]._meta.verbose_name,
my.rectypes[typename]._meta.verbose_name_plural,
my.rectypes[typename].section)
def get_projectlist():
"return list of all the projects"
return my.Project.objects.all().order_by('naam')
def get_ordered_objectlist(proj, soort):
"return ordered list of objects of the given type for the given project"
# if soort in my.rectypes: -- overbodige test volgens mij
# return None
# if proj:
lijst = my.rectypes[soort].objects.filter(project=proj)
# else:
# lijst = my.rectypes[soort].objects.select_related()
# ik denk dat het voorgaande nooit gewerkt heeft. Om te beginnen omdat het vanaf het begin af aan
# select.related heeft gestaan en dat heeft noit bestaan, dus ik denk dat je hier nooit komt met een
# leeg project (want dan ga je naar get_projectlist) - dus maar weghalen:w
# if soort in ('userwijz', 'userprob', 'bevinding'):
if 'naam' in [x[0] for x in get_field_attr(soort)]:
return lijst.order_by('naam')
return lijst.order_by('nummer')
def get_object(soort, id, new=False):
"return specified document object"
if soort not in my.rectypes:
raise Http404('Onbekend type `{}`'.format(soort))
if new:
o = my.rectypes[soort]()
else:
try:
o = my.rectypes[soort].objects.get(pk=id)
except ObjectDoesNotExist:
raise Http404(str(id).join((soort + ' ', _(' bestaat niet'))))
return o
def determine_adjacent(all_items, o):
"return keys for previous and next object"
prev = next = 0
nog_een = False
for x in all_items:
if nog_een:
next = x.id
nog_een = False
break
if x == o:
nog_een = True
else:
prev = x.id
return prev, next
def get_list_title_attrs(proj, soort, srt, id, rel):
"return title, name (single and plural) and section for object type"
soortnm_ev, soortnm_mv, sect = get_names_for_type(soort)
if srt:
srtnm_ev, srtnm_mv = get_names_for_type(srt)[:2]
if proj:
pr = my.Project.objects.get(pk=proj)
title = _(' bij project ').join((soortnm_mv.capitalize(), pr.naam))
else:
pr = None
title = _('Lijst ') + str(soortnm_mv)
if rel:
document = my.rectypes[srt].objects.get(pk=id)
if srt in ('userwijz', 'userprob', 'bevinding'):
docid = document.nummer
else:
docid = document.naam
itemoms = '{} "{}"'.format(srtnm_ev, docid)
relstr = str(_('{} relateren aan {}'))
if rel == 'from':
title = relstr.format(itemoms, soortnm_ev)
else:
title = relstr.format(soortnm_ev, itemoms)
if pr: # is dit niet dubbel? Ja zeker
title = "Project {0} - {1}".format(pr.naam, title)
return title, soortnm_ev, soortnm_mv, sect
def init_infodict_for_detail(proj, soort, edit, meld):
return {'start': '', 'soort': soort, 'prev': '', 'notnw': 'new', 'next': '', "sites": SITES,
'proj': '' if proj == 'proj' else proj, 'sect': '', 'meld': meld,
'projecten': get_projectlist(),
# 'edit': 'view' if edit else '',
# 'view': 'edit' if not edit else '',
'mode': 'edit' if edit else 'view',
'new': 'nieuw' if edit == 'new' else ''}
def get_update_url(proj, edit, soort='', id='', srt='', verw=''):
"return url to view that does the actual update"
if edit == 'new': # form action for new document
if soort:
ref = '{}/{}/'.format(srt, verw) if srt else ''
return "/{}/{}/mut/{}".format(proj, soort, ref)
return "/proj/mut/"
elif edit: # form action for existing
if soort:
return "/{}/{}/{}/mut/".format(proj, soort, id)
return "/{}/mut/".format(proj)
return ''
def get_fieldlengths(soort):
"return dictionary of maxlength per field"
return {x: z for x, y, z in get_field_attr(soort)}
def get_margins_for_type(typename):
"geeft voor een aantal soorten afwijkende marges terug"
left_margin = {"project": 140,
"userspec": 230,
"funcdoc": 16 | first = all_objects.count()
second = str(_("waarvan {} {} en {} {} Actiereg").format(solved, hlp[0], working, hlp[1])) | conditional_block |
helpers.py | enig gemeenschappelijke is loopen over get_fields
deze werd bovendien nergens gebruikt
"""
fields = []
opts = my.rectypes[name]._meta
for rel in opts.get_fields():
# print(rel, rel.one_to_many or rel.many_to_many)
if rel.one_to_many or rel.many_to_many:
try:
fields.append((rel.name, rel.get_internal_type(), rel.max_length))
except AttributeError:
fields.append((rel.name, rel.get_internal_type(), -1))
return fields
def get_new_numberkey_for_soort(owner_proj, soort):
"""generate new id for certain document types
"""
if soort == 'userwijz':
sel = owner_proj.rfcs
elif soort == 'userprob':
sel = owner_proj.probs
elif soort == 'bevinding':
sel = owner_proj.tbev
else:
return ''
ny = str(datetime.date.today().year)
h = ''
try:
last_id = sel.latest("datum_in").nummer
except ObjectDoesNotExist:
pass
else:
yr, nr = last_id.split('-')
if yr == ny:
h = '-'.join((yr, '%04i' % (int(nr) + 1)))
if h == '':
h = '-'.join((ny, '0001'))
return h
def get_stats_texts(proj, action_type):
"""get certain texts for certain document types (also registered in actiereg)
"""
first = _("(nog) geen")
if action_type == 'userwijz':
all_objects = my.Userwijz.objects.filter(project=proj)
second = _('ingediend')
hlp = _("gerealiseerd"), _('in behandeling via')
elif action_type == 'probleem':
all_objects = my.Userprob.objects.filter(project=proj)
second = _("gemeld")
hlp = _('opgelost'), _('doorgekoppeld naar')
elif action_type == 'bevinding':
all_objects = my.Bevinding.objects.filter(project=proj)
second = _("opgevoerd")
hlp = _('opgelost'), _('doorgekoppeld naar')
else:
return '', ''
solved = all_objects.filter(gereed=True).count()
working = all_objects.filter(gereed=False).filter(actie__isnull=False).count()
if all_objects.count() != 0:
first = all_objects.count()
second = str(_("waarvan {} {} en {} {} Actiereg").format(solved, hlp[0], working, hlp[1]))
return first, second
def get_names_for_type(typename):
"get verbose names from model definition"
return (my.rectypes[typename]._meta.verbose_name,
my.rectypes[typename]._meta.verbose_name_plural,
my.rectypes[typename].section)
def get_projectlist():
"return list of all the projects"
return my.Project.objects.all().order_by('naam')
def get_ordered_objectlist(proj, soort):
"return ordered list of objects of the given type for the given project"
# if soort in my.rectypes: -- overbodige test volgens mij
# return None
# if proj:
lijst = my.rectypes[soort].objects.filter(project=proj)
# else:
# lijst = my.rectypes[soort].objects.select_related()
# ik denk dat het voorgaande nooit gewerkt heeft. Om te beginnen omdat het vanaf het begin af aan
# select.related heeft gestaan en dat heeft noit bestaan, dus ik denk dat je hier nooit komt met een
# leeg project (want dan ga je naar get_projectlist) - dus maar weghalen:w
# if soort in ('userwijz', 'userprob', 'bevinding'):
if 'naam' in [x[0] for x in get_field_attr(soort)]:
return lijst.order_by('naam')
return lijst.order_by('nummer')
def get_object(soort, id, new=False):
"return specified document object"
if soort not in my.rectypes:
raise Http404('Onbekend type `{}`'.format(soort))
if new:
o = my.rectypes[soort]()
else:
try:
o = my.rectypes[soort].objects.get(pk=id)
except ObjectDoesNotExist:
raise Http404(str(id).join((soort + ' ', _(' bestaat niet'))))
return o
def determine_adjacent(all_items, o):
"return keys for previous and next object"
prev = next = 0
nog_een = False
for x in all_items:
if nog_een:
next = x.id
nog_een = False
break
if x == o:
nog_een = True
else:
prev = x.id
return prev, next
def get_list_title_attrs(proj, soort, srt, id, rel):
"return title, name (single and plural) and section for object type"
soortnm_ev, soortnm_mv, sect = get_names_for_type(soort)
if srt:
srtnm_ev, srtnm_mv = get_names_for_type(srt)[:2]
if proj:
pr = my.Project.objects.get(pk=proj)
title = _(' bij project ').join((soortnm_mv.capitalize(), pr.naam))
else:
pr = None
title = _('Lijst ') + str(soortnm_mv)
if rel:
document = my.rectypes[srt].objects.get(pk=id)
if srt in ('userwijz', 'userprob', 'bevinding'):
docid = document.nummer
else:
docid = document.naam
itemoms = '{} "{}"'.format(srtnm_ev, docid)
relstr = str(_('{} relateren aan {}'))
if rel == 'from':
title = relstr.format(itemoms, soortnm_ev)
else:
title = relstr.format(soortnm_ev, itemoms)
if pr: # is dit niet dubbel? Ja zeker
title = "Project {0} - {1}".format(pr.naam, title)
return title, soortnm_ev, soortnm_mv, sect
def init_infodict_for_detail(proj, soort, edit, meld):
return {'start': '', 'soort': soort, 'prev': '', 'notnw': 'new', 'next': '', "sites": SITES,
'proj': '' if proj == 'proj' else proj, 'sect': '', 'meld': meld,
'projecten': get_projectlist(),
# 'edit': 'view' if edit else '',
# 'view': 'edit' if not edit else '',
'mode': 'edit' if edit else 'view',
'new': 'nieuw' if edit == 'new' else ''}
def get_update_url(proj, edit, soort='', id='', srt='', verw=''):
"return url to view that does the actual update"
if edit == 'new': # form action for new document
if soort:
ref = '{}/{}/'.format(srt, verw) if srt else ''
return "/{}/{}/mut/{}".format(proj, soort, ref)
return "/proj/mut/"
elif edit: # form action for existing
if soort:
return "/{}/{}/{}/mut/".format(proj, soort, id)
return "/{}/mut/".format(proj)
return ''
def get_fieldlengths(soort):
"return dictionary of maxlength per field"
return {x: z for x, y, z in get_field_attr(soort)}
def get_margins_for_type(typename):
"geeft voor een aantal soorten afwijkende marges terug"
left_margin = {"project": 140,
"userspec": 230,
"funcdoc": 160,
"gebrtaak": 240,
"funcproc": 160,
"entiteit": 140,
"techtaak": 200,
"techproc": 140,
"testplan": 140,
"bevinding": 140} .get(typename, 120)
leftw = "{0}px".format(left_margin)
rightw = "{0}px".format(910 - left_margin)
rightm = "{0}px".format(left_margin + 5)
return leftw, rightw, rightm
def get_detail_title(soort, edit, obj):
"""geeft titel zonder "DocTool!" terug"""
naam_ev = get_names_for_type(soort)[0]
if edit == 'new':
return _('Nieuw(e) ') + str(naam_ev)
try:
title = " ".join((naam_ev.capitalize(), obj.naam))
except AttributeError:
title = " ".join((naam_ev.capitalize(), obj.nummer))
return title
def | get_relation_buttons | identifier_name |
|
js_lua_state.rs |
// close. Is there a more explicit way to close event listeners, or is relying on
// the GC a normal/reasonable approach?
let lua = unsafe { Lua::unsafe_new_with(self.libraries) };
self.lua = Arc::new(lua)
}
}
impl Default for LuaState {
fn default() -> Self |
}
fn flag_into_std_lib(flag: u32) -> Option<StdLib> {
const ALL_SAFE: u32 = u32::MAX - 1;
match flag {
#[cfg(any(feature = "lua54", feature = "lua53", feature = "lua52"))]
0x1 => Some(StdLib::COROUTINE),
0x2 => Some(StdLib::TABLE),
0x4 => Some(StdLib::IO),
0x8 => Some(StdLib::OS),
0x10 => Some(StdLib::STRING),
#[cfg(any(feature = "lua54", feature = "lua53"))]
0x20 => Some(StdLib::UTF8),
#[cfg(any(feature = "lua52", feature = "luajit"))]
0x40 => Some(StdLib::BIT),
0x80 => Some(StdLib::MATH),
0x100 => Some(StdLib::PACKAGE),
#[cfg(any(feature = "luajit"))]
0x200 => Some(StdLib::JIT),
#[cfg(any(feature = "luajit"))]
0x4000_0000 => Some(StdLib::FFI),
0x8000_0000 => Some(StdLib::DEBUG),
u32::MAX => Some(StdLib::ALL),
ALL_SAFE => Some(StdLib::ALL_SAFE),
_ => None,
}
}
/// These correspond to our JS Enum. Used for a clearer error notification when including them in
/// incompatible versions.
fn flag_to_string(flag: u32) -> String {
const ALL_SAFE: u32 = u32::MAX - 1;
match flag {
0x1 => String::from("Coroutine"),
0x2 => String::from("Table"),
0x4 => String::from("Io"),
0x8 => String::from("Os"),
0x10 => String::from("String"),
0x20 => String::from("Utf8"),
0x40 => String::from("Bit"),
0x80 => String::from("Math"),
0x100 => String::from("Package"),
0x200 => String::from("Jit"),
0x4000_0000 => String::from("Ffi"),
0x8000_0000 => String::from("Debug"),
u32::MAX => String::from("All"),
ALL_SAFE => String::from("AllSafe"),
_ => flag.to_string(),
}
}
fn build_libraries_option(
mut cx: CallContext<JsUndefined>,
libs: Handle<JsValue>,
) -> NeonResult<StdLib> {
if libs.is_a::<JsArray>() {
let libflags: Vec<Handle<JsValue>> = libs
.downcast_or_throw::<JsArray, CallContext<JsUndefined>>(&mut cx)?
.to_vec(&mut cx)?;
// Hack to get a StdLib(0)
let mut libset = StdLib::TABLE ^ StdLib::TABLE;
for value in libflags.into_iter() {
let flag = value
.downcast_or_throw::<JsNumber, CallContext<JsUndefined>>(&mut cx)?
.value() as u32;
if let Some(lib) = flag_into_std_lib(flag) {
libset |= lib;
} else {
return cx.throw_error(format!(
"unrecognized Library flag \"{}\" for {}",
flag_to_string(flag),
lua_version()
));
}
}
Ok(libset)
} else if libs.is_a::<JsUndefined>() {
Ok(StdLib::ALL_SAFE)
} else {
cx.throw_error("Expected 'libraries' to be an an array")
}
}
fn init(mut cx: CallContext<JsUndefined>) -> NeonResult<LuaState> {
let opt_options = cx.argument_opt(0);
if let None = opt_options {
return Ok(LuaState::default());
};
let options: Handle<JsObject> = opt_options.unwrap().downcast_or_throw(&mut cx)?;
let libraries_key = cx.string("libraries");
let libs = options.get(&mut cx, libraries_key)?;
let libraries = build_libraries_option(cx, libs)?;
// Because we're allowing the end user to dynamically choose their libraries,
// we're using the unsafe call in case they include `debug`. We need to notify
// the end user in the documentation about the caveats of `debug`.
let lua = unsafe {
let lua = Lua::unsafe_new_with(libraries);
Arc::new(lua)
};
Ok(LuaState { lua, libraries })
}
fn do_string_sync(
mut cx: MethodContext<JsLuaState>,
code: String,
name: Option<String>,
) -> JsResult<JsValue> {
let this = cx.this();
let lua: &Lua = {
let guard = cx.lock();
let state = this.borrow(&guard);
&state.lua.clone()
};
match lua_execution::do_string_sync(lua, code, name) {
Ok(v) => v.to_js(&mut cx),
Err(e) => cx.throw_error(e.to_string()),
}
}
fn do_file_sync(
mut cx: MethodContext<JsLuaState>,
filename: String,
chunk_name: Option<String>,
) -> JsResult<JsValue> {
match fs::read_to_string(filename) {
Ok(contents) => do_string_sync(cx, contents, chunk_name),
Err(e) => cx.throw_error(e.to_string()),
}
}
fn call_chunk<'a>(
mut cx: MethodContext<'a, JsLuaState>,
code: String,
chunk_name: Option<String>,
js_args: Handle<'a, JsArray>,
) -> JsResult<'a, JsValue> {
let this = cx.this();
let mut args: Vec<Value> = vec![];
let js_args = js_args.to_vec(&mut cx)?;
for arg in js_args.iter() {
let value = Value::from_js(*arg, &mut cx)?;
args.push(value);
}
let lua: &Lua = {
let guard = cx.lock();
let state = this.borrow(&guard);
&state.lua.clone()
};
match lua_execution::call_chunk(&lua, code, chunk_name, args) {
Ok(v) => v.to_js(&mut cx),
Err(e) => cx.throw_error(e.to_string()),
}
}
fn register_function<'a>(
mut cx: MethodContext<'a, JsLuaState>,
name: String,
cb: Handle<JsFunction>,
) -> JsResult<'a, JsValue> {
let this = cx.this();
let handler = EventHandler::new(&cx, this, cb);
let lua: &Lua = {
let guard = cx.lock();
let state = this.borrow(&guard);
&state.lua.clone()
};
let callback = move |values: Vec<Value>| {
let handler = handler.clone();
thread::spawn(move || {
handler.schedule_with(move |event_ctx, this, callback| {
let arr = JsArray::new(event_ctx, values.len() as u32);
// TODO remove unwraps, handle errors, and pass to callback if needed.
for (i, value) in values.into_iter().enumerate() {
let js_val = value.to_js(event_ctx).unwrap();
arr.set(event_ctx, i as u32, js_val).unwrap();
}
// TODO How to pass an error via on('error') vs the current setup?
let args: Vec<Handle<JsValue>> = vec![arr.upcast()];
let _result = callback.call(event_ctx, this, args);
});
});
};
match lua_execution::register_function(lua, name, callback) {
Ok(_) => Ok(cx.undefined().upcast()),
Err(e) => cx.throw_error(e.to_string()),
}
}
fn set_global<'a>(
mut cx: MethodContext<'a, JsLuaState>,
name: String,
handle: Handle<'a, JsValue>,
) -> JsResult<'a, JsValue> {
let this: Handle<JsLuaState> = cx.this();
let lua: &Lua = {
let guard = cx.lock();
let state = this.borrow(&guard);
&state.lua.clone()
};
let set_value = Value::from_js(handle, &mut cx)?;
match lua_execution::set_global(lua, name, set_value) {
Ok(v) => v.to_js(&mut cx),
Err(e) => cx.throw_error(e.to_string()),
}
}
fn get_global(mut cx: MethodContext<JsLuaState>, name | {
LuaState {
libraries: StdLib::ALL_SAFE,
lua: Arc::new(Lua::new_with(StdLib::ALL_SAFE).unwrap()),
}
} | identifier_body |
js_lua_state.rs |
// close. Is there a more explicit way to close event listeners, or is relying on
// the GC a normal/reasonable approach?
let lua = unsafe { Lua::unsafe_new_with(self.libraries) };
self.lua = Arc::new(lua)
}
}
impl Default for LuaState {
fn default() -> Self {
LuaState {
libraries: StdLib::ALL_SAFE,
lua: Arc::new(Lua::new_with(StdLib::ALL_SAFE).unwrap()),
}
}
}
fn flag_into_std_lib(flag: u32) -> Option<StdLib> {
const ALL_SAFE: u32 = u32::MAX - 1;
match flag {
#[cfg(any(feature = "lua54", feature = "lua53", feature = "lua52"))]
0x1 => Some(StdLib::COROUTINE),
0x2 => Some(StdLib::TABLE),
0x4 => Some(StdLib::IO),
0x8 => Some(StdLib::OS),
0x10 => Some(StdLib::STRING),
#[cfg(any(feature = "lua54", feature = "lua53"))]
0x20 => Some(StdLib::UTF8),
#[cfg(any(feature = "lua52", feature = "luajit"))]
0x40 => Some(StdLib::BIT),
0x80 => Some(StdLib::MATH),
0x100 => Some(StdLib::PACKAGE),
#[cfg(any(feature = "luajit"))]
0x200 => Some(StdLib::JIT),
#[cfg(any(feature = "luajit"))]
0x4000_0000 => Some(StdLib::FFI),
0x8000_0000 => Some(StdLib::DEBUG),
u32::MAX => Some(StdLib::ALL),
ALL_SAFE => Some(StdLib::ALL_SAFE),
_ => None,
}
}
/// These correspond to our JS Enum. Used for a clearer error notification when including them in
/// incompatible versions.
fn flag_to_string(flag: u32) -> String {
const ALL_SAFE: u32 = u32::MAX - 1;
match flag {
0x1 => String::from("Coroutine"),
0x2 => String::from("Table"),
0x4 => String::from("Io"),
0x8 => String::from("Os"),
0x10 => String::from("String"),
0x20 => String::from("Utf8"),
0x40 => String::from("Bit"),
0x80 => String::from("Math"),
0x100 => String::from("Package"),
0x200 => String::from("Jit"),
0x4000_0000 => String::from("Ffi"),
0x8000_0000 => String::from("Debug"),
u32::MAX => String::from("All"),
ALL_SAFE => String::from("AllSafe"),
_ => flag.to_string(),
}
}
fn build_libraries_option(
mut cx: CallContext<JsUndefined>,
libs: Handle<JsValue>,
) -> NeonResult<StdLib> {
if libs.is_a::<JsArray>() {
let libflags: Vec<Handle<JsValue>> = libs
.downcast_or_throw::<JsArray, CallContext<JsUndefined>>(&mut cx)?
.to_vec(&mut cx)?;
// Hack to get a StdLib(0)
let mut libset = StdLib::TABLE ^ StdLib::TABLE;
for value in libflags.into_iter() {
let flag = value
.downcast_or_throw::<JsNumber, CallContext<JsUndefined>>(&mut cx)?
.value() as u32;
if let Some(lib) = flag_into_std_lib(flag) {
libset |= lib;
} else {
return cx.throw_error(format!(
"unrecognized Library flag \"{}\" for {}",
flag_to_string(flag),
lua_version()
));
}
}
Ok(libset)
} else if libs.is_a::<JsUndefined>() {
Ok(StdLib::ALL_SAFE)
} else {
cx.throw_error("Expected 'libraries' to be an an array")
}
}
fn init(mut cx: CallContext<JsUndefined>) -> NeonResult<LuaState> {
let opt_options = cx.argument_opt(0);
if let None = opt_options {
return Ok(LuaState::default());
};
let options: Handle<JsObject> = opt_options.unwrap().downcast_or_throw(&mut cx)?;
let libraries_key = cx.string("libraries"); | let libraries = build_libraries_option(cx, libs)?;
// Because we're allowing the end user to dynamically choose their libraries,
// we're using the unsafe call in case they include `debug`. We need to notify
// the end user in the documentation about the caveats of `debug`.
let lua = unsafe {
let lua = Lua::unsafe_new_with(libraries);
Arc::new(lua)
};
Ok(LuaState { lua, libraries })
}
fn do_string_sync(
mut cx: MethodContext<JsLuaState>,
code: String,
name: Option<String>,
) -> JsResult<JsValue> {
let this = cx.this();
let lua: &Lua = {
let guard = cx.lock();
let state = this.borrow(&guard);
&state.lua.clone()
};
match lua_execution::do_string_sync(lua, code, name) {
Ok(v) => v.to_js(&mut cx),
Err(e) => cx.throw_error(e.to_string()),
}
}
fn do_file_sync(
mut cx: MethodContext<JsLuaState>,
filename: String,
chunk_name: Option<String>,
) -> JsResult<JsValue> {
match fs::read_to_string(filename) {
Ok(contents) => do_string_sync(cx, contents, chunk_name),
Err(e) => cx.throw_error(e.to_string()),
}
}
fn call_chunk<'a>(
mut cx: MethodContext<'a, JsLuaState>,
code: String,
chunk_name: Option<String>,
js_args: Handle<'a, JsArray>,
) -> JsResult<'a, JsValue> {
let this = cx.this();
let mut args: Vec<Value> = vec![];
let js_args = js_args.to_vec(&mut cx)?;
for arg in js_args.iter() {
let value = Value::from_js(*arg, &mut cx)?;
args.push(value);
}
let lua: &Lua = {
let guard = cx.lock();
let state = this.borrow(&guard);
&state.lua.clone()
};
match lua_execution::call_chunk(&lua, code, chunk_name, args) {
Ok(v) => v.to_js(&mut cx),
Err(e) => cx.throw_error(e.to_string()),
}
}
fn register_function<'a>(
mut cx: MethodContext<'a, JsLuaState>,
name: String,
cb: Handle<JsFunction>,
) -> JsResult<'a, JsValue> {
let this = cx.this();
let handler = EventHandler::new(&cx, this, cb);
let lua: &Lua = {
let guard = cx.lock();
let state = this.borrow(&guard);
&state.lua.clone()
};
let callback = move |values: Vec<Value>| {
let handler = handler.clone();
thread::spawn(move || {
handler.schedule_with(move |event_ctx, this, callback| {
let arr = JsArray::new(event_ctx, values.len() as u32);
// TODO remove unwraps, handle errors, and pass to callback if needed.
for (i, value) in values.into_iter().enumerate() {
let js_val = value.to_js(event_ctx).unwrap();
arr.set(event_ctx, i as u32, js_val).unwrap();
}
// TODO How to pass an error via on('error') vs the current setup?
let args: Vec<Handle<JsValue>> = vec![arr.upcast()];
let _result = callback.call(event_ctx, this, args);
});
});
};
match lua_execution::register_function(lua, name, callback) {
Ok(_) => Ok(cx.undefined().upcast()),
Err(e) => cx.throw_error(e.to_string()),
}
}
fn set_global<'a>(
mut cx: MethodContext<'a, JsLuaState>,
name: String,
handle: Handle<'a, JsValue>,
) -> JsResult<'a, JsValue> {
let this: Handle<JsLuaState> = cx.this();
let lua: &Lua = {
let guard = cx.lock();
let state = this.borrow(&guard);
&state.lua.clone()
};
let set_value = Value::from_js(handle, &mut cx)?;
match lua_execution::set_global(lua, name, set_value) {
Ok(v) => v.to_js(&mut cx),
Err(e) => cx.throw_error(e.to_string()),
}
}
fn get_global(mut cx: MethodContext<JsLuaState>, name: | let libs = options.get(&mut cx, libraries_key)?; | random_line_split |
js_lua_state.rs |
// close. Is there a more explicit way to close event listeners, or is relying on
// the GC a normal/reasonable approach?
let lua = unsafe { Lua::unsafe_new_with(self.libraries) };
self.lua = Arc::new(lua)
}
}
impl Default for LuaState {
fn default() -> Self {
LuaState {
libraries: StdLib::ALL_SAFE,
lua: Arc::new(Lua::new_with(StdLib::ALL_SAFE).unwrap()),
}
}
}
fn flag_into_std_lib(flag: u32) -> Option<StdLib> {
const ALL_SAFE: u32 = u32::MAX - 1;
match flag {
#[cfg(any(feature = "lua54", feature = "lua53", feature = "lua52"))]
0x1 => Some(StdLib::COROUTINE),
0x2 => Some(StdLib::TABLE),
0x4 => Some(StdLib::IO),
0x8 => Some(StdLib::OS),
0x10 => Some(StdLib::STRING),
#[cfg(any(feature = "lua54", feature = "lua53"))]
0x20 => Some(StdLib::UTF8),
#[cfg(any(feature = "lua52", feature = "luajit"))]
0x40 => Some(StdLib::BIT),
0x80 => Some(StdLib::MATH),
0x100 => Some(StdLib::PACKAGE),
#[cfg(any(feature = "luajit"))]
0x200 => Some(StdLib::JIT),
#[cfg(any(feature = "luajit"))]
0x4000_0000 => Some(StdLib::FFI),
0x8000_0000 => Some(StdLib::DEBUG),
u32::MAX => Some(StdLib::ALL),
ALL_SAFE => Some(StdLib::ALL_SAFE),
_ => None,
}
}
/// These correspond to our JS Enum. Used for a clearer error notification when including them in
/// incompatible versions.
fn flag_to_string(flag: u32) -> String {
const ALL_SAFE: u32 = u32::MAX - 1;
match flag {
0x1 => String::from("Coroutine"),
0x2 => String::from("Table"),
0x4 => String::from("Io"),
0x8 => String::from("Os"),
0x10 => String::from("String"),
0x20 => String::from("Utf8"),
0x40 => String::from("Bit"),
0x80 => String::from("Math"),
0x100 => String::from("Package"),
0x200 => String::from("Jit"),
0x4000_0000 => String::from("Ffi"),
0x8000_0000 => String::from("Debug"),
u32::MAX => String::from("All"),
ALL_SAFE => String::from("AllSafe"),
_ => flag.to_string(),
}
}
fn build_libraries_option(
mut cx: CallContext<JsUndefined>,
libs: Handle<JsValue>,
) -> NeonResult<StdLib> {
if libs.is_a::<JsArray>() {
let libflags: Vec<Handle<JsValue>> = libs
.downcast_or_throw::<JsArray, CallContext<JsUndefined>>(&mut cx)?
.to_vec(&mut cx)?;
// Hack to get a StdLib(0)
let mut libset = StdLib::TABLE ^ StdLib::TABLE;
for value in libflags.into_iter() {
let flag = value
.downcast_or_throw::<JsNumber, CallContext<JsUndefined>>(&mut cx)?
.value() as u32;
if let Some(lib) = flag_into_std_lib(flag) {
libset |= lib;
} else {
return cx.throw_error(format!(
"unrecognized Library flag \"{}\" for {}",
flag_to_string(flag),
lua_version()
));
}
}
Ok(libset)
} else if libs.is_a::<JsUndefined>() {
Ok(StdLib::ALL_SAFE)
} else {
cx.throw_error("Expected 'libraries' to be an an array")
}
}
fn init(mut cx: CallContext<JsUndefined>) -> NeonResult<LuaState> {
let opt_options = cx.argument_opt(0);
if let None = opt_options {
return Ok(LuaState::default());
};
let options: Handle<JsObject> = opt_options.unwrap().downcast_or_throw(&mut cx)?;
let libraries_key = cx.string("libraries");
let libs = options.get(&mut cx, libraries_key)?;
let libraries = build_libraries_option(cx, libs)?;
// Because we're allowing the end user to dynamically choose their libraries,
// we're using the unsafe call in case they include `debug`. We need to notify
// the end user in the documentation about the caveats of `debug`.
let lua = unsafe {
let lua = Lua::unsafe_new_with(libraries);
Arc::new(lua)
};
Ok(LuaState { lua, libraries })
}
fn | (
mut cx: MethodContext<JsLuaState>,
code: String,
name: Option<String>,
) -> JsResult<JsValue> {
let this = cx.this();
let lua: &Lua = {
let guard = cx.lock();
let state = this.borrow(&guard);
&state.lua.clone()
};
match lua_execution::do_string_sync(lua, code, name) {
Ok(v) => v.to_js(&mut cx),
Err(e) => cx.throw_error(e.to_string()),
}
}
fn do_file_sync(
mut cx: MethodContext<JsLuaState>,
filename: String,
chunk_name: Option<String>,
) -> JsResult<JsValue> {
match fs::read_to_string(filename) {
Ok(contents) => do_string_sync(cx, contents, chunk_name),
Err(e) => cx.throw_error(e.to_string()),
}
}
fn call_chunk<'a>(
mut cx: MethodContext<'a, JsLuaState>,
code: String,
chunk_name: Option<String>,
js_args: Handle<'a, JsArray>,
) -> JsResult<'a, JsValue> {
let this = cx.this();
let mut args: Vec<Value> = vec![];
let js_args = js_args.to_vec(&mut cx)?;
for arg in js_args.iter() {
let value = Value::from_js(*arg, &mut cx)?;
args.push(value);
}
let lua: &Lua = {
let guard = cx.lock();
let state = this.borrow(&guard);
&state.lua.clone()
};
match lua_execution::call_chunk(&lua, code, chunk_name, args) {
Ok(v) => v.to_js(&mut cx),
Err(e) => cx.throw_error(e.to_string()),
}
}
fn register_function<'a>(
mut cx: MethodContext<'a, JsLuaState>,
name: String,
cb: Handle<JsFunction>,
) -> JsResult<'a, JsValue> {
let this = cx.this();
let handler = EventHandler::new(&cx, this, cb);
let lua: &Lua = {
let guard = cx.lock();
let state = this.borrow(&guard);
&state.lua.clone()
};
let callback = move |values: Vec<Value>| {
let handler = handler.clone();
thread::spawn(move || {
handler.schedule_with(move |event_ctx, this, callback| {
let arr = JsArray::new(event_ctx, values.len() as u32);
// TODO remove unwraps, handle errors, and pass to callback if needed.
for (i, value) in values.into_iter().enumerate() {
let js_val = value.to_js(event_ctx).unwrap();
arr.set(event_ctx, i as u32, js_val).unwrap();
}
// TODO How to pass an error via on('error') vs the current setup?
let args: Vec<Handle<JsValue>> = vec![arr.upcast()];
let _result = callback.call(event_ctx, this, args);
});
});
};
match lua_execution::register_function(lua, name, callback) {
Ok(_) => Ok(cx.undefined().upcast()),
Err(e) => cx.throw_error(e.to_string()),
}
}
fn set_global<'a>(
mut cx: MethodContext<'a, JsLuaState>,
name: String,
handle: Handle<'a, JsValue>,
) -> JsResult<'a, JsValue> {
let this: Handle<JsLuaState> = cx.this();
let lua: &Lua = {
let guard = cx.lock();
let state = this.borrow(&guard);
&state.lua.clone()
};
let set_value = Value::from_js(handle, &mut cx)?;
match lua_execution::set_global(lua, name, set_value) {
Ok(v) => v.to_js(&mut cx),
Err(e) => cx.throw_error(e.to_string()),
}
}
fn get_global(mut cx: MethodContext<JsLuaState>, name: | do_string_sync | identifier_name |
controllerserver.go | (ctx context.Context, req *csi.CreateVolumeRequest) (*csi.CreateVolumeResponse, error) {
if err := cs.validateControllerServiceRequest(csi.ControllerServiceCapability_RPC_CREATE_DELETE_VOLUME); err != nil {
glog.V(3).Infof("invalid create volume req: %v", req)
return nil, err
}
// Check arguments
if len(req.GetName()) == 0 {
return nil, status.Error(codes.InvalidArgument, "Name missing in request")
}
caps := req.GetVolumeCapabilities()
if caps == nil {
return nil, status.Error(codes.InvalidArgument, "Volume Capabilities missing in request")
}
// Keep a record of the requested access types.
var accessTypeMount, accessTypeBlock bool
for _, ca := range caps {
if ca.GetBlock() != nil {
accessTypeBlock = true
}
if ca.GetMount() != nil {
accessTypeMount = true
}
}
// A real driver would also need to check that the other
// fields in VolumeCapabilities are sane. The check above is
// just enough to pass the "[Testpattern: Dynamic PV (block
// volmode)] volumeMode should fail in binding dynamic
// provisioned PV to PVC" storage E2E test.
if accessTypeBlock && accessTypeMount {
return nil, status.Error(codes.InvalidArgument, "cannot have both block and mount access type")
}
var requestedAccessType accessType
if accessTypeBlock {
requestedAccessType = blockAccess
} else {
// Default to mount.
requestedAccessType = mountAccess
}
// Check for maximum available capacity
capacity := int64(req.GetCapacityRange().GetRequiredBytes())
if capacity >= maxStorageCapacity {
return nil, status.Errorf(codes.OutOfRange, "Requested capacity %d exceeds maximum allowed %d", capacity, maxStorageCapacity)
}
topologies := []*csi.Topology{
&csi.Topology{
Segments: map[string]string{TopologyKeyNode: cs.nodeID},
},
}
// Need to check for already existing volume name, and if found
// check for the requested capacity and already allocated capacity
if exVol, err := getVolumeByName(req.GetName()); err == nil {
// Since err is nil, it means the volume with the same name already exists
// need to check if the size of existing volume is the same as in new
// request
if exVol.VolSize < capacity {
return nil, status.Errorf(codes.AlreadyExists, "Volume with the same name: %s but with different size already exist", req.GetName())
}
if req.GetVolumeContentSource() != nil {
volumeSource := req.VolumeContentSource
switch volumeSource.Type.(type) {
case *csi.VolumeContentSource_Snapshot:
if volumeSource.GetSnapshot() != nil && exVol.ParentSnapID != "" && exVol.ParentSnapID != volumeSource.GetSnapshot().GetSnapshotId() {
return nil, status.Error(codes.AlreadyExists, "existing volume source snapshot id not matching")
}
case *csi.VolumeContentSource_Volume:
if volumeSource.GetVolume() != nil && exVol.ParentVolID != volumeSource.GetVolume().GetVolumeId() {
return nil, status.Error(codes.AlreadyExists, "existing volume source volume id not matching")
}
default:
return nil, status.Errorf(codes.InvalidArgument, "%v not a proper volume source", volumeSource)
}
}
// TODO (sbezverk) Do I need to make sure that volume still exists?
return &csi.CreateVolumeResponse{
Volume: &csi.Volume{
VolumeId: exVol.VolID,
CapacityBytes: int64(exVol.VolSize),
VolumeContext: req.GetParameters(),
ContentSource: req.GetVolumeContentSource(),
AccessibleTopology: topologies,
},
}, nil
}
volumeID := uuid.New().String()
vol, err := createHostpathVolume(volumeID, req.GetName(), capacity, requestedAccessType, false /* ephemeral */)
if err != nil {
return nil, status.Errorf(codes.Internal, "failed to create volume %v: %v", volumeID, err)
}
glog.V(4).Infof("created volume %s at path %s", vol.VolID, vol.VolPath)
if req.GetVolumeContentSource() != nil {
path := getVolumePath(volumeID)
volumeSource := req.VolumeContentSource
switch volumeSource.Type.(type) {
case *csi.VolumeContentSource_Snapshot:
if snapshot := volumeSource.GetSnapshot(); snapshot != nil {
err = loadFromSnapshot(capacity, snapshot.GetSnapshotId(), path, requestedAccessType)
vol.ParentSnapID = snapshot.GetSnapshotId()
}
case *csi.VolumeContentSource_Volume:
if srcVolume := volumeSource.GetVolume(); srcVolume != nil {
err = loadFromVolume(capacity, srcVolume.GetVolumeId(), path, requestedAccessType)
vol.ParentVolID = srcVolume.GetVolumeId()
}
default:
err = status.Errorf(codes.InvalidArgument, "%v not a proper volume source", volumeSource)
}
if err != nil {
glog.V(4).Infof("VolumeSource error: %v", err)
if delErr := deleteHostpathVolume(volumeID); delErr != nil {
glog.V(2).Infof("deleting hostpath volume %v failed: %v", volumeID, delErr)
}
return nil, err
}
glog.V(4).Infof("successfully populated volume %s", vol.VolID)
}
return &csi.CreateVolumeResponse{
Volume: &csi.Volume{
VolumeId: volumeID,
CapacityBytes: req.GetCapacityRange().GetRequiredBytes(),
VolumeContext: req.GetParameters(),
ContentSource: req.GetVolumeContentSource(),
AccessibleTopology: topologies,
},
}, nil
}
func (cs controllerServer) DeleteVolume(ctx context.Context, req *csi.DeleteVolumeRequest) (*csi.DeleteVolumeResponse, error) {
// Check arguments
if len(req.GetVolumeId()) == 0 {
return nil, status.Error(codes.InvalidArgument, "Volume ID missing in request")
}
if err := cs.validateControllerServiceRequest(csi.ControllerServiceCapability_RPC_CREATE_DELETE_VOLUME); err != nil {
glog.V(3).Infof("invalid delete volume req: %v", req)
return nil, err
}
volId := req.GetVolumeId()
if err := deleteHostpathVolume(volId); err != nil {
return nil, status.Errorf(codes.Internal, "failed to delete volume %v: %v", volId, err)
}
glog.V(4).Infof("volume %v successfully deleted", volId)
return &csi.DeleteVolumeResponse{}, nil
}
func (cs controllerServer) ControllerPublishVolume(ctx context.Context, request *csi.ControllerPublishVolumeRequest) (*csi.ControllerPublishVolumeResponse, error) {
panic("implement me")
}
func (cs controllerServer) ControllerUnpublishVolume(ctx context.Context, request *csi.ControllerUnpublishVolumeRequest) (*csi.ControllerUnpublishVolumeResponse, error) {
panic("implement me")
}
func (cs controllerServer) ValidateVolumeCapabilities(ctx context.Context, request *csi.ValidateVolumeCapabilitiesRequest) (*csi.ValidateVolumeCapabilitiesResponse, error) {
panic("implement me")
}
func (cs controllerServer) ListVolumes(ctx context.Context, req *csi.ListVolumesRequest) (*csi.ListVolumesResponse, error) {
volumeRes := &csi.ListVolumesResponse{
Entries: []*csi.ListVolumesResponse_Entry{},
}
var (
startIdx, volumesLength, maxLength int64
hpVolume hostPathVolume
)
volumeIds := getSortedVolumeIDs()
if req.StartingToken == "" {
req.StartingToken = "1"
}
startIdx, err := strconv.ParseInt(req.StartingToken, 10, 32)
if err != nil {
return nil, status.Error(codes.Aborted, "The type of startingToken should be integer")
}
volumesLength = int64(len(volumeIds))
maxLength = int64(req.MaxEntries)
if maxLength > volumesLength || maxLength <= 0 {
maxLength = volumesLength
}
for index := startIdx - 1; index < volumesLength && index < maxLength; index++ {
hpVolume = hostPathVolumes[volumeIds[index]]
healthy, msg := doHealthCheckInControllerSide(volumeIds[index])
glog.V(3).Infof("Healthy state: %s Volume: %t", hpVolume.VolName, healthy)
volumeRes.Entries = append(volumeRes.Entries, &csi.ListVolumesResponse_Entry{
Volume: &csi.Volume{
VolumeId: hpVolume.VolID,
CapacityBytes: hpVolume.VolSize,
},
Status: &csi.ListVolumesResponse_VolumeStatus{
PublishedNodeIds: []string{hpVolume.NodeID},
VolumeCondition: &csi.VolumeCondition{
Abnormal: !healthy,
Message: msg,
},
},
})
}
glog.V(5).Infof("Volumes are: %+v", *volumeRes)
return volumeRes, nil
}
| CreateVolume | identifier_name |
|
controllerserver.go | Capacity)
}
topologies := []*csi.Topology{
&csi.Topology{
Segments: map[string]string{TopologyKeyNode: cs.nodeID},
},
}
// Need to check for already existing volume name, and if found
// check for the requested capacity and already allocated capacity
if exVol, err := getVolumeByName(req.GetName()); err == nil {
// Since err is nil, it means the volume with the same name already exists
// need to check if the size of existing volume is the same as in new
// request
if exVol.VolSize < capacity {
return nil, status.Errorf(codes.AlreadyExists, "Volume with the same name: %s but with different size already exist", req.GetName())
}
if req.GetVolumeContentSource() != nil {
volumeSource := req.VolumeContentSource
switch volumeSource.Type.(type) {
case *csi.VolumeContentSource_Snapshot:
if volumeSource.GetSnapshot() != nil && exVol.ParentSnapID != "" && exVol.ParentSnapID != volumeSource.GetSnapshot().GetSnapshotId() {
return nil, status.Error(codes.AlreadyExists, "existing volume source snapshot id not matching")
}
case *csi.VolumeContentSource_Volume:
if volumeSource.GetVolume() != nil && exVol.ParentVolID != volumeSource.GetVolume().GetVolumeId() {
return nil, status.Error(codes.AlreadyExists, "existing volume source volume id not matching")
}
default:
return nil, status.Errorf(codes.InvalidArgument, "%v not a proper volume source", volumeSource)
}
}
// TODO (sbezverk) Do I need to make sure that volume still exists?
return &csi.CreateVolumeResponse{
Volume: &csi.Volume{
VolumeId: exVol.VolID,
CapacityBytes: int64(exVol.VolSize),
VolumeContext: req.GetParameters(),
ContentSource: req.GetVolumeContentSource(),
AccessibleTopology: topologies,
},
}, nil
}
volumeID := uuid.New().String()
vol, err := createHostpathVolume(volumeID, req.GetName(), capacity, requestedAccessType, false /* ephemeral */)
if err != nil {
return nil, status.Errorf(codes.Internal, "failed to create volume %v: %v", volumeID, err)
}
glog.V(4).Infof("created volume %s at path %s", vol.VolID, vol.VolPath)
if req.GetVolumeContentSource() != nil {
path := getVolumePath(volumeID)
volumeSource := req.VolumeContentSource
switch volumeSource.Type.(type) {
case *csi.VolumeContentSource_Snapshot:
if snapshot := volumeSource.GetSnapshot(); snapshot != nil {
err = loadFromSnapshot(capacity, snapshot.GetSnapshotId(), path, requestedAccessType)
vol.ParentSnapID = snapshot.GetSnapshotId()
}
case *csi.VolumeContentSource_Volume:
if srcVolume := volumeSource.GetVolume(); srcVolume != nil {
err = loadFromVolume(capacity, srcVolume.GetVolumeId(), path, requestedAccessType)
vol.ParentVolID = srcVolume.GetVolumeId()
}
default:
err = status.Errorf(codes.InvalidArgument, "%v not a proper volume source", volumeSource)
}
if err != nil {
glog.V(4).Infof("VolumeSource error: %v", err)
if delErr := deleteHostpathVolume(volumeID); delErr != nil {
glog.V(2).Infof("deleting hostpath volume %v failed: %v", volumeID, delErr)
}
return nil, err
}
glog.V(4).Infof("successfully populated volume %s", vol.VolID)
}
return &csi.CreateVolumeResponse{
Volume: &csi.Volume{
VolumeId: volumeID,
CapacityBytes: req.GetCapacityRange().GetRequiredBytes(),
VolumeContext: req.GetParameters(),
ContentSource: req.GetVolumeContentSource(),
AccessibleTopology: topologies,
},
}, nil
}
func (cs controllerServer) DeleteVolume(ctx context.Context, req *csi.DeleteVolumeRequest) (*csi.DeleteVolumeResponse, error) {
// Check arguments
if len(req.GetVolumeId()) == 0 {
return nil, status.Error(codes.InvalidArgument, "Volume ID missing in request")
}
if err := cs.validateControllerServiceRequest(csi.ControllerServiceCapability_RPC_CREATE_DELETE_VOLUME); err != nil {
glog.V(3).Infof("invalid delete volume req: %v", req)
return nil, err
}
volId := req.GetVolumeId()
if err := deleteHostpathVolume(volId); err != nil {
return nil, status.Errorf(codes.Internal, "failed to delete volume %v: %v", volId, err)
}
glog.V(4).Infof("volume %v successfully deleted", volId)
return &csi.DeleteVolumeResponse{}, nil
}
func (cs controllerServer) ControllerPublishVolume(ctx context.Context, request *csi.ControllerPublishVolumeRequest) (*csi.ControllerPublishVolumeResponse, error) {
panic("implement me")
}
func (cs controllerServer) ControllerUnpublishVolume(ctx context.Context, request *csi.ControllerUnpublishVolumeRequest) (*csi.ControllerUnpublishVolumeResponse, error) {
panic("implement me")
}
func (cs controllerServer) ValidateVolumeCapabilities(ctx context.Context, request *csi.ValidateVolumeCapabilitiesRequest) (*csi.ValidateVolumeCapabilitiesResponse, error) {
panic("implement me")
}
func (cs controllerServer) ListVolumes(ctx context.Context, req *csi.ListVolumesRequest) (*csi.ListVolumesResponse, error) {
volumeRes := &csi.ListVolumesResponse{
Entries: []*csi.ListVolumesResponse_Entry{},
}
var (
startIdx, volumesLength, maxLength int64
hpVolume hostPathVolume
)
volumeIds := getSortedVolumeIDs()
if req.StartingToken == "" {
req.StartingToken = "1"
}
startIdx, err := strconv.ParseInt(req.StartingToken, 10, 32)
if err != nil {
return nil, status.Error(codes.Aborted, "The type of startingToken should be integer")
}
volumesLength = int64(len(volumeIds))
maxLength = int64(req.MaxEntries)
if maxLength > volumesLength || maxLength <= 0 {
maxLength = volumesLength
}
for index := startIdx - 1; index < volumesLength && index < maxLength; index++ {
hpVolume = hostPathVolumes[volumeIds[index]]
healthy, msg := doHealthCheckInControllerSide(volumeIds[index])
glog.V(3).Infof("Healthy state: %s Volume: %t", hpVolume.VolName, healthy)
volumeRes.Entries = append(volumeRes.Entries, &csi.ListVolumesResponse_Entry{
Volume: &csi.Volume{
VolumeId: hpVolume.VolID,
CapacityBytes: hpVolume.VolSize,
},
Status: &csi.ListVolumesResponse_VolumeStatus{
PublishedNodeIds: []string{hpVolume.NodeID},
VolumeCondition: &csi.VolumeCondition{
Abnormal: !healthy,
Message: msg,
},
},
})
}
glog.V(5).Infof("Volumes are: %+v", *volumeRes)
return volumeRes, nil
}
func (cs controllerServer) GetCapacity(ctx context.Context, request *csi.GetCapacityRequest) (*csi.GetCapacityResponse, error) {
panic("implement me")
}
func (cs controllerServer) ControllerGetCapabilities(ctx context.Context, request *csi.ControllerGetCapabilitiesRequest) (*csi.ControllerGetCapabilitiesResponse, error) {
panic("implement me")
}
func (cs controllerServer) CreateSnapshot(ctx context.Context, request *csi.CreateSnapshotRequest) (*csi.CreateSnapshotResponse, error) {
panic("implement me")
}
func (cs controllerServer) DeleteSnapshot(ctx context.Context, request *csi.DeleteSnapshotRequest) (*csi.DeleteSnapshotResponse, error) {
panic("implement me")
}
func (cs controllerServer) ListSnapshots(ctx context.Context, request *csi.ListSnapshotsRequest) (*csi.ListSnapshotsResponse, error) {
panic("implement me")
}
func (cs controllerServer) ControllerExpandVolume(ctx context.Context, request *csi.ControllerExpandVolumeRequest) (*csi.ControllerExpandVolumeResponse, error) {
panic("implement me")
}
func (cs controllerServer) ControllerGetVolume(ctx context.Context, request *csi.ControllerGetVolumeRequest) (*csi.ControllerGetVolumeResponse, error) {
panic("implement me")
}
func NewControllerServer(ephemeral bool, nodeID string) *controllerServer {
if ephemeral {
return &controllerServer{caps: getControllerServiceCapabilities(nil), nodeID: nodeID}
}
return &controllerServer{
caps: getControllerServiceCapabilities(
[]csi.ControllerServiceCapability_RPC_Type{
csi.ControllerServiceCapability_RPC_CREATE_DELETE_VOLUME,
csi.ControllerServiceCapability_RPC_GET_VOLUME,
csi.ControllerServiceCapability_RPC_CREATE_DELETE_SNAPSHOT,
csi.ControllerServiceCapability_RPC_LIST_SNAPSHOTS,
csi.ControllerServiceCapability_RPC_LIST_VOLUMES,
csi.ControllerServiceCapability_RPC_CLONE_VOLUME,
csi.ControllerServiceCapability_RPC_EXPAND_VOLUME, | csi.ControllerServiceCapability_RPC_VOLUME_CONDITION, | random_line_split |
|
controllerserver.go | also need to check that the other
// fields in VolumeCapabilities are sane. The check above is
// just enough to pass the "[Testpattern: Dynamic PV (block
// volmode)] volumeMode should fail in binding dynamic
// provisioned PV to PVC" storage E2E test.
if accessTypeBlock && accessTypeMount {
return nil, status.Error(codes.InvalidArgument, "cannot have both block and mount access type")
}
var requestedAccessType accessType
if accessTypeBlock {
requestedAccessType = blockAccess
} else {
// Default to mount.
requestedAccessType = mountAccess
}
// Check for maximum available capacity
capacity := int64(req.GetCapacityRange().GetRequiredBytes())
if capacity >= maxStorageCapacity {
return nil, status.Errorf(codes.OutOfRange, "Requested capacity %d exceeds maximum allowed %d", capacity, maxStorageCapacity)
}
topologies := []*csi.Topology{
&csi.Topology{
Segments: map[string]string{TopologyKeyNode: cs.nodeID},
},
}
// Need to check for already existing volume name, and if found
// check for the requested capacity and already allocated capacity
if exVol, err := getVolumeByName(req.GetName()); err == nil {
// Since err is nil, it means the volume with the same name already exists
// need to check if the size of existing volume is the same as in new
// request
if exVol.VolSize < capacity {
return nil, status.Errorf(codes.AlreadyExists, "Volume with the same name: %s but with different size already exist", req.GetName())
}
if req.GetVolumeContentSource() != nil {
volumeSource := req.VolumeContentSource
switch volumeSource.Type.(type) {
case *csi.VolumeContentSource_Snapshot:
if volumeSource.GetSnapshot() != nil && exVol.ParentSnapID != "" && exVol.ParentSnapID != volumeSource.GetSnapshot().GetSnapshotId() {
return nil, status.Error(codes.AlreadyExists, "existing volume source snapshot id not matching")
}
case *csi.VolumeContentSource_Volume:
if volumeSource.GetVolume() != nil && exVol.ParentVolID != volumeSource.GetVolume().GetVolumeId() {
return nil, status.Error(codes.AlreadyExists, "existing volume source volume id not matching")
}
default:
return nil, status.Errorf(codes.InvalidArgument, "%v not a proper volume source", volumeSource)
}
}
// TODO (sbezverk) Do I need to make sure that volume still exists?
return &csi.CreateVolumeResponse{
Volume: &csi.Volume{
VolumeId: exVol.VolID,
CapacityBytes: int64(exVol.VolSize),
VolumeContext: req.GetParameters(),
ContentSource: req.GetVolumeContentSource(),
AccessibleTopology: topologies,
},
}, nil
}
volumeID := uuid.New().String()
vol, err := createHostpathVolume(volumeID, req.GetName(), capacity, requestedAccessType, false /* ephemeral */)
if err != nil {
return nil, status.Errorf(codes.Internal, "failed to create volume %v: %v", volumeID, err)
}
glog.V(4).Infof("created volume %s at path %s", vol.VolID, vol.VolPath)
if req.GetVolumeContentSource() != nil {
path := getVolumePath(volumeID)
volumeSource := req.VolumeContentSource
switch volumeSource.Type.(type) {
case *csi.VolumeContentSource_Snapshot:
if snapshot := volumeSource.GetSnapshot(); snapshot != nil {
err = loadFromSnapshot(capacity, snapshot.GetSnapshotId(), path, requestedAccessType)
vol.ParentSnapID = snapshot.GetSnapshotId()
}
case *csi.VolumeContentSource_Volume:
if srcVolume := volumeSource.GetVolume(); srcVolume != nil {
err = loadFromVolume(capacity, srcVolume.GetVolumeId(), path, requestedAccessType)
vol.ParentVolID = srcVolume.GetVolumeId()
}
default:
err = status.Errorf(codes.InvalidArgument, "%v not a proper volume source", volumeSource)
}
if err != nil {
glog.V(4).Infof("VolumeSource error: %v", err)
if delErr := deleteHostpathVolume(volumeID); delErr != nil {
glog.V(2).Infof("deleting hostpath volume %v failed: %v", volumeID, delErr)
}
return nil, err
}
glog.V(4).Infof("successfully populated volume %s", vol.VolID)
}
return &csi.CreateVolumeResponse{
Volume: &csi.Volume{
VolumeId: volumeID,
CapacityBytes: req.GetCapacityRange().GetRequiredBytes(),
VolumeContext: req.GetParameters(),
ContentSource: req.GetVolumeContentSource(),
AccessibleTopology: topologies,
},
}, nil
}
func (cs controllerServer) DeleteVolume(ctx context.Context, req *csi.DeleteVolumeRequest) (*csi.DeleteVolumeResponse, error) {
// Check arguments
if len(req.GetVolumeId()) == 0 {
return nil, status.Error(codes.InvalidArgument, "Volume ID missing in request")
}
if err := cs.validateControllerServiceRequest(csi.ControllerServiceCapability_RPC_CREATE_DELETE_VOLUME); err != nil {
glog.V(3).Infof("invalid delete volume req: %v", req)
return nil, err
}
volId := req.GetVolumeId()
if err := deleteHostpathVolume(volId); err != nil {
return nil, status.Errorf(codes.Internal, "failed to delete volume %v: %v", volId, err)
}
glog.V(4).Infof("volume %v successfully deleted", volId)
return &csi.DeleteVolumeResponse{}, nil
}
func (cs controllerServer) ControllerPublishVolume(ctx context.Context, request *csi.ControllerPublishVolumeRequest) (*csi.ControllerPublishVolumeResponse, error) {
panic("implement me")
}
func (cs controllerServer) ControllerUnpublishVolume(ctx context.Context, request *csi.ControllerUnpublishVolumeRequest) (*csi.ControllerUnpublishVolumeResponse, error) {
panic("implement me")
}
func (cs controllerServer) ValidateVolumeCapabilities(ctx context.Context, request *csi.ValidateVolumeCapabilitiesRequest) (*csi.ValidateVolumeCapabilitiesResponse, error) {
panic("implement me")
}
func (cs controllerServer) ListVolumes(ctx context.Context, req *csi.ListVolumesRequest) (*csi.ListVolumesResponse, error) {
volumeRes := &csi.ListVolumesResponse{
Entries: []*csi.ListVolumesResponse_Entry{},
}
var (
startIdx, volumesLength, maxLength int64
hpVolume hostPathVolume
)
volumeIds := getSortedVolumeIDs()
if req.StartingToken == "" {
req.StartingToken = "1"
}
startIdx, err := strconv.ParseInt(req.StartingToken, 10, 32)
if err != nil {
return nil, status.Error(codes.Aborted, "The type of startingToken should be integer")
}
volumesLength = int64(len(volumeIds))
maxLength = int64(req.MaxEntries)
if maxLength > volumesLength || maxLength <= 0 {
maxLength = volumesLength
}
for index := startIdx - 1; index < volumesLength && index < maxLength; index++ {
hpVolume = hostPathVolumes[volumeIds[index]]
healthy, msg := doHealthCheckInControllerSide(volumeIds[index])
glog.V(3).Infof("Healthy state: %s Volume: %t", hpVolume.VolName, healthy)
volumeRes.Entries = append(volumeRes.Entries, &csi.ListVolumesResponse_Entry{
Volume: &csi.Volume{
VolumeId: hpVolume.VolID,
CapacityBytes: hpVolume.VolSize,
},
Status: &csi.ListVolumesResponse_VolumeStatus{
PublishedNodeIds: []string{hpVolume.NodeID},
VolumeCondition: &csi.VolumeCondition{
Abnormal: !healthy,
Message: msg,
},
},
})
}
glog.V(5).Infof("Volumes are: %+v", *volumeRes)
return volumeRes, nil
}
func (cs controllerServer) GetCapacity(ctx context.Context, request *csi.GetCapacityRequest) (*csi.GetCapacityResponse, error) {
panic("implement me")
}
func (cs controllerServer) ControllerGetCapabilities(ctx context.Context, request *csi.ControllerGetCapabilitiesRequest) (*csi.ControllerGetCapabilitiesResponse, error) {
panic("implement me")
}
func (cs controllerServer) CreateSnapshot(ctx context.Context, request *csi.CreateSnapshotRequest) (*csi.CreateSnapshotResponse, error) {
panic("implement me")
}
func (cs controllerServer) DeleteSnapshot(ctx context.Context, request *csi.DeleteSnapshotRequest) (*csi.DeleteSnapshotResponse, error) {
panic("implement me")
}
func (cs controllerServer) ListSnapshots(ctx context.Context, request *csi.ListSnapshotsRequest) (*csi.ListSnapshotsResponse, error) {
panic("implement me")
}
func (cs controllerServer) ControllerExpandVolume(ctx context.Context, request *csi.ControllerExpandVolumeRequest) (*csi.ControllerExpandVolumeResponse, error) | {
panic("implement me")
} | identifier_body |
|
controllerserver.go | "Name missing in request")
}
caps := req.GetVolumeCapabilities()
if caps == nil {
return nil, status.Error(codes.InvalidArgument, "Volume Capabilities missing in request")
}
// Keep a record of the requested access types.
var accessTypeMount, accessTypeBlock bool
for _, ca := range caps {
if ca.GetBlock() != nil {
accessTypeBlock = true
}
if ca.GetMount() != nil |
}
// A real driver would also need to check that the other
// fields in VolumeCapabilities are sane. The check above is
// just enough to pass the "[Testpattern: Dynamic PV (block
// volmode)] volumeMode should fail in binding dynamic
// provisioned PV to PVC" storage E2E test.
if accessTypeBlock && accessTypeMount {
return nil, status.Error(codes.InvalidArgument, "cannot have both block and mount access type")
}
var requestedAccessType accessType
if accessTypeBlock {
requestedAccessType = blockAccess
} else {
// Default to mount.
requestedAccessType = mountAccess
}
// Check for maximum available capacity
capacity := int64(req.GetCapacityRange().GetRequiredBytes())
if capacity >= maxStorageCapacity {
return nil, status.Errorf(codes.OutOfRange, "Requested capacity %d exceeds maximum allowed %d", capacity, maxStorageCapacity)
}
topologies := []*csi.Topology{
&csi.Topology{
Segments: map[string]string{TopologyKeyNode: cs.nodeID},
},
}
// Need to check for already existing volume name, and if found
// check for the requested capacity and already allocated capacity
if exVol, err := getVolumeByName(req.GetName()); err == nil {
// Since err is nil, it means the volume with the same name already exists
// need to check if the size of existing volume is the same as in new
// request
if exVol.VolSize < capacity {
return nil, status.Errorf(codes.AlreadyExists, "Volume with the same name: %s but with different size already exist", req.GetName())
}
if req.GetVolumeContentSource() != nil {
volumeSource := req.VolumeContentSource
switch volumeSource.Type.(type) {
case *csi.VolumeContentSource_Snapshot:
if volumeSource.GetSnapshot() != nil && exVol.ParentSnapID != "" && exVol.ParentSnapID != volumeSource.GetSnapshot().GetSnapshotId() {
return nil, status.Error(codes.AlreadyExists, "existing volume source snapshot id not matching")
}
case *csi.VolumeContentSource_Volume:
if volumeSource.GetVolume() != nil && exVol.ParentVolID != volumeSource.GetVolume().GetVolumeId() {
return nil, status.Error(codes.AlreadyExists, "existing volume source volume id not matching")
}
default:
return nil, status.Errorf(codes.InvalidArgument, "%v not a proper volume source", volumeSource)
}
}
// TODO (sbezverk) Do I need to make sure that volume still exists?
return &csi.CreateVolumeResponse{
Volume: &csi.Volume{
VolumeId: exVol.VolID,
CapacityBytes: int64(exVol.VolSize),
VolumeContext: req.GetParameters(),
ContentSource: req.GetVolumeContentSource(),
AccessibleTopology: topologies,
},
}, nil
}
volumeID := uuid.New().String()
vol, err := createHostpathVolume(volumeID, req.GetName(), capacity, requestedAccessType, false /* ephemeral */)
if err != nil {
return nil, status.Errorf(codes.Internal, "failed to create volume %v: %v", volumeID, err)
}
glog.V(4).Infof("created volume %s at path %s", vol.VolID, vol.VolPath)
if req.GetVolumeContentSource() != nil {
path := getVolumePath(volumeID)
volumeSource := req.VolumeContentSource
switch volumeSource.Type.(type) {
case *csi.VolumeContentSource_Snapshot:
if snapshot := volumeSource.GetSnapshot(); snapshot != nil {
err = loadFromSnapshot(capacity, snapshot.GetSnapshotId(), path, requestedAccessType)
vol.ParentSnapID = snapshot.GetSnapshotId()
}
case *csi.VolumeContentSource_Volume:
if srcVolume := volumeSource.GetVolume(); srcVolume != nil {
err = loadFromVolume(capacity, srcVolume.GetVolumeId(), path, requestedAccessType)
vol.ParentVolID = srcVolume.GetVolumeId()
}
default:
err = status.Errorf(codes.InvalidArgument, "%v not a proper volume source", volumeSource)
}
if err != nil {
glog.V(4).Infof("VolumeSource error: %v", err)
if delErr := deleteHostpathVolume(volumeID); delErr != nil {
glog.V(2).Infof("deleting hostpath volume %v failed: %v", volumeID, delErr)
}
return nil, err
}
glog.V(4).Infof("successfully populated volume %s", vol.VolID)
}
return &csi.CreateVolumeResponse{
Volume: &csi.Volume{
VolumeId: volumeID,
CapacityBytes: req.GetCapacityRange().GetRequiredBytes(),
VolumeContext: req.GetParameters(),
ContentSource: req.GetVolumeContentSource(),
AccessibleTopology: topologies,
},
}, nil
}
func (cs controllerServer) DeleteVolume(ctx context.Context, req *csi.DeleteVolumeRequest) (*csi.DeleteVolumeResponse, error) {
// Check arguments
if len(req.GetVolumeId()) == 0 {
return nil, status.Error(codes.InvalidArgument, "Volume ID missing in request")
}
if err := cs.validateControllerServiceRequest(csi.ControllerServiceCapability_RPC_CREATE_DELETE_VOLUME); err != nil {
glog.V(3).Infof("invalid delete volume req: %v", req)
return nil, err
}
volId := req.GetVolumeId()
if err := deleteHostpathVolume(volId); err != nil {
return nil, status.Errorf(codes.Internal, "failed to delete volume %v: %v", volId, err)
}
glog.V(4).Infof("volume %v successfully deleted", volId)
return &csi.DeleteVolumeResponse{}, nil
}
func (cs controllerServer) ControllerPublishVolume(ctx context.Context, request *csi.ControllerPublishVolumeRequest) (*csi.ControllerPublishVolumeResponse, error) {
panic("implement me")
}
func (cs controllerServer) ControllerUnpublishVolume(ctx context.Context, request *csi.ControllerUnpublishVolumeRequest) (*csi.ControllerUnpublishVolumeResponse, error) {
panic("implement me")
}
func (cs controllerServer) ValidateVolumeCapabilities(ctx context.Context, request *csi.ValidateVolumeCapabilitiesRequest) (*csi.ValidateVolumeCapabilitiesResponse, error) {
panic("implement me")
}
func (cs controllerServer) ListVolumes(ctx context.Context, req *csi.ListVolumesRequest) (*csi.ListVolumesResponse, error) {
volumeRes := &csi.ListVolumesResponse{
Entries: []*csi.ListVolumesResponse_Entry{},
}
var (
startIdx, volumesLength, maxLength int64
hpVolume hostPathVolume
)
volumeIds := getSortedVolumeIDs()
if req.StartingToken == "" {
req.StartingToken = "1"
}
startIdx, err := strconv.ParseInt(req.StartingToken, 10, 32)
if err != nil {
return nil, status.Error(codes.Aborted, "The type of startingToken should be integer")
}
volumesLength = int64(len(volumeIds))
maxLength = int64(req.MaxEntries)
if maxLength > volumesLength || maxLength <= 0 {
maxLength = volumesLength
}
for index := startIdx - 1; index < volumesLength && index < maxLength; index++ {
hpVolume = hostPathVolumes[volumeIds[index]]
healthy, msg := doHealthCheckInControllerSide(volumeIds[index])
glog.V(3).Infof("Healthy state: %s Volume: %t", hpVolume.VolName, healthy)
volumeRes.Entries = append(volumeRes.Entries, &csi.ListVolumesResponse_Entry{
Volume: &csi.Volume{
VolumeId: hpVolume.VolID,
CapacityBytes: hpVolume.VolSize,
},
Status: &csi.ListVolumesResponse_VolumeStatus{
PublishedNodeIds: []string{hpVolume.NodeID},
VolumeCondition: &csi.VolumeCondition{
Abnormal: !healthy,
Message: msg,
},
},
})
}
glog.V(5).Infof("Volumes are: %+v", *volumeRes)
return volumeRes, nil
}
func (cs controllerServer) GetCapacity(ctx context.Context, request *csi.GetCapacityRequest) (*csi.GetCapacityResponse, error) {
panic("implement me")
}
func (cs controllerServer) ControllerGetCapabilities(ctx context.Context, request *csi.ControllerGetCapabilitiesRequest) (*csi.ControllerGetCapabilitiesResponse, error) {
panic("implement me")
}
func (cs controllerServer) CreateSnapshot(ctx context.Context, request *csi.CreateSnapshotRequest) (*csi.CreateSnapshot | {
accessTypeMount = true
} | conditional_block |
AwardEditModal.js | .create()
export default class extends React.Component {
state = {
fileList :[],
type:4
}
getFile = (fileList) => {
this.setState({
fileList
})
}
onCancel =()=>{
this.onProbabilityChange(this.props.row.probability)
this.props.form.resetFields()
this.props.onCancel && this.props.onCancel()
}
onClick = ()=>{
this.props.form.validateFields((err,values)=>{
if(!err){
values.img_path = this.state.fileList[0] && this.state.fileList[0].url
let row = this.props.row
row = {...this.props.row,...values}
row.probability = Number(row.probability)
this.props.onOk && this.props.onOk(row)
this.props.form.resetFields()
this.props.onCancel && this.props.onCancel()
}
})
}
componentDidUpdate(prevProps){
if(this.props.visible && !prevProps.visible){
let row = { ...this.props.row }
if(!row.type){
row.type = 4
row.name = '谢谢参与'
}
this.setState({
type:row.type
},()=>{
this.props.form.setFieldsValue(row)
})
let {img_path} = this.props.row
if(!img_path) img_path = imgMap[row.type]
if(img_path){
this.setState({
fileList:[{
uid: '-1',
name: img_path,
status: 'done',
url: img_path
}]
})
}else{
this.setState({
fileList:[]
})
}
}
}
validatorByProbability= (rule,value,callback) =>{
if(this.props.probability < 0){
callback(`中奖概率之和不能大于100`)
}else{
callback()
}
}
onProbabilityChange = (value) =>{
let {row,probabilityChange} = this.props
let o = {...row}
o.probability = value
probabilityChange && probabilityChange(o)
}
typeChange=(type)=>{
this.props.form.resetFields()
this.onProbabilityChange(this.props.row.probability)
this.setState({
type
},()=>{
let name = ''
if(type === 4) name= '谢谢参与'
this.props.form.setFieldsValue({name})
let img_path = imgMap[type]
let fileList = []
if(img_path){
fileList = [{
uid: '-1',
name: img_path,
status: 'done',
url: img_path
}]
}
this.setState({fileList})
})
}
prizeChange = (value) => {
const {type} = this.state
const {setFieldsValue} = this.props.form
let name = ''
switch(type){
case 1:
name = `${value}积分`
break
case 3:
name = `${value}元红包`
break
default:
return
}
setFieldsValue({name})
}
render() {
const formItemLayout = {
labelCol: {span: 5},
wrapperCol: {span: 18},
}
const { visible , probability, from} = this.props
const { getFieldDecorator } = this.props.form
const {type} = this.state
let TYPES = ACTIVIT_TYPE.filter(i=>{
if(i.value === 4) return true
return from.prize_type.indexOf(i.value) > -1
})
return <Modal
visible={visible}
title="编辑"
okText="确定"
cancelText="取消"
destroyOnClose
onCancel={this.onCancel}
onOk={this.onClick}
width={480}
>
<Form>
<FormItem label="奖品类型" {...formItemLayout}>
{getFieldDecorator('type', {
rules:[
{required:true,message:'请选择奖品类型'}
],
initialValue:4
})(
<Select
placeholder='请选择奖品类型'
onChange={this.typeChange}
getPopupContainer={triggerNode => triggerNode.parentNode}
>
{
TYPES.map((item) => {
return <Option key={item.value} value={item.value}>{item.label}</Option>
})
}
</Select>
)}
</FormItem>
{type === 1||type === 3 ? <FormItem label="奖品面额" {...formItemLayout}>
{getFieldDecorator('prize_value', {
rules:[
{required:true,message:'请输入奖品面额'}
]
})(
<InputNumber
min={1}
max={type===1?99999:200}
onChange={this.prizeChange}
step={1}
precision={type===1?0:2}
style={{width:'100%'}}
placeholder={`${type===1?'单位:积分':'1.00~200元'}`} /> |
<FormItem label="奖品名称" {...formItemLayout}>
{getFieldDecorator('name', {
rules:[
{required:true,message:'请输入奖品名称'}
]
})(
<HzInput maxLength={type===1?7:6} placeholder='请输入奖品名称' />
)}
</FormItem>
{type !== 4 ? <FormItem label="奖品数量" {...formItemLayout}>
{getFieldDecorator('number', {
rules:[
{required:true,message:'请输入奖品数量'}
]
})(
<InputNumber
min={1}
max={99999}
step={1}
precision={0}
style={{width:'100%'}}
placeholder='大于0正整数' />
)}
</FormItem>:null}
<FormItem label="中奖概率" {...formItemLayout}>
{getFieldDecorator('probability', {
validateTrigger:'onBlur',
rules:[
{required:true,message:'请输入中奖概率'},
{validator:this.validatorByProbability}
]
})(
<InputNumber
min={0}
step={1}
precision={0}
onChange={this.onProbabilityChange}
style={{width:'100%'}}
placeholder='请输入中奖概率' />
)}
<span style={{fontSize:12,color:'#9EA8B1',display:'block',marginTop: '-6px'}}>还剩{probability}%的中奖概率 </span>
</FormItem>
<UploadContainer {...this.props} getFile={this.getFile} fileList={this.state.fileList} />
</Form>
</Modal>
}
}
@connect(({shop_fitment}) => ({ shop_fitment }))
class UploadContainer extends React.Component {
state = {
fileList: [],
showUploadIcon: true,
previewVisible: false,
previewImage: ''
}
componentDidMount(){
this.props.dispatch({
type:'shop_fitment/getToken',
payload: {
type: 'image',
}
})
}
static getDerivedStateFromProps(nextProps, prevState) {
return {
fileList: nextProps.fileList,
showUploadIcon: nextProps.fileList.length === 0 || (nextProps.fileList[0] && nextProps.fileList[0].status !== 'done'),
previewImage:nextProps.fileList[0] && nextProps.fileList[0].url
}
}
handleCancel = () => {
this.setState({
previewVisible: false,
previewImage: ''
})
}
setShowUploadIcon = (status) => {
setTimeout(_ => {
this.setState({
showUploadIcon: status
})
}, 400)
}
handlePreview = (fileList) => {
if (fileList && fileList[0]) {
this.setState({
previewVisible: true,
previewImage: fileList[0].url
})
}
}
beforeUpload = (file, fileList) => {
const isJPG = file.type === 'image/jpeg' || file.type === 'image/png'
if (!isJPG) {
message.error('只能上传jpg、jpeg和png格式的图片!')
}
const isLt2M = file.size / 1024 <= 100
if (!isLt2M) {
message.error('图片大小不能超过100KB!')
}
const maxPic = this.state.fileList.length + fileList.length <= 1
if (!maxPic) {
message.error('最多只能上传1张图片!')
}
return isJPG && isLt2M && maxPic
}
handleChange = (info) => {
const { fileList } = info
const photoPrefix = this.props.shop_fitment.photoPrefix
if (info.file.status === 'uploading') {
this.props.getFile && this.props.getFile(fileList)
}
if (info.file.status === 'done') {
fileList.map((file) => {
if (file.response) {
file.url = `https://${photoPrefix}/${file.response.key}`
file.key = file.response.key
}
return file
})
this.props.getFile && this.props.getFile(fileList)
// this.setState({ fileList }, () => {
// this.setShowUploadIcon(fileList.length === 0)
// })
}
}
handleRemove = (file) => {
const { fileList } = this.state
for (let | )}
</FormItem>: null} | random_line_split |
AwardEditModal.js | if(!row.type){
row.type = 4
row.name = '谢谢参与'
}
this.setState({
type:row.type
},()=>{
this.props.form.setFieldsValue(row)
})
let {img_path} = this.props.row
if(!img_path) img_path = imgMap[row.type]
if(img_path){
this.setState({
fileList:[{
uid: '-1',
name: img_path,
status: 'done',
url: img_path
}]
})
}else{
this.setState({
fileList:[]
})
}
}
}
validatorByProbability= (rule,value,callback) =>{
if(this.props.probability < 0){
callback(`中奖概率之和不能大于100`)
}else{
callback()
}
}
onProbabilityChange = (value) =>{
let {row,probabilityChange} = this.props
let o = {...row}
o.probability = value
probabilityChange && probabilityChange(o)
}
typeChange=(type)=>{
this.props.form.resetFields()
this.onProbabilityChange(this.props.row.probability)
this.setState({
type
},()=>{
let name = ''
if(type === 4) name= '谢谢参与'
this.props.form.setFieldsValue({name})
let img_path = imgMap[type]
let fileList = []
if(img_path){
fileList = [{
uid: '-1',
name: img_path,
status: 'done',
url: img_path
}]
}
this.setState({fileList})
})
}
prizeChange = (value) => {
const {type} = this.state
const {setFieldsValue} = this.props.form
let name = ''
switch(type){
case 1:
name = `${value}积分`
break
case 3:
name = `${value}元红包`
break
default:
return
}
setFieldsValue({name})
}
render() {
const formItemLayout = {
labelCol: {span: 5},
wrapperCol: {span: 18},
}
const { visible , probability, from} = this.props
const { getFieldDecorator } = this.props.form
const {type} = this.state
let TYPES = ACTIVIT_TYPE.filter(i=>{
if(i.value === 4) return true
return from.prize_type.indexOf(i.value) > -1
})
return <Modal
visible={visible}
title="编辑"
okText="确定"
cancelText="取消"
destroyOnClose
onCancel={this.onCancel}
onOk={this.onClick}
width={480}
>
<Form>
<FormItem label="奖品类型" {...formItemLayout}>
{getFieldDecorator('type', {
rules:[
{required:true,message:'请选择奖品类型'}
],
initialValue:4
})(
<Select
placeholder='请选择奖品类型'
onChange={this.typeChange}
getPopupContainer={triggerNode => triggerNode.parentNode}
>
{
TYPES.map((item) => {
return <Option key={item.value} value={item.value}>{item.label}</Option>
})
}
</Select>
)}
</FormItem>
{type === 1||type === 3 ? <FormItem label="奖品面额" {...formItemLayout}>
{getFieldDecorator('prize_value', {
rules:[
{required:true,message:'请输入奖品面额'}
]
})(
<InputNumber
min={1}
max={type===1?99999:200}
onChange={this.prizeChange}
step={1}
precision={type===1?0:2}
style={{width:'100%'}}
placeholder={`${type===1?'单位:积分':'1.00~200元'}`} />
)}
</FormItem>: null}
<FormItem label="奖品名称" {...formItemLayout}>
{getFieldDecorator('name', {
rules:[
{required:true,message:'请输入奖品名称'}
]
})(
<HzInput maxLength={type===1?7:6} placeholder='请输入奖品名称' />
)}
</FormItem>
{type !== 4 ? <FormItem label="奖品数量" {...formItemLayout}>
{getFieldDecorator('number', {
rules:[
{required:true,message:'请输入奖品数量'}
]
})(
<InputNumber
min={1}
max={99999}
step={1}
precision={0}
style={{width:'100%'}}
placeholder='大于0正整数' />
)}
</FormItem>:null}
<FormItem label="中奖概率" {...formItemLayout}>
{getFieldDecorator('probability', {
validateTrigger:'onBlur',
rules:[
{required:true,message:'请输入中奖概率'},
{validator:this.validatorByProbability}
]
})(
<InputNumber
min={0}
step={1}
precision={0}
onChange={this.onProbabilityChange}
style={{width:'100%'}}
placeholder='请输入中奖概率' />
)}
<span style={{fontSize:12,color:'#9EA8B1',display:'block',marginTop: '-6px'}}>还剩{probability}%的中奖概率 </span>
</FormItem>
<UploadContainer {...this.props} getFile={this.getFile} fileList={this.state.fileList} />
</Form>
</Modal>
}
}
@connect(({shop_fitment}) => ({ shop_fitment }))
class UploadContainer extends React.Component {
state = {
fileList: [],
showUploadIcon: true,
previewVisible: false,
previewImage: ''
}
componentDidMount(){
this.props.dispatch({
type:'shop_fitment/getToken',
payload: {
type: 'image',
}
})
}
static getDerivedStateFromProps(nextProps, prevState) {
return {
fileList: nextProps.fileList,
showUploadIcon: nextProps.fileList.length === 0 || (nextProps.fileList[0] && nextProps.fileList[0].status !== 'done'),
previewImage:nextProps.fileList[0] && nextProps.fileList[0].url
}
}
handleCancel = () => {
this.setState({
previewVisible: false,
previewImage: ''
})
}
setShowUploadIcon = (status) => {
setTimeout(_ => {
this.setState({
showUploadIcon: status
})
}, 400)
}
handlePreview = (fileList) => {
if (fileList && fileList[0]) {
this.setState({
previewVisible: true,
previewImage: fileList[0].url
})
}
}
beforeUpload = (file, fileList) => {
const isJPG = file.type === 'image/jpeg' || file.type === 'image/png'
if (!isJPG) {
message.error('只能上传jpg、jpeg和png格式的图片!')
}
const isLt2M = file.size / 1024 <= 100
if (!isLt2M) {
message.error('图片大小不能超过100KB!')
}
const maxPic = this.state.fileList.length + fileList.length <= 1
if (!maxPic) {
message.error('最多只能上传1张图片!')
}
return isJPG && isLt2M && maxPic
}
handleChange = (info) => {
const { fileList } = info
const photoPrefix = this.props.shop_fitment.photoPrefix
if (info.file.status === 'uploading') {
this.props.getFile && this.props.getFile(fileList)
}
if (info.file.status === 'done') {
fileList.map((file) => {
if (file.response) {
file.url = `https://${photoPrefix}/${file.response.key}`
file.key = file.response.key
}
return file
})
this.props.getFile && this.props.getFile(fileList)
// this.setState({ fileList }, () => {
// this.setShowUploadIcon(fileList.length === 0)
// })
}
}
handleRemove = (file) => {
const { fileList } = this.state
for (let [i, v] of fileList.entries()) {
if (v.uid === file.uid) {
fileList.splice(i, 1)
this.props.getFile && this.props.getFile([])
// this.setState({ fileList, showUploadIcon: fileList.length === 0 }, () => {
this.props.form.validateFields(['images'], { force: true })
// })
return
}
}
}
validatorByImg = (rule, value, callback) =>{
const {fileList} = this.state
if(fileList.length && fileList[0].url){
callback()
}else{
callback('请上传图片')
}
}
render() {
const fileList = this.state.fileList
const photoToken = this.props.shop_fitment.photoToken
const formItemLayout = {
labelCol: {span: 5},
wrapperCol: {span: 18},
}
const uploadProps = {
name: 'file',
action: '//upload.qi | niup.c | identifier_name |
|
AwardEditModal.js | .create()
export default class extends React.Component {
state = {
fileList :[],
type:4
}
getFile = (fileList) => {
this.setState({
fileList
})
}
onCancel =()=>{
this.onProbabilityChange(this.props.row.probability)
this.props.form.resetFields()
this.props.onCancel && this.props.onCancel()
}
onClick = ()=>{
this.props.form.validateFields((err,values)=>{
if(!err){
values.img_path = this.state.fileList[0] && this.state.fileList[0].url
let row = this.props.row
row = {...this.props.row,...values}
row.probability = Number(row.probability)
this.props.onOk && this.props.onOk(row)
this.props.form.resetFields()
this.props.onCancel && this.props.onCancel()
}
})
}
componentDidUpdate(prevProps){
if(this.props.visible && !prevProps.visible){
let row = { ...this.props.row }
if(!row.type){
row.type = 4
row.name = '谢谢参与'
}
this.setState({
type:row.type
},()=>{
this.props.form.setFieldsValue(row)
})
let {img_path} = this.props.row
if(!img_path) img_path = imgMap[row.type]
if(img_path){
this.setState({
fileList:[{
uid: '-1',
name: img_path,
status: 'done',
url: img_path
}]
})
}else{
this.setState({
fileList:[]
})
}
}
}
validatorByProbability= (rule,value,callback) =>{
if(this.props.probability < 0){
|
}
}
onProbabilityChange = (value) =>{
let {row,probabilityChange} = this.props
let o = {...row}
o.probability = value
probabilityChange && probabilityChange(o)
}
typeChange=(type)=>{
this.props.form.resetFields()
this.onProbabilityChange(this.props.row.probability)
this.setState({
type
},()=>{
let name = ''
if(type === 4) name= '谢谢参与'
this.props.form.setFieldsValue({name})
let img_path = imgMap[type]
let fileList = []
if(img_path){
fileList = [{
uid: '-1',
name: img_path,
status: 'done',
url: img_path
}]
}
this.setState({fileList})
})
}
prizeChange = (value) => {
const {type} = this.state
const {setFieldsValue} = this.props.form
let name = ''
switch(type){
case 1:
name = `${value}积分`
break
case 3:
name = `${value}元红包`
break
default:
return
}
setFieldsValue({name})
}
render() {
const formItemLayout = {
labelCol: {span: 5},
wrapperCol: {span: 18},
}
const { visible , probability, from} = this.props
const { getFieldDecorator } = this.props.form
const {type} = this.state
let TYPES = ACTIVIT_TYPE.filter(i=>{
if(i.value === 4) return true
return from.prize_type.indexOf(i.value) > -1
})
return <Modal
visible={visible}
title="编辑"
okText="确定"
cancelText="取消"
destroyOnClose
onCancel={this.onCancel}
onOk={this.onClick}
width={480}
>
<Form>
<FormItem label="奖品类型" {...formItemLayout}>
{getFieldDecorator('type', {
rules:[
{required:true,message:'请选择奖品类型'}
],
initialValue:4
})(
<Select
placeholder='请选择奖品类型'
onChange={this.typeChange}
getPopupContainer={triggerNode => triggerNode.parentNode}
>
{
TYPES.map((item) => {
return <Option key={item.value} value={item.value}>{item.label}</Option>
})
}
</Select>
)}
</FormItem>
{type === 1||type === 3 ? <FormItem label="奖品面额" {...formItemLayout}>
{getFieldDecorator('prize_value', {
rules:[
{required:true,message:'请输入奖品面额'}
]
})(
<InputNumber
min={1}
max={type===1?99999:200}
onChange={this.prizeChange}
step={1}
precision={type===1?0:2}
style={{width:'100%'}}
placeholder={`${type===1?'单位:积分':'1.00~200元'}`} />
)}
</FormItem>: null}
<FormItem label="奖品名称" {...formItemLayout}>
{getFieldDecorator('name', {
rules:[
{required:true,message:'请输入奖品名称'}
]
})(
<HzInput maxLength={type===1?7:6} placeholder='请输入奖品名称' />
)}
</FormItem>
{type !== 4 ? <FormItem label="奖品数量" {...formItemLayout}>
{getFieldDecorator('number', {
rules:[
{required:true,message:'请输入奖品数量'}
]
})(
<InputNumber
min={1}
max={99999}
step={1}
precision={0}
style={{width:'100%'}}
placeholder='大于0正整数' />
)}
</FormItem>:null}
<FormItem label="中奖概率" {...formItemLayout}>
{getFieldDecorator('probability', {
validateTrigger:'onBlur',
rules:[
{required:true,message:'请输入中奖概率'},
{validator:this.validatorByProbability}
]
})(
<InputNumber
min={0}
step={1}
precision={0}
onChange={this.onProbabilityChange}
style={{width:'100%'}}
placeholder='请输入中奖概率' />
)}
<span style={{fontSize:12,color:'#9EA8B1',display:'block',marginTop: '-6px'}}>还剩{probability}%的中奖概率 </span>
</FormItem>
<UploadContainer {...this.props} getFile={this.getFile} fileList={this.state.fileList} />
</Form>
</Modal>
}
}
@connect(({shop_fitment}) => ({ shop_fitment }))
class UploadContainer extends React.Component {
state = {
fileList: [],
showUploadIcon: true,
previewVisible: false,
previewImage: ''
}
componentDidMount(){
this.props.dispatch({
type:'shop_fitment/getToken',
payload: {
type: 'image',
}
})
}
static getDerivedStateFromProps(nextProps, prevState) {
return {
fileList: nextProps.fileList,
showUploadIcon: nextProps.fileList.length === 0 || (nextProps.fileList[0] && nextProps.fileList[0].status !== 'done'),
previewImage:nextProps.fileList[0] && nextProps.fileList[0].url
}
}
handleCancel = () => {
this.setState({
previewVisible: false,
previewImage: ''
})
}
setShowUploadIcon = (status) => {
setTimeout(_ => {
this.setState({
showUploadIcon: status
})
}, 400)
}
handlePreview = (fileList) => {
if (fileList && fileList[0]) {
this.setState({
previewVisible: true,
previewImage: fileList[0].url
})
}
}
beforeUpload = (file, fileList) => {
const isJPG = file.type === 'image/jpeg' || file.type === 'image/png'
if (!isJPG) {
message.error('只能上传jpg、jpeg和png格式的图片!')
}
const isLt2M = file.size / 1024 <= 100
if (!isLt2M) {
message.error('图片大小不能超过100KB!')
}
const maxPic = this.state.fileList.length + fileList.length <= 1
if (!maxPic) {
message.error('最多只能上传1张图片!')
}
return isJPG && isLt2M && maxPic
}
handleChange = (info) => {
const { fileList } = info
const photoPrefix = this.props.shop_fitment.photoPrefix
if (info.file.status === 'uploading') {
this.props.getFile && this.props.getFile(fileList)
}
if (info.file.status === 'done') {
fileList.map((file) => {
if (file.response) {
file.url = `https://${photoPrefix}/${file.response.key}`
file.key = file.response.key
}
return file
})
this.props.getFile && this.props.getFile(fileList)
// this.setState({ fileList }, () => {
// this.setShowUploadIcon(fileList.length === 0)
// })
}
}
handleRemove = (file) => {
const { fileList } = this.state
for | callback(`中奖概率之和不能大于100`)
}else{
callback() | conditional_block |
PublicFunction.go | Only: false,
MaxAge: maxAge}
r.AddCookie(uid_cookie)
}
func GetTotal(price string, num string) string {
fPrice, err1 := strconv.ParseFloat(price, 64)
fnum, err2 := strconv.ParseFloat(num, 64)
if err1 == nil && err2 == nil {
return fmt.Sprintf("%1.2f", fPrice*fnum)
}
return ""
}
func RemovePath(path string) bool {
//Log("upload picture Task is running...")
//curdir := GetCurDir()
//fullPath := curdir + "/" + path + "/"
if ExistsPath(path) {
err := os.RemoveAll(path)
if err != nil {
Log("remove fail " + path)
return false
} else {
//如果删除成功则输出 file remove OK!
return true
}
} else {
return false
}
}
func RemoveFile(path string) bool {
//Log("upload picture Task is running...")
//curdir := GetCurDir()
//fullPath := curdir + "/" + path + "/"
if ExistsPath(path) {
err := os.Remove(path) //删除文件test.txt
if err != nil {
Log("remove fail " + path)
return false
} else {
//如果删除成功则输出 file remove OK!
return true
}
} else {
return false
}
}
func SavePictureTask(res http.ResponseWriter, req *http.Request, path stri | e()会报错的
CreatePath(curdir + "/" + path + "/")
}
var (
status int
err error
)
defer func() {
if nil != err {
http.Error(res, err.Error(), status)
}
}()
// parse request
const _24K = (1 << 20) * 24
if err = req.ParseMultipartForm(_24K); nil != err {
status = http.StatusInternalServerError
return ""
}
for _, fheaders := range req.MultipartForm.File {
for _, hdr := range fheaders {
// open uploaded
var infile multipart.File
if infile, err = hdr.Open(); nil != err {
status = http.StatusInternalServerError
return ""
}
filename := hdr.Filename
if strings.Contains(strings.ToLower(filename), ".mp3") || strings.Contains(strings.ToLower(filename), ".mov") {
//如果是音频文件,直接存到picture文件夹,不存temp文件夹
path = "Picture/" + userid + "/" + typeid
CreatePath(curdir + "/" + path + "/")
}
// open destination
var outfile *os.File
savePath := curdir + "/" + path + "/" + filename
if outfile, err = os.Create(savePath); nil != err {
status = http.StatusInternalServerError
return ""
}
// 32K buffer copy
//var written int64
if _, err = io.Copy(outfile, infile); nil != err {
status = http.StatusInternalServerError
return ""
}
infile.Close()
outfile.Close()
//CreatePath(curdir + "/" + path + "/thumbnial")
//ImageFile_resize(infile, curdir+"/"+path+"/thumbnial/"+hdr.Filename, 200, 200)
fileNames += "," + hdr.Filename
//outfile.Close()
//res.Write([]byte("uploaded file:" + hdr.Filename + ";length:" + strconv.Itoa(int(written))))
}
}
}
fileNames = strings.Replace(fileNames, "#,", "", -1)
fileNames = strings.Replace(fileNames, "#", "", -1)
return fileNames
}
func SaveConfigTask(res http.ResponseWriter, req *http.Request, path string, filename string) string {
//Log("upload picture Task is running...")
curdir := GetCurDir()
var fileNames string = "#"
if req.Method == "GET" {
} else {
ff, errr := os.Open(curdir + "/" + path + "/")
if errr != nil && os.IsNotExist(errr) {
Log(ff, ""+path+"文件不存在,创建") //为什么打印nil 是这样的如果file不存在 返回f文件的指针是nil的 所以我们不能使用defer f.Close()会报错的
CreatePath(curdir + "/" + path + "/")
}
var (
status int
err error
)
defer func() {
if nil != err {
http.Error(res, err.Error(), status)
}
}()
// parse request
const _24K = (1 << 20) * 24
if err = req.ParseMultipartForm(_24K); nil != err {
status = http.StatusInternalServerError
return ""
}
for _, fheaders := range req.MultipartForm.File {
for _, hdr := range fheaders {
// open uploaded
var infile multipart.File
if infile, err = hdr.Open(); nil != err {
status = http.StatusInternalServerError
return ""
}
//filename := hdr.Filename
// open destination
var outfile *os.File
savePath := curdir + "/" + path + "/" + filename
if outfile, err = os.Create(savePath); nil != err {
status = http.StatusInternalServerError
return ""
}
// 32K buffer copy
//var written int64
if _, err = io.Copy(outfile, infile); nil != err {
status = http.StatusInternalServerError
return ""
}
infile.Close()
outfile.Close()
//CreatePath(curdir + "/" + path + "/thumbnial")
//ImageFile_resize(infile, curdir+"/"+path+"/thumbnial/"+hdr.Filename, 200, 200)
fileNames += "," + hdr.Filename
//outfile.Close()
//res.Write([]byte("uploaded file:" + hdr.Filename + ";length:" + strconv.Itoa(int(written))))
}
}
}
fileNames = strings.Replace(fileNames, "#,", "", -1)
fileNames = strings.Replace(fileNames, "#", "", -1)
return fileNames
}
func SaveUploadPictureTask(res http.ResponseWriter, req *http.Request, path string) string {
//Log("upload picture Task is running...")
curdir := GetCurDir()
var fileNames string = "#"
if req.Method == "GET" {
} else {
defer func() {
if err := recover(); err != nil {
Log("SaveUploadPictureTask")
Log(err)
}
}()
ff, errr := os.Open(curdir + "/" + path + "/")
if errr != nil && os.IsNotExist(errr) {
Log(ff, ""+path+"文件不存在,创建") //为什么打印nil 是这样的如果file不存在 返回f文件的指针是nil的 所以我们不能使用defer f.Close()会报错的
CreatePath(curdir + "/" + path + "/")
}
var (
status int
err error
)
defer func() {
if nil != err {
http.Error(res, err.Error(), status)
}
}()
// parse request
const _24K = (1 << 20) * 24
if err = req.ParseMultipartForm(_24K); nil != err {
status = http.StatusInternalServerError
return ""
}
for _, fheaders := range req.MultipartForm.File {
for _, hdr := range fheaders {
// open uploaded
var infile multipart.File
if infile, err = hdr.Open(); nil != err {
status = http.StatusInternalServerError
return ""
}
filename := hdr.Filename
// open destination
var outfile *os.File
savePath := curdir + "/" + path + "/" + filename
//如果文件存在就给一个随机文件名
if ExistsPath(savePath) {
filename = GetRandomFileName(hdr.Filename)
savePath = curdir + "/" + path + "/" + filename
}
if outfile, err = os.Create(savePath); nil != err {
status = http.StatusInternalServerError
return ""
}
// 32K buffer copy
//var written int64
if _, err = io.Copy(outfile, infile); nil != err {
status = http.StatusInternalServerError
return ""
}
infile.Close()
outfile.Close()
//CreatePath(curdir + "/" + path + "/thumbnial")
//ImageFile_resize(infile, curdir+"/"+path+"/thumbnial/"+hdr.Filename, 200, 200)
| ng, userid string, typeid string) string {
//Log("upload picture Task is running...")
curdir := GetCurDir()
var fileNames string = "#"
if req.Method == "GET" {
} else {
ff, errr := os.Open(curdir + "/" + path + "/")
if errr != nil && os.IsNotExist(errr) {
Log(ff, ""+path+"文件不存在,创建") //为什么打印nil 是这样的如果file不存在 返回f文件的指针是nil的 所以我们不能使用defer f.Clos | identifier_body |
PublicFunction.go | }
for _, address := range addrs {
// check the address type and if it is not a loopback the display it
if ipnet, ok := address.(*net.IPNet); ok && !ipnet.IP.IsLoopback() {
if ipnet.IP.To4() != nil {
ipstr := ipnet.IP.String()
index := strings.Index(ipstr, "127.0")
if index > -1 {
continue
}
index = strings.Index(ipstr, "192.168.")
if index > -1 {
return ipstr
break
}
index = strings.Index(ipstr, "169.254.")
if index > -1 {
continue
}
return ipstr
}
}
}
return ""
}
func GetLocalIP() string {
conn, err := net.Dial("udp", "8.8.8.8:80")
if err == nil {
defer conn.Close()
localAddr := conn.LocalAddr().(*net.UDPAddr)
return localAddr.IP.String()
} else {
return GetLocalIPP()
}
}
func GetLocalIPP() string {
//GetIpList()
var ipstr string = ""
//windows 获取IP
host, _ := os.Hostname()
addrss, err := net.LookupIP(host)
if err != nil {
Log("error", err.Error())
//return ""
}
var ipArray []string
for _, addr := range addrss {
if ipv4 := addr.To4(); ipv4 != nil {
Log("ippppp=: ", ipv4)
ipstr = ipv4.String()
if !strings.HasPrefix(ipstr, "127.0") && !strings.HasPrefix(ipstr, "169.254") && !strings.HasPrefix(ipstr, "172.16") {
ipArray = append(ipArray, ipstr)
}
}
}
//提取公网IP
//var pubIpArray []string
for i := 0; i < len(ipArray); i++ {
//Log("pubip===" + ipArray[i])
if !strings.HasPrefix(ipArray[i], "10.") && !strings.HasPrefix(ipArray[i], "192.168") && !strings.HasPrefix(ipArray[i], "172.") {
return ipArray[i]
//pubIpArray = append(pubIpArray, ipstr)
}
}
//如果没有公网IP 就返回一个本地IP
if len(ipArray) > 0 {
return ipArray[0]
}
//linux 获取IP
if ipstr == "" {
ifaces, errr := net.Interfaces()
// handle err
if errr != nil {
Log("error", errr.Error())
return ""
}
for _, i := range ifaces {
addrs, _ := i.Addrs()
// handle err
for _, addr := range addrs {
var ip net.IP
switch v := addr.(type) {
case *net.IPNet:
ip = v.IP
case *net.IPAddr:
ip = v.IP
}
// process IP address
//Log("ip=", ip)
ipstr = fmt.Sprintf("%s", ip)
Log("ipstr=", ipstr)
index := strings.Index(ipstr, "127.0")
if index > -1 {
continue
}
index = strings.Index(ipstr, "192.168.")
if index > -1 {
return ipstr
break
}
index = strings.Index(ipstr, "169.254.")
if index > -1 {
continue
}
if len(ipstr) > 6 {
array := strings.Split(ipstr, ".")
if len(array) == 4 {
return ipstr
}
}
}
}
}
return ""
}
func HttpPost(url string, paras string) string {
//Log("url=" + url + " paras=" + paras)
client := &http.Client{}
req, err := http.NewRequest("POST",
url,
strings.NewReader(paras))
if err != nil {
// handle error
return ""
}
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
//req.Header.Set("Cookie", "name=anny")
resp, err := client.Do(req)
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
// handle error
return ""
}
//Log(string(body))
return string(body)
}
func HttpGet(url string) string {
//Log("get =" + url)
resp, err := http.Get(url)
if err != nil {
// handle error
Log(err.Error())
return ""
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
// handle error
Log(err.Error())
return ""
}
//Log("response =" + string(body))
return string(body)
}
func HttpDownloadFile(url string, toPath string) {
//Log("get =" + url)
res, err := http.Get(url)
if err != nil {
Log(err)
return
}
f, err := os.Create(toPath)
defer f.Close()
if err != nil {
Log(err)
return
}
io.Copy(f, res.Body)
//Log("size =" + size)
}
//整形转换成字节
func IntToBytes(n int) []byte {
tmp := int32(n)
bytesBuffer := bytes.NewBuffer([]byte{})
binary.Write(bytesBuffer, binary.BigEndian, tmp)
return bytesBuffer.Bytes()
}
//字节转换成整形
func BytesToInt(b []byte) int {
bytesBuffer := bytes.NewBuffer(b)
var tmp int32
binary.Read(bytesBuffer, binary.BigEndian, &tmp)
return int(tmp)
}
func RealIPHand(h http.Handler) http.Handler {
fn := func(w http.ResponseWriter, r *http.Request) {
if rip := RealIP(r); rip != "" {
r.RemoteAddr = rip
}
h.ServeHTTP(w, r)
}
return http.HandlerFunc(fn)
}
var xForwardedFor = http.CanonicalHeaderKey("X-Forwarded-For")
var xForwardedFor2 = http.CanonicalHeaderKey("x-forwarded-for")
var xRealIP = http.CanonicalHeaderKey("X-Real-IP")
var xRealIP2 = http.CanonicalHeaderKey("x-real-ip")
var xRealIP3 = http.CanonicalHeaderKey("x-real-client-ip")
var ProxyClientIP = http.CanonicalHeaderKey("Proxy-Client-IP")
var WLProxyClientIP = http.CanonicalHeaderKey("WL-Proxy-Client-IP")
var HTTPXFORWARDEDFOR = http.CanonicalHeaderKey("HTTP_X_FORWARDED_FOR")
func RealIP(r *http.Request) string {
PrintHead(r)
var ip string
//clientIP := realip.FromRequest(r)
//log.Println("GET / from", clientIP)
if xff := r.Header.Get(xForwardedFor); xff != "" {
//Log(xff)
i := strings.Index(xff, ", ")
if i == -1 {
i = len(xff)
}
ip = xff[:i]
} else if xff := r.Header.Get(xForwardedFor2); xff != "" {
//Log(xff)
i := strings.Index(xff, ", ")
if i == -1 {
i = len(xff)
}
ip = xff[:i]
} else if xrip := r.Header.Get(xRealIP); xrip != "" {
ip = xrip
} else if xrip := r.Header.Get(xRealIP2); xrip != "" {
ip = xrip
} else if xrip := r.Header.Get(xRealIP3); xrip != "" {
ip = xrip
} else if xrip := r.Header.Get(ProxyClientIP); xrip != "" {
ip = xrip
} else if xrip := r.Header.Get(WLProxyClientIP); xrip != "" {
ip = xrip
} else {
ip = r.RemoteAddr
}
return ip
//return realip.FromRequest(r)
}
func PrintHead(r *http.Request) {
realip := r.Header.Get(xForwardedFor)
if len(realip) == 0 {
realip = r.Header.Get("http_client_ip")
}
if len(realip) == 0 {
//Log(xRealIP)
realip = r.Header.Get(xRealIP)
}
if len(realip) == 0 {
//Log(ProxyClientIP)
realip = r.Header.Get(ProxyClientIP)
}
if len(realip) == 0 {
//Log(WLProxyClientIP)
realip = r.Header.Get(WLProxyClientIP)
}
if len(realip) == 0 {
//Log(HTTPXFORWARDEDFOR)
realip = r.Header.Get(HTTPXFORWARDEDFOR)
}
if len(realip) == 0 {
realip = r.RemoteAddr
}
//Log("ip=" + r.RemoteAddr)
// | Log("realip=" + | conditional_block |
|
PublicFunction.go | string {
var str string = "##"
sorted_keys := make([]int, 0)
for k, _ := range MapList {
sorted_keys = append(sorted_keys, k)
}
// sort 'string' key in increasing order
sort.Ints(sorted_keys)
for _, k := range sorted_keys {
//fmt.Printf("k=%v, v=%v\n", k, MapList[k])
jsonStr, err := json.Marshal(MapList[k])
if err != nil {
Log(err)
}
//Log("map to json", string(str))
str += "," + string(jsonStr)
}
str = strings.Replace(str, "##,", "", -1)
str = strings.Replace(str, "##", "", -1)
return str
}
func ConverToStr(v interface{}) string {
if v == nil {
return ""
}
var str string = ""
if reflect.TypeOf(v).Kind() == reflect.String {
str = v.(string)
} else if reflect.TypeOf(v).Kind() == reflect.Int {
str = string(v.(int))
} else if reflect.TypeOf(v).Kind() == reflect.Int8 {
str = string(v.(int8))
} else if reflect.TypeOf(v).Kind() == reflect.Int16 {
str = string(v.(int16))
} else if reflect.TypeOf(v).Kind() == reflect.Int32 {
str = string(v.(int32))
} else if reflect.TypeOf(v).Kind() == reflect.Int64 {
str = string(v.(int64))
} else if reflect.TypeOf(v).Kind() == reflect.Float32 {
str = fmt.Sprintf("%f", v)
} else if reflect.TypeOf(v).Kind() == reflect.Float64 {
str = fmt.Sprintf("%f", v)
} else {
str = v.(string)
}
return strings.Replace(str, ".000000", "", -1)
}
func GetCurDateTime() string {
return time.Now().Format("2006-01-02 15:04:05")
}
func GetCurDay() string {
return time.Now().Format("2006-01-02")
}
func GetNameSinceNow(after int) string {
day := time.Now().AddDate(0, 0, after).Format("2006-01-02")
day = strings.Replace(day, "-", "", -1)
return day
}
func GetDaySinceNow(after int) string {
return time.Now().AddDate(0, 0, after).Format("2006-01-02")
}
func ReplaceStr(str string) string {
//过滤一下,防止sql注入
str = strings.Replace(str, "'", "", -1)
//str = strings.Replace(str, "-", "\\-", -1)
str = strings.Replace(str, "exec", "exe.c", -1)
return str //.Replace(str, ",", "", -1).Replace(str, "-", "\-", -1) //-1表示替换所有
}
var logfile *os.File
var oldFileName string
func Log(a ...interface{}) (n int, err error) {
//log.SetFlags(log.LstdFlags | log.Lshortfile)
log.Println(a...)
return 1, nil
}
// GetLocalIP returns the non loopback local IP of the host
func GetLocalIP22() string {
addrs, err := net.InterfaceAddrs()
if err != nil {
return ""
}
for _, address := range addrs {
// check the address type and if it is not a loopback the display it
if ipnet, ok := address.(*net.IPNet); ok && !ipnet.IP.IsLoopback() {
if ipnet.IP.To4() != nil {
ipstr := ipnet.IP.String()
index := strings.Index(ipstr, "127.0")
if index > -1 {
continue
}
index = strings.Index(ipstr, "192.168.")
if index > -1 {
return ipstr
break
}
index = strings.Index(ipstr, "169.254.")
if index > -1 {
continue
}
return ipstr
}
}
}
return ""
}
func GetLocalIP() string {
conn, err := net.Dial("udp", "8.8.8.8:80")
if err == nil {
defer conn.Close()
localAddr := conn.LocalAddr().(*net.UDPAddr)
return localAddr.IP.String()
} else {
return GetLocalIPP()
}
}
func GetLocalIPP() string {
//GetIpList()
var ipstr string = ""
//windows 获取IP
host, _ := os.Hostname()
addrss, err := net.LookupIP(host)
if err != nil {
Log("error", err.Error())
//return ""
}
var ipArray []string
for _, addr := range addrss {
if ipv4 := addr.To4(); ipv4 != nil {
Log("ippppp=: ", ipv4)
ipstr = ipv4.String()
if !strings.HasPrefix(ipstr, "127.0") && !strings.HasPrefix(ipstr, "169.254") && !strings.HasPrefix(ipstr, "172.16") {
ipArray = append(ipArray, ipstr)
}
}
}
//提取公网IP
//var pubIpArray []string
for i := 0; i < len(ipArray); i++ {
//Log("pubip===" + ipArray[i])
if !strings.HasPrefix(ipArray[i], "10.") && !strings.HasPrefix(ipArray[i], "192.168") && !strings.HasPrefix(ipArray[i], "172.") {
return ipArray[i]
//pubIpArray = append(pubIpArray, ipstr)
}
}
//如果没有公网IP 就返回一个本地IP
if len(ipArray) > 0 {
return ipArray[0]
}
//linux 获取IP
if ipstr == "" {
ifaces, errr := net.Interfaces()
// handle err
if errr != nil {
Log("error", errr.Error())
return ""
}
for _, i := range ifaces {
addrs, _ := i.Addrs()
// handle err
for _, addr := range addrs {
var ip net.IP
switch v := addr.(type) {
case *net.IPNet:
ip = v.IP
case *net.IPAddr:
ip = v.IP
}
// process IP address
//Log("ip=", ip)
ipstr = fmt.Sprintf("%s", ip)
Log("ipstr=", ipstr)
index := strings.Index(ipstr, "127.0")
if index > -1 {
continue
}
index = strings.Index(ipstr, "192.168.")
if index > -1 {
return ipstr
break
}
index = strings.Index(ipstr, "169.254.")
if index > -1 {
continue
}
if len(ipstr) > 6 {
array := strings.Split(ipstr, ".")
if len(array) == 4 {
return ipstr
}
}
}
}
}
return ""
}
func HttpPost(url string, paras string) string {
//Log("url=" + url + " paras=" + paras)
client := &http.Client{}
req, err := http.NewRequest("POST",
url,
strings.NewReader(paras))
if err != nil {
// handle error
return ""
}
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
//req.Header.Set("Cookie", "name=anny")
resp, err := client.Do(req)
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
// handle error
return ""
}
//Log(string(body))
return string(body)
}
func HttpGet(url string) string {
//Log("get =" + url)
resp, err := http.Get(url)
if err != nil {
// handle error
Log(err.Error())
return ""
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
// handle error
Log(err.Error())
return ""
}
//Log("response =" + string(body))
return string(body)
}
func HttpDownloadFile(url string, toPath string) {
//Log("get =" + url)
res, err := http.Get(url)
if err != nil {
Log(err)
return
}
f, err := os.Create(toPath)
defer f.Close()
if err != nil {
Log(err)
return
}
io.Copy(f, res.Body)
//Log("size =" + size)
}
//整形转换成字节
func IntToBytes(n int) []byte {
tmp := int32(n)
bytesBuffer := bytes.NewBuffer([]byte{})
binary.Write(bytesBuffer, binary.BigEndian, tmp)
return bytesBuffer.Bytes()
}
//字节转换成整形
func BytesToInt(b []byte) int {
bytesBuffer := bytes.NewBuffer(b) | var tmp int32
binary.Read(bytesBuffer, binary.BigEndian, &tmp)
return int(tmp)
} | random_line_split |
|
PublicFunction.go | }
}
fileNames = strings.Replace(fileNames, "#,", "", -1)
fileNames = strings.Replace(fileNames, "#", "", -1)
return fileNames
}
func SaveConfigTask(res http.ResponseWriter, req *http.Request, path string, filename string) string {
//Log("upload picture Task is running...")
curdir := GetCurDir()
var fileNames string = "#"
if req.Method == "GET" {
} else {
ff, errr := os.Open(curdir + "/" + path + "/")
if errr != nil && os.IsNotExist(errr) {
Log(ff, ""+path+"文件不存在,创建") //为什么打印nil 是这样的如果file不存在 返回f文件的指针是nil的 所以我们不能使用defer f.Close()会报错的
CreatePath(curdir + "/" + path + "/")
}
var (
status int
err error
)
defer func() {
if nil != err {
http.Error(res, err.Error(), status)
}
}()
// parse request
const _24K = (1 << 20) * 24
if err = req.ParseMultipartForm(_24K); nil != err {
status = http.StatusInternalServerError
return ""
}
for _, fheaders := range req.MultipartForm.File {
for _, hdr := range fheaders {
// open uploaded
var infile multipart.File
if infile, err = hdr.Open(); nil != err {
status = http.StatusInternalServerError
return ""
}
//filename := hdr.Filename
// open destination
var outfile *os.File
savePath := curdir + "/" + path + "/" + filename
if outfile, err = os.Create(savePath); nil != err {
status = http.StatusInternalServerError
return ""
}
// 32K buffer copy
//var written int64
if _, err = io.Copy(outfile, infile); nil != err {
status = http.StatusInternalServerError
return ""
}
infile.Close()
outfile.Close()
//CreatePath(curdir + "/" + path + "/thumbnial")
//ImageFile_resize(infile, curdir+"/"+path+"/thumbnial/"+hdr.Filename, 200, 200)
fileNames += "," + hdr.Filename
//outfile.Close()
//res.Write([]byte("uploaded file:" + hdr.Filename + ";length:" + strconv.Itoa(int(written))))
}
}
}
fileNames = strings.Replace(fileNames, "#,", "", -1)
fileNames = strings.Replace(fileNames, "#", "", -1)
return fileNames
}
func SaveUploadPictureTask(res http.ResponseWriter, req *http.Request, path string) string {
//Log("upload picture Task is running...")
curdir := GetCurDir()
var fileNames string = "#"
if req.Method == "GET" {
} else {
defer func() {
if err := recover(); err != nil {
Log("SaveUploadPictureTask")
Log(err)
}
}()
ff, errr := os.Open(curdir + "/" + path + "/")
if errr != nil && os.IsNotExist(errr) {
Log(ff, ""+path+"文件不存在,创建") //为什么打印nil 是这样的如果file不存在 返回f文件的指针是nil的 所以我们不能使用defer f.Close()会报错的
CreatePath(curdir + "/" + path + "/")
}
var (
status int
err error
)
defer func() {
if nil != err {
http.Error(res, err.Error(), status)
}
}()
// parse request
const _24K = (1 << 20) * 24
if err = req.ParseMultipartForm(_24K); nil != err {
status = http.StatusInternalServerError
return ""
}
for _, fheaders := range req.MultipartForm.File {
for _, hdr := range fheaders {
// open uploaded
var infile multipart.File
if infile, err = hdr.Open(); nil != err {
status = http.StatusInternalServerError
return ""
}
filename := hdr.Filename
// open destination
var outfile *os.File
savePath := curdir + "/" + path + "/" + filename
//如果文件存在就给一个随机文件名
if ExistsPath(savePath) {
filename = GetRandomFileName(hdr.Filename)
savePath = curdir + "/" + path + "/" + filename
}
if outfile, err = os.Create(savePath); nil != err {
status = http.StatusInternalServerError
return ""
}
// 32K buffer copy
//var written int64
if _, err = io.Copy(outfile, infile); nil != err {
status = http.StatusInternalServerError
return ""
}
infile.Close()
outfile.Close()
//CreatePath(curdir + "/" + path + "/thumbnial")
//ImageFile_resize(infile, curdir+"/"+path+"/thumbnial/"+hdr.Filename, 200, 200)
fileNames += "," + filename
//outfile.Close()
//res.Write([]byte("uploaded file:" + hdr.Filename + ";length:" + strconv.Itoa(int(written))))
}
}
}
fileNames = strings.Replace(fileNames, "#,", "", -1)
fileNames = strings.Replace(fileNames, "#", "", -1)
return fileNames
}
func GetRandomFileName(name string) string {
//name := hdr.Filename
arr := strings.Split(name, ".")
extent := arr[len(arr)-1]
return GetRandom() + "." + extent
}
func CopyFile(src, dst string) error {
in, err := os.Open(src)
if err != nil {
Log(err.Error())
return err
}
defer in.Close()
out, err := os.Create(dst)
if err != nil {
Log(err.Error())
return err
}
defer out.Close()
_, err = io.Copy(out, in)
if err != nil {
Log(err.Error())
return err
}
if ExistsPath(dst) {
//Log("copy success" + dst)
}
return out.Close()
}
//拷贝文件 要拷贝的文件路径 拷贝到哪里 "github.com/otiai10/copy"
func CopyFiles(source, dest string) bool {
if source == "" || dest == "" {
Log("source or dest is null")
return false
}
err := copy.Copy(source, dest)
if err == nil {
return true
} else {
return false
}
}
func NetWorkStatus() bool {
cmd := exec.Command("ping", "baidu.com", "-c", "1", "-W", "5")
fmt.Println("NetWorkStatus Start:", time.Now().Unix())
err := cmd.Run()
fmt.Println("NetWorkStatus End :", time.Now().Unix())
if err != nil {
fmt.Println(err.Error())
return false
} else {
fmt.Println("Net Status , OK")
}
return true
}
func GetMapByJsonStr(jsonstr string) map[string]interface{} {
if !strings.Contains(jsonstr, "{") {
Log("bad json=" + jsonstr)
return nil
}
jsonstr = strings.Replace(jsonstr, "\x00", "", -1)
if len(jsonstr) > 4 {
var d map[string]interface{}
err := json.Unmarshal([]byte(jsonstr), &d)
if err != nil {
log.Printf("error decoding sakura response: %v", err)
if e, ok := err.(*json.SyntaxError); ok {
log.Printf("syntax error at byte offset %d", e.Offset)
}
//log.Printf("sakura response: %q", resBody)
Log("bad json" + jsonstr)
Log(err)
//panic("bad json")
return nil
}
return d
}
return nil
}
func GetMessageMapByJsonKey(jsonstr string, keystr string) map[string]interface{} {
//var jsonstr:='{\"data\": { \"mes\": [ {\"fromuserid\": \"25\", \"touserid\": \"56\",\"message\": \"hhhhhaaaaaa\",\"time\": \"2017-12-12 12:11:11\"}]}}';
index := strings.IndexRune(jsonstr, '{')
jsonstr = jsonstr[index : len(jsonstr)-index]
if len(jsonstr) > 4 && strings.Index(jsonstr, "{") > -1 && strings.Index(jsonstr, "}") > -1 {
mapp := GetMapByJsonStr(jsonstr)
//Log(mapp)
mappp := mapp[keystr]
//Log(mappp)
//kll := mapp.(map[string]interface{})[keystr]
//Log(kll)
mymap := mappp.(map[string]interface{})
//Log(mymap["Fromuserid"])
return mymap
}
return nil
}
func GetMessageMapByJson(jsonstr string) map[string]interface{} {
//var jsonstr:='{\"data\": { \"mes\": [ {\"fromuse | rid\": \"25\", \"touse | identifier_name |
|
bot.js | // Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// bot.js is your main bot dialog entry point for handling activity types
// Import required Bot Builder
const { ActionTypes, ActivityTypes, CardFactory } = require('botbuilder');
const { LuisRecognizer } = require('botbuilder-ai');
const { DialogSet, WaterfallDialog } = require('botbuilder-dialogs');
const { OAuthHelpers, LOGIN_PROMPT } = require('./oauth-helpers');
const CONNECTION_SETTING_NAME = '<MS Graph API Connection Name>';
/**
* Demonstrates the following concepts:
* Displaying a Welcome Card, using Adaptive Card technology
* Use LUIS to model Greetings, Help, and Cancel interactions
* Use a Waterfall dialog to model multi-turn conversation flow
* Use custom prompts to validate user input
* Store conversation and user state
* Handle conversation interruptions
*/
let luisResult = null;
class BasicBot {
/**
* Constructs the three pieces necessary for this bot to operate:
* 1. StatePropertyAccessor for conversation state
* 2. StatePropertyAccess for user state
* 3. LUIS client
* 4. DialogSet to handle our GreetingDialog
*
* @param {ConversationState} conversationState property accessor
* @param {application} LUISApplication property accessor
* @param {luisPredictionOptions} PredictionOptions property accessor
* @param {includeApiResults} APIResults Application property accessor
*/
constructor(conversationState, application, luisPredictionOptions, includeApiResults) {
this.luisRecognizer = new LuisRecognizer(application,luisPredictionOptions, true);
this.conversationState = conversationState;
// DialogState property accessor. Used to keep persist DialogState when using DialogSet.
this.dialogState = conversationState.createProperty('dialogState');
this.commandState = conversationState.createProperty('commandState');
// Instructions for the user with information about commands that this bot may handle.
this.helpMessage = `You can type "send <recipient_email>" to send an email, "recent" to view recent unread mail,` +
` "me" to see information about your, or "help" to view the commands` +
` again. For others LUIS displays intent with score.`;
// Create a DialogSet that contains the OAuthPrompt.
this.dialogs = new DialogSet(this.dialogState);
// Add an OAuthPrompt with the connection name as specified on the Bot's settings blade in Azure.
this.dialogs.add(OAuthHelpers.prompt(CONNECTION_SETTING_NAME));
this._graphDialogId = 'graphDialog';
// Logs in the user and calls proceeding dialogs, if login is successful.
this.dialogs.add(new WaterfallDialog(this._graphDialogId, [
this.promptStep.bind(this),
this.processStep.bind(this)
]));
}
/**
* Driver code that does one of the following:
* 1. Display a welcome card upon receiving ConversationUpdate activity
* 2. Use LUIS to recognize intents for incoming user message
* 3. Start a greeting dialog
* 4. Optionally handle Cancel or Help interruptions
*
* @param {Context} turnContext turn context from the adapter
*/
async onTurn(turnContext) {
const dc = await this.dialogs.createContext(turnContext);
const results = await this.luisRecognizer.recognize(turnContext);
switch (turnContext._activity.type) {
case ActivityTypes.Message:
this.luisResult = results;
await this.processInput(dc);
break;
case ActivityTypes.Event:
case ActivityTypes.Invoke:
if (turnContext._activity.type === ActivityTypes.Invoke && turnContext._activity.channelId !== 'msteams') {
throw new Error('The Invoke type is only valid on the MS Teams channel.');
};
await dc.continueDialog();
if (!turnContext.responded) {
await dc.beginDialog(this._graphDialogId);
};
break;
case ActivityTypes.ConversationUpdate:
await this.sendWelcomeMessage(turnContext);
break;
default:
await turnContext.sendActivity(`[${ turnContext._activity.type }]-type activity detected.`);
}
await this.conversationState.saveChanges(turnContext);
}
async sendWelcomeMessage(turnContext) {
const activity = turnContext.activity;
if (activity && activity.membersAdded) {
const heroCard = CardFactory.heroCard(
'Welcome to LUIS with MSGraph API Authentication BOT!',
CardFactory.images(['https://botframeworksamples.blob.core.windows.net/samples/aadlogo.png']),
CardFactory.actions([
{
type: ActionTypes.ImBack,
title: 'Log me in',
value: 'login'
},
{
type: ActionTypes.ImBack,
title: 'Me',
value: 'me'
},
{
type: ActionTypes.ImBack,
title: 'Recent',
value: 'recent'
},
{
type: ActionTypes.ImBack,
title: 'View Token',
value: 'viewToken'
},
{
type: ActionTypes.ImBack,
title: 'Help',
value: 'help'
},
{
type: ActionTypes.ImBack,
title: 'Signout',
value: 'signout'
}
])
);
for (const idx in activity.membersAdded) {
if (activity.membersAdded[idx].id !== activity.recipient.id) {
await turnContext.sendActivity({ attachments: [heroCard] });
}
}
}
}
async processInput(dc, luisResult) {
//console.log(dc);
switch (dc.context.activity.text.toLowerCase()) {
case 'signout':
case 'logout':
case 'signoff':
case 'logoff':
// The bot adapter encapsulates the authentication processes and sends
// activities to from the Bot Connector Service.
const botAdapter = dc.context.adapter;
await botAdapter.signOutUser(dc.context, CONNECTION_SETTING_NAME);
// Let the user know they are signed out.
await dc.context.sendActivity('You are now signed out.');
break;
case 'help':
await dc.context.sendActivity(this.helpMessage);
break;
default:
// The user has input a command that has not been handled yet,
// begin the waterfall dialog to handle the input.
await dc.continueDialog();
if (!dc.context.responded) {
await dc.beginDialog(this._graphDialogId);
}
}
};
async promptStep(step) {
const activity = step.context.activity;
if (activity.type === ActivityTypes.Message && !(/\d{6}/).test(activity.text)) {
await this.commandState.set(step.context, activity.text);
await this.conversationState.saveChanges(step.context);
}
return await step.beginDialog(LOGIN_PROMPT);
}
async | (step) {
//console.log(step);
// We do not need to store the token in the bot. When we need the token we can
// send another prompt. If the token is valid the user will not need to log back in.
// The token will be available in the Result property of the task.
const tokenResponse = step.result;
// If the user is authenticated the bot can use the token to make API calls.
if (tokenResponse !== undefined) {
let parts = await this.commandState.get(step.context);
if (!parts) {
parts = step.context.activity.text;
}
const command = parts.split(' ')[0].toLowerCase();
console.log(command);
if(command === 'login' || command === 'signin'){
await step.context.sendActivity(`You have already loggedin!`);
}
else if (command === 'me') {
await OAuthHelpers.listMe(step.context, tokenResponse);
} else if (command === 'send') {
await OAuthHelpers.sendMail(step.context, tokenResponse, parts.split(' ')[1].toLowerCase());
} else if (command === 'recent') {
await OAuthHelpers.listRecentMail(step.context, tokenResponse);
} else if(command.toLowerCase() === 'viewtoken'){
await step.context.sendActivity(`Your token is: ${ tokenResponse.token }`);
}else{
console.log(this.luisResult);
const topIntent = this.luisResult.luisResult.topScoringIntent;
if(topIntent !== 'None'){
await step.context.sendActivity(`LUIS Top Scoring Intent: ${ topIntent.intent }, Score: ${ topIntent.score }`);
}else{
await step.context.sendActivity(`Please try something else!`);
// If the top scoring intent was "None" tell the user no valid intents were found and provide help.
// await step.context.sendActivity(`No LUIS intents were found.
// \nThis sample is about identifying two user intents:
// \n - 'Calendar.Add'
// \n - 'Calendar.Find'
// \nTry typing 'Add Event' or 'Show me tomorrow'.`);
}
}
} else {
// Ask the user to try logging in later as they are not logged in.
await step.context.sendActivity(`We couldn't log you in. Please try again later.`);
}
return await step.endDialog();
};
};
exports.BasicBot = BasicBot;
| processStep | identifier_name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.