file_name
large_stringlengths
4
140
prefix
large_stringlengths
0
39k
suffix
large_stringlengths
0
36.1k
middle
large_stringlengths
0
29.4k
fim_type
large_stringclasses
4 values
regexp.go
// Copyright 2009 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // The testing package implements a simple regular expression library. // It is a reduced version of the regular expression package suitable // for use in tests; it avoids many dependencies. // // The syntax of the regular expressions accepted is: // // regexp: // concatenation { '|' concatenation } // concatenation: // { closure } // closure: // term [ '*' | '+' | '?' ] // term: // '^' // '$' // '.' // character // '[' [ '^' ] character-ranges ']' // '(' regexp ')' // package testing import ( "utf8"; ) var debug = false // Error codes returned by failures to parse an expression. var ( ErrInternal = "internal error"; ErrUnmatchedLpar = "unmatched ''"; ErrUnmatchedRpar = "unmatched ''"; ErrUnmatchedLbkt = "unmatched '['"; ErrUnmatchedRbkt = "unmatched ']'"; ErrBadRange = "bad range in character class"; ErrExtraneousBackslash = "extraneous backslash"; ErrBadClosure = "repeated closure **, ++, etc."; ErrBareClosure = "closure applies to nothing"; ErrBadBackslash = "illegal backslash escape"; ) // An instruction executed by the NFA type instr interface { kind() int; // the type of this instruction: _CHAR, _ANY, etc. next() instr; // the instruction to execute after this one setNext(i instr); index() int; setIndex(i int); print(); } // Fields and methods common to all instructions type common struct { _next instr; _index int; } func (c *common) next() instr { return c._next } func (c *common) setNext(i instr) { c._next = i } func (c *common) index() int { return c._index } func (c *common) setIndex(i int) { c._index = i } // The representation of a compiled regular expression. // The public interface is entirely through methods. type Regexp struct { expr string; // the original expression inst []instr; start instr; nbra int; // number of brackets in expression, for subexpressions } const ( _START = // beginning of program iota; _END; // end of program: success _BOT; // '^' beginning of text _EOT; // '$' end of text _CHAR; // 'a' regular character _CHARCLASS; // [a-z] character class _ANY; // '.' any character including newline _NOTNL; // [^\n] special case: any character but newline _BRA; // '(' parenthesized expression _EBRA; // ')'; end of '(' parenthesized expression _ALT; // '|' alternation _NOP; // do nothing; makes it easy to link without patching ) // --- START start of program type _Start struct { common; } func (start *_Start) kind() int { return _START } func (start *_Start) print() { print("start") } // --- END end of program type _End struct { common; } func (end *_End) kind() int { return _END } func (end *_End) print() { print("end") } // --- BOT beginning of text type _Bot struct { common; } func (bot *_Bot) kind() int { return _BOT } func (bot *_Bot) print() { print("bot") } // --- EOT end of text type _Eot struct { common; } func (eot *_Eot) kind() int { return _EOT } func (eot *_Eot) print() { print("eot") } // --- CHAR a regular character type _Char struct { common; char int; } func (char *_Char) kind() int { return _CHAR } func (char *_Char) print() { print("char ", string(char.char)) } func newChar(char int) *_Char { c := new(_Char); c.char = char; return c; } // --- CHARCLASS [a-z] type _CharClass struct { common; char int; negate bool; // is character class negated? ([^a-z]) // stored pairwise: [a-z] is (a,z); x is (x,x): ranges []int; } func (cclass *_CharClass) kind() int { return _CHARCLASS } func (cclass *_CharClass) print() { print("charclass"); if cclass.negate { print(" (negated)") } for i := 0; i < len(cclass.ranges); i += 2 { l := cclass.ranges[i]; r := cclass.ranges[i+1]; if l == r { print(" [", string(l), "]") } else { print(" [", string(l), "-", string(r), "]") } } } func (cclass *_CharClass) addRange(a, b int) { // range is a through b inclusive n := len(cclass.ranges); if n >= cap(cclass.ranges) { nr := make([]int, n, 2*n); for i, j := range nr { nr[i] = j } cclass.ranges = nr; } cclass.ranges = cclass.ranges[0 : n+2]; cclass.ranges[n] = a; n++; cclass.ranges[n] = b; n++; } func (cclass *_CharClass) matches(c int) bool { for i := 0; i < len(cclass.ranges); i = i + 2 { min := cclass.ranges[i]; max := cclass.ranges[i+1]; if min <= c && c <= max { return !cclass.negate } } return cclass.negate; } func newCharClass() *_CharClass { c := new(_CharClass); c.ranges = make([]int, 0, 20); return c; } // --- ANY any character type _Any struct { common; } func (any *_Any) kind() int { return _ANY } func (any *_Any) print() { print("any") } // --- NOTNL any character but newline type _NotNl struct { common; } func (notnl *_NotNl) kind() int { return _NOTNL } func (notnl *_NotNl) print() { print("notnl") } // --- BRA parenthesized expression type _Bra struct { common; n int; // subexpression number } func (bra *_Bra) kind() int { return _BRA } func (bra *_Bra) print() { print("bra", bra.n) } // --- EBRA end of parenthesized expression type _Ebra struct { common; n int; // subexpression number } func (ebra *_Ebra) kind() int { return _EBRA } func (ebra *_Ebra) print() { print("ebra ", ebra.n) } // --- ALT alternation type _Alt struct { common; left instr; // other branch } func (alt *_Alt) kind() int { return _ALT } func (alt *_Alt) print() { print("alt(", alt.left.index(), ")") } // --- NOP no operation type _Nop struct { common; } func (nop *_Nop) kind() int { return _NOP } func (nop *_Nop) print() { print("nop") } func (re *Regexp) add(i instr) instr { n := len(re.inst); i.setIndex(len(re.inst)); if n >= cap(re.inst) { ni := make([]instr, n, 2*n); for i, j := range re.inst { ni[i] = j } re.inst = ni; } re.inst = re.inst[0 : n+1]; re.inst[n] = i; return i; } type parser struct { re *Regexp; error string; nlpar int; // number of unclosed lpars pos int; ch int; } const endOfFile = -1 func (p *parser) c() int { return p.ch } func (p *parser) nextc() int { if p.pos >= len(p.re.expr) { p.ch = endOfFile } else { c, w := utf8.DecodeRuneInString(p.re.expr[p.pos:len(p.re.expr)]); p.ch = c; p.pos += w; } return p.ch; } func newParser(re *Regexp) *parser { p := new(parser); p.re = re; p.nextc(); // load p.ch return p; } func special(c int) bool { s := `\.+*?()|[]^$`; for i := 0; i < len(s); i++ { if c == int(s[i]) { return true } } return false; } func specialcclass(c int) bool { s := `\-[]`; for i := 0; i < len(s); i++ { if c == int(s[i]) { return true } } return false; } func (p *parser) charClass() instr { cc := newCharClass(); if p.c() == '^' { cc.negate = true; p.nextc(); } left := -1; for { switch c := p.c(); c { case ']', endOfFile: if left >= 0 { p.error = ErrBadRange; return nil; } // Is it [^\n]? if cc.negate && len(cc.ranges) == 2 && cc.ranges[0] == '\n' && cc.ranges[1] == '\n' { nl := new(_NotNl); p.re.add(nl); return nl; } p.re.add(cc); return cc; case '-': // do this before backslash processing p.error = ErrBadRange; return nil; case '\\': c = p.nextc(); switch { case c == endOfFile: p.error = ErrExtraneousBackslash; return nil; case c == 'n': c = '\n' case specialcclass(c): // c is as delivered default: p.error = ErrBadBackslash; return nil; } fallthrough; default: p.nextc(); switch { case left < 0: // first of pair if p.c() == '-' { // range p.nextc(); left = c; } else { // single char cc.addRange(c, c) } case left <= c: // second of pair cc.addRange(left, c); left = -1; default: p.error = ErrBadRange; return nil; } } } return nil; } func (p *parser) term() (start, end instr) { // term() is the leaf of the recursion, so it's sufficient to pick off the // error state here for early exit. // The other functions (closure(), concatenation() etc.) assume // it's safe to recur to here. if p.error != "" { return } switch c := p.c(); c { case '|', endOfFile: return nil, nil case '*', '+': p.error = ErrBareClosure; return; case ')': if p.nlpar == 0 { p.error = ErrUnmatchedRpar; return; } return nil, nil; case ']': p.error = ErrUnmatchedRbkt; return; case '^': p.nextc(); start = p.re.add(new(_Bot)); return start, start; case '$': p.nextc(); start = p.re.add(new(_Eot)); return start, start; case '.': p.nextc(); start = p.re.add(new(_Any)); return start, start; case '[': p.nextc(); start = p.charClass(); if p.error != "" { return } if p.c() != ']' { p.error = ErrUnmatchedLbkt; return; } p.nextc(); return start, start; case '(': p.nextc(); p.nlpar++; p.re.nbra++; // increment first so first subexpr is \1 nbra := p.re.nbra; start, end = p.regexp(); if p.c() != ')' { p.error = ErrUnmatchedLpar; return; } p.nlpar--; p.nextc(); bra := new(_Bra); p.re.add(bra); ebra := new(_Ebra); p.re.add(ebra); bra.n = nbra; ebra.n = nbra; if start == nil { if end == nil { p.error = ErrInternal; return; } start = ebra; } else { end.setNext(ebra) } bra.setNext(start); return bra, ebra; case '\\': c = p.nextc(); switch { case c == endOfFile: p.error = ErrExtraneousBackslash; return; case c == 'n': c = '\n' case special(c): // c is as delivered default: p.error = ErrBadBackslash; return; } fallthrough; default: p.nextc(); start = newChar(c); p.re.add(start); return start, start; } panic("unreachable"); } func (p *parser) closure() (start, end instr) { start, end = p.term(); if start == nil || p.error != "" { return } switch p.c() { case '*': // (start,end)*: alt := new(_Alt); p.re.add(alt); end.setNext(alt); // after end, do alt alt.left = start; // alternate brach: return to start start = alt; // alt becomes new (start, end) end = alt; case '+': // (start,end)+: alt := new(_Alt); p.re.add(alt); end.setNext(alt); // after end, do alt alt.left = start; // alternate brach: return to start end = alt; // start is unchanged; end is alt case '?': // (start,end)?: alt := new(_Alt); p.re.add(alt); nop := new(_Nop); p.re.add(nop); alt.left = start; // alternate branch is start alt.setNext(nop); // follow on to nop end.setNext(nop); // after end, go to nop start = alt; // start is now alt end = nop; // end is nop pointed to by both branches default: return } switch p.nextc() { case '*', '+', '?': p.error = ErrBadClosure } return; } func (p *parser) concatenation() (start, end instr) { for { nstart, nend := p.closure(); if p.error != "" { return } switch { case nstart == nil: // end of this concatenation if start == nil { // this is the empty string nop := p.re.add(new(_Nop)); return nop, nop; } return; case start == nil: // this is first element of concatenation start, end = nstart, nend default: end.setNext(nstart); end = nend; } } panic("unreachable"); } func (p *parser) regexp() (start, end instr) { start, end = p.concatenation(); if p.error != "" { return } for { switch p.c() { default: return case '|': p.nextc(); nstart, nend := p.concatenation(); if p.error != "" { return } alt := new(_Alt); p.re.add(alt); alt.left = start; alt.setNext(nstart); nop := new(_Nop); p.re.add(nop); end.setNext(nop); nend.setNext(nop); start, end = alt, nop; } } panic("unreachable"); } func unNop(i instr) instr { for i.kind() == _NOP { i = i.next() } return i; } func (re *Regexp) eliminateNops()
func (re *Regexp) doParse() string { p := newParser(re); start := new(_Start); re.add(start); s, e := p.regexp(); if p.error != "" { return p.error } start.setNext(s); re.start = start; e.setNext(re.add(new(_End))); re.eliminateNops(); return p.error; } // CompileRegexp parses a regular expression and returns, if successful, a Regexp // object that can be used to match against text. func CompileRegexp(str string) (regexp *Regexp, error string) { regexp = new(Regexp); regexp.expr = str; regexp.inst = make([]instr, 0, 20); error = regexp.doParse(); return; } // MustCompileRegexp is like CompileRegexp but panics if the expression cannot be parsed. // It simplifies safe initialization of global variables holding compiled regular // expressions. func MustCompile(str string) *Regexp { regexp, error := CompileRegexp(str); if error != "" { panicln(`regexp: compiling "`, str, `": `, error) } return regexp; } type state struct { inst instr; // next instruction to execute match []int; // pairs of bracketing submatches. 0th is start,end } // Append new state to to-do list. Leftmost-longest wins so avoid // adding a state that's already active. func addState(s []state, inst instr, match []int) []state { index := inst.index(); l := len(s); pos := match[0]; // TODO: Once the state is a vector and we can do insert, have inputs always // go in order correctly and this "earlier" test is never necessary, for i := 0; i < l; i++ { if s[i].inst.index() == index && // same instruction s[i].match[0] < pos { // earlier match already going; lefmost wins return s } } if l == cap(s) { s1 := make([]state, 2*l)[0:l]; for i := 0; i < l; i++ { s1[i] = s[i] } s = s1; } s = s[0 : l+1]; s[l].inst = inst; s[l].match = match; return s; } // Accepts either string or bytes - the logic is identical either way. // If bytes == nil, scan str. func (re *Regexp) doExecute(str string, bytes []byte, pos int) []int { var s [2][]state; // TODO: use a vector when state values (not ptrs) can be vector elements s[0] = make([]state, 10)[0:0]; s[1] = make([]state, 10)[0:0]; in, out := 0, 1; var final state; found := false; end := len(str); if bytes != nil { end = len(bytes) } for pos <= end { if !found { // prime the pump if we haven't seen a match yet match := make([]int, 2*(re.nbra+1)); for i := 0; i < len(match); i++ { match[i] = -1 // no match seen; catches cases like "a(b)?c" on "ac" } match[0] = pos; s[out] = addState(s[out], re.start.next(), match); } in, out = out, in; // old out state is new in state s[out] = s[out][0:0]; // clear out state if len(s[in]) == 0 { // machine has completed break } charwidth := 1; c := endOfFile; if pos < end { if bytes == nil { c, charwidth = utf8.DecodeRuneInString(str[pos:end]) } else { c, charwidth = utf8.DecodeRune(bytes[pos:end]) } } for i := 0; i < len(s[in]); i++ { st := s[in][i]; switch s[in][i].inst.kind() { case _BOT: if pos == 0 { s[in] = addState(s[in], st.inst.next(), st.match) } case _EOT: if pos == end { s[in] = addState(s[in], st.inst.next(), st.match) } case _CHAR: if c == st.inst.(*_Char).char { s[out] = addState(s[out], st.inst.next(), st.match) } case _CHARCLASS: if st.inst.(*_CharClass).matches(c) { s[out] = addState(s[out], st.inst.next(), st.match) } case _ANY: if c != endOfFile { s[out] = addState(s[out], st.inst.next(), st.match) } case _NOTNL: if c != endOfFile && c != '\n' { s[out] = addState(s[out], st.inst.next(), st.match) } case _BRA: n := st.inst.(*_Bra).n; st.match[2*n] = pos; s[in] = addState(s[in], st.inst.next(), st.match); case _EBRA: n := st.inst.(*_Ebra).n; st.match[2*n+1] = pos; s[in] = addState(s[in], st.inst.next(), st.match); case _ALT: s[in] = addState(s[in], st.inst.(*_Alt).left, st.match); // give other branch a copy of this match vector s1 := make([]int, 2*(re.nbra+1)); for i := 0; i < len(s1); i++ { s1[i] = st.match[i] } s[in] = addState(s[in], st.inst.next(), s1); case _END: // choose leftmost longest if !found || // first st.match[0] < final.match[0] || // leftmost (st.match[0] == final.match[0] && pos > final.match[1]) { // longest final = st; final.match[1] = pos; } found = true; default: st.inst.print(); panic("unknown instruction in execute"); } } pos += charwidth; } return final.match; } // ExecuteString matches the Regexp against the string s. // The return value is an array of integers, in pairs, identifying the positions of // substrings matched by the expression. // s[a[0]:a[1]] is the substring matched by the entire expression. // s[a[2*i]:a[2*i+1]] for i > 0 is the substring matched by the ith parenthesized subexpression. // A negative value means the subexpression did not match any element of the string. // An empty array means "no match". func (re *Regexp) ExecuteString(s string) (a []int) { return re.doExecute(s, nil, 0) } // Execute matches the Regexp against the byte slice b. // The return value is an array of integers, in pairs, identifying the positions of // subslices matched by the expression. // b[a[0]:a[1]] is the subslice matched by the entire expression. // b[a[2*i]:a[2*i+1]] for i > 0 is the subslice matched by the ith parenthesized subexpression. // A negative value means the subexpression did not match any element of the slice. // An empty array means "no match". func (re *Regexp) Execute(b []byte) (a []int) { return re.doExecute("", b, 0) } // MatchString returns whether the Regexp matches the string s. // The return value is a boolean: true for match, false for no match. func (re *Regexp) MatchString(s string) bool { return len(re.doExecute(s, nil, 0)) > 0 } // Match returns whether the Regexp matches the byte slice b. // The return value is a boolean: true for match, false for no match. func (re *Regexp) Match(b []byte) bool { return len(re.doExecute("", b, 0)) > 0 } // MatchStrings matches the Regexp against the string s. // The return value is an array of strings matched by the expression. // a[0] is the substring matched by the entire expression. // a[i] for i > 0 is the substring matched by the ith parenthesized subexpression. // An empty array means ``no match''. func (re *Regexp) MatchStrings(s string) (a []string) { r := re.doExecute(s, nil, 0); if r == nil { return nil } a = make([]string, len(r)/2); for i := 0; i < len(r); i += 2 { if r[i] != -1 { // -1 means no match for this subexpression a[i/2] = s[r[i]:r[i+1]] } } return; } // MatchSlices matches the Regexp against the byte slice b. // The return value is an array of subslices matched by the expression. // a[0] is the subslice matched by the entire expression. // a[i] for i > 0 is the subslice matched by the ith parenthesized subexpression. // An empty array means ``no match''. func (re *Regexp) MatchSlices(b []byte) (a [][]byte) { r := re.doExecute("", b, 0); if r == nil { return nil } a = make([][]byte, len(r)/2); for i := 0; i < len(r); i += 2 { if r[i] != -1 { // -1 means no match for this subexpression a[i/2] = b[r[i]:r[i+1]] } } return; } // MatchString checks whether a textual regular expression // matches a string. More complicated queries need // to use Compile and the full Regexp interface. func MatchString(pattern string, s string) (matched bool, error string) { re, err := CompileRegexp(pattern); if err != "" { return false, err } return re.MatchString(s), ""; } // Match checks whether a textual regular expression // matches a byte slice. More complicated queries need // to use Compile and the full Regexp interface. func Match(pattern string, b []byte) (matched bool, error string) { re, err := CompileRegexp(pattern); if err != "" { return false, err } return re.Match(b), ""; }
{ for i := 0; i < len(re.inst); i++ { inst := re.inst[i]; if inst.kind() == _END { continue } inst.setNext(unNop(inst.next())); if inst.kind() == _ALT { alt := inst.(*_Alt); alt.left = unNop(alt.left); } } }
identifier_body
regexp.go
// Copyright 2009 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // The testing package implements a simple regular expression library. // It is a reduced version of the regular expression package suitable // for use in tests; it avoids many dependencies. // // The syntax of the regular expressions accepted is: // // regexp: // concatenation { '|' concatenation } // concatenation: // { closure } // closure: // term [ '*' | '+' | '?' ] // term: // '^' // '$' // '.' // character // '[' [ '^' ] character-ranges ']' // '(' regexp ')' // package testing import ( "utf8"; ) var debug = false // Error codes returned by failures to parse an expression. var ( ErrInternal = "internal error"; ErrUnmatchedLpar = "unmatched ''"; ErrUnmatchedRpar = "unmatched ''"; ErrUnmatchedLbkt = "unmatched '['"; ErrUnmatchedRbkt = "unmatched ']'"; ErrBadRange = "bad range in character class"; ErrExtraneousBackslash = "extraneous backslash"; ErrBadClosure = "repeated closure **, ++, etc."; ErrBareClosure = "closure applies to nothing"; ErrBadBackslash = "illegal backslash escape"; ) // An instruction executed by the NFA type instr interface { kind() int; // the type of this instruction: _CHAR, _ANY, etc. next() instr; // the instruction to execute after this one setNext(i instr); index() int; setIndex(i int); print(); } // Fields and methods common to all instructions type common struct { _next instr; _index int; } func (c *common) next() instr { return c._next } func (c *common) setNext(i instr) { c._next = i } func (c *common) index() int { return c._index } func (c *common) setIndex(i int) { c._index = i } // The representation of a compiled regular expression. // The public interface is entirely through methods. type Regexp struct { expr string; // the original expression inst []instr; start instr; nbra int; // number of brackets in expression, for subexpressions } const ( _START = // beginning of program iota; _END; // end of program: success _BOT; // '^' beginning of text _EOT; // '$' end of text _CHAR; // 'a' regular character _CHARCLASS; // [a-z] character class _ANY; // '.' any character including newline _NOTNL; // [^\n] special case: any character but newline _BRA; // '(' parenthesized expression _EBRA; // ')'; end of '(' parenthesized expression _ALT; // '|' alternation _NOP; // do nothing; makes it easy to link without patching ) // --- START start of program type _Start struct { common; } func (start *_Start) kind() int { return _START } func (start *_Start) print() { print("start") } // --- END end of program type _End struct { common; } func (end *_End) kind() int { return _END } func (end *_End) print() { print("end") } // --- BOT beginning of text type _Bot struct { common; } func (bot *_Bot) kind() int { return _BOT } func (bot *_Bot) print() { print("bot") } // --- EOT end of text type _Eot struct { common; } func (eot *_Eot) kind() int { return _EOT } func (eot *_Eot) print() { print("eot") } // --- CHAR a regular character type _Char struct { common; char int; } func (char *_Char) kind() int { return _CHAR } func (char *_Char) print() { print("char ", string(char.char)) } func newChar(char int) *_Char { c := new(_Char); c.char = char; return c; } // --- CHARCLASS [a-z] type _CharClass struct { common; char int; negate bool; // is character class negated? ([^a-z]) // stored pairwise: [a-z] is (a,z); x is (x,x): ranges []int; } func (cclass *_CharClass) kind() int { return _CHARCLASS } func (cclass *_CharClass) print() { print("charclass"); if cclass.negate { print(" (negated)") } for i := 0; i < len(cclass.ranges); i += 2 { l := cclass.ranges[i]; r := cclass.ranges[i+1]; if l == r { print(" [", string(l), "]") } else { print(" [", string(l), "-", string(r), "]") } } } func (cclass *_CharClass) addRange(a, b int) { // range is a through b inclusive n := len(cclass.ranges); if n >= cap(cclass.ranges) { nr := make([]int, n, 2*n); for i, j := range nr { nr[i] = j } cclass.ranges = nr; } cclass.ranges = cclass.ranges[0 : n+2]; cclass.ranges[n] = a; n++; cclass.ranges[n] = b; n++; } func (cclass *_CharClass)
(c int) bool { for i := 0; i < len(cclass.ranges); i = i + 2 { min := cclass.ranges[i]; max := cclass.ranges[i+1]; if min <= c && c <= max { return !cclass.negate } } return cclass.negate; } func newCharClass() *_CharClass { c := new(_CharClass); c.ranges = make([]int, 0, 20); return c; } // --- ANY any character type _Any struct { common; } func (any *_Any) kind() int { return _ANY } func (any *_Any) print() { print("any") } // --- NOTNL any character but newline type _NotNl struct { common; } func (notnl *_NotNl) kind() int { return _NOTNL } func (notnl *_NotNl) print() { print("notnl") } // --- BRA parenthesized expression type _Bra struct { common; n int; // subexpression number } func (bra *_Bra) kind() int { return _BRA } func (bra *_Bra) print() { print("bra", bra.n) } // --- EBRA end of parenthesized expression type _Ebra struct { common; n int; // subexpression number } func (ebra *_Ebra) kind() int { return _EBRA } func (ebra *_Ebra) print() { print("ebra ", ebra.n) } // --- ALT alternation type _Alt struct { common; left instr; // other branch } func (alt *_Alt) kind() int { return _ALT } func (alt *_Alt) print() { print("alt(", alt.left.index(), ")") } // --- NOP no operation type _Nop struct { common; } func (nop *_Nop) kind() int { return _NOP } func (nop *_Nop) print() { print("nop") } func (re *Regexp) add(i instr) instr { n := len(re.inst); i.setIndex(len(re.inst)); if n >= cap(re.inst) { ni := make([]instr, n, 2*n); for i, j := range re.inst { ni[i] = j } re.inst = ni; } re.inst = re.inst[0 : n+1]; re.inst[n] = i; return i; } type parser struct { re *Regexp; error string; nlpar int; // number of unclosed lpars pos int; ch int; } const endOfFile = -1 func (p *parser) c() int { return p.ch } func (p *parser) nextc() int { if p.pos >= len(p.re.expr) { p.ch = endOfFile } else { c, w := utf8.DecodeRuneInString(p.re.expr[p.pos:len(p.re.expr)]); p.ch = c; p.pos += w; } return p.ch; } func newParser(re *Regexp) *parser { p := new(parser); p.re = re; p.nextc(); // load p.ch return p; } func special(c int) bool { s := `\.+*?()|[]^$`; for i := 0; i < len(s); i++ { if c == int(s[i]) { return true } } return false; } func specialcclass(c int) bool { s := `\-[]`; for i := 0; i < len(s); i++ { if c == int(s[i]) { return true } } return false; } func (p *parser) charClass() instr { cc := newCharClass(); if p.c() == '^' { cc.negate = true; p.nextc(); } left := -1; for { switch c := p.c(); c { case ']', endOfFile: if left >= 0 { p.error = ErrBadRange; return nil; } // Is it [^\n]? if cc.negate && len(cc.ranges) == 2 && cc.ranges[0] == '\n' && cc.ranges[1] == '\n' { nl := new(_NotNl); p.re.add(nl); return nl; } p.re.add(cc); return cc; case '-': // do this before backslash processing p.error = ErrBadRange; return nil; case '\\': c = p.nextc(); switch { case c == endOfFile: p.error = ErrExtraneousBackslash; return nil; case c == 'n': c = '\n' case specialcclass(c): // c is as delivered default: p.error = ErrBadBackslash; return nil; } fallthrough; default: p.nextc(); switch { case left < 0: // first of pair if p.c() == '-' { // range p.nextc(); left = c; } else { // single char cc.addRange(c, c) } case left <= c: // second of pair cc.addRange(left, c); left = -1; default: p.error = ErrBadRange; return nil; } } } return nil; } func (p *parser) term() (start, end instr) { // term() is the leaf of the recursion, so it's sufficient to pick off the // error state here for early exit. // The other functions (closure(), concatenation() etc.) assume // it's safe to recur to here. if p.error != "" { return } switch c := p.c(); c { case '|', endOfFile: return nil, nil case '*', '+': p.error = ErrBareClosure; return; case ')': if p.nlpar == 0 { p.error = ErrUnmatchedRpar; return; } return nil, nil; case ']': p.error = ErrUnmatchedRbkt; return; case '^': p.nextc(); start = p.re.add(new(_Bot)); return start, start; case '$': p.nextc(); start = p.re.add(new(_Eot)); return start, start; case '.': p.nextc(); start = p.re.add(new(_Any)); return start, start; case '[': p.nextc(); start = p.charClass(); if p.error != "" { return } if p.c() != ']' { p.error = ErrUnmatchedLbkt; return; } p.nextc(); return start, start; case '(': p.nextc(); p.nlpar++; p.re.nbra++; // increment first so first subexpr is \1 nbra := p.re.nbra; start, end = p.regexp(); if p.c() != ')' { p.error = ErrUnmatchedLpar; return; } p.nlpar--; p.nextc(); bra := new(_Bra); p.re.add(bra); ebra := new(_Ebra); p.re.add(ebra); bra.n = nbra; ebra.n = nbra; if start == nil { if end == nil { p.error = ErrInternal; return; } start = ebra; } else { end.setNext(ebra) } bra.setNext(start); return bra, ebra; case '\\': c = p.nextc(); switch { case c == endOfFile: p.error = ErrExtraneousBackslash; return; case c == 'n': c = '\n' case special(c): // c is as delivered default: p.error = ErrBadBackslash; return; } fallthrough; default: p.nextc(); start = newChar(c); p.re.add(start); return start, start; } panic("unreachable"); } func (p *parser) closure() (start, end instr) { start, end = p.term(); if start == nil || p.error != "" { return } switch p.c() { case '*': // (start,end)*: alt := new(_Alt); p.re.add(alt); end.setNext(alt); // after end, do alt alt.left = start; // alternate brach: return to start start = alt; // alt becomes new (start, end) end = alt; case '+': // (start,end)+: alt := new(_Alt); p.re.add(alt); end.setNext(alt); // after end, do alt alt.left = start; // alternate brach: return to start end = alt; // start is unchanged; end is alt case '?': // (start,end)?: alt := new(_Alt); p.re.add(alt); nop := new(_Nop); p.re.add(nop); alt.left = start; // alternate branch is start alt.setNext(nop); // follow on to nop end.setNext(nop); // after end, go to nop start = alt; // start is now alt end = nop; // end is nop pointed to by both branches default: return } switch p.nextc() { case '*', '+', '?': p.error = ErrBadClosure } return; } func (p *parser) concatenation() (start, end instr) { for { nstart, nend := p.closure(); if p.error != "" { return } switch { case nstart == nil: // end of this concatenation if start == nil { // this is the empty string nop := p.re.add(new(_Nop)); return nop, nop; } return; case start == nil: // this is first element of concatenation start, end = nstart, nend default: end.setNext(nstart); end = nend; } } panic("unreachable"); } func (p *parser) regexp() (start, end instr) { start, end = p.concatenation(); if p.error != "" { return } for { switch p.c() { default: return case '|': p.nextc(); nstart, nend := p.concatenation(); if p.error != "" { return } alt := new(_Alt); p.re.add(alt); alt.left = start; alt.setNext(nstart); nop := new(_Nop); p.re.add(nop); end.setNext(nop); nend.setNext(nop); start, end = alt, nop; } } panic("unreachable"); } func unNop(i instr) instr { for i.kind() == _NOP { i = i.next() } return i; } func (re *Regexp) eliminateNops() { for i := 0; i < len(re.inst); i++ { inst := re.inst[i]; if inst.kind() == _END { continue } inst.setNext(unNop(inst.next())); if inst.kind() == _ALT { alt := inst.(*_Alt); alt.left = unNop(alt.left); } } } func (re *Regexp) doParse() string { p := newParser(re); start := new(_Start); re.add(start); s, e := p.regexp(); if p.error != "" { return p.error } start.setNext(s); re.start = start; e.setNext(re.add(new(_End))); re.eliminateNops(); return p.error; } // CompileRegexp parses a regular expression and returns, if successful, a Regexp // object that can be used to match against text. func CompileRegexp(str string) (regexp *Regexp, error string) { regexp = new(Regexp); regexp.expr = str; regexp.inst = make([]instr, 0, 20); error = regexp.doParse(); return; } // MustCompileRegexp is like CompileRegexp but panics if the expression cannot be parsed. // It simplifies safe initialization of global variables holding compiled regular // expressions. func MustCompile(str string) *Regexp { regexp, error := CompileRegexp(str); if error != "" { panicln(`regexp: compiling "`, str, `": `, error) } return regexp; } type state struct { inst instr; // next instruction to execute match []int; // pairs of bracketing submatches. 0th is start,end } // Append new state to to-do list. Leftmost-longest wins so avoid // adding a state that's already active. func addState(s []state, inst instr, match []int) []state { index := inst.index(); l := len(s); pos := match[0]; // TODO: Once the state is a vector and we can do insert, have inputs always // go in order correctly and this "earlier" test is never necessary, for i := 0; i < l; i++ { if s[i].inst.index() == index && // same instruction s[i].match[0] < pos { // earlier match already going; lefmost wins return s } } if l == cap(s) { s1 := make([]state, 2*l)[0:l]; for i := 0; i < l; i++ { s1[i] = s[i] } s = s1; } s = s[0 : l+1]; s[l].inst = inst; s[l].match = match; return s; } // Accepts either string or bytes - the logic is identical either way. // If bytes == nil, scan str. func (re *Regexp) doExecute(str string, bytes []byte, pos int) []int { var s [2][]state; // TODO: use a vector when state values (not ptrs) can be vector elements s[0] = make([]state, 10)[0:0]; s[1] = make([]state, 10)[0:0]; in, out := 0, 1; var final state; found := false; end := len(str); if bytes != nil { end = len(bytes) } for pos <= end { if !found { // prime the pump if we haven't seen a match yet match := make([]int, 2*(re.nbra+1)); for i := 0; i < len(match); i++ { match[i] = -1 // no match seen; catches cases like "a(b)?c" on "ac" } match[0] = pos; s[out] = addState(s[out], re.start.next(), match); } in, out = out, in; // old out state is new in state s[out] = s[out][0:0]; // clear out state if len(s[in]) == 0 { // machine has completed break } charwidth := 1; c := endOfFile; if pos < end { if bytes == nil { c, charwidth = utf8.DecodeRuneInString(str[pos:end]) } else { c, charwidth = utf8.DecodeRune(bytes[pos:end]) } } for i := 0; i < len(s[in]); i++ { st := s[in][i]; switch s[in][i].inst.kind() { case _BOT: if pos == 0 { s[in] = addState(s[in], st.inst.next(), st.match) } case _EOT: if pos == end { s[in] = addState(s[in], st.inst.next(), st.match) } case _CHAR: if c == st.inst.(*_Char).char { s[out] = addState(s[out], st.inst.next(), st.match) } case _CHARCLASS: if st.inst.(*_CharClass).matches(c) { s[out] = addState(s[out], st.inst.next(), st.match) } case _ANY: if c != endOfFile { s[out] = addState(s[out], st.inst.next(), st.match) } case _NOTNL: if c != endOfFile && c != '\n' { s[out] = addState(s[out], st.inst.next(), st.match) } case _BRA: n := st.inst.(*_Bra).n; st.match[2*n] = pos; s[in] = addState(s[in], st.inst.next(), st.match); case _EBRA: n := st.inst.(*_Ebra).n; st.match[2*n+1] = pos; s[in] = addState(s[in], st.inst.next(), st.match); case _ALT: s[in] = addState(s[in], st.inst.(*_Alt).left, st.match); // give other branch a copy of this match vector s1 := make([]int, 2*(re.nbra+1)); for i := 0; i < len(s1); i++ { s1[i] = st.match[i] } s[in] = addState(s[in], st.inst.next(), s1); case _END: // choose leftmost longest if !found || // first st.match[0] < final.match[0] || // leftmost (st.match[0] == final.match[0] && pos > final.match[1]) { // longest final = st; final.match[1] = pos; } found = true; default: st.inst.print(); panic("unknown instruction in execute"); } } pos += charwidth; } return final.match; } // ExecuteString matches the Regexp against the string s. // The return value is an array of integers, in pairs, identifying the positions of // substrings matched by the expression. // s[a[0]:a[1]] is the substring matched by the entire expression. // s[a[2*i]:a[2*i+1]] for i > 0 is the substring matched by the ith parenthesized subexpression. // A negative value means the subexpression did not match any element of the string. // An empty array means "no match". func (re *Regexp) ExecuteString(s string) (a []int) { return re.doExecute(s, nil, 0) } // Execute matches the Regexp against the byte slice b. // The return value is an array of integers, in pairs, identifying the positions of // subslices matched by the expression. // b[a[0]:a[1]] is the subslice matched by the entire expression. // b[a[2*i]:a[2*i+1]] for i > 0 is the subslice matched by the ith parenthesized subexpression. // A negative value means the subexpression did not match any element of the slice. // An empty array means "no match". func (re *Regexp) Execute(b []byte) (a []int) { return re.doExecute("", b, 0) } // MatchString returns whether the Regexp matches the string s. // The return value is a boolean: true for match, false for no match. func (re *Regexp) MatchString(s string) bool { return len(re.doExecute(s, nil, 0)) > 0 } // Match returns whether the Regexp matches the byte slice b. // The return value is a boolean: true for match, false for no match. func (re *Regexp) Match(b []byte) bool { return len(re.doExecute("", b, 0)) > 0 } // MatchStrings matches the Regexp against the string s. // The return value is an array of strings matched by the expression. // a[0] is the substring matched by the entire expression. // a[i] for i > 0 is the substring matched by the ith parenthesized subexpression. // An empty array means ``no match''. func (re *Regexp) MatchStrings(s string) (a []string) { r := re.doExecute(s, nil, 0); if r == nil { return nil } a = make([]string, len(r)/2); for i := 0; i < len(r); i += 2 { if r[i] != -1 { // -1 means no match for this subexpression a[i/2] = s[r[i]:r[i+1]] } } return; } // MatchSlices matches the Regexp against the byte slice b. // The return value is an array of subslices matched by the expression. // a[0] is the subslice matched by the entire expression. // a[i] for i > 0 is the subslice matched by the ith parenthesized subexpression. // An empty array means ``no match''. func (re *Regexp) MatchSlices(b []byte) (a [][]byte) { r := re.doExecute("", b, 0); if r == nil { return nil } a = make([][]byte, len(r)/2); for i := 0; i < len(r); i += 2 { if r[i] != -1 { // -1 means no match for this subexpression a[i/2] = b[r[i]:r[i+1]] } } return; } // MatchString checks whether a textual regular expression // matches a string. More complicated queries need // to use Compile and the full Regexp interface. func MatchString(pattern string, s string) (matched bool, error string) { re, err := CompileRegexp(pattern); if err != "" { return false, err } return re.MatchString(s), ""; } // Match checks whether a textual regular expression // matches a byte slice. More complicated queries need // to use Compile and the full Regexp interface. func Match(pattern string, b []byte) (matched bool, error string) { re, err := CompileRegexp(pattern); if err != "" { return false, err } return re.Match(b), ""; }
matches
identifier_name
mod.rs
use crate::domain; use crate::ops; use crate::prelude::*; use petgraph; use std::collections::{HashMap, HashSet}; use std::ops::{Deref, DerefMut}; mod process; #[cfg(test)] pub(crate) use self::process::materialize; pub mod special; mod ntype; pub use self::ntype::NodeType; // crate viz for tests mod debug; // NOTE(jfrg): the migration code should probably move into the dataflow crate... // it is the reason why so much stuff here is pub #[derive(Clone, Serialize, Deserialize)] pub struct Node { name: String, index: Option<IndexPair>, domain: Option<domain::Index>, fields: Vec<String>, parents: Vec<LocalNodeIndex>, children: Vec<LocalNodeIndex>, inner: NodeType, taken: bool, pub purge: bool, sharded_by: Sharding, } // constructors impl Node { pub fn new<S1, FS, S2, NT>(name: S1, fields: FS, inner: NT) -> Node where S1: ToString, S2: ToString, FS: IntoIterator<Item = S2>, NT: Into<NodeType>, { Node { name: name.to_string(), index: None, domain: None, fields: fields.into_iter().map(|s| s.to_string()).collect(), parents: Vec::new(), children: Vec::new(), inner: inner.into(), taken: false, purge: false, sharded_by: Sharding::None, } } pub fn mirror<NT: Into<NodeType>>(&self, n: NT) -> Node { Self::new(&*self.name, &self.fields, n) } pub fn named_mirror<NT: Into<NodeType>>(&self, n: NT, name: String) -> Node { Self::new(name, &self.fields, n) } } #[must_use] pub struct DanglingDomainNode(Node); impl DanglingDomainNode { pub fn finalize(self, graph: &Graph) -> Node { let mut n = self.0; let ni = n.global_addr(); let dm = n.domain(); n.children = graph .neighbors_directed(ni, petgraph::EdgeDirection::Outgoing) .filter(|&c| graph[c].domain() == dm) .map(|ni| graph[ni].local_addr()) .collect(); n.parents = graph .neighbors_directed(ni, petgraph::EdgeDirection::Incoming) .filter(|&c| !graph[c].is_source() && graph[c].domain() == dm) .map(|ni| graph[ni].local_addr()) .collect(); n } } // expternal parts of Ingredient impl Node { /// Called when a node is first connected to the graph. /// /// All its ancestors are present, but this node and its children may not have been connected /// yet. pub fn on_connected(&mut self, graph: &Graph) { Ingredient::on_connected(&mut **self, graph) } pub fn on_commit(&mut self, remap: &HashMap<NodeIndex, IndexPair>) { // this is *only* overwritten for these asserts. assert!(!self.taken); if let NodeType::Internal(ref mut i) = self.inner { i.on_commit(self.index.unwrap().as_global(), remap) } } /// May return a set of nodes such that *one* of the given ancestors *must* be the one to be /// replayed if this node's state is to be initialized. pub fn must_replay_among(&self) -> Option<HashSet<NodeIndex>> { Ingredient::must_replay_among(&**self) } /// Translate a column in this ingredient into the corresponding column(s) in /// parent ingredients. None for the column means that the parent doesn't /// have an associated column. Similar to resolve, but does not depend on /// materialization, and returns results even for computed columns. pub fn parent_columns(&self, column: usize) -> Vec<(NodeIndex, Option<usize>)> { Ingredient::parent_columns(&**self, column) } /// Resolve where the given field originates from. If the view is materialized, or the value is /// otherwise created by this view, None should be returned. pub fn resolve(&self, i: usize) -> Option<Vec<(NodeIndex, usize)>> { Ingredient::resolve(&**self, i) } /// Returns true if this operator requires a full materialization pub fn requires_full_materialization(&self) -> bool { Ingredient::requires_full_materialization(&**self) } pub fn can_query_through(&self) -> bool { Ingredient::can_query_through(&**self) } pub fn is_join(&self) -> bool { Ingredient::is_join(&**self) } pub fn ancestors(&self) -> Vec<NodeIndex> { Ingredient::ancestors(&**self) } /// Produce a compact, human-readable description of this node for Graphviz. /// /// If `detailed` is true, emit more info. /// /// Symbol Description /// --------|------------- /// B | Base /// || | Concat /// ⧖ | Latest /// γ | Group by /// |*| | Count /// 𝛴 | Sum /// ⋈ | Join /// ⋉ | Left join /// ⋃ | Union pub fn description(&self, detailed: bool) -> String { Ingredient::description(&**self, detailed) } } // publicly accessible attributes impl Node { pub fn name(&self) -> &str { &*self.name } pub fn fields(&self) -> &[String] { &self.fields[..] } pub fn sharded_by(&self) -> Sharding { self.sharded_by } /// Set this node's sharding property. pub fn shard_by(&mut self, s: Sharding) { self.sharded_by = s; } } // events impl Node { pub fn take(&mut self) -> DanglingDomainNode { assert!(!self.taken); assert!( (!self.is_internal() && !self.is_base()) || self.domain.is_some(), "tried to take unassigned node" ); let inner = self.inner.take(); let mut n = self.mirror(inner); n.index = self.index; n.domain = self.domain; n.purge = self.purge; self.taken = true; DanglingDomainNode(n) } pub fn remove(&mut self) { self.inner = NodeType::Dropped; } } // derefs impl Node { pub(crate) fn with_sharder_mut<F>(&mut self, f: F) where F: FnOnce(&mut special::Sharder), { match self.inner { NodeType::Sharder(ref mut s) => f(s), _ => unreachable!(), } } pub fn with_sharder<'a, F, R>(&'a self, f: F) -> Option<R> where F: FnOnce(&'a special::Sharder) -> R, R: 'a, { match self.inner { NodeType::Sharder(ref s) => Some(f(s)), _ => None, } } pub(crate) fn with_egress_mut<F>(&mut self, f: F) where F: FnOnce(&mut special::Egress), { match self.inner { NodeType::Egress(Some(ref mut e)) => f(e), _ => unreachable!(), } } pub fn with_reader_mut<'a, F, R>(&'a mut self, f: F) -> Result<R, ()> where F: FnOnce(&'a mut special::Reader) -> R, R: 'a, { match self.inner { NodeType::Reader(ref mut r) => Ok(f(r)), _ => Err(()), } } pub fn with_reader<'a, F, R>(&'a self, f: F) -> Result<R, ()> where F: FnOnce(&'a special::Reader) -> R, R: 'a, { match self.inner { NodeType::Reader(ref r) => Ok(f(r)), _ => Err(()), } } pub fn get_base(&self) -> Option<&special::Base> { if let NodeType::Base(ref b) = self.inner { Some(b) } else { None } } pub fn suggest_indexes(&self, n: NodeIndex) -> HashMap<NodeIndex, Vec<usize>> { match self.inner { NodeType::Internal(ref i) => i.suggest_indexes(n), NodeType::Base(ref b) => b.suggest_indexes(n), _ => HashMap::new(), } } } impl Deref for Node { type Target = ops::NodeOperator; fn deref(&self) -> &Self::Target { match self.inner { NodeType::Internal(ref i) => i, _ => unreachable!(), } } } impl DerefMut for Node { fn deref_mut(&mut self) -> &mut Self::Target { assert!(!self.taken); match self.inner { NodeType::Internal(ref mut i) => i, _ => unreachable!(), } } } // neighbors impl Node { pub(crate) fn children(&self) -> &[LocalNodeIndex] { &self.children } pub(crate) fn parents(&self) -> &[LocalNodeIndex] { &self.parents } } // attributes impl Node { pub(crate) fn beyond_mat_frontier(&self) -> bool { self.purge } pub(crate) fn add_child(&mut self, child: LocalNodeIndex) { self.children.push(child); } pub(crate) fn try_remove_child(&mut self, child: LocalNodeIndex) -> bool { for i in 0..self.children.len() { if self.children[i] == child { self.children.swap_remove(i); return true; } } false } pub fn add_column(&mut self, field: &str) -> usize { self.fields.push(field.to_string()); self.fields.len() - 1 } pub fn has_domain(&self) -> bool { self.domain.is_some() } pub fn domain(&self) -> domain::Index { match self.domain { Some(domain) => domain, None => { unreachable!( "asked for unset domain for {:?} {}", self, self.global_addr().index() ); } } } pub fn local_addr(&self) -> LocalNodeIndex { match self.index { Some(idx) if idx.has_local() => *idx, Some(_) | None => unreachable!("asked for unset addr for {:?}", self), } } pub fn global_addr(&self) -> NodeIndex { match self.index { Some(ref index) => index.as_global(), None => { unreachable!("asked for unset index for {:?}", self); } } } pub fn get_base_mut(&mut self) -> Option<&mut special::Base> { if let NodeType::Base(ref mut b) = self.inner { Some(b) } else { None } } pub fn add_to(&mut self, domain: domain::Index) { assert_eq!(self.domain, None); assert!(!self.is_dropped()); self.domain = Some(domain); } pub fn set_finalized_addr(&mut self, addr: IndexPair) { self.index = Some(addr); } } // is this or that? impl Node { pub fn is_dropped(&self) -> bool { if let NodeType::Dropped = self.inner { true } else { false } } pub fn is_egress(&self) -> bool { if let NodeType::Egress { .. } = self.inner { true } else { false } } pub fn is_reader(&self) -> bool { if let NodeType::Reader { .. } = self.inner { true } else { false } } pub fn is_ingress(&self) -> bool { if let NodeType::Ingress = self.inner { true } else { false } } pub fn is_sender(&self) -> bool { match self.inner { NodeType::Egress { .. } | NodeType::Sharder(..) => true, _ => false, } } pub fn is_internal(&self) -> bool { if let NodeType::Internal(..) = self.inner { true } else { false } } pub fn is_source(&self) -> bool { if let NodeType::Source { .. } = self.inner { true } else { false } } pub fn is_sharder(&self) -> bool { if let NodeType::Sharder { .. } = self.inner { true } else { false } } pub fn is_base(&sel
ool { if let NodeType::Base(..) = self.inner { true } else { false } } pub fn is_union(&self) -> bool { if let NodeType::Internal(NodeOperator::Union(_)) = self.inner { true } else { false } } pub fn is_shard_merger(&self) -> bool { if let NodeType::Internal(NodeOperator::Union(ref u)) = self.inner { u.is_shard_merger() } else { false } } }
f) -> b
identifier_name
mod.rs
use crate::domain; use crate::ops; use crate::prelude::*; use petgraph; use std::collections::{HashMap, HashSet}; use std::ops::{Deref, DerefMut}; mod process; #[cfg(test)] pub(crate) use self::process::materialize; pub mod special; mod ntype; pub use self::ntype::NodeType; // crate viz for tests mod debug; // NOTE(jfrg): the migration code should probably move into the dataflow crate... // it is the reason why so much stuff here is pub #[derive(Clone, Serialize, Deserialize)] pub struct Node { name: String, index: Option<IndexPair>, domain: Option<domain::Index>, fields: Vec<String>, parents: Vec<LocalNodeIndex>, children: Vec<LocalNodeIndex>, inner: NodeType, taken: bool, pub purge: bool, sharded_by: Sharding, } // constructors impl Node { pub fn new<S1, FS, S2, NT>(name: S1, fields: FS, inner: NT) -> Node where S1: ToString, S2: ToString, FS: IntoIterator<Item = S2>, NT: Into<NodeType>, { Node { name: name.to_string(), index: None, domain: None, fields: fields.into_iter().map(|s| s.to_string()).collect(), parents: Vec::new(), children: Vec::new(), inner: inner.into(), taken: false, purge: false, sharded_by: Sharding::None, } } pub fn mirror<NT: Into<NodeType>>(&self, n: NT) -> Node { Self::new(&*self.name, &self.fields, n) } pub fn named_mirror<NT: Into<NodeType>>(&self, n: NT, name: String) -> Node { Self::new(name, &self.fields, n) } } #[must_use] pub struct DanglingDomainNode(Node); impl DanglingDomainNode { pub fn finalize(self, graph: &Graph) -> Node { let mut n = self.0; let ni = n.global_addr(); let dm = n.domain(); n.children = graph .neighbors_directed(ni, petgraph::EdgeDirection::Outgoing) .filter(|&c| graph[c].domain() == dm) .map(|ni| graph[ni].local_addr()) .collect(); n.parents = graph .neighbors_directed(ni, petgraph::EdgeDirection::Incoming) .filter(|&c| !graph[c].is_source() && graph[c].domain() == dm) .map(|ni| graph[ni].local_addr()) .collect(); n } } // expternal parts of Ingredient impl Node { /// Called when a node is first connected to the graph. /// /// All its ancestors are present, but this node and its children may not have been connected /// yet. pub fn on_connected(&mut self, graph: &Graph) { Ingredient::on_connected(&mut **self, graph) } pub fn on_commit(&mut self, remap: &HashMap<NodeIndex, IndexPair>) { // this is *only* overwritten for these asserts. assert!(!self.taken); if let NodeType::Internal(ref mut i) = self.inner { i.on_commit(self.index.unwrap().as_global(), remap) } } /// May return a set of nodes such that *one* of the given ancestors *must* be the one to be /// replayed if this node's state is to be initialized. pub fn must_replay_among(&self) -> Option<HashSet<NodeIndex>> { Ingredient::must_replay_among(&**self) } /// Translate a column in this ingredient into the corresponding column(s) in /// parent ingredients. None for the column means that the parent doesn't /// have an associated column. Similar to resolve, but does not depend on /// materialization, and returns results even for computed columns. pub fn parent_columns(&self, column: usize) -> Vec<(NodeIndex, Option<usize>)> { Ingredient::parent_columns(&**self, column) } /// Resolve where the given field originates from. If the view is materialized, or the value is /// otherwise created by this view, None should be returned. pub fn resolve(&self, i: usize) -> Option<Vec<(NodeIndex, usize)>> { Ingredient::resolve(&**self, i) } /// Returns true if this operator requires a full materialization pub fn requires_full_materialization(&self) -> bool { Ingredient::requires_full_materialization(&**self) } pub fn can_query_through(&self) -> bool { Ingredient::can_query_through(&**self) } pub fn is_join(&self) -> bool { Ingredient::is_join(&**self) } pub fn ancestors(&self) -> Vec<NodeIndex> { Ingredient::ancestors(&**self) } /// Produce a compact, human-readable description of this node for Graphviz. /// /// If `detailed` is true, emit more info. /// /// Symbol Description /// --------|------------- /// B | Base /// || | Concat /// ⧖ | Latest /// γ | Group by /// |*| | Count /// 𝛴 | Sum /// ⋈ | Join /// ⋉ | Left join /// ⋃ | Union pub fn description(&self, detailed: bool) -> String { Ingredient::description(&**self, detailed) } } // publicly accessible attributes impl Node { pub fn name(&self) -> &str { &*self.name } pub fn fields(&self) -> &[String] { &self.fields[..] } pub fn sharded_by(&self) -> Sharding { self.sharded_by } /// Set this node's sharding property. pub fn shard_by(&mut self, s: Sharding) { self.sharded_by = s; } } // events impl Node { pub fn take(&mut self) -> DanglingDomainNode { assert!(!self.taken); assert!( (!self.is_internal() && !self.is_base()) || self.domain.is_some(), "tried to take unassigned node" ); let inner = self.inner.take(); let mut n = self.mirror(inner); n.index = self.index; n.domain = self.domain; n.purge = self.purge; self.taken = true; DanglingDomainNode(n) } pub fn remove(&mut self) { self.inner = NodeType::Dropped; } } // derefs impl Node { pub(crate) fn with_sharder_mut<F>(&mut self, f: F) where F: FnOnce(&mut special::Sharder), { match self.inner { NodeType::Sharder(ref mut s) => f(s), _ => unreachable!(), } } pub fn with_sharder<'a, F, R>(&'a self, f: F) -> Option<R> where F: FnOnce(&'a special::Sharder) -> R, R: 'a, { match self.inner { NodeType::Sharder(ref s) => Some(f(s)), _ => None, } } pub(crate) fn with_egress_mut<F>(&mut self, f: F) where F: FnOnce(&mut special::Egress), { match self.inner { NodeType::Egress(Some(ref mut e)) => f(e), _ => unreachable!(), } } pub fn with_reader_mut<'a, F, R>(&'a mut self, f: F) -> Result<R, ()> where F: FnOnce(&'a mut special::Reader) -> R, R: 'a, { match self.inner { NodeType::Reader(ref mut r) => Ok(f(r)), _ => Err(()), } } pub fn with_reader<'a, F, R>(&'a self, f: F) -> Result<R, ()> where F: FnOnce(&'a special::Reader) -> R, R: 'a, { match self.inner { NodeType::Reader(ref r) => Ok(f(r)), _ => Err(()), } } pub fn get_base(&self) -> Option<&special::Base> { if let NodeType::Base(ref b) = self.inner { Some(b) } else { None } } pub fn suggest_indexes(&self, n: NodeIndex) -> HashMap<NodeIndex, Vec<usize>> { match self.inner { NodeType::Internal(ref i) => i.suggest_indexes(n), NodeType::Base(ref b) => b.suggest_indexes(n), _ => HashMap::new(), } } } impl Deref for Node { type Target = ops::NodeOperator; fn deref(&self) -> &Self::Target { match self.inner { NodeType::Internal(ref i) => i, _ => unreachable!(), } } } impl DerefMut for Node { fn deref_mut(&mut self) -> &mut Self::Target { assert!(!self.taken); match self.inner { NodeType::Internal(ref mut i) => i, _ => unreachable!(), } } } // neighbors impl Node { pub(crate) fn children(&self) -> &[LocalNodeIndex] { &self.children } pub(crate) fn parents(&self) -> &[LocalNodeIndex] { &self.parents } } // attributes impl Node { pub(crate) fn beyond_mat_frontier(&self) -> bool { self.purge } pub(crate) fn add_child(&mut self, child: LocalNodeIndex) { self.children.push(child); } pub(crate) fn try_remove_child(&mut self, child: LocalNodeIndex) -> bool { for i in 0..self.children.len() { if self.children[i] == child { self.children.swap_remove(i); return true; } } false } pub fn add_column(&mut self, field: &str) -> usize { self.fields.push(field.to_string()); self.fields.len() - 1 } pub fn has_domain(&self) -> bool { self.domain.is_some() } pub fn domain(&self) -> domain::Index { match self.domain { Some(domain) => domain, None => { unreachable!( "asked for unset domain for {:?} {}", self, self.global_addr().index() ); } } } pub fn local_addr(&self) -> LocalNodeIndex { match self.index { Some(idx) if idx.has_local() => *idx, Some(_) | None => unreachable!("asked for unset addr for {:?}", self), } } pub fn global_addr(&self) -> NodeIndex { match self.index { Some(ref index) => index.as_global(), None => { unreachable!("asked for unset index for {:?}", self); } } } pub fn get_base_mut(&mut self) -> Option<&mut special::Base> { if let NodeType::Base(ref mut b) = self.inner { Some(b) } else { None } } pub fn add_to(&mut self, domain: domain::Index) { assert_eq!(self.domain, None); assert!(!self.is_dropped()); self.domain = Some(domain); } pub fn set_finalized_addr(&mut self, addr: IndexPair) { self.index = Some(addr); } } // is this or that? impl Node { pub fn is_dropped(&self) -> bool { if let NodeType::Dropped = self.inner { true } else { false } } pub fn is_egress(&self) -> bool { if let NodeType::Egress { .. } = self.inner { true } else { false } } pub fn is_reader(&self) -> bool { if let NodeType::Reader { .. } = self.inner { true } else { false } } pub fn is_ingress(&self) -> bool { if let NodeType::Ingress = self.inner { true } else { false } } pub fn is_sender(&self) -> bool { match self.inner { NodeType::Egress { .. } | NodeType::Sharder(..) => true, _ => false, } } pub fn is_internal(&self) -> bool { if let NodeType::Internal(..) = self.inner { true } else { false } } pub fn is_source(&self) -> bool { if let NodeType::Source { .. } = self.inner { true } else { false } } pub fn is_sharder(&self) -> bool { if let NodeType::Sharder { .. } = self.inner { true } else { false } } pub fn is_base(&self) -> bool { if let NodeType::Base(..) = self.inner { true } else { false } } pub fn is_union(&self) -> bool { if
is_shard_merger(&self) -> bool { if let NodeType::Internal(NodeOperator::Union(ref u)) = self.inner { u.is_shard_merger() } else { false } } }
let NodeType::Internal(NodeOperator::Union(_)) = self.inner { true } else { false } } pub fn
identifier_body
mod.rs
use crate::domain; use crate::ops; use crate::prelude::*; use petgraph; use std::collections::{HashMap, HashSet}; use std::ops::{Deref, DerefMut}; mod process; #[cfg(test)] pub(crate) use self::process::materialize; pub mod special; mod ntype; pub use self::ntype::NodeType; // crate viz for tests mod debug; // NOTE(jfrg): the migration code should probably move into the dataflow crate... // it is the reason why so much stuff here is pub #[derive(Clone, Serialize, Deserialize)] pub struct Node { name: String, index: Option<IndexPair>, domain: Option<domain::Index>, fields: Vec<String>, parents: Vec<LocalNodeIndex>, children: Vec<LocalNodeIndex>, inner: NodeType, taken: bool, pub purge: bool, sharded_by: Sharding, } // constructors impl Node { pub fn new<S1, FS, S2, NT>(name: S1, fields: FS, inner: NT) -> Node where S1: ToString, S2: ToString, FS: IntoIterator<Item = S2>, NT: Into<NodeType>, { Node { name: name.to_string(),
children: Vec::new(), inner: inner.into(), taken: false, purge: false, sharded_by: Sharding::None, } } pub fn mirror<NT: Into<NodeType>>(&self, n: NT) -> Node { Self::new(&*self.name, &self.fields, n) } pub fn named_mirror<NT: Into<NodeType>>(&self, n: NT, name: String) -> Node { Self::new(name, &self.fields, n) } } #[must_use] pub struct DanglingDomainNode(Node); impl DanglingDomainNode { pub fn finalize(self, graph: &Graph) -> Node { let mut n = self.0; let ni = n.global_addr(); let dm = n.domain(); n.children = graph .neighbors_directed(ni, petgraph::EdgeDirection::Outgoing) .filter(|&c| graph[c].domain() == dm) .map(|ni| graph[ni].local_addr()) .collect(); n.parents = graph .neighbors_directed(ni, petgraph::EdgeDirection::Incoming) .filter(|&c| !graph[c].is_source() && graph[c].domain() == dm) .map(|ni| graph[ni].local_addr()) .collect(); n } } // expternal parts of Ingredient impl Node { /// Called when a node is first connected to the graph. /// /// All its ancestors are present, but this node and its children may not have been connected /// yet. pub fn on_connected(&mut self, graph: &Graph) { Ingredient::on_connected(&mut **self, graph) } pub fn on_commit(&mut self, remap: &HashMap<NodeIndex, IndexPair>) { // this is *only* overwritten for these asserts. assert!(!self.taken); if let NodeType::Internal(ref mut i) = self.inner { i.on_commit(self.index.unwrap().as_global(), remap) } } /// May return a set of nodes such that *one* of the given ancestors *must* be the one to be /// replayed if this node's state is to be initialized. pub fn must_replay_among(&self) -> Option<HashSet<NodeIndex>> { Ingredient::must_replay_among(&**self) } /// Translate a column in this ingredient into the corresponding column(s) in /// parent ingredients. None for the column means that the parent doesn't /// have an associated column. Similar to resolve, but does not depend on /// materialization, and returns results even for computed columns. pub fn parent_columns(&self, column: usize) -> Vec<(NodeIndex, Option<usize>)> { Ingredient::parent_columns(&**self, column) } /// Resolve where the given field originates from. If the view is materialized, or the value is /// otherwise created by this view, None should be returned. pub fn resolve(&self, i: usize) -> Option<Vec<(NodeIndex, usize)>> { Ingredient::resolve(&**self, i) } /// Returns true if this operator requires a full materialization pub fn requires_full_materialization(&self) -> bool { Ingredient::requires_full_materialization(&**self) } pub fn can_query_through(&self) -> bool { Ingredient::can_query_through(&**self) } pub fn is_join(&self) -> bool { Ingredient::is_join(&**self) } pub fn ancestors(&self) -> Vec<NodeIndex> { Ingredient::ancestors(&**self) } /// Produce a compact, human-readable description of this node for Graphviz. /// /// If `detailed` is true, emit more info. /// /// Symbol Description /// --------|------------- /// B | Base /// || | Concat /// ⧖ | Latest /// γ | Group by /// |*| | Count /// 𝛴 | Sum /// ⋈ | Join /// ⋉ | Left join /// ⋃ | Union pub fn description(&self, detailed: bool) -> String { Ingredient::description(&**self, detailed) } } // publicly accessible attributes impl Node { pub fn name(&self) -> &str { &*self.name } pub fn fields(&self) -> &[String] { &self.fields[..] } pub fn sharded_by(&self) -> Sharding { self.sharded_by } /// Set this node's sharding property. pub fn shard_by(&mut self, s: Sharding) { self.sharded_by = s; } } // events impl Node { pub fn take(&mut self) -> DanglingDomainNode { assert!(!self.taken); assert!( (!self.is_internal() && !self.is_base()) || self.domain.is_some(), "tried to take unassigned node" ); let inner = self.inner.take(); let mut n = self.mirror(inner); n.index = self.index; n.domain = self.domain; n.purge = self.purge; self.taken = true; DanglingDomainNode(n) } pub fn remove(&mut self) { self.inner = NodeType::Dropped; } } // derefs impl Node { pub(crate) fn with_sharder_mut<F>(&mut self, f: F) where F: FnOnce(&mut special::Sharder), { match self.inner { NodeType::Sharder(ref mut s) => f(s), _ => unreachable!(), } } pub fn with_sharder<'a, F, R>(&'a self, f: F) -> Option<R> where F: FnOnce(&'a special::Sharder) -> R, R: 'a, { match self.inner { NodeType::Sharder(ref s) => Some(f(s)), _ => None, } } pub(crate) fn with_egress_mut<F>(&mut self, f: F) where F: FnOnce(&mut special::Egress), { match self.inner { NodeType::Egress(Some(ref mut e)) => f(e), _ => unreachable!(), } } pub fn with_reader_mut<'a, F, R>(&'a mut self, f: F) -> Result<R, ()> where F: FnOnce(&'a mut special::Reader) -> R, R: 'a, { match self.inner { NodeType::Reader(ref mut r) => Ok(f(r)), _ => Err(()), } } pub fn with_reader<'a, F, R>(&'a self, f: F) -> Result<R, ()> where F: FnOnce(&'a special::Reader) -> R, R: 'a, { match self.inner { NodeType::Reader(ref r) => Ok(f(r)), _ => Err(()), } } pub fn get_base(&self) -> Option<&special::Base> { if let NodeType::Base(ref b) = self.inner { Some(b) } else { None } } pub fn suggest_indexes(&self, n: NodeIndex) -> HashMap<NodeIndex, Vec<usize>> { match self.inner { NodeType::Internal(ref i) => i.suggest_indexes(n), NodeType::Base(ref b) => b.suggest_indexes(n), _ => HashMap::new(), } } } impl Deref for Node { type Target = ops::NodeOperator; fn deref(&self) -> &Self::Target { match self.inner { NodeType::Internal(ref i) => i, _ => unreachable!(), } } } impl DerefMut for Node { fn deref_mut(&mut self) -> &mut Self::Target { assert!(!self.taken); match self.inner { NodeType::Internal(ref mut i) => i, _ => unreachable!(), } } } // neighbors impl Node { pub(crate) fn children(&self) -> &[LocalNodeIndex] { &self.children } pub(crate) fn parents(&self) -> &[LocalNodeIndex] { &self.parents } } // attributes impl Node { pub(crate) fn beyond_mat_frontier(&self) -> bool { self.purge } pub(crate) fn add_child(&mut self, child: LocalNodeIndex) { self.children.push(child); } pub(crate) fn try_remove_child(&mut self, child: LocalNodeIndex) -> bool { for i in 0..self.children.len() { if self.children[i] == child { self.children.swap_remove(i); return true; } } false } pub fn add_column(&mut self, field: &str) -> usize { self.fields.push(field.to_string()); self.fields.len() - 1 } pub fn has_domain(&self) -> bool { self.domain.is_some() } pub fn domain(&self) -> domain::Index { match self.domain { Some(domain) => domain, None => { unreachable!( "asked for unset domain for {:?} {}", self, self.global_addr().index() ); } } } pub fn local_addr(&self) -> LocalNodeIndex { match self.index { Some(idx) if idx.has_local() => *idx, Some(_) | None => unreachable!("asked for unset addr for {:?}", self), } } pub fn global_addr(&self) -> NodeIndex { match self.index { Some(ref index) => index.as_global(), None => { unreachable!("asked for unset index for {:?}", self); } } } pub fn get_base_mut(&mut self) -> Option<&mut special::Base> { if let NodeType::Base(ref mut b) = self.inner { Some(b) } else { None } } pub fn add_to(&mut self, domain: domain::Index) { assert_eq!(self.domain, None); assert!(!self.is_dropped()); self.domain = Some(domain); } pub fn set_finalized_addr(&mut self, addr: IndexPair) { self.index = Some(addr); } } // is this or that? impl Node { pub fn is_dropped(&self) -> bool { if let NodeType::Dropped = self.inner { true } else { false } } pub fn is_egress(&self) -> bool { if let NodeType::Egress { .. } = self.inner { true } else { false } } pub fn is_reader(&self) -> bool { if let NodeType::Reader { .. } = self.inner { true } else { false } } pub fn is_ingress(&self) -> bool { if let NodeType::Ingress = self.inner { true } else { false } } pub fn is_sender(&self) -> bool { match self.inner { NodeType::Egress { .. } | NodeType::Sharder(..) => true, _ => false, } } pub fn is_internal(&self) -> bool { if let NodeType::Internal(..) = self.inner { true } else { false } } pub fn is_source(&self) -> bool { if let NodeType::Source { .. } = self.inner { true } else { false } } pub fn is_sharder(&self) -> bool { if let NodeType::Sharder { .. } = self.inner { true } else { false } } pub fn is_base(&self) -> bool { if let NodeType::Base(..) = self.inner { true } else { false } } pub fn is_union(&self) -> bool { if let NodeType::Internal(NodeOperator::Union(_)) = self.inner { true } else { false } } pub fn is_shard_merger(&self) -> bool { if let NodeType::Internal(NodeOperator::Union(ref u)) = self.inner { u.is_shard_merger() } else { false } } }
index: None, domain: None, fields: fields.into_iter().map(|s| s.to_string()).collect(), parents: Vec::new(),
random_line_split
gdb_stub.rs
use std::io::{self, Read, Write}; use std::net::{TcpListener, TcpStream, ToSocketAddrs}; use num_traits::PrimInt; use crate::log::LogKind::GDB; use std::fmt::Arguments; use crate::gba::Gba; use crate::bus::{BusPtr, Bus}; use crate::utils::OrderedSet; use crate::renderer::{Framebuffer, PHYS_WIDTH, PHYS_HEIGHT}; use std::time::{Instant, Duration}; use crate::arm7tdmi::REG_PC; use glutin::EventsLoop; use crate::gui::Gui; use std::thread; type GResult<T=()> = Result<T, failure::Error>; // From include/gdb/signals.h in GDB source code const SIGINT: u32 = 2; const SIGTRAP: u32 = 5; pub struct GdbStub { listener: Option<TcpListener>, stream: Option<TcpStream>, blocking: bool, no_ack_mode: bool, gba: Box<Gba>, bus_snooper: Box<BusDebugSnooper>, framebuffer: Framebuffer, run_state: RunState, events_loop: EventsLoop, gui: Gui, } #[derive(PartialEq)] enum RunState { Running, Paused, } impl GdbStub { pub fn new(mut gba: Box<Gba>) -> GdbStub { let mut bus_snooper = BusDebugSnooper::wrap(gba.arm.bus.clone()); gba.fixup_bus_ptrs(BusPtr::new(bus_snooper.as_mut() as *mut dyn Bus)); let events_loop = EventsLoop::new(); let gui = Gui::new(&events_loop); GdbStub { listener: None, stream: None, blocking: false, no_ack_mode: false, gba, bus_snooper, framebuffer: Framebuffer::new(PHYS_WIDTH, PHYS_HEIGHT), run_state: RunState::Paused, events_loop, gui, } } pub fn listen(&mut self, addr: impl ToSocketAddrs) -> GResult { let listener = TcpListener::bind(addr)?; listener.set_nonblocking(!self.blocking)?; self.listener = Some(listener); Ok(()) } pub fn run(&mut self) -> GResult { while self.gui.running { self.update()?; self.gui.update(&mut self.events_loop, &mut self.gba); if self.run_state == RunState::Running { let deadline = Instant::now() + Duration::from_millis(15); while Instant::now() < deadline { self.step_gba(); if let Some(stop_reason) = self.bus_snooper.stop_reason.take() { note!(GDB, "Stopped in debugger due to {:?}", stop_reason); self.run_state = RunState::Paused; self.send(&stop_reason.to_command())?; break; } } } else { thread::sleep(Duration::from_millis(1)); } } Ok(()) } pub fn update(&mut self) -> GResult { if self.listener.is_none() { return Ok(()); } let listener = self.listener.as_mut().unwrap(); if self.stream.is_none() { let (stream, addr) = if let Some(t) = transpose_would_block(listener.accept())? { t } else { return Ok(()); }; note!(GDB, "TcpListener accepted a connection from {}", addr); stream.set_nonblocking(!self.blocking)?; self.no_ack_mode = false; self.stream = Some(stream); } // Unwrapping because we ensured it's Some above let stream = self.stream.as_mut().unwrap(); let mut bytes = [0u8; 1200]; let mut msg: &[u8]; if let Some(amount) = transpose_would_block(stream.read(&mut bytes[..]))? { if amount == 0 { trace!(GDB, "Received 0 bytes, closing TcpStream.."); self.stream = None; return Ok(()); } else { let ascii_string = bytes_as_ascii(&bytes[..amount]); trace!(GDB, "Received {} bytes: {:?}", amount, ascii_string); msg = &bytes[..amount]; } } else { return Ok(()); } while !msg.is_empty() { let prev = msg; self.parse_message(&mut msg)?; // parse_message() must adjust `msg` to exclude the input it consumed. assert_ne!(prev, msg); } Ok(()) } fn parse_message(&mut self, msg: &mut &[u8]) -> GResult { match (*msg)[0] { b'+' => { // ack *msg = &(*msg)[1..]; return Ok(()); } b'-' => { // nak *msg = &(*msg)[1..]; return Ok(()); } b'$' => { // Continue on to process this command } 0x03 => { // Enter debugger *msg = &(*msg)[1..]; self.run_state = RunState::Paused; return self.send_fmt(format_args!("S{:02}", SIGINT)); } first => { // Skip this character, try parsing from the next character onwards *msg = &(*msg)[1..]; warn!(GDB, "packet error; first byte = '{:02X}'", first); return self.nak(); } } if !msg.contains(&b'#') { trace!(GDB, "request was missing '#' character"); return self.nak(); } let (message_body, mut their_checksum_str) = split_at(&msg[1..], b'#')?; if their_checksum_str.len() < 2 { trace!(GDB, "request had a checksum of less than 2 digits"); return self.nak(); } // Cut the checksum off at 2 characters, any input left after that might be another request. *msg = &their_checksum_str[2..]; their_checksum_str = &their_checksum_str[..2]; let our_checksum = checksum(message_body); let their_checksum = hex_to_int(their_checksum_str)?; if our_checksum != their_checksum { warn!(GDB, "incorrect checksum: our_checksum = {}, their_checksum = {}", our_checksum, their_checksum); return self.nak(); } // The input is syntactically well-formed, we'll ack it now, then we can respond with // an empty response if we don't actually understand the command we received. self.ack()?; let message_type = message_body[0]; let message_body = &message_body[1..]; match message_type { b'?' => { // Let's say we halted due to SIGINT self.send_fmt(format_args!("S{:02}", SIGINT))?; } b'c' => { self.do_continue(message_body)?; } b'D' => { self.process_detach_command()?; } b'g' => { self.read_gprs()?; } b'G' => { self.write_gprs(message_body)?; } b'H' => { // Sets the thread to use for subsequent invocations of a particular command. // We only have 1 thread, so acknowledge and do nothing. self.send(b"OK")?; } b'm' => { self.read_memory(message_body)?; } b'M' => { self.write_memory(message_body)?; } b'p' => { self.read_gpr(message_body)?; } b'P' => { self.write_gpr(message_body)?; } b'q' => { self.process_qread_command(message_body)?; } b'Q' => { self.process_qwrite_command(message_body)?; } b's' => { self.do_step(message_body)?; } b'z' => { self.process_z_command(message_body, false)?; } b'Z' => { self.process_z_command(message_body, true)?; } _ => { self.unrecognised_command()?; } } Ok(()) } fn process_qread_command(&mut self, msg: &[u8]) -> GResult { match msg { b"fThreadInfo" => { // First thread in list: thread ID 1 self.send(b"m1") } b"sThreadInfo" => { // End of list, thread ID 1 is the only thread self.send(b"l") } b"C" => { // The current thread is thread 1, we only have 1 thread.. self.send(b"QC1") } b"Attached" => { // We, the GDB server, are always already attached to a process self.send(b"1") } b"HostInfo" => { const MACH_O_ARM: u32 = 12; const MACH_O_ARM_V4T: u32 = 5; self.send_fmt(format_args!("cputype:{};cpusubtype:{};ostype:none;vendor:none;endian:little;ptrsize:4;", MACH_O_ARM, MACH_O_ARM_V4T)) } _ => { if let Some(tail) = strip_prefix(msg, b"Supported:") { self.process_qsupported_command(tail) } else { self.unrecognised_command() } } } } fn process_qsupported_command(&mut self, msg: &[u8]) -> GResult { let mut have_capabilities = Vec::new(); for requested_capability in msg.split(|&b| b == b';' || b == b',') { match requested_capability { b"swbreak+" | b"hwbreak+" => { have_capabilities.push(requested_capability); } b"arm" => { have_capabilities.push(b"arm+"); } // TODO: Support "vContSupported+"? _ => {} } } let capability_string = have_capabilities.join(&b';'); self.send(&capability_string) } fn process_qwrite_command(&mut self, msg: &[u8]) -> GResult { match msg { b"StartNoAckMode" => { self.no_ack_mode = true; self.send(b"OK") } _ => { self.unrecognised_command() } } } fn read_gprs(&mut self) -> GResult { let mut reg_string = Vec::with_capacity(16 * 8); for reg in self.gba.arm.regs[..REG_PC].iter() { reg_string.write(&int_to_hex_le(*reg))?; } reg_string.write(&int_to_hex_le(self.gba.arm.current_pc()))?; self.send(&reg_string) } fn write_gprs(&mut self, msg: &[u8]) -> GResult { for (i, value) in msg.chunks_exact(8).map(hex_to_int_le).enumerate() { self.gba.arm.set_reg(i, value?); } self.send(b"OK") } fn read_gpr(&mut self, msg: &[u8]) -> GResult { let reg_index: usize = hex_to_int(msg)?; let reg = if reg_index == 25 { self.gba.arm.cpsr.into() } else if reg_index == REG_PC { self.gba.arm.current_pc() } else if reg_index < 16 { self.gba.arm.regs[reg_index] } else { return self.send(b"E00"); }; self.send(&int_to_hex_le(reg)) } fn write_gpr(&mut self, msg: &[u8]) -> GResult { let (reg_index_str, value_str) = split_at(msg, b'=')?; let reg_index = hex_to_int(reg_index_str)?; let value = hex_to_int_le(value_str)?; self.gba.arm.set_reg(reg_index, value); self.send(b"OK") } fn read_memory(&mut self, msg: &[u8]) -> GResult { let (addr_str, len_str) = split_at(msg, b',')?; let addr: u32 = hex_to_int(addr_str)?; let len: u32 = hex_to_int(len_str)?; let mut result = Vec::<u8>::with_capacity(2 * len as usize); for i in addr..addr + len { let (_, byte) = self.gba.debug_read8(i); result.write_fmt(format_args!("{:02X}", byte))?; } self.send(&result) } fn write_memory(&mut self, msg: &[u8]) -> GResult { let (addr_str, len_str) = split_at(msg, b',')?; let (len_str, data_str) = split_at(len_str, b':')?; let start_addr: u32 = hex_to_int(addr_str)?; let len: u32 = hex_to_int(len_str)?; let data = data_str .chunks(2) .map(hex_to_int) .collect::<Result<Vec<u8>, failure::Error>>()?; for (addr, byte) in (start_addr..start_addr+len).zip(data) { self.gba.debug_write8(addr, byte); } self.send(b"OK") } fn process_z_command(&mut self, msg: &[u8], is_insert: bool) -> GResult { let (type_str, addr_str) = split_at(msg, b',')?; let (addr_str, kind_str) = split_at(addr_str, b',')?; let kind: u32 = hex_to_int(kind_str)?; let start_addr = hex_to_int(addr_str)?; let addr_set: &mut OrderedSet<u32> = match type_str { b"0" | b"1" if kind != 2 && kind != 4 => { return self.unrecognised_command(); } b"0" => { // software breakpoint // TODO: Does it matter that I'm just implementing this like a hardware breakpoint? &mut self.bus_snooper.breakpoints } b"1" => { // hardware breakpoint &mut self.bus_snooper.breakpoints } b"2" => { // write watchpoint &mut self.bus_snooper.write_watchpoints } b"3" => { // read watchpoint &mut self.bus_snooper.read_watchpoints } b"4" => { // access watchpoint &mut self.bus_snooper.access_watchpoints } _ => { return self.unrecognised_command(); } }; for addr in start_addr..start_addr+kind { if is_insert { addr_set.insert(addr); } else { addr_set.remove(addr); } } self.send(b"OK") } fn do_continue(&mut self, msg: &[u8]) -> GResult { if !msg.is_empty() { let addr = hex_to_int_le(msg)?; self.gba.arm.branch_to(addr); } self.run_state = RunState::Running; Ok(()) } fn do_step(&mut self, msg: &[u8]) -> GResult { if !msg.is_empty() { let addr = hex_to_int_le(msg)?; self.gba.arm.branch_to(addr); } self.step_gba(); let stop_reason = self.bus_snooper.stop_reason.take() .unwrap_or(StopReason::Step); self.send(&stop_reason.to_command()) } fn step_gba(&mut self) { let pc = self.gba.arm.regs[REG_PC] - self.gba.arm.get_op_size(); if self.bus_snooper.breakpoints.contains(pc) { self.bus_snooper.stop_reason = Some(StopReason::Breakpoint(pc)); } else { self.gba.step(&mut self.framebuffer); } } fn process_detach_command(&mut self) -> GResult { self.send(b"OK")?; // Just close the stream, we have no other bookkeeping to do for detaching. self.stream = None; Ok(()) } fn send_fmt(&mut self, args: Arguments) -> GResult { let mut bytes = Vec::<u8>::new(); bytes.write_fmt(args)?; self.send(&bytes) } fn send(&mut self, message: &[u8]) -> GResult { let mut response = Vec::new(); response.push(b'$'); response.extend_from_slice(message); response.push(b'#'); let checksum = checksum(message); write!(response, "{:02X}", checksum)?; self.send_raw(&response) } fn ack(&mut self) -> GResult { if self.no_ack_mode { return Ok(()); } self.send_raw(b"+") } fn nak(&mut self) -> GResult { if self.no_ack_mode { return Ok(()); } self.send_raw(b"-") } fn unrecognised_command(&mut self) -> GResult { // https://www-zeuthen.desy.de/unix/unixguide/infohtml/gdb/Overview.html // The empty response "$#00" indicates to the GDB client that the command is not supported self.send(&[]) } fn write_fmt(&mut self, args: Arguments) -> GResult { use std::io::Write; let mut v = Vec::new(); v.write_fmt(args)?; Ok(()) } fn send_raw(&mut self, bytes: &[u8]) -> GResult { if let Some(stream) = self.stream.as_mut() { let amount = stream.write(bytes); trace!(GDB, "wrote {:?} bytes of {} ({:?})", amount, bytes.len(), bytes_as_ascii(bytes)); amount?; } else { trace!(GDB, "tried to send {} bytes but stream was None", bytes.len()); } Ok(()) } }
WriteWatchpoint(u32), AccessWatchpoint(u32), Breakpoint(u32), Step, } impl StopReason { fn to_command(&self) -> Vec<u8> { let mut result = Vec::new(); match self { StopReason::ReadWatchpoint(addr) => write!(result, "T{:02}rwatch:{}", SIGTRAP, std::str::from_utf8(&int_to_hex_le(*addr)).unwrap()), StopReason::WriteWatchpoint(addr) => write!(result, "T{:02}watch:{}", SIGTRAP, std::str::from_utf8(&int_to_hex_le(*addr)).unwrap()), StopReason::AccessWatchpoint(addr) => write!(result, "T{:02}awatch:{}", SIGTRAP, std::str::from_utf8(&int_to_hex_le(*addr)).unwrap()), StopReason::Breakpoint(_) => write!(result, "T{:02}hwbreak:", SIGTRAP), StopReason::Step => write!(result, "S{:02}", SIGTRAP), }.unwrap(); result } } pub struct BusDebugSnooper { delegate: BusPtr, breakpoints: OrderedSet<u32>, read_watchpoints: OrderedSet<u32>, write_watchpoints: OrderedSet<u32>, access_watchpoints: OrderedSet<u32>, stop_reason: Option<StopReason>, } impl BusDebugSnooper { pub fn wrap(delegate: BusPtr) -> Box<BusDebugSnooper> { Box::new(BusDebugSnooper { delegate, breakpoints: OrderedSet::new(), read_watchpoints: OrderedSet::new(), write_watchpoints: OrderedSet::new(), access_watchpoints: OrderedSet::new(), stop_reason: None, }) } fn check_read(&mut self, addr: u32) { if self.read_watchpoints.contains(addr) { self.stop_reason = Some(StopReason::ReadWatchpoint(addr)); } else if self.access_watchpoints.contains(addr) { self.stop_reason = Some(StopReason::AccessWatchpoint(addr)); } } fn check_write(&mut self, addr: u32) { if self.write_watchpoints.contains(addr) { self.stop_reason = Some(StopReason::WriteWatchpoint(addr)); } } } impl Bus for BusDebugSnooper { fn read8(&mut self, addr: u32) -> u8 { self.check_read(addr); self.delegate.read8(addr) } fn read16(&mut self, addr: u32) -> u16 { self.check_read(addr); self.delegate.read16(addr) } fn read32(&mut self, addr: u32) -> u32 { self.check_read(addr); self.delegate.read32(addr) } fn write8(&mut self, addr: u32, value: u8) { self.check_write(addr); self.delegate.write8(addr, value); } fn write16(&mut self, addr: u32, value: u16) { self.check_write(addr); self.delegate.write16(addr, value); } fn write32(&mut self, addr: u32, value: u32) { self.check_write(addr); self.delegate.write32(addr, value); } fn exec_thumb_slow(&mut self, addr: u32) -> u16 { self.delegate.exec_thumb_slow(addr) } fn exec_arm_slow(&mut self, addr: u32) -> u32 { self.delegate.exec_arm_slow(addr) } fn add_internal_cycles(&mut self, cycles: i64) { self.delegate.add_internal_cycles(cycles); } } fn hex_to_int<T: PrimInt>(bstr: &[u8]) -> Result<T, failure::Error> { // TODO: Error handle if let Ok(result) = T::from_str_radix(std::str::from_utf8(bstr)?, 16) { Ok(result) } else { Err(failure::err_msg("Failed to parse hex")) } } fn hex_to_int_le(bstr: &[u8]) -> Result<u32, failure::Error> { if bstr.len() != 8 { return Err(failure::err_msg("")) } let mut bytes = [0u8; 4]; for (i, value) in bstr.chunks(2).map(hex_to_int).enumerate() { bytes[i] = value?; } Ok(u32::from_le_bytes(bytes)) } fn int_to_hex_le(reg: u32) -> [u8; 8] { const CHARS: &[u8] = b"0123456789ABCDEF"; let mut result = [0u8; 8]; for (i, &b) in reg.to_le_bytes().iter().enumerate() { result[2*i + 0] = CHARS[(b >> 4) as usize]; result[2*i + 1] = CHARS[(b & 0xF) as usize]; } result } fn transpose_would_block<T>(r: Result<T, io::Error>) -> Result<Option<T>, io::Error> { match r { Ok(t) => Ok(Some(t)), Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => Ok(None), Err(e) => Err(e), } } fn strip_prefix<'a>(msg: &'a [u8], prefix: &[u8]) -> Option<&'a [u8]> { if msg.starts_with(prefix) { Some(&msg[prefix.len()..]) } else { None } } fn bytes_as_ascii(bytes: &[u8]) -> String { let ascii_chars = bytes .iter() .flat_map(|c| std::ascii::escape_default(*c)) .collect::<Vec<_>>(); String::from_utf8(ascii_chars).unwrap() } fn split_at(haystack: &[u8], needle: u8) -> Result<(&[u8], &[u8]), failure::Error> { let split_pos = haystack .iter() .position(|&c| c == needle) .ok_or_else(|| failure::err_msg(format!("missing '{}'", needle as char)))?; Ok((&haystack[..split_pos], &haystack[split_pos + 1..])) } fn checksum(bytes: &[u8]) -> u8 { bytes .iter() .map(|&c| c as u64) .sum::<u64>() as u8 }
#[derive(Debug)] enum StopReason { ReadWatchpoint(u32),
random_line_split
gdb_stub.rs
use std::io::{self, Read, Write}; use std::net::{TcpListener, TcpStream, ToSocketAddrs}; use num_traits::PrimInt; use crate::log::LogKind::GDB; use std::fmt::Arguments; use crate::gba::Gba; use crate::bus::{BusPtr, Bus}; use crate::utils::OrderedSet; use crate::renderer::{Framebuffer, PHYS_WIDTH, PHYS_HEIGHT}; use std::time::{Instant, Duration}; use crate::arm7tdmi::REG_PC; use glutin::EventsLoop; use crate::gui::Gui; use std::thread; type GResult<T=()> = Result<T, failure::Error>; // From include/gdb/signals.h in GDB source code const SIGINT: u32 = 2; const SIGTRAP: u32 = 5; pub struct GdbStub { listener: Option<TcpListener>, stream: Option<TcpStream>, blocking: bool, no_ack_mode: bool, gba: Box<Gba>, bus_snooper: Box<BusDebugSnooper>, framebuffer: Framebuffer, run_state: RunState, events_loop: EventsLoop, gui: Gui, } #[derive(PartialEq)] enum RunState { Running, Paused, } impl GdbStub { pub fn new(mut gba: Box<Gba>) -> GdbStub { let mut bus_snooper = BusDebugSnooper::wrap(gba.arm.bus.clone()); gba.fixup_bus_ptrs(BusPtr::new(bus_snooper.as_mut() as *mut dyn Bus)); let events_loop = EventsLoop::new(); let gui = Gui::new(&events_loop); GdbStub { listener: None, stream: None, blocking: false, no_ack_mode: false, gba, bus_snooper, framebuffer: Framebuffer::new(PHYS_WIDTH, PHYS_HEIGHT), run_state: RunState::Paused, events_loop, gui, } } pub fn listen(&mut self, addr: impl ToSocketAddrs) -> GResult { let listener = TcpListener::bind(addr)?; listener.set_nonblocking(!self.blocking)?; self.listener = Some(listener); Ok(()) } pub fn run(&mut self) -> GResult { while self.gui.running { self.update()?; self.gui.update(&mut self.events_loop, &mut self.gba); if self.run_state == RunState::Running { let deadline = Instant::now() + Duration::from_millis(15); while Instant::now() < deadline { self.step_gba(); if let Some(stop_reason) = self.bus_snooper.stop_reason.take() { note!(GDB, "Stopped in debugger due to {:?}", stop_reason); self.run_state = RunState::Paused; self.send(&stop_reason.to_command())?; break; } } } else { thread::sleep(Duration::from_millis(1)); } } Ok(()) } pub fn update(&mut self) -> GResult { if self.listener.is_none() { return Ok(()); } let listener = self.listener.as_mut().unwrap(); if self.stream.is_none() { let (stream, addr) = if let Some(t) = transpose_would_block(listener.accept())? { t } else { return Ok(()); }; note!(GDB, "TcpListener accepted a connection from {}", addr); stream.set_nonblocking(!self.blocking)?; self.no_ack_mode = false; self.stream = Some(stream); } // Unwrapping because we ensured it's Some above let stream = self.stream.as_mut().unwrap(); let mut bytes = [0u8; 1200]; let mut msg: &[u8]; if let Some(amount) = transpose_would_block(stream.read(&mut bytes[..]))? { if amount == 0 { trace!(GDB, "Received 0 bytes, closing TcpStream.."); self.stream = None; return Ok(()); } else { let ascii_string = bytes_as_ascii(&bytes[..amount]); trace!(GDB, "Received {} bytes: {:?}", amount, ascii_string); msg = &bytes[..amount]; } } else { return Ok(()); } while !msg.is_empty() { let prev = msg; self.parse_message(&mut msg)?; // parse_message() must adjust `msg` to exclude the input it consumed. assert_ne!(prev, msg); } Ok(()) } fn parse_message(&mut self, msg: &mut &[u8]) -> GResult { match (*msg)[0] { b'+' => { // ack *msg = &(*msg)[1..]; return Ok(()); } b'-' => { // nak *msg = &(*msg)[1..]; return Ok(()); } b'$' => { // Continue on to process this command } 0x03 => { // Enter debugger *msg = &(*msg)[1..]; self.run_state = RunState::Paused; return self.send_fmt(format_args!("S{:02}", SIGINT)); } first => { // Skip this character, try parsing from the next character onwards *msg = &(*msg)[1..]; warn!(GDB, "packet error; first byte = '{:02X}'", first); return self.nak(); } } if !msg.contains(&b'#') { trace!(GDB, "request was missing '#' character"); return self.nak(); } let (message_body, mut their_checksum_str) = split_at(&msg[1..], b'#')?; if their_checksum_str.len() < 2 { trace!(GDB, "request had a checksum of less than 2 digits"); return self.nak(); } // Cut the checksum off at 2 characters, any input left after that might be another request. *msg = &their_checksum_str[2..]; their_checksum_str = &their_checksum_str[..2]; let our_checksum = checksum(message_body); let their_checksum = hex_to_int(their_checksum_str)?; if our_checksum != their_checksum { warn!(GDB, "incorrect checksum: our_checksum = {}, their_checksum = {}", our_checksum, their_checksum); return self.nak(); } // The input is syntactically well-formed, we'll ack it now, then we can respond with // an empty response if we don't actually understand the command we received. self.ack()?; let message_type = message_body[0]; let message_body = &message_body[1..]; match message_type { b'?' => { // Let's say we halted due to SIGINT self.send_fmt(format_args!("S{:02}", SIGINT))?; } b'c' => { self.do_continue(message_body)?; } b'D' => { self.process_detach_command()?; } b'g' => { self.read_gprs()?; } b'G' => { self.write_gprs(message_body)?; } b'H' => { // Sets the thread to use for subsequent invocations of a particular command. // We only have 1 thread, so acknowledge and do nothing. self.send(b"OK")?; } b'm' => { self.read_memory(message_body)?; } b'M' => { self.write_memory(message_body)?; } b'p' => { self.read_gpr(message_body)?; } b'P' => { self.write_gpr(message_body)?; } b'q' => { self.process_qread_command(message_body)?; } b'Q' => { self.process_qwrite_command(message_body)?; } b's' => { self.do_step(message_body)?; } b'z' => { self.process_z_command(message_body, false)?; } b'Z' => { self.process_z_command(message_body, true)?; } _ => { self.unrecognised_command()?; } } Ok(()) } fn process_qread_command(&mut self, msg: &[u8]) -> GResult { match msg { b"fThreadInfo" => { // First thread in list: thread ID 1 self.send(b"m1") } b"sThreadInfo" => { // End of list, thread ID 1 is the only thread self.send(b"l") } b"C" => { // The current thread is thread 1, we only have 1 thread.. self.send(b"QC1") } b"Attached" => { // We, the GDB server, are always already attached to a process self.send(b"1") } b"HostInfo" => { const MACH_O_ARM: u32 = 12; const MACH_O_ARM_V4T: u32 = 5; self.send_fmt(format_args!("cputype:{};cpusubtype:{};ostype:none;vendor:none;endian:little;ptrsize:4;", MACH_O_ARM, MACH_O_ARM_V4T)) } _ => { if let Some(tail) = strip_prefix(msg, b"Supported:") { self.process_qsupported_command(tail) } else { self.unrecognised_command() } } } } fn process_qsupported_command(&mut self, msg: &[u8]) -> GResult { let mut have_capabilities = Vec::new(); for requested_capability in msg.split(|&b| b == b';' || b == b',') { match requested_capability { b"swbreak+" | b"hwbreak+" => { have_capabilities.push(requested_capability); } b"arm" => { have_capabilities.push(b"arm+"); } // TODO: Support "vContSupported+"? _ => {} } } let capability_string = have_capabilities.join(&b';'); self.send(&capability_string) } fn process_qwrite_command(&mut self, msg: &[u8]) -> GResult { match msg { b"StartNoAckMode" => { self.no_ack_mode = true; self.send(b"OK") } _ => { self.unrecognised_command() } } } fn read_gprs(&mut self) -> GResult { let mut reg_string = Vec::with_capacity(16 * 8); for reg in self.gba.arm.regs[..REG_PC].iter() { reg_string.write(&int_to_hex_le(*reg))?; } reg_string.write(&int_to_hex_le(self.gba.arm.current_pc()))?; self.send(&reg_string) } fn write_gprs(&mut self, msg: &[u8]) -> GResult { for (i, value) in msg.chunks_exact(8).map(hex_to_int_le).enumerate() { self.gba.arm.set_reg(i, value?); } self.send(b"OK") } fn read_gpr(&mut self, msg: &[u8]) -> GResult { let reg_index: usize = hex_to_int(msg)?; let reg = if reg_index == 25 { self.gba.arm.cpsr.into() } else if reg_index == REG_PC { self.gba.arm.current_pc() } else if reg_index < 16 { self.gba.arm.regs[reg_index] } else { return self.send(b"E00"); }; self.send(&int_to_hex_le(reg)) } fn write_gpr(&mut self, msg: &[u8]) -> GResult { let (reg_index_str, value_str) = split_at(msg, b'=')?; let reg_index = hex_to_int(reg_index_str)?; let value = hex_to_int_le(value_str)?; self.gba.arm.set_reg(reg_index, value); self.send(b"OK") } fn read_memory(&mut self, msg: &[u8]) -> GResult { let (addr_str, len_str) = split_at(msg, b',')?; let addr: u32 = hex_to_int(addr_str)?; let len: u32 = hex_to_int(len_str)?; let mut result = Vec::<u8>::with_capacity(2 * len as usize); for i in addr..addr + len { let (_, byte) = self.gba.debug_read8(i); result.write_fmt(format_args!("{:02X}", byte))?; } self.send(&result) } fn write_memory(&mut self, msg: &[u8]) -> GResult { let (addr_str, len_str) = split_at(msg, b',')?; let (len_str, data_str) = split_at(len_str, b':')?; let start_addr: u32 = hex_to_int(addr_str)?; let len: u32 = hex_to_int(len_str)?; let data = data_str .chunks(2) .map(hex_to_int) .collect::<Result<Vec<u8>, failure::Error>>()?; for (addr, byte) in (start_addr..start_addr+len).zip(data) { self.gba.debug_write8(addr, byte); } self.send(b"OK") } fn process_z_command(&mut self, msg: &[u8], is_insert: bool) -> GResult { let (type_str, addr_str) = split_at(msg, b',')?; let (addr_str, kind_str) = split_at(addr_str, b',')?; let kind: u32 = hex_to_int(kind_str)?; let start_addr = hex_to_int(addr_str)?; let addr_set: &mut OrderedSet<u32> = match type_str { b"0" | b"1" if kind != 2 && kind != 4 => { return self.unrecognised_command(); } b"0" => { // software breakpoint // TODO: Does it matter that I'm just implementing this like a hardware breakpoint? &mut self.bus_snooper.breakpoints } b"1" => { // hardware breakpoint &mut self.bus_snooper.breakpoints } b"2" => { // write watchpoint &mut self.bus_snooper.write_watchpoints } b"3" => { // read watchpoint &mut self.bus_snooper.read_watchpoints } b"4" => { // access watchpoint &mut self.bus_snooper.access_watchpoints } _ => { return self.unrecognised_command(); } }; for addr in start_addr..start_addr+kind { if is_insert { addr_set.insert(addr); } else { addr_set.remove(addr); } } self.send(b"OK") } fn do_continue(&mut self, msg: &[u8]) -> GResult { if !msg.is_empty() { let addr = hex_to_int_le(msg)?; self.gba.arm.branch_to(addr); } self.run_state = RunState::Running; Ok(()) } fn do_step(&mut self, msg: &[u8]) -> GResult { if !msg.is_empty() { let addr = hex_to_int_le(msg)?; self.gba.arm.branch_to(addr); } self.step_gba(); let stop_reason = self.bus_snooper.stop_reason.take() .unwrap_or(StopReason::Step); self.send(&stop_reason.to_command()) } fn step_gba(&mut self) { let pc = self.gba.arm.regs[REG_PC] - self.gba.arm.get_op_size(); if self.bus_snooper.breakpoints.contains(pc) { self.bus_snooper.stop_reason = Some(StopReason::Breakpoint(pc)); } else { self.gba.step(&mut self.framebuffer); } } fn process_detach_command(&mut self) -> GResult { self.send(b"OK")?; // Just close the stream, we have no other bookkeeping to do for detaching. self.stream = None; Ok(()) } fn send_fmt(&mut self, args: Arguments) -> GResult { let mut bytes = Vec::<u8>::new(); bytes.write_fmt(args)?; self.send(&bytes) } fn send(&mut self, message: &[u8]) -> GResult { let mut response = Vec::new(); response.push(b'$'); response.extend_from_slice(message); response.push(b'#'); let checksum = checksum(message); write!(response, "{:02X}", checksum)?; self.send_raw(&response) } fn ack(&mut self) -> GResult { if self.no_ack_mode { return Ok(()); } self.send_raw(b"+") } fn nak(&mut self) -> GResult { if self.no_ack_mode { return Ok(()); } self.send_raw(b"-") } fn unrecognised_command(&mut self) -> GResult { // https://www-zeuthen.desy.de/unix/unixguide/infohtml/gdb/Overview.html // The empty response "$#00" indicates to the GDB client that the command is not supported self.send(&[]) } fn write_fmt(&mut self, args: Arguments) -> GResult { use std::io::Write; let mut v = Vec::new(); v.write_fmt(args)?; Ok(()) } fn send_raw(&mut self, bytes: &[u8]) -> GResult { if let Some(stream) = self.stream.as_mut() { let amount = stream.write(bytes); trace!(GDB, "wrote {:?} bytes of {} ({:?})", amount, bytes.len(), bytes_as_ascii(bytes)); amount?; } else { trace!(GDB, "tried to send {} bytes but stream was None", bytes.len()); } Ok(()) } } #[derive(Debug)] enum StopReason { ReadWatchpoint(u32), WriteWatchpoint(u32), AccessWatchpoint(u32), Breakpoint(u32), Step, } impl StopReason { fn to_command(&self) -> Vec<u8> { let mut result = Vec::new(); match self { StopReason::ReadWatchpoint(addr) => write!(result, "T{:02}rwatch:{}", SIGTRAP, std::str::from_utf8(&int_to_hex_le(*addr)).unwrap()), StopReason::WriteWatchpoint(addr) => write!(result, "T{:02}watch:{}", SIGTRAP, std::str::from_utf8(&int_to_hex_le(*addr)).unwrap()), StopReason::AccessWatchpoint(addr) => write!(result, "T{:02}awatch:{}", SIGTRAP, std::str::from_utf8(&int_to_hex_le(*addr)).unwrap()), StopReason::Breakpoint(_) => write!(result, "T{:02}hwbreak:", SIGTRAP), StopReason::Step => write!(result, "S{:02}", SIGTRAP), }.unwrap(); result } } pub struct BusDebugSnooper { delegate: BusPtr, breakpoints: OrderedSet<u32>, read_watchpoints: OrderedSet<u32>, write_watchpoints: OrderedSet<u32>, access_watchpoints: OrderedSet<u32>, stop_reason: Option<StopReason>, } impl BusDebugSnooper { pub fn wrap(delegate: BusPtr) -> Box<BusDebugSnooper> { Box::new(BusDebugSnooper { delegate, breakpoints: OrderedSet::new(), read_watchpoints: OrderedSet::new(), write_watchpoints: OrderedSet::new(), access_watchpoints: OrderedSet::new(), stop_reason: None, }) } fn check_read(&mut self, addr: u32) { if self.read_watchpoints.contains(addr) { self.stop_reason = Some(StopReason::ReadWatchpoint(addr)); } else if self.access_watchpoints.contains(addr) { self.stop_reason = Some(StopReason::AccessWatchpoint(addr)); } } fn check_write(&mut self, addr: u32) { if self.write_watchpoints.contains(addr) { self.stop_reason = Some(StopReason::WriteWatchpoint(addr)); } } } impl Bus for BusDebugSnooper { fn read8(&mut self, addr: u32) -> u8 { self.check_read(addr); self.delegate.read8(addr) } fn
(&mut self, addr: u32) -> u16 { self.check_read(addr); self.delegate.read16(addr) } fn read32(&mut self, addr: u32) -> u32 { self.check_read(addr); self.delegate.read32(addr) } fn write8(&mut self, addr: u32, value: u8) { self.check_write(addr); self.delegate.write8(addr, value); } fn write16(&mut self, addr: u32, value: u16) { self.check_write(addr); self.delegate.write16(addr, value); } fn write32(&mut self, addr: u32, value: u32) { self.check_write(addr); self.delegate.write32(addr, value); } fn exec_thumb_slow(&mut self, addr: u32) -> u16 { self.delegate.exec_thumb_slow(addr) } fn exec_arm_slow(&mut self, addr: u32) -> u32 { self.delegate.exec_arm_slow(addr) } fn add_internal_cycles(&mut self, cycles: i64) { self.delegate.add_internal_cycles(cycles); } } fn hex_to_int<T: PrimInt>(bstr: &[u8]) -> Result<T, failure::Error> { // TODO: Error handle if let Ok(result) = T::from_str_radix(std::str::from_utf8(bstr)?, 16) { Ok(result) } else { Err(failure::err_msg("Failed to parse hex")) } } fn hex_to_int_le(bstr: &[u8]) -> Result<u32, failure::Error> { if bstr.len() != 8 { return Err(failure::err_msg("")) } let mut bytes = [0u8; 4]; for (i, value) in bstr.chunks(2).map(hex_to_int).enumerate() { bytes[i] = value?; } Ok(u32::from_le_bytes(bytes)) } fn int_to_hex_le(reg: u32) -> [u8; 8] { const CHARS: &[u8] = b"0123456789ABCDEF"; let mut result = [0u8; 8]; for (i, &b) in reg.to_le_bytes().iter().enumerate() { result[2*i + 0] = CHARS[(b >> 4) as usize]; result[2*i + 1] = CHARS[(b & 0xF) as usize]; } result } fn transpose_would_block<T>(r: Result<T, io::Error>) -> Result<Option<T>, io::Error> { match r { Ok(t) => Ok(Some(t)), Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => Ok(None), Err(e) => Err(e), } } fn strip_prefix<'a>(msg: &'a [u8], prefix: &[u8]) -> Option<&'a [u8]> { if msg.starts_with(prefix) { Some(&msg[prefix.len()..]) } else { None } } fn bytes_as_ascii(bytes: &[u8]) -> String { let ascii_chars = bytes .iter() .flat_map(|c| std::ascii::escape_default(*c)) .collect::<Vec<_>>(); String::from_utf8(ascii_chars).unwrap() } fn split_at(haystack: &[u8], needle: u8) -> Result<(&[u8], &[u8]), failure::Error> { let split_pos = haystack .iter() .position(|&c| c == needle) .ok_or_else(|| failure::err_msg(format!("missing '{}'", needle as char)))?; Ok((&haystack[..split_pos], &haystack[split_pos + 1..])) } fn checksum(bytes: &[u8]) -> u8 { bytes .iter() .map(|&c| c as u64) .sum::<u64>() as u8 }
read16
identifier_name
gdb_stub.rs
use std::io::{self, Read, Write}; use std::net::{TcpListener, TcpStream, ToSocketAddrs}; use num_traits::PrimInt; use crate::log::LogKind::GDB; use std::fmt::Arguments; use crate::gba::Gba; use crate::bus::{BusPtr, Bus}; use crate::utils::OrderedSet; use crate::renderer::{Framebuffer, PHYS_WIDTH, PHYS_HEIGHT}; use std::time::{Instant, Duration}; use crate::arm7tdmi::REG_PC; use glutin::EventsLoop; use crate::gui::Gui; use std::thread; type GResult<T=()> = Result<T, failure::Error>; // From include/gdb/signals.h in GDB source code const SIGINT: u32 = 2; const SIGTRAP: u32 = 5; pub struct GdbStub { listener: Option<TcpListener>, stream: Option<TcpStream>, blocking: bool, no_ack_mode: bool, gba: Box<Gba>, bus_snooper: Box<BusDebugSnooper>, framebuffer: Framebuffer, run_state: RunState, events_loop: EventsLoop, gui: Gui, } #[derive(PartialEq)] enum RunState { Running, Paused, } impl GdbStub { pub fn new(mut gba: Box<Gba>) -> GdbStub { let mut bus_snooper = BusDebugSnooper::wrap(gba.arm.bus.clone()); gba.fixup_bus_ptrs(BusPtr::new(bus_snooper.as_mut() as *mut dyn Bus)); let events_loop = EventsLoop::new(); let gui = Gui::new(&events_loop); GdbStub { listener: None, stream: None, blocking: false, no_ack_mode: false, gba, bus_snooper, framebuffer: Framebuffer::new(PHYS_WIDTH, PHYS_HEIGHT), run_state: RunState::Paused, events_loop, gui, } } pub fn listen(&mut self, addr: impl ToSocketAddrs) -> GResult { let listener = TcpListener::bind(addr)?; listener.set_nonblocking(!self.blocking)?; self.listener = Some(listener); Ok(()) } pub fn run(&mut self) -> GResult { while self.gui.running { self.update()?; self.gui.update(&mut self.events_loop, &mut self.gba); if self.run_state == RunState::Running { let deadline = Instant::now() + Duration::from_millis(15); while Instant::now() < deadline { self.step_gba(); if let Some(stop_reason) = self.bus_snooper.stop_reason.take() { note!(GDB, "Stopped in debugger due to {:?}", stop_reason); self.run_state = RunState::Paused; self.send(&stop_reason.to_command())?; break; } } } else { thread::sleep(Duration::from_millis(1)); } } Ok(()) } pub fn update(&mut self) -> GResult { if self.listener.is_none() { return Ok(()); } let listener = self.listener.as_mut().unwrap(); if self.stream.is_none() { let (stream, addr) = if let Some(t) = transpose_would_block(listener.accept())? { t } else { return Ok(()); }; note!(GDB, "TcpListener accepted a connection from {}", addr); stream.set_nonblocking(!self.blocking)?; self.no_ack_mode = false; self.stream = Some(stream); } // Unwrapping because we ensured it's Some above let stream = self.stream.as_mut().unwrap(); let mut bytes = [0u8; 1200]; let mut msg: &[u8]; if let Some(amount) = transpose_would_block(stream.read(&mut bytes[..]))? { if amount == 0 { trace!(GDB, "Received 0 bytes, closing TcpStream.."); self.stream = None; return Ok(()); } else { let ascii_string = bytes_as_ascii(&bytes[..amount]); trace!(GDB, "Received {} bytes: {:?}", amount, ascii_string); msg = &bytes[..amount]; } } else { return Ok(()); } while !msg.is_empty() { let prev = msg; self.parse_message(&mut msg)?; // parse_message() must adjust `msg` to exclude the input it consumed. assert_ne!(prev, msg); } Ok(()) } fn parse_message(&mut self, msg: &mut &[u8]) -> GResult { match (*msg)[0] { b'+' => { // ack *msg = &(*msg)[1..]; return Ok(()); } b'-' => { // nak *msg = &(*msg)[1..]; return Ok(()); } b'$' => { // Continue on to process this command } 0x03 => { // Enter debugger *msg = &(*msg)[1..]; self.run_state = RunState::Paused; return self.send_fmt(format_args!("S{:02}", SIGINT)); } first => { // Skip this character, try parsing from the next character onwards *msg = &(*msg)[1..]; warn!(GDB, "packet error; first byte = '{:02X}'", first); return self.nak(); } } if !msg.contains(&b'#') { trace!(GDB, "request was missing '#' character"); return self.nak(); } let (message_body, mut their_checksum_str) = split_at(&msg[1..], b'#')?; if their_checksum_str.len() < 2 { trace!(GDB, "request had a checksum of less than 2 digits"); return self.nak(); } // Cut the checksum off at 2 characters, any input left after that might be another request. *msg = &their_checksum_str[2..]; their_checksum_str = &their_checksum_str[..2]; let our_checksum = checksum(message_body); let their_checksum = hex_to_int(their_checksum_str)?; if our_checksum != their_checksum { warn!(GDB, "incorrect checksum: our_checksum = {}, their_checksum = {}", our_checksum, their_checksum); return self.nak(); } // The input is syntactically well-formed, we'll ack it now, then we can respond with // an empty response if we don't actually understand the command we received. self.ack()?; let message_type = message_body[0]; let message_body = &message_body[1..]; match message_type { b'?' => { // Let's say we halted due to SIGINT self.send_fmt(format_args!("S{:02}", SIGINT))?; } b'c' => { self.do_continue(message_body)?; } b'D' => { self.process_detach_command()?; } b'g' => { self.read_gprs()?; } b'G' => { self.write_gprs(message_body)?; } b'H' => { // Sets the thread to use for subsequent invocations of a particular command. // We only have 1 thread, so acknowledge and do nothing. self.send(b"OK")?; } b'm' => { self.read_memory(message_body)?; } b'M' => { self.write_memory(message_body)?; } b'p' => { self.read_gpr(message_body)?; } b'P' => { self.write_gpr(message_body)?; } b'q' => { self.process_qread_command(message_body)?; } b'Q' => { self.process_qwrite_command(message_body)?; } b's' => { self.do_step(message_body)?; } b'z' => { self.process_z_command(message_body, false)?; } b'Z' => { self.process_z_command(message_body, true)?; } _ => { self.unrecognised_command()?; } } Ok(()) } fn process_qread_command(&mut self, msg: &[u8]) -> GResult { match msg { b"fThreadInfo" => { // First thread in list: thread ID 1 self.send(b"m1") } b"sThreadInfo" => { // End of list, thread ID 1 is the only thread self.send(b"l") } b"C" => { // The current thread is thread 1, we only have 1 thread.. self.send(b"QC1") } b"Attached" => { // We, the GDB server, are always already attached to a process self.send(b"1") } b"HostInfo" => { const MACH_O_ARM: u32 = 12; const MACH_O_ARM_V4T: u32 = 5; self.send_fmt(format_args!("cputype:{};cpusubtype:{};ostype:none;vendor:none;endian:little;ptrsize:4;", MACH_O_ARM, MACH_O_ARM_V4T)) } _ => { if let Some(tail) = strip_prefix(msg, b"Supported:") { self.process_qsupported_command(tail) } else { self.unrecognised_command() } } } } fn process_qsupported_command(&mut self, msg: &[u8]) -> GResult { let mut have_capabilities = Vec::new(); for requested_capability in msg.split(|&b| b == b';' || b == b',') { match requested_capability { b"swbreak+" | b"hwbreak+" => { have_capabilities.push(requested_capability); } b"arm" => { have_capabilities.push(b"arm+"); } // TODO: Support "vContSupported+"? _ => {} } } let capability_string = have_capabilities.join(&b';'); self.send(&capability_string) } fn process_qwrite_command(&mut self, msg: &[u8]) -> GResult { match msg { b"StartNoAckMode" => { self.no_ack_mode = true; self.send(b"OK") } _ => { self.unrecognised_command() } } } fn read_gprs(&mut self) -> GResult { let mut reg_string = Vec::with_capacity(16 * 8); for reg in self.gba.arm.regs[..REG_PC].iter() { reg_string.write(&int_to_hex_le(*reg))?; } reg_string.write(&int_to_hex_le(self.gba.arm.current_pc()))?; self.send(&reg_string) } fn write_gprs(&mut self, msg: &[u8]) -> GResult { for (i, value) in msg.chunks_exact(8).map(hex_to_int_le).enumerate() { self.gba.arm.set_reg(i, value?); } self.send(b"OK") } fn read_gpr(&mut self, msg: &[u8]) -> GResult { let reg_index: usize = hex_to_int(msg)?; let reg = if reg_index == 25 { self.gba.arm.cpsr.into() } else if reg_index == REG_PC { self.gba.arm.current_pc() } else if reg_index < 16 { self.gba.arm.regs[reg_index] } else { return self.send(b"E00"); }; self.send(&int_to_hex_le(reg)) } fn write_gpr(&mut self, msg: &[u8]) -> GResult { let (reg_index_str, value_str) = split_at(msg, b'=')?; let reg_index = hex_to_int(reg_index_str)?; let value = hex_to_int_le(value_str)?; self.gba.arm.set_reg(reg_index, value); self.send(b"OK") } fn read_memory(&mut self, msg: &[u8]) -> GResult { let (addr_str, len_str) = split_at(msg, b',')?; let addr: u32 = hex_to_int(addr_str)?; let len: u32 = hex_to_int(len_str)?; let mut result = Vec::<u8>::with_capacity(2 * len as usize); for i in addr..addr + len { let (_, byte) = self.gba.debug_read8(i); result.write_fmt(format_args!("{:02X}", byte))?; } self.send(&result) } fn write_memory(&mut self, msg: &[u8]) -> GResult { let (addr_str, len_str) = split_at(msg, b',')?; let (len_str, data_str) = split_at(len_str, b':')?; let start_addr: u32 = hex_to_int(addr_str)?; let len: u32 = hex_to_int(len_str)?; let data = data_str .chunks(2) .map(hex_to_int) .collect::<Result<Vec<u8>, failure::Error>>()?; for (addr, byte) in (start_addr..start_addr+len).zip(data) { self.gba.debug_write8(addr, byte); } self.send(b"OK") } fn process_z_command(&mut self, msg: &[u8], is_insert: bool) -> GResult { let (type_str, addr_str) = split_at(msg, b',')?; let (addr_str, kind_str) = split_at(addr_str, b',')?; let kind: u32 = hex_to_int(kind_str)?; let start_addr = hex_to_int(addr_str)?; let addr_set: &mut OrderedSet<u32> = match type_str { b"0" | b"1" if kind != 2 && kind != 4 => { return self.unrecognised_command(); } b"0" => { // software breakpoint // TODO: Does it matter that I'm just implementing this like a hardware breakpoint? &mut self.bus_snooper.breakpoints } b"1" => { // hardware breakpoint &mut self.bus_snooper.breakpoints } b"2" => { // write watchpoint &mut self.bus_snooper.write_watchpoints } b"3" => { // read watchpoint &mut self.bus_snooper.read_watchpoints } b"4" => { // access watchpoint &mut self.bus_snooper.access_watchpoints } _ => { return self.unrecognised_command(); } }; for addr in start_addr..start_addr+kind { if is_insert { addr_set.insert(addr); } else { addr_set.remove(addr); } } self.send(b"OK") } fn do_continue(&mut self, msg: &[u8]) -> GResult { if !msg.is_empty() { let addr = hex_to_int_le(msg)?; self.gba.arm.branch_to(addr); } self.run_state = RunState::Running; Ok(()) } fn do_step(&mut self, msg: &[u8]) -> GResult { if !msg.is_empty() { let addr = hex_to_int_le(msg)?; self.gba.arm.branch_to(addr); } self.step_gba(); let stop_reason = self.bus_snooper.stop_reason.take() .unwrap_or(StopReason::Step); self.send(&stop_reason.to_command()) } fn step_gba(&mut self) { let pc = self.gba.arm.regs[REG_PC] - self.gba.arm.get_op_size(); if self.bus_snooper.breakpoints.contains(pc) { self.bus_snooper.stop_reason = Some(StopReason::Breakpoint(pc)); } else { self.gba.step(&mut self.framebuffer); } } fn process_detach_command(&mut self) -> GResult { self.send(b"OK")?; // Just close the stream, we have no other bookkeeping to do for detaching. self.stream = None; Ok(()) } fn send_fmt(&mut self, args: Arguments) -> GResult { let mut bytes = Vec::<u8>::new(); bytes.write_fmt(args)?; self.send(&bytes) } fn send(&mut self, message: &[u8]) -> GResult { let mut response = Vec::new(); response.push(b'$'); response.extend_from_slice(message); response.push(b'#'); let checksum = checksum(message); write!(response, "{:02X}", checksum)?; self.send_raw(&response) } fn ack(&mut self) -> GResult { if self.no_ack_mode { return Ok(()); } self.send_raw(b"+") } fn nak(&mut self) -> GResult { if self.no_ack_mode { return Ok(()); } self.send_raw(b"-") } fn unrecognised_command(&mut self) -> GResult { // https://www-zeuthen.desy.de/unix/unixguide/infohtml/gdb/Overview.html // The empty response "$#00" indicates to the GDB client that the command is not supported self.send(&[]) } fn write_fmt(&mut self, args: Arguments) -> GResult { use std::io::Write; let mut v = Vec::new(); v.write_fmt(args)?; Ok(()) } fn send_raw(&mut self, bytes: &[u8]) -> GResult { if let Some(stream) = self.stream.as_mut() { let amount = stream.write(bytes); trace!(GDB, "wrote {:?} bytes of {} ({:?})", amount, bytes.len(), bytes_as_ascii(bytes)); amount?; } else { trace!(GDB, "tried to send {} bytes but stream was None", bytes.len()); } Ok(()) } } #[derive(Debug)] enum StopReason { ReadWatchpoint(u32), WriteWatchpoint(u32), AccessWatchpoint(u32), Breakpoint(u32), Step, } impl StopReason { fn to_command(&self) -> Vec<u8> { let mut result = Vec::new(); match self { StopReason::ReadWatchpoint(addr) => write!(result, "T{:02}rwatch:{}", SIGTRAP, std::str::from_utf8(&int_to_hex_le(*addr)).unwrap()), StopReason::WriteWatchpoint(addr) => write!(result, "T{:02}watch:{}", SIGTRAP, std::str::from_utf8(&int_to_hex_le(*addr)).unwrap()), StopReason::AccessWatchpoint(addr) => write!(result, "T{:02}awatch:{}", SIGTRAP, std::str::from_utf8(&int_to_hex_le(*addr)).unwrap()), StopReason::Breakpoint(_) => write!(result, "T{:02}hwbreak:", SIGTRAP), StopReason::Step => write!(result, "S{:02}", SIGTRAP), }.unwrap(); result } } pub struct BusDebugSnooper { delegate: BusPtr, breakpoints: OrderedSet<u32>, read_watchpoints: OrderedSet<u32>, write_watchpoints: OrderedSet<u32>, access_watchpoints: OrderedSet<u32>, stop_reason: Option<StopReason>, } impl BusDebugSnooper { pub fn wrap(delegate: BusPtr) -> Box<BusDebugSnooper> { Box::new(BusDebugSnooper { delegate, breakpoints: OrderedSet::new(), read_watchpoints: OrderedSet::new(), write_watchpoints: OrderedSet::new(), access_watchpoints: OrderedSet::new(), stop_reason: None, }) } fn check_read(&mut self, addr: u32) { if self.read_watchpoints.contains(addr) { self.stop_reason = Some(StopReason::ReadWatchpoint(addr)); } else if self.access_watchpoints.contains(addr) { self.stop_reason = Some(StopReason::AccessWatchpoint(addr)); } } fn check_write(&mut self, addr: u32) { if self.write_watchpoints.contains(addr) { self.stop_reason = Some(StopReason::WriteWatchpoint(addr)); } } } impl Bus for BusDebugSnooper { fn read8(&mut self, addr: u32) -> u8 { self.check_read(addr); self.delegate.read8(addr) } fn read16(&mut self, addr: u32) -> u16
fn read32(&mut self, addr: u32) -> u32 { self.check_read(addr); self.delegate.read32(addr) } fn write8(&mut self, addr: u32, value: u8) { self.check_write(addr); self.delegate.write8(addr, value); } fn write16(&mut self, addr: u32, value: u16) { self.check_write(addr); self.delegate.write16(addr, value); } fn write32(&mut self, addr: u32, value: u32) { self.check_write(addr); self.delegate.write32(addr, value); } fn exec_thumb_slow(&mut self, addr: u32) -> u16 { self.delegate.exec_thumb_slow(addr) } fn exec_arm_slow(&mut self, addr: u32) -> u32 { self.delegate.exec_arm_slow(addr) } fn add_internal_cycles(&mut self, cycles: i64) { self.delegate.add_internal_cycles(cycles); } } fn hex_to_int<T: PrimInt>(bstr: &[u8]) -> Result<T, failure::Error> { // TODO: Error handle if let Ok(result) = T::from_str_radix(std::str::from_utf8(bstr)?, 16) { Ok(result) } else { Err(failure::err_msg("Failed to parse hex")) } } fn hex_to_int_le(bstr: &[u8]) -> Result<u32, failure::Error> { if bstr.len() != 8 { return Err(failure::err_msg("")) } let mut bytes = [0u8; 4]; for (i, value) in bstr.chunks(2).map(hex_to_int).enumerate() { bytes[i] = value?; } Ok(u32::from_le_bytes(bytes)) } fn int_to_hex_le(reg: u32) -> [u8; 8] { const CHARS: &[u8] = b"0123456789ABCDEF"; let mut result = [0u8; 8]; for (i, &b) in reg.to_le_bytes().iter().enumerate() { result[2*i + 0] = CHARS[(b >> 4) as usize]; result[2*i + 1] = CHARS[(b & 0xF) as usize]; } result } fn transpose_would_block<T>(r: Result<T, io::Error>) -> Result<Option<T>, io::Error> { match r { Ok(t) => Ok(Some(t)), Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => Ok(None), Err(e) => Err(e), } } fn strip_prefix<'a>(msg: &'a [u8], prefix: &[u8]) -> Option<&'a [u8]> { if msg.starts_with(prefix) { Some(&msg[prefix.len()..]) } else { None } } fn bytes_as_ascii(bytes: &[u8]) -> String { let ascii_chars = bytes .iter() .flat_map(|c| std::ascii::escape_default(*c)) .collect::<Vec<_>>(); String::from_utf8(ascii_chars).unwrap() } fn split_at(haystack: &[u8], needle: u8) -> Result<(&[u8], &[u8]), failure::Error> { let split_pos = haystack .iter() .position(|&c| c == needle) .ok_or_else(|| failure::err_msg(format!("missing '{}'", needle as char)))?; Ok((&haystack[..split_pos], &haystack[split_pos + 1..])) } fn checksum(bytes: &[u8]) -> u8 { bytes .iter() .map(|&c| c as u64) .sum::<u64>() as u8 }
{ self.check_read(addr); self.delegate.read16(addr) }
identifier_body
install.js
const cp = require('child_process'); const crypto = require('crypto'); const path = require('path'); const fs = require('fs'); const net = require('net'); let nmPath = path.join(__dirname, './node_modules/'); if (!fs.existsSync(nmPath)) { console.log('[info] installing dependencies...') let out = cp.execSync('npm install', { cwd: __dirname, }); console.log('[info] Please enter the command again. Sorry!'); process.exit(15); } const chalk = require('chalk'); const redis = require('ioredis'); const rl = require('prompt-sync')() const log = console.log; const info = (...args) => { log(chalk.white('[info]'), ...args) } const warn = (...args) => { log(chalk.yellow('[warning]'), ...args) } const err = (...args) => { log(chalk.red('[err]'), ...args) } /* if (process.platform !== 'linux') { log(chalk.red('[ERROR]'), 'Your current platform', chalk.yellow(process.platform), 'is not supported by this install script. Sorry!'); // process.exit(1); } */ // First, check if cookies file exists if (!fs.existsSync(path.join(__dirname, '../cookies.txt'))) { err('Please create a file called "cookies.txt" and paste in a list of .ROBLOSECURITY cookies, seperated by new lines.'); process.exit(1); } let wwwDir = path.join(__dirname, '../www/'); let confDir = path.join(wwwDir, './config.json'); const setupConfig = () => { info('Creating random keys...'); /** * @type {*} */ let conf = { cookiesKey: crypto.randomBytes(64).toString('base64').replace(/"/g, '\"'), csrfKey: crypto.randomBytes(64).toString('base64').replace(/"/g, '\"'), }; // check for default conf let add = '127.0.0.1'; let prefix = '2k12Roblox1'; let port = 6379; let pass = ''; const testRedis = () => { return new Promise((res, rej) => { info('Trying to connect to redis...'); let attempt = new redis(port, add, { keyPrefix: prefix, password: pass, enableOfflineQueue: false, }); attempt.on('ready', e => { attempt.setex('testing1234', 1, 'world', (rErr, ok) => { if (rErr) { // Ask for pass err(rErr) pass = rl('It looks like your redis server requires a password. Please enter it and press enter.\n'); if (!pass) { err('Exiting due to no pass.'); process.exit(1); } attempt.disconnect(); testRedis().then(ok => { res(); }).catch(e => { err(e); rej(e); }) } else { // Ok conf.redis = { host: add, port, keyPrefix: prefix, enableOfflineQueue: true, password: pass, } res() } }); }); attempt.on('error', (e) => { // Install try { attempt.disconnect(); } catch (e) { } let isInstalled = rl('Do you have redis installed? [y/n]').toLowerCase(); if (isInstalled !== 'y') { if (process.platform === 'win32') { err('Please install redis. Although you can download it from various websites, it is much easier to use Windows Subsystem for Linux.\n\nMore info here: https://docs.microsoft.com/en-us/windows/wsl/install-win10\n') } else { err('Please install redis. If you use ubuntu, this is as simple as:\n\n' + chalk.bold('sudo apt-get update && sudo apt-get install redis-server\n')); } process.exit(1); } else { let newAdd = rl('Please specify the address/IP/Hostname (excluding the port). Currently: "' + add + '": '); if (!newAdd) { info('Address is not being updated.'); } else { add = newAdd; } let newPort = parseInt(rl('Please specify the port of the redis server. Currently: "' + port + '": ').toLowerCase(), 10); if (!Number.isInteger(newPort)) { info('Port is not being updated.'); } else { port = newPort; } let newPass = rl('If your redis server has a password/Auth, please enter it below. Currently: "' + pass + '": '); if (!newPass && !pass) { info('Password is not being updated.'); } else { pass = newPass; } testRedis().then(() => { res(); }).catch(err => { rej(err); }) } }) }) } testRedis().then(ok => { log('Redis config OK. Continuing...'); let doRender = rl('Do you want to enable R6 avatar rendering? [y/n]').toLowerCase(); if (doRender === 'y') { // Enable render setup conf.avatarRender = { enabled: true, } let r = conf.avatarRender; let auth = crypto.randomBytes(32).toString('base64'); let port = 8196; let add = 'http://127.0.0.1'; let customAdd = rl('Do you have a custom render server address? Currently: ' + add + ':' + port.toString() + ': '); if (!customAdd) { info('No custom server is being used - it will be self hosted.'); } else { let newAdd = customAdd.slice(0, customAdd.indexOf(':')); if (newAdd) { add = newAdd; } let newPort = parseInt(customAdd.slice(customAdd.indexOf(':')), 10); if (Number.isInteger(newPort) && newPort <= 99999) { port = newPort; } } r.address = add + ':' + port; r.authorization = auth; let rulesToAdd = rl('Please type out any rules, seperated by a comma.\nSee here for more info: https://github.com/Pokemonjpups/2012-roblox/tree/master/docs/avatar-render/rules.md').split(','); r.rules = rulesToAdd; info('Writing config to disk...') fs.writeFileSync(confDir, JSON.stringify(conf)); info('Write OK. Writing avatar render config to disk...'); let renderPath = path.join(__dirname, '../avatar-render-node/config.json'); if (fs.existsSync(renderPath)) { warn('config.json file for avatar service already exists.'); let isOk = rl('Can the config.json file in avatar-render-node folder be deleted [y/n]?\n'); if (isOk.toLowerCase() === 'y') { fs.unlinkSync(renderPath); info('avatar-render-node/config.json file was deleted.'); fs.writeFileSync(renderPath, JSON.stringify({ port: port, authorization: auth, })); info('Avatar render config file created.'); installModules(); } else { err('Exiting due to config conflict with avatar-render-node.'); process.exit(1); } } else { fs.writeFileSync(renderPath, JSON.stringify({ port: port, authorization: auth, })); info('Avatar render config file created.'); installModules(); } } else { // Dont enable render setup info('Not setting up 3d avatar render service.'); info('Writing config to disk...') fs.writeFileSync(confDir, JSON.stringify(conf)); installModules(); } }); const installModules = () => { let renderPath = path.join(__dirname, '../avatar-render-node/'); let wwwPath = path.join(__dirname, '../www/'); let ss = 0; const startBuild = () => { info('Complete.\n\nTo run the program, enter this command:\n' + chalk.bold(`node ./utils/start.js`)); process.exit(0); } let join = '; '; if (process.platform === 'win32') { join = '&& '; } cp.exec(`npm i ${join} npm run build`, { cwd: renderPath, }, (e, out, stdErr) => { if (e) { err(e); } else { if (stdErr) { err(stdErr); } else { info('Node modules for render service installed.'); ss++; if (ss >= 2) { startBuild(); } } } }); cp.exec(`npm i ${join} npm run build`, { cwd: wwwPath, }, (e, out, stdErr) => { if (e) { err(e); } else { if (stdErr) { err(stdErr); } else { info('Node modules for www service installed.'); ss++; if (ss >= 2) { startBuild(); } } } }); } // Quick check if exists // Check if redis port is ok. If works, and no auth, then just go with default (and maybe give warning about pass). If can't connect, ask for redis info, try to connect, then continue. Also can try installing it with child process (sudo apt-get install redis) } if (fs.existsSync(confDir)) {
JSON.parse(file); } catch (err) { warn('config.json file for www service is invalid json. It should be deleted.'); let isOk = rl('Can the config.json file in www folder be deleted [y/n]?\n'); if (isOk.toLowerCase() === 'y') { fs.unlinkSync(file); info('www/config.json file was deleted.'); setupConfig(); } else { err('Exiting due to config conflict.'); process.exit(1); } } log('config.json for www service already exists, so it is not going to be setup. If you need to a setup a new one (such as for a version upgrade), please delete or move the config.json file.'); } else { log('config.json for www service does not exist, so it is being created.'); setupConfig(); }
// Confirm it's valid let file = fs.readFileSync(confDir).toString(); try {
random_line_split
install.js
const cp = require('child_process'); const crypto = require('crypto'); const path = require('path'); const fs = require('fs'); const net = require('net'); let nmPath = path.join(__dirname, './node_modules/'); if (!fs.existsSync(nmPath)) { console.log('[info] installing dependencies...') let out = cp.execSync('npm install', { cwd: __dirname, }); console.log('[info] Please enter the command again. Sorry!'); process.exit(15); } const chalk = require('chalk'); const redis = require('ioredis'); const rl = require('prompt-sync')() const log = console.log; const info = (...args) => { log(chalk.white('[info]'), ...args) } const warn = (...args) => { log(chalk.yellow('[warning]'), ...args) } const err = (...args) => { log(chalk.red('[err]'), ...args) } /* if (process.platform !== 'linux') { log(chalk.red('[ERROR]'), 'Your current platform', chalk.yellow(process.platform), 'is not supported by this install script. Sorry!'); // process.exit(1); } */ // First, check if cookies file exists if (!fs.existsSync(path.join(__dirname, '../cookies.txt'))) { err('Please create a file called "cookies.txt" and paste in a list of .ROBLOSECURITY cookies, seperated by new lines.'); process.exit(1); } let wwwDir = path.join(__dirname, '../www/'); let confDir = path.join(wwwDir, './config.json'); const setupConfig = () => { info('Creating random keys...'); /** * @type {*} */ let conf = { cookiesKey: crypto.randomBytes(64).toString('base64').replace(/"/g, '\"'), csrfKey: crypto.randomBytes(64).toString('base64').replace(/"/g, '\"'), }; // check for default conf let add = '127.0.0.1'; let prefix = '2k12Roblox1'; let port = 6379; let pass = ''; const testRedis = () => { return new Promise((res, rej) => { info('Trying to connect to redis...'); let attempt = new redis(port, add, { keyPrefix: prefix, password: pass, enableOfflineQueue: false, }); attempt.on('ready', e => { attempt.setex('testing1234', 1, 'world', (rErr, ok) => { if (rErr)
else { // Ok conf.redis = { host: add, port, keyPrefix: prefix, enableOfflineQueue: true, password: pass, } res() } }); }); attempt.on('error', (e) => { // Install try { attempt.disconnect(); } catch (e) { } let isInstalled = rl('Do you have redis installed? [y/n]').toLowerCase(); if (isInstalled !== 'y') { if (process.platform === 'win32') { err('Please install redis. Although you can download it from various websites, it is much easier to use Windows Subsystem for Linux.\n\nMore info here: https://docs.microsoft.com/en-us/windows/wsl/install-win10\n') } else { err('Please install redis. If you use ubuntu, this is as simple as:\n\n' + chalk.bold('sudo apt-get update && sudo apt-get install redis-server\n')); } process.exit(1); } else { let newAdd = rl('Please specify the address/IP/Hostname (excluding the port). Currently: "' + add + '": '); if (!newAdd) { info('Address is not being updated.'); } else { add = newAdd; } let newPort = parseInt(rl('Please specify the port of the redis server. Currently: "' + port + '": ').toLowerCase(), 10); if (!Number.isInteger(newPort)) { info('Port is not being updated.'); } else { port = newPort; } let newPass = rl('If your redis server has a password/Auth, please enter it below. Currently: "' + pass + '": '); if (!newPass && !pass) { info('Password is not being updated.'); } else { pass = newPass; } testRedis().then(() => { res(); }).catch(err => { rej(err); }) } }) }) } testRedis().then(ok => { log('Redis config OK. Continuing...'); let doRender = rl('Do you want to enable R6 avatar rendering? [y/n]').toLowerCase(); if (doRender === 'y') { // Enable render setup conf.avatarRender = { enabled: true, } let r = conf.avatarRender; let auth = crypto.randomBytes(32).toString('base64'); let port = 8196; let add = 'http://127.0.0.1'; let customAdd = rl('Do you have a custom render server address? Currently: ' + add + ':' + port.toString() + ': '); if (!customAdd) { info('No custom server is being used - it will be self hosted.'); } else { let newAdd = customAdd.slice(0, customAdd.indexOf(':')); if (newAdd) { add = newAdd; } let newPort = parseInt(customAdd.slice(customAdd.indexOf(':')), 10); if (Number.isInteger(newPort) && newPort <= 99999) { port = newPort; } } r.address = add + ':' + port; r.authorization = auth; let rulesToAdd = rl('Please type out any rules, seperated by a comma.\nSee here for more info: https://github.com/Pokemonjpups/2012-roblox/tree/master/docs/avatar-render/rules.md').split(','); r.rules = rulesToAdd; info('Writing config to disk...') fs.writeFileSync(confDir, JSON.stringify(conf)); info('Write OK. Writing avatar render config to disk...'); let renderPath = path.join(__dirname, '../avatar-render-node/config.json'); if (fs.existsSync(renderPath)) { warn('config.json file for avatar service already exists.'); let isOk = rl('Can the config.json file in avatar-render-node folder be deleted [y/n]?\n'); if (isOk.toLowerCase() === 'y') { fs.unlinkSync(renderPath); info('avatar-render-node/config.json file was deleted.'); fs.writeFileSync(renderPath, JSON.stringify({ port: port, authorization: auth, })); info('Avatar render config file created.'); installModules(); } else { err('Exiting due to config conflict with avatar-render-node.'); process.exit(1); } } else { fs.writeFileSync(renderPath, JSON.stringify({ port: port, authorization: auth, })); info('Avatar render config file created.'); installModules(); } } else { // Dont enable render setup info('Not setting up 3d avatar render service.'); info('Writing config to disk...') fs.writeFileSync(confDir, JSON.stringify(conf)); installModules(); } }); const installModules = () => { let renderPath = path.join(__dirname, '../avatar-render-node/'); let wwwPath = path.join(__dirname, '../www/'); let ss = 0; const startBuild = () => { info('Complete.\n\nTo run the program, enter this command:\n' + chalk.bold(`node ./utils/start.js`)); process.exit(0); } let join = '; '; if (process.platform === 'win32') { join = '&& '; } cp.exec(`npm i ${join} npm run build`, { cwd: renderPath, }, (e, out, stdErr) => { if (e) { err(e); } else { if (stdErr) { err(stdErr); } else { info('Node modules for render service installed.'); ss++; if (ss >= 2) { startBuild(); } } } }); cp.exec(`npm i ${join} npm run build`, { cwd: wwwPath, }, (e, out, stdErr) => { if (e) { err(e); } else { if (stdErr) { err(stdErr); } else { info('Node modules for www service installed.'); ss++; if (ss >= 2) { startBuild(); } } } }); } // Quick check if exists // Check if redis port is ok. If works, and no auth, then just go with default (and maybe give warning about pass). If can't connect, ask for redis info, try to connect, then continue. Also can try installing it with child process (sudo apt-get install redis) } if (fs.existsSync(confDir)) { // Confirm it's valid let file = fs.readFileSync(confDir).toString(); try { JSON.parse(file); } catch (err) { warn('config.json file for www service is invalid json. It should be deleted.'); let isOk = rl('Can the config.json file in www folder be deleted [y/n]?\n'); if (isOk.toLowerCase() === 'y') { fs.unlinkSync(file); info('www/config.json file was deleted.'); setupConfig(); } else { err('Exiting due to config conflict.'); process.exit(1); } } log('config.json for www service already exists, so it is not going to be setup. If you need to a setup a new one (such as for a version upgrade), please delete or move the config.json file.'); } else { log('config.json for www service does not exist, so it is being created.'); setupConfig(); }
{ // Ask for pass err(rErr) pass = rl('It looks like your redis server requires a password. Please enter it and press enter.\n'); if (!pass) { err('Exiting due to no pass.'); process.exit(1); } attempt.disconnect(); testRedis().then(ok => { res(); }).catch(e => { err(e); rej(e); }) }
conditional_block
declare.rs
/*! Functionality for declaring Objective-C classes. Classes can be declared using the `ClassDecl` struct. Instance variables and methods can then be added before the class is ultimately registered. # Example The following example demonstrates declaring a class named `MyNumber` that has one ivar, a `u32` named `_number` and a `number` method that returns it: ``` no_run # #[macro_use] extern crate makepad_objc_sys; # use makepad_objc_sys::declare::ClassDecl; # use makepad_objc_sys::runtime::{Class, Object, Sel}; # fn main() { let superclass = class!(NSObject); let mut decl = ClassDecl::new("MyNumber", superclass).unwrap(); // Add an instance variable decl.add_ivar::<u32>("_number"); // Add an ObjC method for getting the number extern fn my_number_get(this: &Object, _cmd: Sel) -> u32 { unsafe { *this.get_ivar("_number") } } unsafe { decl.add_method(sel!(number), my_number_get as extern fn(&Object, Sel) -> u32); } decl.register(); # } ``` */ use std::ffi::CString; use std::mem; use std::ptr; use runtime::{BOOL, Class, Imp, NO, Object, Protocol, Sel, self}; use {Encode, EncodeArguments, Encoding, Message}; /// Types that can be used as the implementation of an Objective-C method. pub trait MethodImplementation { /// The callee type of the method. type Callee: Message; /// The return type of the method. type Ret: Encode; /// The argument types of the method. type Args: EncodeArguments; /// Returns self as an `Imp` of a method. fn imp(self) -> Imp; } macro_rules! method_decl_impl { (-$s:ident, $r:ident, $f:ty, $($t:ident),*) => ( impl<$s, $r $(, $t)*> MethodImplementation for $f where $s: Message, $r: Encode $(, $t: Encode)* { type Callee = $s; type Ret = $r; type Args = ($($t,)*); fn imp(self) -> Imp { unsafe { mem::transmute(self) } } } ); ($($t:ident),*) => ( method_decl_impl!(-T, R, extern fn(&T, Sel $(, $t)*) -> R, $($t),*); method_decl_impl!(-T, R, extern fn(&mut T, Sel $(, $t)*) -> R, $($t),*); ); } method_decl_impl!(); method_decl_impl!(A); method_decl_impl!(A, B); method_decl_impl!(A, B, C); method_decl_impl!(A, B, C, D); method_decl_impl!(A, B, C, D, E); method_decl_impl!(A, B, C, D, E, F); method_decl_impl!(A, B, C, D, E, F, G); method_decl_impl!(A, B, C, D, E, F, G, H); method_decl_impl!(A, B, C, D, E, F, G, H, I); method_decl_impl!(A, B, C, D, E, F, G, H, I, J); method_decl_impl!(A, B, C, D, E, F, G, H, I, J, K); method_decl_impl!(A, B, C, D, E, F, G, H, I, J, K, L); fn count_args(sel: Sel) -> usize { sel.name().chars().filter(|&c| c == ':').count() } fn method_type_encoding(ret: &Encoding, args: &[Encoding]) -> CString { let mut types = ret.as_str().to_owned(); // First two arguments are always self and the selector types.push_str(<*mut Object>::encode().as_str()); types.push_str(Sel::encode().as_str()); types.extend(args.iter().map(|e| e.as_str())); CString::new(types).unwrap() } fn log2_align_of<T>() -> u8 { let align = mem::align_of::<T>(); // Alignments are required to be powers of 2 debug_assert!(align.count_ones() == 1); // log2 of a power of 2 is the number of trailing zeros align.trailing_zeros() as u8 } /// A type for declaring a new class and adding new methods and ivars to it /// before registering it. pub struct ClassDecl { cls: *mut Class, } impl ClassDecl { fn with_superclass(name: &str, superclass: Option<&Class>) -> Option<ClassDecl> { let name = CString::new(name).unwrap(); let super_ptr = superclass.map_or(ptr::null(), |c| c); let cls = unsafe { runtime::objc_allocateClassPair(super_ptr, name.as_ptr(), 0) }; if cls.is_null() { None } else { Some(ClassDecl { cls }) } } /// Constructs a `ClassDecl` with the given name and superclass. /// Returns `None` if the class couldn't be allocated. pub fn new(name: &str, superclass: &Class) -> Option<ClassDecl> { ClassDecl::with_superclass(name, Some(superclass)) } /** Constructs a `ClassDecl` declaring a new root class with the given name. Returns `None` if the class couldn't be allocated. An implementation for `+initialize` must also be given; the runtime calls this method for all classes, so it must be defined on root classes. Note that implementing a root class is not a simple endeavor. For example, your class probably cannot be passed to Cocoa code unless the entire `NSObject` protocol is implemented. Functionality it expects, like implementations of `-retain` and `-release` used by ARC, will not be present otherwise. */ pub fn root(name: &str, intitialize_fn: extern fn(&Class, Sel)) -> Option<ClassDecl> { let mut decl = ClassDecl::with_superclass(name, None); if let Some(ref mut decl) = decl { unsafe { decl.add_class_method(sel!(initialize), intitialize_fn); } } decl } /// Adds a method with the given name and implementation to self. /// Panics if the method wasn't sucessfully added /// or if the selector and function take different numbers of arguments. /// Unsafe because the caller must ensure that the types match those that /// are expected when the method is invoked from Objective-C. pub unsafe fn add_method<F>(&mut self, sel: Sel, func: F) where F: MethodImplementation<Callee=Object> { let encs = F::Args::encodings(); let encs = encs.as_ref(); let sel_args = count_args(sel); assert!(sel_args == encs.len(), "Selector accepts {} arguments, but function accepts {}", sel_args, encs.len(), ); let types = method_type_encoding(&F::Ret::encode(), encs); let success = runtime::class_addMethod(self.cls, sel, func.imp(), types.as_ptr()); assert!(success != NO, "Failed to add method {:?}", sel); } /// Adds a class method with the given name and implementation to self. /// Panics if the method wasn't sucessfully added /// or if the selector and function take different numbers of arguments. /// Unsafe because the caller must ensure that the types match those that /// are expected when the method is invoked from Objective-C. pub unsafe fn add_class_method<F>(&mut self, sel: Sel, func: F) where F: MethodImplementation<Callee=Class> { let encs = F::Args::encodings(); let encs = encs.as_ref(); let sel_args = count_args(sel); assert!(sel_args == encs.len(), "Selector accepts {} arguments, but function accepts {}", sel_args, encs.len(), ); let types = method_type_encoding(&F::Ret::encode(), encs); let metaclass = (*self.cls).metaclass() as *const _ as *mut _; let success = runtime::class_addMethod(metaclass, sel, func.imp(), types.as_ptr()); assert!(success != NO, "Failed to add class method {:?}", sel); } /// Adds an ivar with type `T` and the provided name to self. /// Panics if the ivar wasn't successfully added. pub fn add_ivar<T>(&mut self, name: &str) where T: Encode { let c_name = CString::new(name).unwrap(); let encoding = CString::new(T::encode().as_str()).unwrap(); let size = mem::size_of::<T>(); let align = log2_align_of::<T>(); let success = unsafe { runtime::class_addIvar(self.cls, c_name.as_ptr(), size, align, encoding.as_ptr()) }; assert!(success != NO, "Failed to add ivar {}", name); } /// Adds a protocol to self. Panics if the protocol wasn't successfully /// added pub fn add_protocol(&mut self, proto: &Protocol) { let success = unsafe { runtime::class_addProtocol(self.cls, proto) }; assert!(success != NO, "Failed to add protocol {:?}", proto); } /// Registers self, consuming it and returning a reference to the /// newly registered `Class`. pub fn register(self) -> &'static Class { unsafe { let cls = self.cls; runtime::objc_registerClassPair(cls); // Forget self otherwise the class will be disposed in drop mem::forget(self); &*cls } } } impl Drop for ClassDecl { fn drop(&mut self) { unsafe { runtime::objc_disposeClassPair(self.cls); } } } /// A type for declaring a new protocol and adding new methods to it /// before registering it. pub struct ProtocolDecl { proto: *mut Protocol } impl ProtocolDecl { /// Constructs a `ProtocolDecl` with the given name. Returns `None` if the /// protocol couldn't be allocated. pub fn new(name: &str) -> Option<ProtocolDecl> { let c_name = CString::new(name).unwrap(); let proto = unsafe { runtime::objc_allocateProtocol(c_name.as_ptr()) }; if proto.is_null() { None } else { Some(ProtocolDecl { proto }) } } fn add_method_description_common<Args, Ret>(&mut self, sel: Sel, is_required: bool, is_instance_method: bool) where Args: EncodeArguments, Ret: Encode { let encs = Args::encodings(); let encs = encs.as_ref(); let sel_args = count_args(sel); assert!(sel_args == encs.len(), "Selector accepts {} arguments, but function accepts {}", sel_args, encs.len(), ); let types = method_type_encoding(&Ret::encode(), encs); unsafe { runtime::protocol_addMethodDescription( self.proto, sel, types.as_ptr(), is_required as BOOL, is_instance_method as BOOL); } } /// Adds an instance method declaration with a given description to self. pub fn add_method_description<Args, Ret>(&mut self, sel: Sel, is_required: bool) where Args: EncodeArguments, Ret: Encode { self.add_method_description_common::<Args, Ret>(sel, is_required, true) } /// Adds a class method declaration with a given description to self. pub fn add_class_method_description<Args, Ret>(&mut self, sel: Sel, is_required: bool) where Args: EncodeArguments, Ret: Encode
/// Adds a requirement on another protocol. pub fn add_protocol(&mut self, proto: &Protocol) { unsafe { runtime::protocol_addProtocol(self.proto, proto); } } /// Registers self, consuming it and returning a reference to the /// newly registered `Protocol`. pub fn register(self) -> &'static Protocol { unsafe { runtime::objc_registerProtocol(self.proto); &*self.proto } } } /* #[cfg(test)] mod tests { use test_utils; #[test] fn test_custom_class() { // Registering the custom class is in test_utils let obj = test_utils::custom_object(); unsafe { let _: () = msg_send![obj, setFoo:13u32]; let result: u32 = msg_send![obj, foo]; assert!(result == 13); } } #[test] fn test_class_method() { let cls = test_utils::custom_class(); unsafe { let result: u32 = msg_send![cls, classFoo]; assert!(result == 7); } } }*/
{ self.add_method_description_common::<Args, Ret>(sel, is_required, false) }
identifier_body
declare.rs
/*! Functionality for declaring Objective-C classes. Classes can be declared using the `ClassDecl` struct. Instance variables and methods can then be added before the class is ultimately registered. # Example The following example demonstrates declaring a class named `MyNumber` that has one ivar, a `u32` named `_number` and a `number` method that returns it: ``` no_run # #[macro_use] extern crate makepad_objc_sys; # use makepad_objc_sys::declare::ClassDecl; # use makepad_objc_sys::runtime::{Class, Object, Sel}; # fn main() { let superclass = class!(NSObject); let mut decl = ClassDecl::new("MyNumber", superclass).unwrap(); // Add an instance variable decl.add_ivar::<u32>("_number"); // Add an ObjC method for getting the number extern fn my_number_get(this: &Object, _cmd: Sel) -> u32 { unsafe { *this.get_ivar("_number") } } unsafe { decl.add_method(sel!(number), my_number_get as extern fn(&Object, Sel) -> u32); } decl.register(); # } ``` */ use std::ffi::CString; use std::mem; use std::ptr; use runtime::{BOOL, Class, Imp, NO, Object, Protocol, Sel, self}; use {Encode, EncodeArguments, Encoding, Message}; /// Types that can be used as the implementation of an Objective-C method. pub trait MethodImplementation { /// The callee type of the method. type Callee: Message; /// The return type of the method. type Ret: Encode; /// The argument types of the method. type Args: EncodeArguments; /// Returns self as an `Imp` of a method. fn imp(self) -> Imp; } macro_rules! method_decl_impl { (-$s:ident, $r:ident, $f:ty, $($t:ident),*) => ( impl<$s, $r $(, $t)*> MethodImplementation for $f where $s: Message, $r: Encode $(, $t: Encode)* { type Callee = $s; type Ret = $r; type Args = ($($t,)*); fn imp(self) -> Imp { unsafe { mem::transmute(self) } } } ); ($($t:ident),*) => ( method_decl_impl!(-T, R, extern fn(&T, Sel $(, $t)*) -> R, $($t),*); method_decl_impl!(-T, R, extern fn(&mut T, Sel $(, $t)*) -> R, $($t),*); ); } method_decl_impl!(); method_decl_impl!(A); method_decl_impl!(A, B); method_decl_impl!(A, B, C); method_decl_impl!(A, B, C, D); method_decl_impl!(A, B, C, D, E); method_decl_impl!(A, B, C, D, E, F); method_decl_impl!(A, B, C, D, E, F, G); method_decl_impl!(A, B, C, D, E, F, G, H); method_decl_impl!(A, B, C, D, E, F, G, H, I); method_decl_impl!(A, B, C, D, E, F, G, H, I, J); method_decl_impl!(A, B, C, D, E, F, G, H, I, J, K); method_decl_impl!(A, B, C, D, E, F, G, H, I, J, K, L); fn count_args(sel: Sel) -> usize { sel.name().chars().filter(|&c| c == ':').count() } fn method_type_encoding(ret: &Encoding, args: &[Encoding]) -> CString { let mut types = ret.as_str().to_owned(); // First two arguments are always self and the selector types.push_str(<*mut Object>::encode().as_str()); types.push_str(Sel::encode().as_str()); types.extend(args.iter().map(|e| e.as_str())); CString::new(types).unwrap() } fn log2_align_of<T>() -> u8 { let align = mem::align_of::<T>(); // Alignments are required to be powers of 2 debug_assert!(align.count_ones() == 1); // log2 of a power of 2 is the number of trailing zeros align.trailing_zeros() as u8 } /// A type for declaring a new class and adding new methods and ivars to it /// before registering it. pub struct ClassDecl { cls: *mut Class, } impl ClassDecl { fn with_superclass(name: &str, superclass: Option<&Class>) -> Option<ClassDecl> { let name = CString::new(name).unwrap(); let super_ptr = superclass.map_or(ptr::null(), |c| c); let cls = unsafe { runtime::objc_allocateClassPair(super_ptr, name.as_ptr(), 0) }; if cls.is_null()
else { Some(ClassDecl { cls }) } } /// Constructs a `ClassDecl` with the given name and superclass. /// Returns `None` if the class couldn't be allocated. pub fn new(name: &str, superclass: &Class) -> Option<ClassDecl> { ClassDecl::with_superclass(name, Some(superclass)) } /** Constructs a `ClassDecl` declaring a new root class with the given name. Returns `None` if the class couldn't be allocated. An implementation for `+initialize` must also be given; the runtime calls this method for all classes, so it must be defined on root classes. Note that implementing a root class is not a simple endeavor. For example, your class probably cannot be passed to Cocoa code unless the entire `NSObject` protocol is implemented. Functionality it expects, like implementations of `-retain` and `-release` used by ARC, will not be present otherwise. */ pub fn root(name: &str, intitialize_fn: extern fn(&Class, Sel)) -> Option<ClassDecl> { let mut decl = ClassDecl::with_superclass(name, None); if let Some(ref mut decl) = decl { unsafe { decl.add_class_method(sel!(initialize), intitialize_fn); } } decl } /// Adds a method with the given name and implementation to self. /// Panics if the method wasn't sucessfully added /// or if the selector and function take different numbers of arguments. /// Unsafe because the caller must ensure that the types match those that /// are expected when the method is invoked from Objective-C. pub unsafe fn add_method<F>(&mut self, sel: Sel, func: F) where F: MethodImplementation<Callee=Object> { let encs = F::Args::encodings(); let encs = encs.as_ref(); let sel_args = count_args(sel); assert!(sel_args == encs.len(), "Selector accepts {} arguments, but function accepts {}", sel_args, encs.len(), ); let types = method_type_encoding(&F::Ret::encode(), encs); let success = runtime::class_addMethod(self.cls, sel, func.imp(), types.as_ptr()); assert!(success != NO, "Failed to add method {:?}", sel); } /// Adds a class method with the given name and implementation to self. /// Panics if the method wasn't sucessfully added /// or if the selector and function take different numbers of arguments. /// Unsafe because the caller must ensure that the types match those that /// are expected when the method is invoked from Objective-C. pub unsafe fn add_class_method<F>(&mut self, sel: Sel, func: F) where F: MethodImplementation<Callee=Class> { let encs = F::Args::encodings(); let encs = encs.as_ref(); let sel_args = count_args(sel); assert!(sel_args == encs.len(), "Selector accepts {} arguments, but function accepts {}", sel_args, encs.len(), ); let types = method_type_encoding(&F::Ret::encode(), encs); let metaclass = (*self.cls).metaclass() as *const _ as *mut _; let success = runtime::class_addMethod(metaclass, sel, func.imp(), types.as_ptr()); assert!(success != NO, "Failed to add class method {:?}", sel); } /// Adds an ivar with type `T` and the provided name to self. /// Panics if the ivar wasn't successfully added. pub fn add_ivar<T>(&mut self, name: &str) where T: Encode { let c_name = CString::new(name).unwrap(); let encoding = CString::new(T::encode().as_str()).unwrap(); let size = mem::size_of::<T>(); let align = log2_align_of::<T>(); let success = unsafe { runtime::class_addIvar(self.cls, c_name.as_ptr(), size, align, encoding.as_ptr()) }; assert!(success != NO, "Failed to add ivar {}", name); } /// Adds a protocol to self. Panics if the protocol wasn't successfully /// added pub fn add_protocol(&mut self, proto: &Protocol) { let success = unsafe { runtime::class_addProtocol(self.cls, proto) }; assert!(success != NO, "Failed to add protocol {:?}", proto); } /// Registers self, consuming it and returning a reference to the /// newly registered `Class`. pub fn register(self) -> &'static Class { unsafe { let cls = self.cls; runtime::objc_registerClassPair(cls); // Forget self otherwise the class will be disposed in drop mem::forget(self); &*cls } } } impl Drop for ClassDecl { fn drop(&mut self) { unsafe { runtime::objc_disposeClassPair(self.cls); } } } /// A type for declaring a new protocol and adding new methods to it /// before registering it. pub struct ProtocolDecl { proto: *mut Protocol } impl ProtocolDecl { /// Constructs a `ProtocolDecl` with the given name. Returns `None` if the /// protocol couldn't be allocated. pub fn new(name: &str) -> Option<ProtocolDecl> { let c_name = CString::new(name).unwrap(); let proto = unsafe { runtime::objc_allocateProtocol(c_name.as_ptr()) }; if proto.is_null() { None } else { Some(ProtocolDecl { proto }) } } fn add_method_description_common<Args, Ret>(&mut self, sel: Sel, is_required: bool, is_instance_method: bool) where Args: EncodeArguments, Ret: Encode { let encs = Args::encodings(); let encs = encs.as_ref(); let sel_args = count_args(sel); assert!(sel_args == encs.len(), "Selector accepts {} arguments, but function accepts {}", sel_args, encs.len(), ); let types = method_type_encoding(&Ret::encode(), encs); unsafe { runtime::protocol_addMethodDescription( self.proto, sel, types.as_ptr(), is_required as BOOL, is_instance_method as BOOL); } } /// Adds an instance method declaration with a given description to self. pub fn add_method_description<Args, Ret>(&mut self, sel: Sel, is_required: bool) where Args: EncodeArguments, Ret: Encode { self.add_method_description_common::<Args, Ret>(sel, is_required, true) } /// Adds a class method declaration with a given description to self. pub fn add_class_method_description<Args, Ret>(&mut self, sel: Sel, is_required: bool) where Args: EncodeArguments, Ret: Encode { self.add_method_description_common::<Args, Ret>(sel, is_required, false) } /// Adds a requirement on another protocol. pub fn add_protocol(&mut self, proto: &Protocol) { unsafe { runtime::protocol_addProtocol(self.proto, proto); } } /// Registers self, consuming it and returning a reference to the /// newly registered `Protocol`. pub fn register(self) -> &'static Protocol { unsafe { runtime::objc_registerProtocol(self.proto); &*self.proto } } } /* #[cfg(test)] mod tests { use test_utils; #[test] fn test_custom_class() { // Registering the custom class is in test_utils let obj = test_utils::custom_object(); unsafe { let _: () = msg_send![obj, setFoo:13u32]; let result: u32 = msg_send![obj, foo]; assert!(result == 13); } } #[test] fn test_class_method() { let cls = test_utils::custom_class(); unsafe { let result: u32 = msg_send![cls, classFoo]; assert!(result == 7); } } }*/
{ None }
conditional_block
declare.rs
/*! Functionality for declaring Objective-C classes. Classes can be declared using the `ClassDecl` struct. Instance variables and methods can then be added before the class is ultimately registered. # Example The following example demonstrates declaring a class named `MyNumber` that has one ivar, a `u32` named `_number` and a `number` method that returns it: ``` no_run # #[macro_use] extern crate makepad_objc_sys; # use makepad_objc_sys::declare::ClassDecl; # use makepad_objc_sys::runtime::{Class, Object, Sel}; # fn main() { let superclass = class!(NSObject); let mut decl = ClassDecl::new("MyNumber", superclass).unwrap(); // Add an instance variable decl.add_ivar::<u32>("_number"); // Add an ObjC method for getting the number extern fn my_number_get(this: &Object, _cmd: Sel) -> u32 { unsafe { *this.get_ivar("_number") } } unsafe { decl.add_method(sel!(number), my_number_get as extern fn(&Object, Sel) -> u32); } decl.register(); # } ``` */ use std::ffi::CString; use std::mem; use std::ptr; use runtime::{BOOL, Class, Imp, NO, Object, Protocol, Sel, self}; use {Encode, EncodeArguments, Encoding, Message}; /// Types that can be used as the implementation of an Objective-C method. pub trait MethodImplementation { /// The callee type of the method. type Callee: Message; /// The return type of the method. type Ret: Encode; /// The argument types of the method. type Args: EncodeArguments; /// Returns self as an `Imp` of a method. fn imp(self) -> Imp; } macro_rules! method_decl_impl { (-$s:ident, $r:ident, $f:ty, $($t:ident),*) => ( impl<$s, $r $(, $t)*> MethodImplementation for $f where $s: Message, $r: Encode $(, $t: Encode)* { type Callee = $s; type Ret = $r; type Args = ($($t,)*); fn imp(self) -> Imp { unsafe { mem::transmute(self) } } } ); ($($t:ident),*) => ( method_decl_impl!(-T, R, extern fn(&T, Sel $(, $t)*) -> R, $($t),*); method_decl_impl!(-T, R, extern fn(&mut T, Sel $(, $t)*) -> R, $($t),*); ); } method_decl_impl!(); method_decl_impl!(A); method_decl_impl!(A, B); method_decl_impl!(A, B, C); method_decl_impl!(A, B, C, D); method_decl_impl!(A, B, C, D, E); method_decl_impl!(A, B, C, D, E, F); method_decl_impl!(A, B, C, D, E, F, G); method_decl_impl!(A, B, C, D, E, F, G, H); method_decl_impl!(A, B, C, D, E, F, G, H, I); method_decl_impl!(A, B, C, D, E, F, G, H, I, J); method_decl_impl!(A, B, C, D, E, F, G, H, I, J, K); method_decl_impl!(A, B, C, D, E, F, G, H, I, J, K, L); fn count_args(sel: Sel) -> usize { sel.name().chars().filter(|&c| c == ':').count() } fn method_type_encoding(ret: &Encoding, args: &[Encoding]) -> CString { let mut types = ret.as_str().to_owned(); // First two arguments are always self and the selector types.push_str(<*mut Object>::encode().as_str()); types.push_str(Sel::encode().as_str()); types.extend(args.iter().map(|e| e.as_str())); CString::new(types).unwrap() } fn log2_align_of<T>() -> u8 { let align = mem::align_of::<T>(); // Alignments are required to be powers of 2 debug_assert!(align.count_ones() == 1); // log2 of a power of 2 is the number of trailing zeros align.trailing_zeros() as u8 } /// A type for declaring a new class and adding new methods and ivars to it /// before registering it. pub struct ClassDecl { cls: *mut Class, } impl ClassDecl { fn with_superclass(name: &str, superclass: Option<&Class>) -> Option<ClassDecl> { let name = CString::new(name).unwrap(); let super_ptr = superclass.map_or(ptr::null(), |c| c); let cls = unsafe { runtime::objc_allocateClassPair(super_ptr, name.as_ptr(), 0) }; if cls.is_null() { None } else { Some(ClassDecl { cls }) } } /// Constructs a `ClassDecl` with the given name and superclass. /// Returns `None` if the class couldn't be allocated. pub fn new(name: &str, superclass: &Class) -> Option<ClassDecl> { ClassDecl::with_superclass(name, Some(superclass)) } /** Constructs a `ClassDecl` declaring a new root class with the given name. Returns `None` if the class couldn't be allocated. An implementation for `+initialize` must also be given; the runtime calls this method for all classes, so it must be defined on root classes. Note that implementing a root class is not a simple endeavor. For example, your class probably cannot be passed to Cocoa code unless the entire `NSObject` protocol is implemented. Functionality it expects, like implementations of `-retain` and `-release` used by ARC, will not be present otherwise. */ pub fn root(name: &str, intitialize_fn: extern fn(&Class, Sel)) -> Option<ClassDecl> { let mut decl = ClassDecl::with_superclass(name, None); if let Some(ref mut decl) = decl { unsafe { decl.add_class_method(sel!(initialize), intitialize_fn); } } decl } /// Adds a method with the given name and implementation to self. /// Panics if the method wasn't sucessfully added /// or if the selector and function take different numbers of arguments. /// Unsafe because the caller must ensure that the types match those that /// are expected when the method is invoked from Objective-C. pub unsafe fn add_method<F>(&mut self, sel: Sel, func: F) where F: MethodImplementation<Callee=Object> { let encs = F::Args::encodings(); let encs = encs.as_ref(); let sel_args = count_args(sel); assert!(sel_args == encs.len(), "Selector accepts {} arguments, but function accepts {}", sel_args, encs.len(), ); let types = method_type_encoding(&F::Ret::encode(), encs); let success = runtime::class_addMethod(self.cls, sel, func.imp(), types.as_ptr()); assert!(success != NO, "Failed to add method {:?}", sel); } /// Adds a class method with the given name and implementation to self. /// Panics if the method wasn't sucessfully added /// or if the selector and function take different numbers of arguments. /// Unsafe because the caller must ensure that the types match those that /// are expected when the method is invoked from Objective-C. pub unsafe fn add_class_method<F>(&mut self, sel: Sel, func: F) where F: MethodImplementation<Callee=Class> { let encs = F::Args::encodings(); let encs = encs.as_ref(); let sel_args = count_args(sel); assert!(sel_args == encs.len(), "Selector accepts {} arguments, but function accepts {}", sel_args, encs.len(), ); let types = method_type_encoding(&F::Ret::encode(), encs); let metaclass = (*self.cls).metaclass() as *const _ as *mut _; let success = runtime::class_addMethod(metaclass, sel, func.imp(), types.as_ptr()); assert!(success != NO, "Failed to add class method {:?}", sel); } /// Adds an ivar with type `T` and the provided name to self. /// Panics if the ivar wasn't successfully added. pub fn add_ivar<T>(&mut self, name: &str) where T: Encode { let c_name = CString::new(name).unwrap();
let size = mem::size_of::<T>(); let align = log2_align_of::<T>(); let success = unsafe { runtime::class_addIvar(self.cls, c_name.as_ptr(), size, align, encoding.as_ptr()) }; assert!(success != NO, "Failed to add ivar {}", name); } /// Adds a protocol to self. Panics if the protocol wasn't successfully /// added pub fn add_protocol(&mut self, proto: &Protocol) { let success = unsafe { runtime::class_addProtocol(self.cls, proto) }; assert!(success != NO, "Failed to add protocol {:?}", proto); } /// Registers self, consuming it and returning a reference to the /// newly registered `Class`. pub fn register(self) -> &'static Class { unsafe { let cls = self.cls; runtime::objc_registerClassPair(cls); // Forget self otherwise the class will be disposed in drop mem::forget(self); &*cls } } } impl Drop for ClassDecl { fn drop(&mut self) { unsafe { runtime::objc_disposeClassPair(self.cls); } } } /// A type for declaring a new protocol and adding new methods to it /// before registering it. pub struct ProtocolDecl { proto: *mut Protocol } impl ProtocolDecl { /// Constructs a `ProtocolDecl` with the given name. Returns `None` if the /// protocol couldn't be allocated. pub fn new(name: &str) -> Option<ProtocolDecl> { let c_name = CString::new(name).unwrap(); let proto = unsafe { runtime::objc_allocateProtocol(c_name.as_ptr()) }; if proto.is_null() { None } else { Some(ProtocolDecl { proto }) } } fn add_method_description_common<Args, Ret>(&mut self, sel: Sel, is_required: bool, is_instance_method: bool) where Args: EncodeArguments, Ret: Encode { let encs = Args::encodings(); let encs = encs.as_ref(); let sel_args = count_args(sel); assert!(sel_args == encs.len(), "Selector accepts {} arguments, but function accepts {}", sel_args, encs.len(), ); let types = method_type_encoding(&Ret::encode(), encs); unsafe { runtime::protocol_addMethodDescription( self.proto, sel, types.as_ptr(), is_required as BOOL, is_instance_method as BOOL); } } /// Adds an instance method declaration with a given description to self. pub fn add_method_description<Args, Ret>(&mut self, sel: Sel, is_required: bool) where Args: EncodeArguments, Ret: Encode { self.add_method_description_common::<Args, Ret>(sel, is_required, true) } /// Adds a class method declaration with a given description to self. pub fn add_class_method_description<Args, Ret>(&mut self, sel: Sel, is_required: bool) where Args: EncodeArguments, Ret: Encode { self.add_method_description_common::<Args, Ret>(sel, is_required, false) } /// Adds a requirement on another protocol. pub fn add_protocol(&mut self, proto: &Protocol) { unsafe { runtime::protocol_addProtocol(self.proto, proto); } } /// Registers self, consuming it and returning a reference to the /// newly registered `Protocol`. pub fn register(self) -> &'static Protocol { unsafe { runtime::objc_registerProtocol(self.proto); &*self.proto } } } /* #[cfg(test)] mod tests { use test_utils; #[test] fn test_custom_class() { // Registering the custom class is in test_utils let obj = test_utils::custom_object(); unsafe { let _: () = msg_send![obj, setFoo:13u32]; let result: u32 = msg_send![obj, foo]; assert!(result == 13); } } #[test] fn test_class_method() { let cls = test_utils::custom_class(); unsafe { let result: u32 = msg_send![cls, classFoo]; assert!(result == 7); } } }*/
let encoding = CString::new(T::encode().as_str()).unwrap();
random_line_split
declare.rs
/*! Functionality for declaring Objective-C classes. Classes can be declared using the `ClassDecl` struct. Instance variables and methods can then be added before the class is ultimately registered. # Example The following example demonstrates declaring a class named `MyNumber` that has one ivar, a `u32` named `_number` and a `number` method that returns it: ``` no_run # #[macro_use] extern crate makepad_objc_sys; # use makepad_objc_sys::declare::ClassDecl; # use makepad_objc_sys::runtime::{Class, Object, Sel}; # fn main() { let superclass = class!(NSObject); let mut decl = ClassDecl::new("MyNumber", superclass).unwrap(); // Add an instance variable decl.add_ivar::<u32>("_number"); // Add an ObjC method for getting the number extern fn my_number_get(this: &Object, _cmd: Sel) -> u32 { unsafe { *this.get_ivar("_number") } } unsafe { decl.add_method(sel!(number), my_number_get as extern fn(&Object, Sel) -> u32); } decl.register(); # } ``` */ use std::ffi::CString; use std::mem; use std::ptr; use runtime::{BOOL, Class, Imp, NO, Object, Protocol, Sel, self}; use {Encode, EncodeArguments, Encoding, Message}; /// Types that can be used as the implementation of an Objective-C method. pub trait MethodImplementation { /// The callee type of the method. type Callee: Message; /// The return type of the method. type Ret: Encode; /// The argument types of the method. type Args: EncodeArguments; /// Returns self as an `Imp` of a method. fn imp(self) -> Imp; } macro_rules! method_decl_impl { (-$s:ident, $r:ident, $f:ty, $($t:ident),*) => ( impl<$s, $r $(, $t)*> MethodImplementation for $f where $s: Message, $r: Encode $(, $t: Encode)* { type Callee = $s; type Ret = $r; type Args = ($($t,)*); fn imp(self) -> Imp { unsafe { mem::transmute(self) } } } ); ($($t:ident),*) => ( method_decl_impl!(-T, R, extern fn(&T, Sel $(, $t)*) -> R, $($t),*); method_decl_impl!(-T, R, extern fn(&mut T, Sel $(, $t)*) -> R, $($t),*); ); } method_decl_impl!(); method_decl_impl!(A); method_decl_impl!(A, B); method_decl_impl!(A, B, C); method_decl_impl!(A, B, C, D); method_decl_impl!(A, B, C, D, E); method_decl_impl!(A, B, C, D, E, F); method_decl_impl!(A, B, C, D, E, F, G); method_decl_impl!(A, B, C, D, E, F, G, H); method_decl_impl!(A, B, C, D, E, F, G, H, I); method_decl_impl!(A, B, C, D, E, F, G, H, I, J); method_decl_impl!(A, B, C, D, E, F, G, H, I, J, K); method_decl_impl!(A, B, C, D, E, F, G, H, I, J, K, L); fn count_args(sel: Sel) -> usize { sel.name().chars().filter(|&c| c == ':').count() } fn method_type_encoding(ret: &Encoding, args: &[Encoding]) -> CString { let mut types = ret.as_str().to_owned(); // First two arguments are always self and the selector types.push_str(<*mut Object>::encode().as_str()); types.push_str(Sel::encode().as_str()); types.extend(args.iter().map(|e| e.as_str())); CString::new(types).unwrap() } fn log2_align_of<T>() -> u8 { let align = mem::align_of::<T>(); // Alignments are required to be powers of 2 debug_assert!(align.count_ones() == 1); // log2 of a power of 2 is the number of trailing zeros align.trailing_zeros() as u8 } /// A type for declaring a new class and adding new methods and ivars to it /// before registering it. pub struct ClassDecl { cls: *mut Class, } impl ClassDecl { fn with_superclass(name: &str, superclass: Option<&Class>) -> Option<ClassDecl> { let name = CString::new(name).unwrap(); let super_ptr = superclass.map_or(ptr::null(), |c| c); let cls = unsafe { runtime::objc_allocateClassPair(super_ptr, name.as_ptr(), 0) }; if cls.is_null() { None } else { Some(ClassDecl { cls }) } } /// Constructs a `ClassDecl` with the given name and superclass. /// Returns `None` if the class couldn't be allocated. pub fn new(name: &str, superclass: &Class) -> Option<ClassDecl> { ClassDecl::with_superclass(name, Some(superclass)) } /** Constructs a `ClassDecl` declaring a new root class with the given name. Returns `None` if the class couldn't be allocated. An implementation for `+initialize` must also be given; the runtime calls this method for all classes, so it must be defined on root classes. Note that implementing a root class is not a simple endeavor. For example, your class probably cannot be passed to Cocoa code unless the entire `NSObject` protocol is implemented. Functionality it expects, like implementations of `-retain` and `-release` used by ARC, will not be present otherwise. */ pub fn root(name: &str, intitialize_fn: extern fn(&Class, Sel)) -> Option<ClassDecl> { let mut decl = ClassDecl::with_superclass(name, None); if let Some(ref mut decl) = decl { unsafe { decl.add_class_method(sel!(initialize), intitialize_fn); } } decl } /// Adds a method with the given name and implementation to self. /// Panics if the method wasn't sucessfully added /// or if the selector and function take different numbers of arguments. /// Unsafe because the caller must ensure that the types match those that /// are expected when the method is invoked from Objective-C. pub unsafe fn
<F>(&mut self, sel: Sel, func: F) where F: MethodImplementation<Callee=Object> { let encs = F::Args::encodings(); let encs = encs.as_ref(); let sel_args = count_args(sel); assert!(sel_args == encs.len(), "Selector accepts {} arguments, but function accepts {}", sel_args, encs.len(), ); let types = method_type_encoding(&F::Ret::encode(), encs); let success = runtime::class_addMethod(self.cls, sel, func.imp(), types.as_ptr()); assert!(success != NO, "Failed to add method {:?}", sel); } /// Adds a class method with the given name and implementation to self. /// Panics if the method wasn't sucessfully added /// or if the selector and function take different numbers of arguments. /// Unsafe because the caller must ensure that the types match those that /// are expected when the method is invoked from Objective-C. pub unsafe fn add_class_method<F>(&mut self, sel: Sel, func: F) where F: MethodImplementation<Callee=Class> { let encs = F::Args::encodings(); let encs = encs.as_ref(); let sel_args = count_args(sel); assert!(sel_args == encs.len(), "Selector accepts {} arguments, but function accepts {}", sel_args, encs.len(), ); let types = method_type_encoding(&F::Ret::encode(), encs); let metaclass = (*self.cls).metaclass() as *const _ as *mut _; let success = runtime::class_addMethod(metaclass, sel, func.imp(), types.as_ptr()); assert!(success != NO, "Failed to add class method {:?}", sel); } /// Adds an ivar with type `T` and the provided name to self. /// Panics if the ivar wasn't successfully added. pub fn add_ivar<T>(&mut self, name: &str) where T: Encode { let c_name = CString::new(name).unwrap(); let encoding = CString::new(T::encode().as_str()).unwrap(); let size = mem::size_of::<T>(); let align = log2_align_of::<T>(); let success = unsafe { runtime::class_addIvar(self.cls, c_name.as_ptr(), size, align, encoding.as_ptr()) }; assert!(success != NO, "Failed to add ivar {}", name); } /// Adds a protocol to self. Panics if the protocol wasn't successfully /// added pub fn add_protocol(&mut self, proto: &Protocol) { let success = unsafe { runtime::class_addProtocol(self.cls, proto) }; assert!(success != NO, "Failed to add protocol {:?}", proto); } /// Registers self, consuming it and returning a reference to the /// newly registered `Class`. pub fn register(self) -> &'static Class { unsafe { let cls = self.cls; runtime::objc_registerClassPair(cls); // Forget self otherwise the class will be disposed in drop mem::forget(self); &*cls } } } impl Drop for ClassDecl { fn drop(&mut self) { unsafe { runtime::objc_disposeClassPair(self.cls); } } } /// A type for declaring a new protocol and adding new methods to it /// before registering it. pub struct ProtocolDecl { proto: *mut Protocol } impl ProtocolDecl { /// Constructs a `ProtocolDecl` with the given name. Returns `None` if the /// protocol couldn't be allocated. pub fn new(name: &str) -> Option<ProtocolDecl> { let c_name = CString::new(name).unwrap(); let proto = unsafe { runtime::objc_allocateProtocol(c_name.as_ptr()) }; if proto.is_null() { None } else { Some(ProtocolDecl { proto }) } } fn add_method_description_common<Args, Ret>(&mut self, sel: Sel, is_required: bool, is_instance_method: bool) where Args: EncodeArguments, Ret: Encode { let encs = Args::encodings(); let encs = encs.as_ref(); let sel_args = count_args(sel); assert!(sel_args == encs.len(), "Selector accepts {} arguments, but function accepts {}", sel_args, encs.len(), ); let types = method_type_encoding(&Ret::encode(), encs); unsafe { runtime::protocol_addMethodDescription( self.proto, sel, types.as_ptr(), is_required as BOOL, is_instance_method as BOOL); } } /// Adds an instance method declaration with a given description to self. pub fn add_method_description<Args, Ret>(&mut self, sel: Sel, is_required: bool) where Args: EncodeArguments, Ret: Encode { self.add_method_description_common::<Args, Ret>(sel, is_required, true) } /// Adds a class method declaration with a given description to self. pub fn add_class_method_description<Args, Ret>(&mut self, sel: Sel, is_required: bool) where Args: EncodeArguments, Ret: Encode { self.add_method_description_common::<Args, Ret>(sel, is_required, false) } /// Adds a requirement on another protocol. pub fn add_protocol(&mut self, proto: &Protocol) { unsafe { runtime::protocol_addProtocol(self.proto, proto); } } /// Registers self, consuming it and returning a reference to the /// newly registered `Protocol`. pub fn register(self) -> &'static Protocol { unsafe { runtime::objc_registerProtocol(self.proto); &*self.proto } } } /* #[cfg(test)] mod tests { use test_utils; #[test] fn test_custom_class() { // Registering the custom class is in test_utils let obj = test_utils::custom_object(); unsafe { let _: () = msg_send![obj, setFoo:13u32]; let result: u32 = msg_send![obj, foo]; assert!(result == 13); } } #[test] fn test_class_method() { let cls = test_utils::custom_class(); unsafe { let result: u32 = msg_send![cls, classFoo]; assert!(result == 7); } } }*/
add_method
identifier_name
annealing3.rs
use crate::common::*; use geo::algorithm::coords_iter::CoordsIter; use rand::prelude::*; use rand::seq::SliceRandom; use std::collections::VecDeque; use std::time::{Duration, Instant}; static SEED: [u8; 32] = [ 0xfd, 0x00, 0xf1, 0x5c, 0xde, 0x01, 0x11, 0xc6, 0xc3, 0xea, 0xfb, 0xbf, 0xf3, 0xca, 0xd8, 0x32, 0x6a, 0xe3, 0x07, 0x99, 0xc5, 0xe0, 0x52, 0xe4, 0xaa, 0x35, 0x07, 0x99, 0xe3, 0x2b, 0x9d, 0xc6, ]; fn tscore(solution: &Vec<Point>, input: &Input) -> (f64, f64)
fn ascore(value: (f64, f64), progress: f64) -> f64 { value.0 * progress + (1.0 - progress) * value.1 } pub fn solve( input: &Input, mut solution: Vec<Point>, time_limit: Duration, fix_seed: bool, initial_temperature: f64, ) -> (Vec<Point>, f64) { let n = solution.len(); let mut rng = if fix_seed { SmallRng::from_seed(SEED) } else { SmallRng::from_entropy() }; let mut current_score = tscore(&solution, &input); let out_edges = make_out_edges(&input.figure.edges, n); let original_vertices = &input.figure.vertices; let mut orders = vec![vec![]; n]; for i in 0..n { orders[i] = make_determined_order(&out_edges, Some(i)); } let start_at = Instant::now(); let mut best_solution = solution.clone(); let mut best_score = current_score; let mut progress = 0.0; let mut temperature = initial_temperature; eprintln!("initial_temperature = {}", initial_temperature); let distance_sums = calc_distance_sums(&out_edges, original_vertices.len()); let distance_total: usize = distance_sums.iter().sum(); // eprintln!("{} {:?}", distance_total, distance_sums); let mut iter = 0; let mut move_count = 0; loop { // check time limit iter += 1; if iter % 100 == 0 { let elapsed = Instant::now() - start_at; if best_score.0 == 0.0 || elapsed >= time_limit { eprintln!("iter = {}, move_count = {}", iter, move_count); let dislike = calculate_dislike(&best_solution, &input.hole); return (best_solution, dislike); } // tweak temperature progress = elapsed.as_secs_f64() / time_limit.as_secs_f64(); temperature = initial_temperature * (1.0 - progress) * (-progress).exp2(); } // move to neighbor let r = rng.gen::<f64>(); if r > progress { let mut i = 0; { let r = rng.gen::<usize>() % distance_total; let mut sum = 0; for index in 0..n { sum += distance_sums[index]; if r < sum { i = index; break; } } } let w = rng.gen::<usize>() % 40 + 5; let next_solution = random_move_one_point(i, w, &solution, &input, &mut rng, &out_edges, &orders); if next_solution.is_none() { continue; } move_count += 1; let next_solution = next_solution.unwrap(); // calculate score. FIXME: slow let new_score = tscore(&next_solution, &input); let accept = { let current = ascore(current_score, progress); let new = ascore(new_score, progress); if new < current { true } else { // new_score >= current_score let delta = new - current; let accept_prob = (-delta / temperature).exp(); rng.gen::<f64>() < accept_prob } }; if accept { // accept candidate current_score = new_score; solution = next_solution; } } else { let i = rng.gen::<usize>() % n; let candidate = make_next_candidates( i, original_vertices, &input.hole, input.epsilon, &solution, &out_edges, &mut rng, ); if candidate != original_vertices[i] { move_count += 1; } // calculate score. FIXME: slow let old = solution[i]; solution[i] = candidate; let new_score = tscore(&solution, &input); let accept = { let current = ascore(current_score, progress); let new = ascore(new_score, progress); if new < current { true } else { // new_score >= current_score let delta = new - current; let accept_prob = (-delta / temperature).exp(); rng.gen::<f64>() < accept_prob } }; if accept { // accept candidate current_score = new_score; } else { // reject candidate solution[i] = old; } } if current_score < best_score { best_score = current_score; best_solution = solution.clone(); } } } fn make_next_candidates( i: usize, original_vertices: &[Point], hole: &Polygon, epsilon: i64, solution: &[Point], out_edges: &[Vec<usize>], rng: &mut SmallRng, ) -> Point { let some_neighbor = out_edges[i][0]; let original_squared_distance = squared_distance(&original_vertices[i], &original_vertices[some_neighbor]); if original_squared_distance < 100.0 || epsilon < 100000 { let ring = Ring::from_epsilon(solution[some_neighbor], epsilon, original_squared_distance); let mut points = ring_points(&ring); points.shuffle(rng); for &p in points.iter() { if !is_valid_point_move(i, &p, solution, original_vertices, out_edges, hole, epsilon) { continue; } return p; } } else { let od = original_squared_distance.sqrt(); let low = od * (1.0 - epsilon as f64 / 1000000.0).sqrt(); let high = od * (1.0 + epsilon as f64 / 1000000.0).sqrt(); for _iter in 0..100 { let d = low + (high - low) * rng.gen::<f64>(); let theta = 2.0 * std::f64::consts::PI * rng.gen::<f64>(); let vect = Point::new( (theta.cos() * d + 0.5).floor(), (theta.sin() * d + 0.5).floor(), ); let p = solution[some_neighbor] + vect; if !is_valid_point_move(i, &p, solution, original_vertices, out_edges, hole, epsilon) { continue; } return p; } return solution[i]; } unreachable!() } fn is_valid_point_move( index: usize, p: &Point, solution: &[Point], original_vertices: &[Point], out_edges: &[Vec<usize>], hole: &Polygon, epsilon: i64, ) -> bool { let ok1 = out_edges[index].iter().all(|&dst| { is_allowed_distance( &p, &solution[dst], &original_vertices[index], &original_vertices[dst], epsilon, false, ) }); if !ok1 { return false; } let ok2 = out_edges[index] .iter() .all(|&dst| does_line_fit_in_hole(&p, &solution[dst], hole)); if !ok2 { return false; } return true; } fn random_move_one_point( from: usize, w: usize, solution: &Vec<Point>, input: &Input, rng: &mut SmallRng, out_edges: &Vec<Vec<usize>>, orders: &Vec<Vec<usize>>, ) -> Option<Vec<Point>> { let mut gx: f64 = 0.0; let mut gy: f64 = 0.0; for p in solution.iter() { gx += p.x(); gy += p.y(); } gx /= solution.len() as f64; gy /= solution.len() as f64; let g = Point::new(gx, gy); let mut vect1 = solution[from] - g; vect1 = vect1 / squared_distance(&solution[from], &g).sqrt(); let mut np = solution[from]; for iter in 0..5 { let dx = (rng.gen::<usize>() % (w * 2 + 1)) as f64 - w as f64; let dy = (rng.gen::<usize>() % (w * 2 + 1)) as f64 - w as f64; let vect2 = Point::new(dx, dy) / (dx * dx + dy * dy).sqrt(); np = solution[from] + Point::new(dx, dy); if vect1.dot(vect2) > 0.4 - iter as f64 * 0.2 { break; } } if solution[from] == np { return None; } let mut solution = solution.clone(); let old = solution[from]; solution[from] = np; let next_solution = fix_allowed_distance_violation(from, &solution, &input, &out_edges, &orders); solution[from] = old; return next_solution; } fn calc_distance_sums(edges: &Vec<Vec<usize>>, n: usize) -> Vec<usize> { let mut ret = vec![0; n]; for start in 0..n { let mut visited = vec![false; n]; visited[start] = true; let mut que = VecDeque::new(); que.push_back((start, 0)); while let Some((from, dist)) = que.pop_front() { ret[start] += dist * dist; // ret[start] += dist; for &to in edges[from].iter() { if visited[to] { continue; } visited[to] = true; que.push_back((to, dist + 1)); } } } return ret; }
{ let dislike = calculate_dislike(&solution, &input.hole); let mut gx: f64 = 0.0; let mut gy: f64 = 0.0; for p in solution.iter() { gx += p.x(); gy += p.y(); } gx /= solution.len() as f64; gy /= solution.len() as f64; let mut vx: f64 = 0.0; let mut vy: f64 = 0.0; for p in solution.iter() { vx += pow2(p.x() - gx); vy += pow2(p.y() - gy); } vx /= solution.len() as f64; vy /= solution.len() as f64; ( dislike / (input.hole.exterior().coords_count() as f64), -(vx + vy), ) }
identifier_body
annealing3.rs
use crate::common::*; use geo::algorithm::coords_iter::CoordsIter; use rand::prelude::*; use rand::seq::SliceRandom; use std::collections::VecDeque; use std::time::{Duration, Instant}; static SEED: [u8; 32] = [ 0xfd, 0x00, 0xf1, 0x5c, 0xde, 0x01, 0x11, 0xc6, 0xc3, 0xea, 0xfb, 0xbf, 0xf3, 0xca, 0xd8, 0x32, 0x6a, 0xe3, 0x07, 0x99, 0xc5, 0xe0, 0x52, 0xe4, 0xaa, 0x35, 0x07, 0x99, 0xe3, 0x2b, 0x9d, 0xc6, ]; fn tscore(solution: &Vec<Point>, input: &Input) -> (f64, f64) { let dislike = calculate_dislike(&solution, &input.hole); let mut gx: f64 = 0.0; let mut gy: f64 = 0.0; for p in solution.iter() { gx += p.x(); gy += p.y(); } gx /= solution.len() as f64; gy /= solution.len() as f64; let mut vx: f64 = 0.0; let mut vy: f64 = 0.0; for p in solution.iter() { vx += pow2(p.x() - gx); vy += pow2(p.y() - gy); } vx /= solution.len() as f64; vy /= solution.len() as f64; ( dislike / (input.hole.exterior().coords_count() as f64), -(vx + vy), ) } fn ascore(value: (f64, f64), progress: f64) -> f64 { value.0 * progress + (1.0 - progress) * value.1 } pub fn solve( input: &Input, mut solution: Vec<Point>, time_limit: Duration, fix_seed: bool, initial_temperature: f64, ) -> (Vec<Point>, f64) { let n = solution.len(); let mut rng = if fix_seed { SmallRng::from_seed(SEED) } else { SmallRng::from_entropy() }; let mut current_score = tscore(&solution, &input); let out_edges = make_out_edges(&input.figure.edges, n); let original_vertices = &input.figure.vertices; let mut orders = vec![vec![]; n]; for i in 0..n { orders[i] = make_determined_order(&out_edges, Some(i)); } let start_at = Instant::now(); let mut best_solution = solution.clone(); let mut best_score = current_score; let mut progress = 0.0; let mut temperature = initial_temperature; eprintln!("initial_temperature = {}", initial_temperature); let distance_sums = calc_distance_sums(&out_edges, original_vertices.len()); let distance_total: usize = distance_sums.iter().sum(); // eprintln!("{} {:?}", distance_total, distance_sums); let mut iter = 0; let mut move_count = 0; loop { // check time limit iter += 1; if iter % 100 == 0 { let elapsed = Instant::now() - start_at; if best_score.0 == 0.0 || elapsed >= time_limit { eprintln!("iter = {}, move_count = {}", iter, move_count); let dislike = calculate_dislike(&best_solution, &input.hole); return (best_solution, dislike); }
temperature = initial_temperature * (1.0 - progress) * (-progress).exp2(); } // move to neighbor let r = rng.gen::<f64>(); if r > progress { let mut i = 0; { let r = rng.gen::<usize>() % distance_total; let mut sum = 0; for index in 0..n { sum += distance_sums[index]; if r < sum { i = index; break; } } } let w = rng.gen::<usize>() % 40 + 5; let next_solution = random_move_one_point(i, w, &solution, &input, &mut rng, &out_edges, &orders); if next_solution.is_none() { continue; } move_count += 1; let next_solution = next_solution.unwrap(); // calculate score. FIXME: slow let new_score = tscore(&next_solution, &input); let accept = { let current = ascore(current_score, progress); let new = ascore(new_score, progress); if new < current { true } else { // new_score >= current_score let delta = new - current; let accept_prob = (-delta / temperature).exp(); rng.gen::<f64>() < accept_prob } }; if accept { // accept candidate current_score = new_score; solution = next_solution; } } else { let i = rng.gen::<usize>() % n; let candidate = make_next_candidates( i, original_vertices, &input.hole, input.epsilon, &solution, &out_edges, &mut rng, ); if candidate != original_vertices[i] { move_count += 1; } // calculate score. FIXME: slow let old = solution[i]; solution[i] = candidate; let new_score = tscore(&solution, &input); let accept = { let current = ascore(current_score, progress); let new = ascore(new_score, progress); if new < current { true } else { // new_score >= current_score let delta = new - current; let accept_prob = (-delta / temperature).exp(); rng.gen::<f64>() < accept_prob } }; if accept { // accept candidate current_score = new_score; } else { // reject candidate solution[i] = old; } } if current_score < best_score { best_score = current_score; best_solution = solution.clone(); } } } fn make_next_candidates( i: usize, original_vertices: &[Point], hole: &Polygon, epsilon: i64, solution: &[Point], out_edges: &[Vec<usize>], rng: &mut SmallRng, ) -> Point { let some_neighbor = out_edges[i][0]; let original_squared_distance = squared_distance(&original_vertices[i], &original_vertices[some_neighbor]); if original_squared_distance < 100.0 || epsilon < 100000 { let ring = Ring::from_epsilon(solution[some_neighbor], epsilon, original_squared_distance); let mut points = ring_points(&ring); points.shuffle(rng); for &p in points.iter() { if !is_valid_point_move(i, &p, solution, original_vertices, out_edges, hole, epsilon) { continue; } return p; } } else { let od = original_squared_distance.sqrt(); let low = od * (1.0 - epsilon as f64 / 1000000.0).sqrt(); let high = od * (1.0 + epsilon as f64 / 1000000.0).sqrt(); for _iter in 0..100 { let d = low + (high - low) * rng.gen::<f64>(); let theta = 2.0 * std::f64::consts::PI * rng.gen::<f64>(); let vect = Point::new( (theta.cos() * d + 0.5).floor(), (theta.sin() * d + 0.5).floor(), ); let p = solution[some_neighbor] + vect; if !is_valid_point_move(i, &p, solution, original_vertices, out_edges, hole, epsilon) { continue; } return p; } return solution[i]; } unreachable!() } fn is_valid_point_move( index: usize, p: &Point, solution: &[Point], original_vertices: &[Point], out_edges: &[Vec<usize>], hole: &Polygon, epsilon: i64, ) -> bool { let ok1 = out_edges[index].iter().all(|&dst| { is_allowed_distance( &p, &solution[dst], &original_vertices[index], &original_vertices[dst], epsilon, false, ) }); if !ok1 { return false; } let ok2 = out_edges[index] .iter() .all(|&dst| does_line_fit_in_hole(&p, &solution[dst], hole)); if !ok2 { return false; } return true; } fn random_move_one_point( from: usize, w: usize, solution: &Vec<Point>, input: &Input, rng: &mut SmallRng, out_edges: &Vec<Vec<usize>>, orders: &Vec<Vec<usize>>, ) -> Option<Vec<Point>> { let mut gx: f64 = 0.0; let mut gy: f64 = 0.0; for p in solution.iter() { gx += p.x(); gy += p.y(); } gx /= solution.len() as f64; gy /= solution.len() as f64; let g = Point::new(gx, gy); let mut vect1 = solution[from] - g; vect1 = vect1 / squared_distance(&solution[from], &g).sqrt(); let mut np = solution[from]; for iter in 0..5 { let dx = (rng.gen::<usize>() % (w * 2 + 1)) as f64 - w as f64; let dy = (rng.gen::<usize>() % (w * 2 + 1)) as f64 - w as f64; let vect2 = Point::new(dx, dy) / (dx * dx + dy * dy).sqrt(); np = solution[from] + Point::new(dx, dy); if vect1.dot(vect2) > 0.4 - iter as f64 * 0.2 { break; } } if solution[from] == np { return None; } let mut solution = solution.clone(); let old = solution[from]; solution[from] = np; let next_solution = fix_allowed_distance_violation(from, &solution, &input, &out_edges, &orders); solution[from] = old; return next_solution; } fn calc_distance_sums(edges: &Vec<Vec<usize>>, n: usize) -> Vec<usize> { let mut ret = vec![0; n]; for start in 0..n { let mut visited = vec![false; n]; visited[start] = true; let mut que = VecDeque::new(); que.push_back((start, 0)); while let Some((from, dist)) = que.pop_front() { ret[start] += dist * dist; // ret[start] += dist; for &to in edges[from].iter() { if visited[to] { continue; } visited[to] = true; que.push_back((to, dist + 1)); } } } return ret; }
// tweak temperature progress = elapsed.as_secs_f64() / time_limit.as_secs_f64();
random_line_split
annealing3.rs
use crate::common::*; use geo::algorithm::coords_iter::CoordsIter; use rand::prelude::*; use rand::seq::SliceRandom; use std::collections::VecDeque; use std::time::{Duration, Instant}; static SEED: [u8; 32] = [ 0xfd, 0x00, 0xf1, 0x5c, 0xde, 0x01, 0x11, 0xc6, 0xc3, 0xea, 0xfb, 0xbf, 0xf3, 0xca, 0xd8, 0x32, 0x6a, 0xe3, 0x07, 0x99, 0xc5, 0xe0, 0x52, 0xe4, 0xaa, 0x35, 0x07, 0x99, 0xe3, 0x2b, 0x9d, 0xc6, ]; fn tscore(solution: &Vec<Point>, input: &Input) -> (f64, f64) { let dislike = calculate_dislike(&solution, &input.hole); let mut gx: f64 = 0.0; let mut gy: f64 = 0.0; for p in solution.iter() { gx += p.x(); gy += p.y(); } gx /= solution.len() as f64; gy /= solution.len() as f64; let mut vx: f64 = 0.0; let mut vy: f64 = 0.0; for p in solution.iter() { vx += pow2(p.x() - gx); vy += pow2(p.y() - gy); } vx /= solution.len() as f64; vy /= solution.len() as f64; ( dislike / (input.hole.exterior().coords_count() as f64), -(vx + vy), ) } fn ascore(value: (f64, f64), progress: f64) -> f64 { value.0 * progress + (1.0 - progress) * value.1 } pub fn solve( input: &Input, mut solution: Vec<Point>, time_limit: Duration, fix_seed: bool, initial_temperature: f64, ) -> (Vec<Point>, f64) { let n = solution.len(); let mut rng = if fix_seed { SmallRng::from_seed(SEED) } else { SmallRng::from_entropy() }; let mut current_score = tscore(&solution, &input); let out_edges = make_out_edges(&input.figure.edges, n); let original_vertices = &input.figure.vertices; let mut orders = vec![vec![]; n]; for i in 0..n { orders[i] = make_determined_order(&out_edges, Some(i)); } let start_at = Instant::now(); let mut best_solution = solution.clone(); let mut best_score = current_score; let mut progress = 0.0; let mut temperature = initial_temperature; eprintln!("initial_temperature = {}", initial_temperature); let distance_sums = calc_distance_sums(&out_edges, original_vertices.len()); let distance_total: usize = distance_sums.iter().sum(); // eprintln!("{} {:?}", distance_total, distance_sums); let mut iter = 0; let mut move_count = 0; loop { // check time limit iter += 1; if iter % 100 == 0 { let elapsed = Instant::now() - start_at; if best_score.0 == 0.0 || elapsed >= time_limit { eprintln!("iter = {}, move_count = {}", iter, move_count); let dislike = calculate_dislike(&best_solution, &input.hole); return (best_solution, dislike); } // tweak temperature progress = elapsed.as_secs_f64() / time_limit.as_secs_f64(); temperature = initial_temperature * (1.0 - progress) * (-progress).exp2(); } // move to neighbor let r = rng.gen::<f64>(); if r > progress { let mut i = 0; { let r = rng.gen::<usize>() % distance_total; let mut sum = 0; for index in 0..n { sum += distance_sums[index]; if r < sum { i = index; break; } } } let w = rng.gen::<usize>() % 40 + 5; let next_solution = random_move_one_point(i, w, &solution, &input, &mut rng, &out_edges, &orders); if next_solution.is_none() { continue; } move_count += 1; let next_solution = next_solution.unwrap(); // calculate score. FIXME: slow let new_score = tscore(&next_solution, &input); let accept = { let current = ascore(current_score, progress); let new = ascore(new_score, progress); if new < current { true } else { // new_score >= current_score let delta = new - current; let accept_prob = (-delta / temperature).exp(); rng.gen::<f64>() < accept_prob } }; if accept { // accept candidate current_score = new_score; solution = next_solution; } } else { let i = rng.gen::<usize>() % n; let candidate = make_next_candidates( i, original_vertices, &input.hole, input.epsilon, &solution, &out_edges, &mut rng, ); if candidate != original_vertices[i] { move_count += 1; } // calculate score. FIXME: slow let old = solution[i]; solution[i] = candidate; let new_score = tscore(&solution, &input); let accept = { let current = ascore(current_score, progress); let new = ascore(new_score, progress); if new < current { true } else { // new_score >= current_score let delta = new - current; let accept_prob = (-delta / temperature).exp(); rng.gen::<f64>() < accept_prob } }; if accept { // accept candidate current_score = new_score; } else { // reject candidate solution[i] = old; } } if current_score < best_score { best_score = current_score; best_solution = solution.clone(); } } } fn make_next_candidates( i: usize, original_vertices: &[Point], hole: &Polygon, epsilon: i64, solution: &[Point], out_edges: &[Vec<usize>], rng: &mut SmallRng, ) -> Point { let some_neighbor = out_edges[i][0]; let original_squared_distance = squared_distance(&original_vertices[i], &original_vertices[some_neighbor]); if original_squared_distance < 100.0 || epsilon < 100000 { let ring = Ring::from_epsilon(solution[some_neighbor], epsilon, original_squared_distance); let mut points = ring_points(&ring); points.shuffle(rng); for &p in points.iter() { if !is_valid_point_move(i, &p, solution, original_vertices, out_edges, hole, epsilon) { continue; } return p; } } else { let od = original_squared_distance.sqrt(); let low = od * (1.0 - epsilon as f64 / 1000000.0).sqrt(); let high = od * (1.0 + epsilon as f64 / 1000000.0).sqrt(); for _iter in 0..100 { let d = low + (high - low) * rng.gen::<f64>(); let theta = 2.0 * std::f64::consts::PI * rng.gen::<f64>(); let vect = Point::new( (theta.cos() * d + 0.5).floor(), (theta.sin() * d + 0.5).floor(), ); let p = solution[some_neighbor] + vect; if !is_valid_point_move(i, &p, solution, original_vertices, out_edges, hole, epsilon)
return p; } return solution[i]; } unreachable!() } fn is_valid_point_move( index: usize, p: &Point, solution: &[Point], original_vertices: &[Point], out_edges: &[Vec<usize>], hole: &Polygon, epsilon: i64, ) -> bool { let ok1 = out_edges[index].iter().all(|&dst| { is_allowed_distance( &p, &solution[dst], &original_vertices[index], &original_vertices[dst], epsilon, false, ) }); if !ok1 { return false; } let ok2 = out_edges[index] .iter() .all(|&dst| does_line_fit_in_hole(&p, &solution[dst], hole)); if !ok2 { return false; } return true; } fn random_move_one_point( from: usize, w: usize, solution: &Vec<Point>, input: &Input, rng: &mut SmallRng, out_edges: &Vec<Vec<usize>>, orders: &Vec<Vec<usize>>, ) -> Option<Vec<Point>> { let mut gx: f64 = 0.0; let mut gy: f64 = 0.0; for p in solution.iter() { gx += p.x(); gy += p.y(); } gx /= solution.len() as f64; gy /= solution.len() as f64; let g = Point::new(gx, gy); let mut vect1 = solution[from] - g; vect1 = vect1 / squared_distance(&solution[from], &g).sqrt(); let mut np = solution[from]; for iter in 0..5 { let dx = (rng.gen::<usize>() % (w * 2 + 1)) as f64 - w as f64; let dy = (rng.gen::<usize>() % (w * 2 + 1)) as f64 - w as f64; let vect2 = Point::new(dx, dy) / (dx * dx + dy * dy).sqrt(); np = solution[from] + Point::new(dx, dy); if vect1.dot(vect2) > 0.4 - iter as f64 * 0.2 { break; } } if solution[from] == np { return None; } let mut solution = solution.clone(); let old = solution[from]; solution[from] = np; let next_solution = fix_allowed_distance_violation(from, &solution, &input, &out_edges, &orders); solution[from] = old; return next_solution; } fn calc_distance_sums(edges: &Vec<Vec<usize>>, n: usize) -> Vec<usize> { let mut ret = vec![0; n]; for start in 0..n { let mut visited = vec![false; n]; visited[start] = true; let mut que = VecDeque::new(); que.push_back((start, 0)); while let Some((from, dist)) = que.pop_front() { ret[start] += dist * dist; // ret[start] += dist; for &to in edges[from].iter() { if visited[to] { continue; } visited[to] = true; que.push_back((to, dist + 1)); } } } return ret; }
{ continue; }
conditional_block
annealing3.rs
use crate::common::*; use geo::algorithm::coords_iter::CoordsIter; use rand::prelude::*; use rand::seq::SliceRandom; use std::collections::VecDeque; use std::time::{Duration, Instant}; static SEED: [u8; 32] = [ 0xfd, 0x00, 0xf1, 0x5c, 0xde, 0x01, 0x11, 0xc6, 0xc3, 0xea, 0xfb, 0xbf, 0xf3, 0xca, 0xd8, 0x32, 0x6a, 0xe3, 0x07, 0x99, 0xc5, 0xe0, 0x52, 0xe4, 0xaa, 0x35, 0x07, 0x99, 0xe3, 0x2b, 0x9d, 0xc6, ]; fn tscore(solution: &Vec<Point>, input: &Input) -> (f64, f64) { let dislike = calculate_dislike(&solution, &input.hole); let mut gx: f64 = 0.0; let mut gy: f64 = 0.0; for p in solution.iter() { gx += p.x(); gy += p.y(); } gx /= solution.len() as f64; gy /= solution.len() as f64; let mut vx: f64 = 0.0; let mut vy: f64 = 0.0; for p in solution.iter() { vx += pow2(p.x() - gx); vy += pow2(p.y() - gy); } vx /= solution.len() as f64; vy /= solution.len() as f64; ( dislike / (input.hole.exterior().coords_count() as f64), -(vx + vy), ) } fn ascore(value: (f64, f64), progress: f64) -> f64 { value.0 * progress + (1.0 - progress) * value.1 } pub fn solve( input: &Input, mut solution: Vec<Point>, time_limit: Duration, fix_seed: bool, initial_temperature: f64, ) -> (Vec<Point>, f64) { let n = solution.len(); let mut rng = if fix_seed { SmallRng::from_seed(SEED) } else { SmallRng::from_entropy() }; let mut current_score = tscore(&solution, &input); let out_edges = make_out_edges(&input.figure.edges, n); let original_vertices = &input.figure.vertices; let mut orders = vec![vec![]; n]; for i in 0..n { orders[i] = make_determined_order(&out_edges, Some(i)); } let start_at = Instant::now(); let mut best_solution = solution.clone(); let mut best_score = current_score; let mut progress = 0.0; let mut temperature = initial_temperature; eprintln!("initial_temperature = {}", initial_temperature); let distance_sums = calc_distance_sums(&out_edges, original_vertices.len()); let distance_total: usize = distance_sums.iter().sum(); // eprintln!("{} {:?}", distance_total, distance_sums); let mut iter = 0; let mut move_count = 0; loop { // check time limit iter += 1; if iter % 100 == 0 { let elapsed = Instant::now() - start_at; if best_score.0 == 0.0 || elapsed >= time_limit { eprintln!("iter = {}, move_count = {}", iter, move_count); let dislike = calculate_dislike(&best_solution, &input.hole); return (best_solution, dislike); } // tweak temperature progress = elapsed.as_secs_f64() / time_limit.as_secs_f64(); temperature = initial_temperature * (1.0 - progress) * (-progress).exp2(); } // move to neighbor let r = rng.gen::<f64>(); if r > progress { let mut i = 0; { let r = rng.gen::<usize>() % distance_total; let mut sum = 0; for index in 0..n { sum += distance_sums[index]; if r < sum { i = index; break; } } } let w = rng.gen::<usize>() % 40 + 5; let next_solution = random_move_one_point(i, w, &solution, &input, &mut rng, &out_edges, &orders); if next_solution.is_none() { continue; } move_count += 1; let next_solution = next_solution.unwrap(); // calculate score. FIXME: slow let new_score = tscore(&next_solution, &input); let accept = { let current = ascore(current_score, progress); let new = ascore(new_score, progress); if new < current { true } else { // new_score >= current_score let delta = new - current; let accept_prob = (-delta / temperature).exp(); rng.gen::<f64>() < accept_prob } }; if accept { // accept candidate current_score = new_score; solution = next_solution; } } else { let i = rng.gen::<usize>() % n; let candidate = make_next_candidates( i, original_vertices, &input.hole, input.epsilon, &solution, &out_edges, &mut rng, ); if candidate != original_vertices[i] { move_count += 1; } // calculate score. FIXME: slow let old = solution[i]; solution[i] = candidate; let new_score = tscore(&solution, &input); let accept = { let current = ascore(current_score, progress); let new = ascore(new_score, progress); if new < current { true } else { // new_score >= current_score let delta = new - current; let accept_prob = (-delta / temperature).exp(); rng.gen::<f64>() < accept_prob } }; if accept { // accept candidate current_score = new_score; } else { // reject candidate solution[i] = old; } } if current_score < best_score { best_score = current_score; best_solution = solution.clone(); } } } fn make_next_candidates( i: usize, original_vertices: &[Point], hole: &Polygon, epsilon: i64, solution: &[Point], out_edges: &[Vec<usize>], rng: &mut SmallRng, ) -> Point { let some_neighbor = out_edges[i][0]; let original_squared_distance = squared_distance(&original_vertices[i], &original_vertices[some_neighbor]); if original_squared_distance < 100.0 || epsilon < 100000 { let ring = Ring::from_epsilon(solution[some_neighbor], epsilon, original_squared_distance); let mut points = ring_points(&ring); points.shuffle(rng); for &p in points.iter() { if !is_valid_point_move(i, &p, solution, original_vertices, out_edges, hole, epsilon) { continue; } return p; } } else { let od = original_squared_distance.sqrt(); let low = od * (1.0 - epsilon as f64 / 1000000.0).sqrt(); let high = od * (1.0 + epsilon as f64 / 1000000.0).sqrt(); for _iter in 0..100 { let d = low + (high - low) * rng.gen::<f64>(); let theta = 2.0 * std::f64::consts::PI * rng.gen::<f64>(); let vect = Point::new( (theta.cos() * d + 0.5).floor(), (theta.sin() * d + 0.5).floor(), ); let p = solution[some_neighbor] + vect; if !is_valid_point_move(i, &p, solution, original_vertices, out_edges, hole, epsilon) { continue; } return p; } return solution[i]; } unreachable!() } fn is_valid_point_move( index: usize, p: &Point, solution: &[Point], original_vertices: &[Point], out_edges: &[Vec<usize>], hole: &Polygon, epsilon: i64, ) -> bool { let ok1 = out_edges[index].iter().all(|&dst| { is_allowed_distance( &p, &solution[dst], &original_vertices[index], &original_vertices[dst], epsilon, false, ) }); if !ok1 { return false; } let ok2 = out_edges[index] .iter() .all(|&dst| does_line_fit_in_hole(&p, &solution[dst], hole)); if !ok2 { return false; } return true; } fn
( from: usize, w: usize, solution: &Vec<Point>, input: &Input, rng: &mut SmallRng, out_edges: &Vec<Vec<usize>>, orders: &Vec<Vec<usize>>, ) -> Option<Vec<Point>> { let mut gx: f64 = 0.0; let mut gy: f64 = 0.0; for p in solution.iter() { gx += p.x(); gy += p.y(); } gx /= solution.len() as f64; gy /= solution.len() as f64; let g = Point::new(gx, gy); let mut vect1 = solution[from] - g; vect1 = vect1 / squared_distance(&solution[from], &g).sqrt(); let mut np = solution[from]; for iter in 0..5 { let dx = (rng.gen::<usize>() % (w * 2 + 1)) as f64 - w as f64; let dy = (rng.gen::<usize>() % (w * 2 + 1)) as f64 - w as f64; let vect2 = Point::new(dx, dy) / (dx * dx + dy * dy).sqrt(); np = solution[from] + Point::new(dx, dy); if vect1.dot(vect2) > 0.4 - iter as f64 * 0.2 { break; } } if solution[from] == np { return None; } let mut solution = solution.clone(); let old = solution[from]; solution[from] = np; let next_solution = fix_allowed_distance_violation(from, &solution, &input, &out_edges, &orders); solution[from] = old; return next_solution; } fn calc_distance_sums(edges: &Vec<Vec<usize>>, n: usize) -> Vec<usize> { let mut ret = vec![0; n]; for start in 0..n { let mut visited = vec![false; n]; visited[start] = true; let mut que = VecDeque::new(); que.push_back((start, 0)); while let Some((from, dist)) = que.pop_front() { ret[start] += dist * dist; // ret[start] += dist; for &to in edges[from].iter() { if visited[to] { continue; } visited[to] = true; que.push_back((to, dist + 1)); } } } return ret; }
random_move_one_point
identifier_name
mod.rs
use futures::prelude::*; use http::Uri; use slog::Logger; use std::{ iter::FromIterator, path::{Path, PathBuf}, str::FromStr, }; use crate::{ cache::{self, Cacheable, Cache as _}, download, error::prelude::*, }; use tokio::io::AsyncReadExt; mod hash_writer; use hash_writer::HashWriter; mod error{ use snafu::Snafu; #[derive(Debug,Snafu)] #[snafu(visibility(pub))] pub enum Error{ #[snafu(display("Invalid uri: {}", source))] BadUri{ source: http::uri::InvalidUri, }, #[snafu(display("Invalid url: {}", source))] BadUrl{ source: url::ParseError, }, } } #[derive(Debug)] pub enum VerifyResult { Good, Bad, NotInCache, } #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)] pub struct Artifact { pub group: String, pub artifact: String, pub version: String, pub classifier: Option<String>, pub extension: Option<String>, } #[derive(Debug, Clone)] pub struct ResolvedArtifact { pub artifact: Artifact, pub repo: Uri, } pub struct Cache; impl Cacheable for ResolvedArtifact { type Cache = crate::cache::FileCache; fn cached_path(&self) -> PathBuf { let mut p = PathBuf::new(); p.push(app_dirs::app_dir(app_dirs::AppDataType::UserCache, crate::APP_INFO, "maven_cache").expect("Cache directory must be accesible")); p.push(&self.artifact.to_path()); p } fn uri(&self) -> crate::cache::Result<Uri> { crate::cache::ResultExt::erased(self.artifact.get_uri_on(&self.repo)) } } impl cache::Cache<ResolvedArtifact> for Cache { fn with( artifact: ResolvedArtifact, manager: download::Manager, log: Logger, ) -> crate::cache::BoxFuture<PathBuf> { let cached_path = artifact.cached_path(); let log = log.new( o!("artifact"=>artifact.artifact.to_string(),"repo"=>artifact.repo.to_string(),"cached_path"=>cached_path.as_path().to_string_lossy().into_owned()), ); Box::pin(async move{ info!(log, "caching maven artifact"); if !Self::is_cached(&artifact) { info!(log, "artifact is not cached, downloading now"); let uri = artifact.uri()?; manager .download(uri.clone(), cached_path.clone(), false, &log).await.context(crate::cache::error::Downloading{uri})?; } Ok(cached_path) }) } } impl Cache { pub async fn verify_cached( resolved: ResolvedArtifact, manager: download::Manager, ) -> download::Result<VerifyResult> { if Self::is_cached(&resolved) { let cached_path = resolved.cached_path(); let sha_url_res = resolved.sha_uri(); let mut cached_file = tokio::fs::File::open(cached_path).await.context(download::error::Io)?; let mut sha = HashWriter::new(); cached_file.copy(&mut sha).await.context(download::error::Io)?; let cached_sha = sha.digest(); let sha_uri = sha_url_res?; let (res,_) = manager.get(sha_uri)?.await?; let hash_str = res.into_body().map_ok(hyper::Chunk::into_bytes).try_concat().await.context(download::error::Hyper)?; if hash_str == format!("{}", cached_sha) { Ok(VerifyResult::Good) } else { Ok(VerifyResult::Bad) } } else { Ok(VerifyResult::NotInCache) } } } impl Artifact { fn to_path(&self) -> PathBuf { let mut p = PathBuf::new(); p.push(&self.group_path()); p.push(&self.artifact); p.push(&self.version); p.push(&self.artifact_filename()); p } pub fn get_uri_on(&self, base: &Uri) -> Result<Uri,error::Error> { let base = crate::util::uri_to_url(base).context(error::BadUrl)?; let path = self.to_path(); let url = base.join(path.to_str().expect("non unicode path encountered")).context(error::BadUrl)?; crate::util::url_to_uri(&url).context(error::BadUri) } fn group_path(&self) -> PathBuf { PathBuf::from_iter(self.group.split('.')) } fn artifact_filename(&self) -> String { let classifier_fmt = match self.classifier { Some(ref class) => format!("-{classifier}", classifier = class), None => "".to_string(), }; let extension_fmt = match self.extension { Some(ref extension) => extension.clone(), None => "jar".to_string(), }; format!( "{artifact}-{version}{classifier}.{extension}", artifact = self.artifact, version = self.version, classifier = classifier_fmt, extension = extension_fmt ) } pub fn
(&self, repo_uri: Uri) -> ResolvedArtifact { ResolvedArtifact { artifact: self.clone(), repo: repo_uri, } } pub fn download_from( &self, location: &Path, repo_uri: Uri, manager: download::Manager, log: Logger, ) -> impl Future<Output=Result<(), crate::cache::Error>> + Send { Cache::install_at(self.resolve(repo_uri), location.to_owned(), manager, log) } } impl ResolvedArtifact { pub fn to_path(&self) -> PathBuf { self.artifact.to_path() } pub fn sha_uri(&self) -> crate::download::Result<Uri> { let mut url = crate::util::uri_to_url(&self.uri().context(download::error::Cached)?).context(download::error::BadUrl)?; let mut path = url.path().to_owned(); path.push_str(".sha1"); url.set_path(path.as_ref()); crate::util::url_to_uri(&url).context(download::error::BadUri) } pub fn install_at_no_classifier( self, location: PathBuf, manager: download::Manager, log: Logger, ) -> impl Future<Output=crate::cache::Result<()>> + Send { async move{ let cached_path_no_classifier = Self { artifact: Artifact { classifier: None, ..self.artifact.clone() }, repo: self.repo.clone(), }.cached_path(); let filename = cached_path_no_classifier.file_name().expect("Maven artifact should have a filename"); <Self as Cacheable>::install_at_custom_filename(self, location, filename.to_os_string(), manager, log).await } } } #[derive(Debug, PartialEq, Eq)] pub enum ArtifactParseError { BadNumberOfParts, } impl ToString for Artifact { fn to_string(&self) -> String { let mut strn = String::new(); strn.push_str(&self.group); strn.push(':'); strn.push_str(&self.artifact); strn.push(':'); strn.push_str(&self.version); if let Some(ref classifier) = self.classifier { strn.push(':'); strn.push_str(classifier); } if let Some(ref ext) = self.extension { strn.push('@'); strn.push_str(ext); } strn } } impl FromStr for Artifact { type Err = ArtifactParseError; fn from_str(s: &str) -> ::std::result::Result<Self, Self::Err> { let parts: Vec<&str> = s.split('@').collect(); let (s, ext): (&str, Option<String>) = match *parts.as_slice() { [s, ext] => (s, Some(ext.to_string())), _ => (s, None), }; let parts = s.split(':'); let parts: Vec<&str> = parts.collect(); match *parts.as_slice() { [grp, art, ver] => Ok(Self { group: grp.into(), artifact: art.into(), version: ver.into(), classifier: None, extension: ext, }), [grp, art, ver, class] => Ok(Self { group: grp.into(), artifact: art.into(), version: ver.into(), classifier: Some(class.into()), extension: ext, }), _ => Err(ArtifactParseError::BadNumberOfParts), } } } #[cfg(test)] mod test { use super::Artifact; #[test] fn parses_simple() { assert_eq!( "net.minecraftforge.forge:some-jar:some-version".parse(), Ok(Artifact { group: "net.minecraftforge.forge".into(), artifact: "some-jar".into(), version: "some-version".into(), classifier: None, extension: None, }) ) } #[test] fn parses_with_ext() { assert_eq!( "net.minecraftforge.forge:some-jar:some-version@zip".parse(), Ok(Artifact { group: "net.minecraftforge.forge".into(), artifact: "some-jar".into(), version: "some-version".into(), classifier: None, extension: Some("zip".into()), }) ) } #[test] fn parses_with_classifier() { assert_eq!( "net.minecraftforge.forge:some-jar:some-version:universal".parse(), Ok(Artifact { group: "net.minecraftforge.forge".into(), artifact: "some-jar".into(), version: "some-version".into(), classifier: Some("universal".into()), extension: None, }) ) } #[test] fn parses_with_ext_and_classifier() { assert_eq!( "net.minecraftforge.forge:some-jar:some-version:universal@zip".parse(), Ok(Artifact { group: "net.minecraftforge.forge".into(), artifact: "some-jar".into(), version: "some-version".into(), classifier: Some("universal".into()), extension: Some("zip".into()), }) ) } }
resolve
identifier_name
mod.rs
use futures::prelude::*; use http::Uri; use slog::Logger; use std::{ iter::FromIterator, path::{Path, PathBuf}, str::FromStr, }; use crate::{ cache::{self, Cacheable, Cache as _}, download, error::prelude::*, }; use tokio::io::AsyncReadExt; mod hash_writer; use hash_writer::HashWriter; mod error{ use snafu::Snafu; #[derive(Debug,Snafu)] #[snafu(visibility(pub))] pub enum Error{ #[snafu(display("Invalid uri: {}", source))] BadUri{ source: http::uri::InvalidUri, }, #[snafu(display("Invalid url: {}", source))] BadUrl{ source: url::ParseError, }, } } #[derive(Debug)] pub enum VerifyResult { Good, Bad, NotInCache, } #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)] pub struct Artifact { pub group: String, pub artifact: String, pub version: String, pub classifier: Option<String>, pub extension: Option<String>, } #[derive(Debug, Clone)] pub struct ResolvedArtifact { pub artifact: Artifact, pub repo: Uri, } pub struct Cache; impl Cacheable for ResolvedArtifact { type Cache = crate::cache::FileCache; fn cached_path(&self) -> PathBuf { let mut p = PathBuf::new(); p.push(app_dirs::app_dir(app_dirs::AppDataType::UserCache, crate::APP_INFO, "maven_cache").expect("Cache directory must be accesible")); p.push(&self.artifact.to_path()); p } fn uri(&self) -> crate::cache::Result<Uri> { crate::cache::ResultExt::erased(self.artifact.get_uri_on(&self.repo)) } } impl cache::Cache<ResolvedArtifact> for Cache { fn with( artifact: ResolvedArtifact, manager: download::Manager, log: Logger, ) -> crate::cache::BoxFuture<PathBuf> { let cached_path = artifact.cached_path(); let log = log.new( o!("artifact"=>artifact.artifact.to_string(),"repo"=>artifact.repo.to_string(),"cached_path"=>cached_path.as_path().to_string_lossy().into_owned()), ); Box::pin(async move{ info!(log, "caching maven artifact"); if !Self::is_cached(&artifact) { info!(log, "artifact is not cached, downloading now"); let uri = artifact.uri()?; manager .download(uri.clone(), cached_path.clone(), false, &log).await.context(crate::cache::error::Downloading{uri})?; } Ok(cached_path) }) } } impl Cache { pub async fn verify_cached( resolved: ResolvedArtifact, manager: download::Manager, ) -> download::Result<VerifyResult> { if Self::is_cached(&resolved) { let cached_path = resolved.cached_path(); let sha_url_res = resolved.sha_uri(); let mut cached_file = tokio::fs::File::open(cached_path).await.context(download::error::Io)?; let mut sha = HashWriter::new(); cached_file.copy(&mut sha).await.context(download::error::Io)?; let cached_sha = sha.digest(); let sha_uri = sha_url_res?; let (res,_) = manager.get(sha_uri)?.await?; let hash_str = res.into_body().map_ok(hyper::Chunk::into_bytes).try_concat().await.context(download::error::Hyper)?; if hash_str == format!("{}", cached_sha) { Ok(VerifyResult::Good) } else { Ok(VerifyResult::Bad) } } else { Ok(VerifyResult::NotInCache) } } } impl Artifact { fn to_path(&self) -> PathBuf { let mut p = PathBuf::new(); p.push(&self.group_path()); p.push(&self.artifact); p.push(&self.version); p.push(&self.artifact_filename()); p } pub fn get_uri_on(&self, base: &Uri) -> Result<Uri,error::Error> { let base = crate::util::uri_to_url(base).context(error::BadUrl)?; let path = self.to_path(); let url = base.join(path.to_str().expect("non unicode path encountered")).context(error::BadUrl)?; crate::util::url_to_uri(&url).context(error::BadUri) } fn group_path(&self) -> PathBuf { PathBuf::from_iter(self.group.split('.')) } fn artifact_filename(&self) -> String { let classifier_fmt = match self.classifier { Some(ref class) => format!("-{classifier}", classifier = class), None => "".to_string(), }; let extension_fmt = match self.extension { Some(ref extension) => extension.clone(), None => "jar".to_string(), }; format!( "{artifact}-{version}{classifier}.{extension}", artifact = self.artifact, version = self.version, classifier = classifier_fmt, extension = extension_fmt ) } pub fn resolve(&self, repo_uri: Uri) -> ResolvedArtifact { ResolvedArtifact { artifact: self.clone(), repo: repo_uri, } } pub fn download_from( &self, location: &Path, repo_uri: Uri, manager: download::Manager, log: Logger, ) -> impl Future<Output=Result<(), crate::cache::Error>> + Send { Cache::install_at(self.resolve(repo_uri), location.to_owned(), manager, log) } } impl ResolvedArtifact { pub fn to_path(&self) -> PathBuf { self.artifact.to_path() } pub fn sha_uri(&self) -> crate::download::Result<Uri> { let mut url = crate::util::uri_to_url(&self.uri().context(download::error::Cached)?).context(download::error::BadUrl)?; let mut path = url.path().to_owned(); path.push_str(".sha1"); url.set_path(path.as_ref()); crate::util::url_to_uri(&url).context(download::error::BadUri) } pub fn install_at_no_classifier( self, location: PathBuf, manager: download::Manager, log: Logger, ) -> impl Future<Output=crate::cache::Result<()>> + Send { async move{ let cached_path_no_classifier = Self { artifact: Artifact { classifier: None, ..self.artifact.clone() }, repo: self.repo.clone(), }.cached_path(); let filename = cached_path_no_classifier.file_name().expect("Maven artifact should have a filename"); <Self as Cacheable>::install_at_custom_filename(self, location, filename.to_os_string(), manager, log).await } } } #[derive(Debug, PartialEq, Eq)] pub enum ArtifactParseError { BadNumberOfParts, } impl ToString for Artifact { fn to_string(&self) -> String { let mut strn = String::new(); strn.push_str(&self.group); strn.push(':'); strn.push_str(&self.artifact); strn.push(':'); strn.push_str(&self.version); if let Some(ref classifier) = self.classifier
if let Some(ref ext) = self.extension { strn.push('@'); strn.push_str(ext); } strn } } impl FromStr for Artifact { type Err = ArtifactParseError; fn from_str(s: &str) -> ::std::result::Result<Self, Self::Err> { let parts: Vec<&str> = s.split('@').collect(); let (s, ext): (&str, Option<String>) = match *parts.as_slice() { [s, ext] => (s, Some(ext.to_string())), _ => (s, None), }; let parts = s.split(':'); let parts: Vec<&str> = parts.collect(); match *parts.as_slice() { [grp, art, ver] => Ok(Self { group: grp.into(), artifact: art.into(), version: ver.into(), classifier: None, extension: ext, }), [grp, art, ver, class] => Ok(Self { group: grp.into(), artifact: art.into(), version: ver.into(), classifier: Some(class.into()), extension: ext, }), _ => Err(ArtifactParseError::BadNumberOfParts), } } } #[cfg(test)] mod test { use super::Artifact; #[test] fn parses_simple() { assert_eq!( "net.minecraftforge.forge:some-jar:some-version".parse(), Ok(Artifact { group: "net.minecraftforge.forge".into(), artifact: "some-jar".into(), version: "some-version".into(), classifier: None, extension: None, }) ) } #[test] fn parses_with_ext() { assert_eq!( "net.minecraftforge.forge:some-jar:some-version@zip".parse(), Ok(Artifact { group: "net.minecraftforge.forge".into(), artifact: "some-jar".into(), version: "some-version".into(), classifier: None, extension: Some("zip".into()), }) ) } #[test] fn parses_with_classifier() { assert_eq!( "net.minecraftforge.forge:some-jar:some-version:universal".parse(), Ok(Artifact { group: "net.minecraftforge.forge".into(), artifact: "some-jar".into(), version: "some-version".into(), classifier: Some("universal".into()), extension: None, }) ) } #[test] fn parses_with_ext_and_classifier() { assert_eq!( "net.minecraftforge.forge:some-jar:some-version:universal@zip".parse(), Ok(Artifact { group: "net.minecraftforge.forge".into(), artifact: "some-jar".into(), version: "some-version".into(), classifier: Some("universal".into()), extension: Some("zip".into()), }) ) } }
{ strn.push(':'); strn.push_str(classifier); }
conditional_block
mod.rs
use futures::prelude::*; use http::Uri; use slog::Logger; use std::{ iter::FromIterator, path::{Path, PathBuf}, str::FromStr, }; use crate::{ cache::{self, Cacheable, Cache as _}, download, error::prelude::*, }; use tokio::io::AsyncReadExt; mod hash_writer; use hash_writer::HashWriter; mod error{ use snafu::Snafu; #[derive(Debug,Snafu)] #[snafu(visibility(pub))] pub enum Error{ #[snafu(display("Invalid uri: {}", source))] BadUri{ source: http::uri::InvalidUri, }, #[snafu(display("Invalid url: {}", source))] BadUrl{ source: url::ParseError, }, } } #[derive(Debug)] pub enum VerifyResult { Good, Bad, NotInCache, } #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)] pub struct Artifact { pub group: String, pub artifact: String, pub version: String, pub classifier: Option<String>, pub extension: Option<String>, } #[derive(Debug, Clone)] pub struct ResolvedArtifact { pub artifact: Artifact, pub repo: Uri, } pub struct Cache; impl Cacheable for ResolvedArtifact { type Cache = crate::cache::FileCache; fn cached_path(&self) -> PathBuf { let mut p = PathBuf::new(); p.push(app_dirs::app_dir(app_dirs::AppDataType::UserCache, crate::APP_INFO, "maven_cache").expect("Cache directory must be accesible")); p.push(&self.artifact.to_path()); p } fn uri(&self) -> crate::cache::Result<Uri> { crate::cache::ResultExt::erased(self.artifact.get_uri_on(&self.repo)) } } impl cache::Cache<ResolvedArtifact> for Cache { fn with( artifact: ResolvedArtifact, manager: download::Manager, log: Logger, ) -> crate::cache::BoxFuture<PathBuf> { let cached_path = artifact.cached_path(); let log = log.new( o!("artifact"=>artifact.artifact.to_string(),"repo"=>artifact.repo.to_string(),"cached_path"=>cached_path.as_path().to_string_lossy().into_owned()), ); Box::pin(async move{ info!(log, "caching maven artifact"); if !Self::is_cached(&artifact) { info!(log, "artifact is not cached, downloading now"); let uri = artifact.uri()?; manager .download(uri.clone(), cached_path.clone(), false, &log).await.context(crate::cache::error::Downloading{uri})?; } Ok(cached_path) }) } } impl Cache { pub async fn verify_cached( resolved: ResolvedArtifact, manager: download::Manager, ) -> download::Result<VerifyResult> { if Self::is_cached(&resolved) { let cached_path = resolved.cached_path(); let sha_url_res = resolved.sha_uri(); let mut cached_file = tokio::fs::File::open(cached_path).await.context(download::error::Io)?; let mut sha = HashWriter::new(); cached_file.copy(&mut sha).await.context(download::error::Io)?; let cached_sha = sha.digest(); let sha_uri = sha_url_res?; let (res,_) = manager.get(sha_uri)?.await?; let hash_str = res.into_body().map_ok(hyper::Chunk::into_bytes).try_concat().await.context(download::error::Hyper)?; if hash_str == format!("{}", cached_sha) { Ok(VerifyResult::Good) } else { Ok(VerifyResult::Bad) } } else { Ok(VerifyResult::NotInCache) } } } impl Artifact { fn to_path(&self) -> PathBuf { let mut p = PathBuf::new(); p.push(&self.group_path()); p.push(&self.artifact); p.push(&self.version); p.push(&self.artifact_filename()); p } pub fn get_uri_on(&self, base: &Uri) -> Result<Uri,error::Error> { let base = crate::util::uri_to_url(base).context(error::BadUrl)?; let path = self.to_path(); let url = base.join(path.to_str().expect("non unicode path encountered")).context(error::BadUrl)?; crate::util::url_to_uri(&url).context(error::BadUri) } fn group_path(&self) -> PathBuf { PathBuf::from_iter(self.group.split('.')) } fn artifact_filename(&self) -> String { let classifier_fmt = match self.classifier { Some(ref class) => format!("-{classifier}", classifier = class), None => "".to_string(), }; let extension_fmt = match self.extension { Some(ref extension) => extension.clone(), None => "jar".to_string(), }; format!( "{artifact}-{version}{classifier}.{extension}", artifact = self.artifact, version = self.version,
classifier = classifier_fmt, extension = extension_fmt ) } pub fn resolve(&self, repo_uri: Uri) -> ResolvedArtifact { ResolvedArtifact { artifact: self.clone(), repo: repo_uri, } } pub fn download_from( &self, location: &Path, repo_uri: Uri, manager: download::Manager, log: Logger, ) -> impl Future<Output=Result<(), crate::cache::Error>> + Send { Cache::install_at(self.resolve(repo_uri), location.to_owned(), manager, log) } } impl ResolvedArtifact { pub fn to_path(&self) -> PathBuf { self.artifact.to_path() } pub fn sha_uri(&self) -> crate::download::Result<Uri> { let mut url = crate::util::uri_to_url(&self.uri().context(download::error::Cached)?).context(download::error::BadUrl)?; let mut path = url.path().to_owned(); path.push_str(".sha1"); url.set_path(path.as_ref()); crate::util::url_to_uri(&url).context(download::error::BadUri) } pub fn install_at_no_classifier( self, location: PathBuf, manager: download::Manager, log: Logger, ) -> impl Future<Output=crate::cache::Result<()>> + Send { async move{ let cached_path_no_classifier = Self { artifact: Artifact { classifier: None, ..self.artifact.clone() }, repo: self.repo.clone(), }.cached_path(); let filename = cached_path_no_classifier.file_name().expect("Maven artifact should have a filename"); <Self as Cacheable>::install_at_custom_filename(self, location, filename.to_os_string(), manager, log).await } } } #[derive(Debug, PartialEq, Eq)] pub enum ArtifactParseError { BadNumberOfParts, } impl ToString for Artifact { fn to_string(&self) -> String { let mut strn = String::new(); strn.push_str(&self.group); strn.push(':'); strn.push_str(&self.artifact); strn.push(':'); strn.push_str(&self.version); if let Some(ref classifier) = self.classifier { strn.push(':'); strn.push_str(classifier); } if let Some(ref ext) = self.extension { strn.push('@'); strn.push_str(ext); } strn } } impl FromStr for Artifact { type Err = ArtifactParseError; fn from_str(s: &str) -> ::std::result::Result<Self, Self::Err> { let parts: Vec<&str> = s.split('@').collect(); let (s, ext): (&str, Option<String>) = match *parts.as_slice() { [s, ext] => (s, Some(ext.to_string())), _ => (s, None), }; let parts = s.split(':'); let parts: Vec<&str> = parts.collect(); match *parts.as_slice() { [grp, art, ver] => Ok(Self { group: grp.into(), artifact: art.into(), version: ver.into(), classifier: None, extension: ext, }), [grp, art, ver, class] => Ok(Self { group: grp.into(), artifact: art.into(), version: ver.into(), classifier: Some(class.into()), extension: ext, }), _ => Err(ArtifactParseError::BadNumberOfParts), } } } #[cfg(test)] mod test { use super::Artifact; #[test] fn parses_simple() { assert_eq!( "net.minecraftforge.forge:some-jar:some-version".parse(), Ok(Artifact { group: "net.minecraftforge.forge".into(), artifact: "some-jar".into(), version: "some-version".into(), classifier: None, extension: None, }) ) } #[test] fn parses_with_ext() { assert_eq!( "net.minecraftforge.forge:some-jar:some-version@zip".parse(), Ok(Artifact { group: "net.minecraftforge.forge".into(), artifact: "some-jar".into(), version: "some-version".into(), classifier: None, extension: Some("zip".into()), }) ) } #[test] fn parses_with_classifier() { assert_eq!( "net.minecraftforge.forge:some-jar:some-version:universal".parse(), Ok(Artifact { group: "net.minecraftforge.forge".into(), artifact: "some-jar".into(), version: "some-version".into(), classifier: Some("universal".into()), extension: None, }) ) } #[test] fn parses_with_ext_and_classifier() { assert_eq!( "net.minecraftforge.forge:some-jar:some-version:universal@zip".parse(), Ok(Artifact { group: "net.minecraftforge.forge".into(), artifact: "some-jar".into(), version: "some-version".into(), classifier: Some("universal".into()), extension: Some("zip".into()), }) ) } }
random_line_split
mod.rs
use futures::prelude::*; use http::Uri; use slog::Logger; use std::{ iter::FromIterator, path::{Path, PathBuf}, str::FromStr, }; use crate::{ cache::{self, Cacheable, Cache as _}, download, error::prelude::*, }; use tokio::io::AsyncReadExt; mod hash_writer; use hash_writer::HashWriter; mod error{ use snafu::Snafu; #[derive(Debug,Snafu)] #[snafu(visibility(pub))] pub enum Error{ #[snafu(display("Invalid uri: {}", source))] BadUri{ source: http::uri::InvalidUri, }, #[snafu(display("Invalid url: {}", source))] BadUrl{ source: url::ParseError, }, } } #[derive(Debug)] pub enum VerifyResult { Good, Bad, NotInCache, } #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)] pub struct Artifact { pub group: String, pub artifact: String, pub version: String, pub classifier: Option<String>, pub extension: Option<String>, } #[derive(Debug, Clone)] pub struct ResolvedArtifact { pub artifact: Artifact, pub repo: Uri, } pub struct Cache; impl Cacheable for ResolvedArtifact { type Cache = crate::cache::FileCache; fn cached_path(&self) -> PathBuf { let mut p = PathBuf::new(); p.push(app_dirs::app_dir(app_dirs::AppDataType::UserCache, crate::APP_INFO, "maven_cache").expect("Cache directory must be accesible")); p.push(&self.artifact.to_path()); p } fn uri(&self) -> crate::cache::Result<Uri> { crate::cache::ResultExt::erased(self.artifact.get_uri_on(&self.repo)) } } impl cache::Cache<ResolvedArtifact> for Cache { fn with( artifact: ResolvedArtifact, manager: download::Manager, log: Logger, ) -> crate::cache::BoxFuture<PathBuf> { let cached_path = artifact.cached_path(); let log = log.new( o!("artifact"=>artifact.artifact.to_string(),"repo"=>artifact.repo.to_string(),"cached_path"=>cached_path.as_path().to_string_lossy().into_owned()), ); Box::pin(async move{ info!(log, "caching maven artifact"); if !Self::is_cached(&artifact) { info!(log, "artifact is not cached, downloading now"); let uri = artifact.uri()?; manager .download(uri.clone(), cached_path.clone(), false, &log).await.context(crate::cache::error::Downloading{uri})?; } Ok(cached_path) }) } } impl Cache { pub async fn verify_cached( resolved: ResolvedArtifact, manager: download::Manager, ) -> download::Result<VerifyResult> { if Self::is_cached(&resolved) { let cached_path = resolved.cached_path(); let sha_url_res = resolved.sha_uri(); let mut cached_file = tokio::fs::File::open(cached_path).await.context(download::error::Io)?; let mut sha = HashWriter::new(); cached_file.copy(&mut sha).await.context(download::error::Io)?; let cached_sha = sha.digest(); let sha_uri = sha_url_res?; let (res,_) = manager.get(sha_uri)?.await?; let hash_str = res.into_body().map_ok(hyper::Chunk::into_bytes).try_concat().await.context(download::error::Hyper)?; if hash_str == format!("{}", cached_sha) { Ok(VerifyResult::Good) } else { Ok(VerifyResult::Bad) } } else { Ok(VerifyResult::NotInCache) } } } impl Artifact { fn to_path(&self) -> PathBuf
pub fn get_uri_on(&self, base: &Uri) -> Result<Uri,error::Error> { let base = crate::util::uri_to_url(base).context(error::BadUrl)?; let path = self.to_path(); let url = base.join(path.to_str().expect("non unicode path encountered")).context(error::BadUrl)?; crate::util::url_to_uri(&url).context(error::BadUri) } fn group_path(&self) -> PathBuf { PathBuf::from_iter(self.group.split('.')) } fn artifact_filename(&self) -> String { let classifier_fmt = match self.classifier { Some(ref class) => format!("-{classifier}", classifier = class), None => "".to_string(), }; let extension_fmt = match self.extension { Some(ref extension) => extension.clone(), None => "jar".to_string(), }; format!( "{artifact}-{version}{classifier}.{extension}", artifact = self.artifact, version = self.version, classifier = classifier_fmt, extension = extension_fmt ) } pub fn resolve(&self, repo_uri: Uri) -> ResolvedArtifact { ResolvedArtifact { artifact: self.clone(), repo: repo_uri, } } pub fn download_from( &self, location: &Path, repo_uri: Uri, manager: download::Manager, log: Logger, ) -> impl Future<Output=Result<(), crate::cache::Error>> + Send { Cache::install_at(self.resolve(repo_uri), location.to_owned(), manager, log) } } impl ResolvedArtifact { pub fn to_path(&self) -> PathBuf { self.artifact.to_path() } pub fn sha_uri(&self) -> crate::download::Result<Uri> { let mut url = crate::util::uri_to_url(&self.uri().context(download::error::Cached)?).context(download::error::BadUrl)?; let mut path = url.path().to_owned(); path.push_str(".sha1"); url.set_path(path.as_ref()); crate::util::url_to_uri(&url).context(download::error::BadUri) } pub fn install_at_no_classifier( self, location: PathBuf, manager: download::Manager, log: Logger, ) -> impl Future<Output=crate::cache::Result<()>> + Send { async move{ let cached_path_no_classifier = Self { artifact: Artifact { classifier: None, ..self.artifact.clone() }, repo: self.repo.clone(), }.cached_path(); let filename = cached_path_no_classifier.file_name().expect("Maven artifact should have a filename"); <Self as Cacheable>::install_at_custom_filename(self, location, filename.to_os_string(), manager, log).await } } } #[derive(Debug, PartialEq, Eq)] pub enum ArtifactParseError { BadNumberOfParts, } impl ToString for Artifact { fn to_string(&self) -> String { let mut strn = String::new(); strn.push_str(&self.group); strn.push(':'); strn.push_str(&self.artifact); strn.push(':'); strn.push_str(&self.version); if let Some(ref classifier) = self.classifier { strn.push(':'); strn.push_str(classifier); } if let Some(ref ext) = self.extension { strn.push('@'); strn.push_str(ext); } strn } } impl FromStr for Artifact { type Err = ArtifactParseError; fn from_str(s: &str) -> ::std::result::Result<Self, Self::Err> { let parts: Vec<&str> = s.split('@').collect(); let (s, ext): (&str, Option<String>) = match *parts.as_slice() { [s, ext] => (s, Some(ext.to_string())), _ => (s, None), }; let parts = s.split(':'); let parts: Vec<&str> = parts.collect(); match *parts.as_slice() { [grp, art, ver] => Ok(Self { group: grp.into(), artifact: art.into(), version: ver.into(), classifier: None, extension: ext, }), [grp, art, ver, class] => Ok(Self { group: grp.into(), artifact: art.into(), version: ver.into(), classifier: Some(class.into()), extension: ext, }), _ => Err(ArtifactParseError::BadNumberOfParts), } } } #[cfg(test)] mod test { use super::Artifact; #[test] fn parses_simple() { assert_eq!( "net.minecraftforge.forge:some-jar:some-version".parse(), Ok(Artifact { group: "net.minecraftforge.forge".into(), artifact: "some-jar".into(), version: "some-version".into(), classifier: None, extension: None, }) ) } #[test] fn parses_with_ext() { assert_eq!( "net.minecraftforge.forge:some-jar:some-version@zip".parse(), Ok(Artifact { group: "net.minecraftforge.forge".into(), artifact: "some-jar".into(), version: "some-version".into(), classifier: None, extension: Some("zip".into()), }) ) } #[test] fn parses_with_classifier() { assert_eq!( "net.minecraftforge.forge:some-jar:some-version:universal".parse(), Ok(Artifact { group: "net.minecraftforge.forge".into(), artifact: "some-jar".into(), version: "some-version".into(), classifier: Some("universal".into()), extension: None, }) ) } #[test] fn parses_with_ext_and_classifier() { assert_eq!( "net.minecraftforge.forge:some-jar:some-version:universal@zip".parse(), Ok(Artifact { group: "net.minecraftforge.forge".into(), artifact: "some-jar".into(), version: "some-version".into(), classifier: Some("universal".into()), extension: Some("zip".into()), }) ) } }
{ let mut p = PathBuf::new(); p.push(&self.group_path()); p.push(&self.artifact); p.push(&self.version); p.push(&self.artifact_filename()); p }
identifier_body
plots.py
# coding=utf-8 # Copyright 2020 The Google Research Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Utilitites to plot the ROC and Calibration for survival models. This module has utility functions to generate ROC and Calibration plots for survival models at given horizons of time. Note that ideally both the ROC and Calibration curves require to be adjusted for censoring using IPCW estimates. Not designed to be called directly, would be called when running a function from dcm.deep_cox_mixtures """ from dcm import baseline_models from dcm import models from dcm.calibration import calibration_curve import matplotlib as mpl from matplotlib import pyplot as plt from dcm.skmetrics import brier_score from dcm.skmetrics import cumulative_dynamic_auc from dcm.skmetrics import concordance_index_ipcw import numpy as np import logging logging.getLogger("matplotlib").setLevel(logging.CRITICAL) from sklearn.metrics import auc def plot_calibration_curve(ax, scores, e, t, a, folds, group, quant, strat='quantile', adj='IPCW', plot=True):
def plot_roc_curve(ax, scores, e, t, a, folds, groups, quant, plot=True): """Function to plot ROC at a specified time horizon. Accepts a matplotlib figure instance, risk scores from a trained survival analysis model, and quantiles of event interest and generates an IPCW adjusted ROC curve. Args: ax: a matplotlib subfigure object. scores: choice of model. One of "coupled_deep_cph", "coupled_deep_cph_vae". e: a numpy array of input features. t: a numpy array of input features. a: a numpy vector of protected attributes. folds: a numpy vector of cv fold. groups: List of the demogrpahics to adjust for. quant: a list of event time quantiles at which the models are to be evaluated. Returns: A plotted matplotlib ROC curve. """ fs = 16 fprs, tprs, tprs_std, ctds, brss = {}, {}, {}, {}, {} fprs['all'] = {} tprs['all'] = {} ctds['all'] = {} brss['all'] = {} for group in groups: fprs[group] = {} tprs[group] = {} ctds[group] = {} brss[group] = {} for fold in set(folds): ate = a[folds == fold] str_test = baseline_models.structure_for_eval_(t[folds == fold], e[folds == fold]) if len(set(folds)) == 1: atr = ate str_train = str_test else: atr = a[folds != fold] str_train = baseline_models.structure_for_eval_(t[folds != fold], e[folds != fold]) t_tr_max = np.max([t_[1] for t_ in str_train]) t_ = np.array([t_[1] for t_ in str_test]) clean = (t_<=t_tr_max) str_test = str_test[t_<=t_tr_max] ate = ate[t_<=t_tr_max] scores_f = scores[fold][clean] for group in groups: te_protg = (ate == group) tr_protg = (atr == group) try: roc_m = cumulative_dynamic_auc(str_train[tr_protg], str_test[te_protg], -scores_f[te_protg], [quant]) brs_m = brier_score(str_train[tr_protg], str_test[te_protg], scores_f[te_protg], quant) ctd_m = concordance_index_ipcw(str_train[tr_protg], str_test[te_protg], -scores_f[te_protg], quant)[0] except: roc_m = cumulative_dynamic_auc(str_train, str_test[te_protg], -scores_f[te_protg], [quant]) brs_m = brier_score(str_train, str_test[te_protg], scores_f[te_protg], quant) ctd_m = concordance_index_ipcw(str_train, str_test[te_protg], -scores_f[te_protg], quant)[0] fprs[group][fold] = roc_m[0][0][1] tprs[group][fold] = roc_m[0][0][0] ctds[group][fold] = ctd_m brss[group][fold] = brs_m[1][0] roc_m = cumulative_dynamic_auc(str_train, str_test, -scores_f, [quant]) ctd_m = concordance_index_ipcw(str_train, str_test, -scores_f, quant)[0] brs_m = brier_score(str_train, str_test, scores_f, quant) fprs['all'][fold], tprs['all'][fold] = roc_m[0][0][1], roc_m[0][0][0] ctds['all'][fold] = ctd_m brss['all'][fold] = brs_m[1][0] cols = ['b', 'r', 'g'] roc_auc = {} ctds_mean = {} brss_mean = {} j = 0 for group in list(groups) + ['all']: all_fpr = np.unique(np.concatenate([fprs[group][i] for i in set(folds)])) # The ROC curves are interpolated at these points. mean_tprs = [] for i in set(folds): mean_tprs.append(np.interp(all_fpr, fprs[group][i], tprs[group][i])) # Finally the interpolated curves are averaged over to compute AUC. mean_tpr = np.mean(mean_tprs, axis=0) std_tpr = 1.96 * np.std(mean_tprs, axis=0) / np.sqrt(10) fprs[group]['macro'] = all_fpr tprs[group]['macro'] = mean_tpr tprs_std[group] = std_tpr roc_auc[group] = auc(fprs[group]['macro'], tprs[group]['macro']) ctds_mean[group] = np.mean([ctds[group][fold] for fold in folds]) brss_mean[group] = np.mean([brss[group][fold] for fold in folds]) lbl = str(group) lbl += ' AUC:' + str(round(roc_auc[group], 3)) lbl += ' Ctd:'+ str(round(ctds_mean[group], 3)) lbl += ' BS:'+ str(round(brss_mean[group], 3)) if plot: ax.plot( all_fpr, mean_tpr, c=cols[j], label=lbl) ax.fill_between( all_fpr, mean_tpr - std_tpr, mean_tpr + std_tpr, color=cols[j], alpha=0.25) j += 1 if plot: ax.set_xlabel('False Positive Rate', fontsize=fs) ax.set_ylabel('True Positive Rate', fontsize=fs) ax.legend(fontsize=fs) ax.set_xscale('log') return roc_auc, ctds_mean, brss_mean def plot_results(outputs, x, e, t, a, folds, groups, quantiles, strat='quantile', adj='KM', plot=True): """Function to plot the ROC and Calibration curves from a survival model. Accepts a trained survival analysis model, features and horizon of interest and generates the IPCW adjusted ROC curve and Calibration curve at pre-specified horizons of time. Args: outputs: a python dict with survival probabilities for each fold x: a numpy array of input features. e: a numpy array of input features. t: a numpy array of input features. a: a numpy vector of protected attributes. folds: a numpy vector of cv fold. groups: List of the demogrpahics to adjust for. quantiles: a list of event time quantiles at which the models are to be evaluated. strat: Specifies how the bins are computed. One of: "quantile": Equal sized bins. "uniform": Uniformly stratified. adj: Adjustment strategy for the Expected Calibration Error. One of: "KM": Kaplan-Meier (Default) "IPCW": Inverse Propensity of Censoring Returns: a numpy vector of estimated risks P(T>t|X) at the horizon "quant". """ if plot: mpl.rcParams['hatch.linewidth'] = 2.0 fig, big_axes = plt.subplots( figsize=(8 * (len(groups) + 2), 6 * len(quantiles)), nrows=len(quantiles), ncols=1) plt.subplots_adjust(hspace=0.4) i = 0 for _, big_ax in enumerate(big_axes, start=1): big_ax.set_title( 'Receiver Operator Characteristic and Calibration at t=' + str(quantiles[i]) + '\n', fontsize=16) big_ax.tick_params( labelcolor=(1., 1., 1., 0.0), top='off', bottom='off', left='off', right='off') i += 1 eces = {} metrics = {} for quant in quantiles: eces[quant] = {} for i in range(len(quantiles)): scores = outputs[quantiles[i]] for j in range(len(groups) + 2): pt = (i * (len(groups) + 2) + j + 1) if plot: ax = fig.add_subplot(len(quantiles), len(groups) + 2, pt) else: ax = None if (j==1): eces[quantiles[i]]['all'] = plot_calibration_curve(ax, scores, e, t, a, folds, None, quantiles[i], strat=strat, adj=adj, plot=plot) if (j>1): eces[quantiles[i]][groups[j - 2]] = plot_calibration_curve(ax, scores, e, t, a, folds, groups[j - 2], quantiles[i], strat=strat, adj=adj, plot=plot) if (j==0): metrics[quantiles[i]] = plot_roc_curve(ax, scores, e, t, a, folds, groups, quantiles[i], plot=plot) for quant in quantiles: metrics[quant] = metrics[quant] + (eces[quant], ) if plot: plt.show() return metrics
"""Function to plot Calibration Curve at a specified time horizon. Accepts a matplotlib figure instance, risk scores from a trained survival analysis model, and quantiles of event interest and generates an IPCW adjusted calibration curve. Args: ax: a matplotlib subfigure object. scores: risk scores P(T>t) issued by a trained survival analysis model (output of deep_cox_mixtures.models.predict_survival). e: a numpy array of event indicators. t: a numpy array of event/censoring times. a: a numpy vector of protected attributes. folds: a numpy vector of cv folds. group: List of the demogrpahics to adjust for. quant: a list of event time quantiles at which the models are to be evaluated. strat: Specifies how the bins are computed. One of: "quantile": Equal sized bins. "uniform": Uniformly stratified. adj (str): Determines if IPCW adjustment is carried out on a population or subgroup level. One of "IPCWpop", "IPCWcon" (not implemented). Returns: A plotted matplotlib calibration curve. """ allscores = np.ones_like(t).astype('float') for fold in set(folds): allscores[folds == fold] = scores[fold] scores = allscores b_fc = (0, 0, 1, .4) r_fc = (1, 0, 0, .2) b_ec = (0, 0, 1, .8) r_ec = (1, 0, 0, .8) n_bins = 20 hatch = '//' fs = 16 prob_true_n, _, outbins, ece = calibration_curve( scores, e, t, a, group, quant, typ=adj, ret_bins=True, strat=strat, n_bins=n_bins) for d in range(len(prob_true_n)): binsize = outbins[d + 1] - outbins[d] binloc = (outbins[d + 1] + outbins[d]) / 2 gap = (prob_true_n[d] - binloc) if gap < 0: bottom = prob_true_n[d] else: bottom = prob_true_n[d] - abs(gap) if d == len(prob_true_n) - 1: lbl1 = 'Score' lbl2 = 'Gap' else: lbl1 = None lbl2 = None if plot: ax.bar( binloc, prob_true_n[d], width=binsize, facecolor=b_fc, edgecolor=b_ec, linewidth=2.5, label=lbl1) ax.bar( binloc, abs(gap), bottom=bottom, width=binsize, facecolor=r_fc, edgecolor=r_ec, linewidth=2.5, hatch=hatch, label=lbl2) d += 1 if plot: ax.plot([0, 1], [0, 1], c='k', ls='--', lw=2, zorder=100) ax.set_xlabel('Predicted Score', fontsize=fs) ax.set_ylabel('True Score', fontsize=fs) ax.legend(fontsize=fs) ax.set_title(str(group), fontsize=fs) ax.set_xlim(0, 1) ax.set_ylim(0, 1) ax.grid(ls=':', lw=2, zorder=-100, color='grey') ax.set_axisbelow(True) ax.text( x=0.030, y=.7, s='ECE=' + str(round(ece, 3)), size=fs, bbox=dict(boxstyle='round', fc='white', ec='grey', pad=0.2)) return ece
identifier_body
plots.py
# coding=utf-8 # Copyright 2020 The Google Research Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Utilitites to plot the ROC and Calibration for survival models. This module has utility functions to generate ROC and Calibration plots for survival models at given horizons of time. Note that ideally both the ROC and Calibration curves require to be adjusted for censoring using IPCW estimates. Not designed to be called directly, would be called when running a function from dcm.deep_cox_mixtures """ from dcm import baseline_models from dcm import models from dcm.calibration import calibration_curve import matplotlib as mpl from matplotlib import pyplot as plt from dcm.skmetrics import brier_score from dcm.skmetrics import cumulative_dynamic_auc from dcm.skmetrics import concordance_index_ipcw import numpy as np import logging logging.getLogger("matplotlib").setLevel(logging.CRITICAL) from sklearn.metrics import auc def plot_calibration_curve(ax, scores, e, t, a, folds, group, quant, strat='quantile', adj='IPCW', plot=True): """Function to plot Calibration Curve at a specified time horizon. Accepts a matplotlib figure instance, risk scores from a trained survival analysis model, and quantiles of event interest and generates an IPCW adjusted calibration curve. Args: ax: a matplotlib subfigure object. scores: risk scores P(T>t) issued by a trained survival analysis model (output of deep_cox_mixtures.models.predict_survival). e: a numpy array of event indicators. t: a numpy array of event/censoring times. a: a numpy vector of protected attributes. folds: a numpy vector of cv folds. group: List of the demogrpahics to adjust for. quant: a list of event time quantiles at which the models are to be evaluated. strat: Specifies how the bins are computed. One of: "quantile": Equal sized bins. "uniform": Uniformly stratified. adj (str): Determines if IPCW adjustment is carried out on a population or subgroup level. One of "IPCWpop", "IPCWcon" (not implemented). Returns: A plotted matplotlib calibration curve. """ allscores = np.ones_like(t).astype('float') for fold in set(folds): allscores[folds == fold] = scores[fold] scores = allscores b_fc = (0, 0, 1, .4) r_fc = (1, 0, 0, .2) b_ec = (0, 0, 1, .8) r_ec = (1, 0, 0, .8) n_bins = 20 hatch = '//' fs = 16 prob_true_n, _, outbins, ece = calibration_curve( scores, e, t, a, group, quant, typ=adj, ret_bins=True, strat=strat, n_bins=n_bins) for d in range(len(prob_true_n)): binsize = outbins[d + 1] - outbins[d] binloc = (outbins[d + 1] + outbins[d]) / 2 gap = (prob_true_n[d] - binloc) if gap < 0: bottom = prob_true_n[d] else: bottom = prob_true_n[d] - abs(gap) if d == len(prob_true_n) - 1: lbl1 = 'Score' lbl2 = 'Gap' else: lbl1 = None lbl2 = None if plot: ax.bar( binloc, prob_true_n[d], width=binsize, facecolor=b_fc, edgecolor=b_ec, linewidth=2.5, label=lbl1) ax.bar( binloc, abs(gap), bottom=bottom, width=binsize, facecolor=r_fc, edgecolor=r_ec, linewidth=2.5, hatch=hatch, label=lbl2) d += 1 if plot: ax.plot([0, 1], [0, 1], c='k', ls='--', lw=2, zorder=100) ax.set_xlabel('Predicted Score', fontsize=fs) ax.set_ylabel('True Score', fontsize=fs) ax.legend(fontsize=fs) ax.set_title(str(group), fontsize=fs) ax.set_xlim(0, 1) ax.set_ylim(0, 1) ax.grid(ls=':', lw=2, zorder=-100, color='grey') ax.set_axisbelow(True) ax.text( x=0.030, y=.7, s='ECE=' + str(round(ece, 3)), size=fs, bbox=dict(boxstyle='round', fc='white', ec='grey', pad=0.2)) return ece def plot_roc_curve(ax, scores, e, t, a, folds, groups, quant, plot=True): """Function to plot ROC at a specified time horizon. Accepts a matplotlib figure instance, risk scores from a trained survival analysis model, and quantiles of event interest and generates an IPCW adjusted ROC curve. Args: ax: a matplotlib subfigure object. scores: choice of model. One of "coupled_deep_cph", "coupled_deep_cph_vae". e: a numpy array of input features. t: a numpy array of input features. a: a numpy vector of protected attributes. folds: a numpy vector of cv fold. groups: List of the demogrpahics to adjust for. quant: a list of event time quantiles at which the models are to be evaluated. Returns: A plotted matplotlib ROC curve. """ fs = 16 fprs, tprs, tprs_std, ctds, brss = {}, {}, {}, {}, {} fprs['all'] = {} tprs['all'] = {} ctds['all'] = {} brss['all'] = {} for group in groups: fprs[group] = {} tprs[group] = {} ctds[group] = {} brss[group] = {} for fold in set(folds): ate = a[folds == fold] str_test = baseline_models.structure_for_eval_(t[folds == fold], e[folds == fold]) if len(set(folds)) == 1: atr = ate str_train = str_test else: atr = a[folds != fold] str_train = baseline_models.structure_for_eval_(t[folds != fold], e[folds != fold]) t_tr_max = np.max([t_[1] for t_ in str_train]) t_ = np.array([t_[1] for t_ in str_test]) clean = (t_<=t_tr_max) str_test = str_test[t_<=t_tr_max] ate = ate[t_<=t_tr_max] scores_f = scores[fold][clean] for group in groups: te_protg = (ate == group) tr_protg = (atr == group) try: roc_m = cumulative_dynamic_auc(str_train[tr_protg], str_test[te_protg], -scores_f[te_protg], [quant]) brs_m = brier_score(str_train[tr_protg], str_test[te_protg], scores_f[te_protg], quant) ctd_m = concordance_index_ipcw(str_train[tr_protg], str_test[te_protg], -scores_f[te_protg], quant)[0] except: roc_m = cumulative_dynamic_auc(str_train, str_test[te_protg], -scores_f[te_protg], [quant]) brs_m = brier_score(str_train, str_test[te_protg], scores_f[te_protg], quant) ctd_m = concordance_index_ipcw(str_train, str_test[te_protg], -scores_f[te_protg], quant)[0] fprs[group][fold] = roc_m[0][0][1] tprs[group][fold] = roc_m[0][0][0] ctds[group][fold] = ctd_m brss[group][fold] = brs_m[1][0] roc_m = cumulative_dynamic_auc(str_train, str_test, -scores_f, [quant]) ctd_m = concordance_index_ipcw(str_train, str_test, -scores_f, quant)[0] brs_m = brier_score(str_train, str_test, scores_f, quant) fprs['all'][fold], tprs['all'][fold] = roc_m[0][0][1], roc_m[0][0][0] ctds['all'][fold] = ctd_m brss['all'][fold] = brs_m[1][0] cols = ['b', 'r', 'g'] roc_auc = {} ctds_mean = {} brss_mean = {} j = 0 for group in list(groups) + ['all']: all_fpr = np.unique(np.concatenate([fprs[group][i] for i in set(folds)])) # The ROC curves are interpolated at these points. mean_tprs = [] for i in set(folds):
# Finally the interpolated curves are averaged over to compute AUC. mean_tpr = np.mean(mean_tprs, axis=0) std_tpr = 1.96 * np.std(mean_tprs, axis=0) / np.sqrt(10) fprs[group]['macro'] = all_fpr tprs[group]['macro'] = mean_tpr tprs_std[group] = std_tpr roc_auc[group] = auc(fprs[group]['macro'], tprs[group]['macro']) ctds_mean[group] = np.mean([ctds[group][fold] for fold in folds]) brss_mean[group] = np.mean([brss[group][fold] for fold in folds]) lbl = str(group) lbl += ' AUC:' + str(round(roc_auc[group], 3)) lbl += ' Ctd:'+ str(round(ctds_mean[group], 3)) lbl += ' BS:'+ str(round(brss_mean[group], 3)) if plot: ax.plot( all_fpr, mean_tpr, c=cols[j], label=lbl) ax.fill_between( all_fpr, mean_tpr - std_tpr, mean_tpr + std_tpr, color=cols[j], alpha=0.25) j += 1 if plot: ax.set_xlabel('False Positive Rate', fontsize=fs) ax.set_ylabel('True Positive Rate', fontsize=fs) ax.legend(fontsize=fs) ax.set_xscale('log') return roc_auc, ctds_mean, brss_mean def plot_results(outputs, x, e, t, a, folds, groups, quantiles, strat='quantile', adj='KM', plot=True): """Function to plot the ROC and Calibration curves from a survival model. Accepts a trained survival analysis model, features and horizon of interest and generates the IPCW adjusted ROC curve and Calibration curve at pre-specified horizons of time. Args: outputs: a python dict with survival probabilities for each fold x: a numpy array of input features. e: a numpy array of input features. t: a numpy array of input features. a: a numpy vector of protected attributes. folds: a numpy vector of cv fold. groups: List of the demogrpahics to adjust for. quantiles: a list of event time quantiles at which the models are to be evaluated. strat: Specifies how the bins are computed. One of: "quantile": Equal sized bins. "uniform": Uniformly stratified. adj: Adjustment strategy for the Expected Calibration Error. One of: "KM": Kaplan-Meier (Default) "IPCW": Inverse Propensity of Censoring Returns: a numpy vector of estimated risks P(T>t|X) at the horizon "quant". """ if plot: mpl.rcParams['hatch.linewidth'] = 2.0 fig, big_axes = plt.subplots( figsize=(8 * (len(groups) + 2), 6 * len(quantiles)), nrows=len(quantiles), ncols=1) plt.subplots_adjust(hspace=0.4) i = 0 for _, big_ax in enumerate(big_axes, start=1): big_ax.set_title( 'Receiver Operator Characteristic and Calibration at t=' + str(quantiles[i]) + '\n', fontsize=16) big_ax.tick_params( labelcolor=(1., 1., 1., 0.0), top='off', bottom='off', left='off', right='off') i += 1 eces = {} metrics = {} for quant in quantiles: eces[quant] = {} for i in range(len(quantiles)): scores = outputs[quantiles[i]] for j in range(len(groups) + 2): pt = (i * (len(groups) + 2) + j + 1) if plot: ax = fig.add_subplot(len(quantiles), len(groups) + 2, pt) else: ax = None if (j==1): eces[quantiles[i]]['all'] = plot_calibration_curve(ax, scores, e, t, a, folds, None, quantiles[i], strat=strat, adj=adj, plot=plot) if (j>1): eces[quantiles[i]][groups[j - 2]] = plot_calibration_curve(ax, scores, e, t, a, folds, groups[j - 2], quantiles[i], strat=strat, adj=adj, plot=plot) if (j==0): metrics[quantiles[i]] = plot_roc_curve(ax, scores, e, t, a, folds, groups, quantiles[i], plot=plot) for quant in quantiles: metrics[quant] = metrics[quant] + (eces[quant], ) if plot: plt.show() return metrics
mean_tprs.append(np.interp(all_fpr, fprs[group][i], tprs[group][i]))
conditional_block
plots.py
# coding=utf-8 # Copyright 2020 The Google Research Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Utilitites to plot the ROC and Calibration for survival models. This module has utility functions to generate ROC and Calibration plots for survival models at given horizons of time. Note that ideally both the ROC and Calibration curves require to be adjusted for censoring using IPCW estimates. Not designed to be called directly, would be called when running a function from dcm.deep_cox_mixtures """ from dcm import baseline_models from dcm import models from dcm.calibration import calibration_curve import matplotlib as mpl from matplotlib import pyplot as plt from dcm.skmetrics import brier_score from dcm.skmetrics import cumulative_dynamic_auc from dcm.skmetrics import concordance_index_ipcw import numpy as np import logging logging.getLogger("matplotlib").setLevel(logging.CRITICAL) from sklearn.metrics import auc def plot_calibration_curve(ax, scores, e, t, a, folds, group, quant, strat='quantile', adj='IPCW', plot=True): """Function to plot Calibration Curve at a specified time horizon. Accepts a matplotlib figure instance, risk scores from a trained survival analysis model, and quantiles of event interest and generates an IPCW adjusted calibration curve. Args: ax: a matplotlib subfigure object. scores: risk scores P(T>t) issued by a trained survival analysis model (output of deep_cox_mixtures.models.predict_survival). e: a numpy array of event indicators. t: a numpy array of event/censoring times. a: a numpy vector of protected attributes. folds: a numpy vector of cv folds. group: List of the demogrpahics to adjust for. quant: a list of event time quantiles at which the models are to be evaluated. strat: Specifies how the bins are computed. One of: "quantile": Equal sized bins. "uniform": Uniformly stratified. adj (str): Determines if IPCW adjustment is carried out on a population or subgroup level. One of "IPCWpop", "IPCWcon" (not implemented). Returns: A plotted matplotlib calibration curve. """ allscores = np.ones_like(t).astype('float') for fold in set(folds): allscores[folds == fold] = scores[fold] scores = allscores b_fc = (0, 0, 1, .4) r_fc = (1, 0, 0, .2) b_ec = (0, 0, 1, .8) r_ec = (1, 0, 0, .8) n_bins = 20 hatch = '//' fs = 16 prob_true_n, _, outbins, ece = calibration_curve( scores, e, t, a, group, quant, typ=adj, ret_bins=True, strat=strat, n_bins=n_bins) for d in range(len(prob_true_n)): binsize = outbins[d + 1] - outbins[d] binloc = (outbins[d + 1] + outbins[d]) / 2 gap = (prob_true_n[d] - binloc) if gap < 0: bottom = prob_true_n[d] else: bottom = prob_true_n[d] - abs(gap) if d == len(prob_true_n) - 1: lbl1 = 'Score' lbl2 = 'Gap' else: lbl1 = None lbl2 = None if plot: ax.bar( binloc, prob_true_n[d], width=binsize, facecolor=b_fc, edgecolor=b_ec, linewidth=2.5, label=lbl1) ax.bar( binloc, abs(gap), bottom=bottom, width=binsize, facecolor=r_fc, edgecolor=r_ec, linewidth=2.5, hatch=hatch, label=lbl2) d += 1 if plot: ax.plot([0, 1], [0, 1], c='k', ls='--', lw=2, zorder=100) ax.set_xlabel('Predicted Score', fontsize=fs) ax.set_ylabel('True Score', fontsize=fs) ax.legend(fontsize=fs) ax.set_title(str(group), fontsize=fs) ax.set_xlim(0, 1) ax.set_ylim(0, 1) ax.grid(ls=':', lw=2, zorder=-100, color='grey') ax.set_axisbelow(True) ax.text( x=0.030, y=.7, s='ECE=' + str(round(ece, 3)), size=fs, bbox=dict(boxstyle='round', fc='white', ec='grey', pad=0.2)) return ece def plot_roc_curve(ax, scores, e, t, a, folds, groups, quant, plot=True): """Function to plot ROC at a specified time horizon. Accepts a matplotlib figure instance, risk scores from a trained survival analysis model, and quantiles of event interest and generates an IPCW adjusted ROC curve. Args: ax: a matplotlib subfigure object. scores: choice of model. One of "coupled_deep_cph", "coupled_deep_cph_vae". e: a numpy array of input features. t: a numpy array of input features. a: a numpy vector of protected attributes. folds: a numpy vector of cv fold. groups: List of the demogrpahics to adjust for. quant: a list of event time quantiles at which the models are to be evaluated. Returns: A plotted matplotlib ROC curve. """ fs = 16 fprs, tprs, tprs_std, ctds, brss = {}, {}, {}, {}, {} fprs['all'] = {} tprs['all'] = {} ctds['all'] = {} brss['all'] = {} for group in groups: fprs[group] = {} tprs[group] = {} ctds[group] = {} brss[group] = {} for fold in set(folds): ate = a[folds == fold] str_test = baseline_models.structure_for_eval_(t[folds == fold], e[folds == fold]) if len(set(folds)) == 1: atr = ate str_train = str_test else: atr = a[folds != fold] str_train = baseline_models.structure_for_eval_(t[folds != fold], e[folds != fold]) t_tr_max = np.max([t_[1] for t_ in str_train]) t_ = np.array([t_[1] for t_ in str_test]) clean = (t_<=t_tr_max) str_test = str_test[t_<=t_tr_max] ate = ate[t_<=t_tr_max] scores_f = scores[fold][clean] for group in groups: te_protg = (ate == group) tr_protg = (atr == group) try: roc_m = cumulative_dynamic_auc(str_train[tr_protg], str_test[te_protg], -scores_f[te_protg], [quant]) brs_m = brier_score(str_train[tr_protg], str_test[te_protg], scores_f[te_protg], quant) ctd_m = concordance_index_ipcw(str_train[tr_protg], str_test[te_protg], -scores_f[te_protg], quant)[0] except: roc_m = cumulative_dynamic_auc(str_train, str_test[te_protg], -scores_f[te_protg], [quant]) brs_m = brier_score(str_train, str_test[te_protg], scores_f[te_protg], quant) ctd_m = concordance_index_ipcw(str_train, str_test[te_protg], -scores_f[te_protg], quant)[0] fprs[group][fold] = roc_m[0][0][1] tprs[group][fold] = roc_m[0][0][0] ctds[group][fold] = ctd_m brss[group][fold] = brs_m[1][0] roc_m = cumulative_dynamic_auc(str_train, str_test, -scores_f, [quant]) ctd_m = concordance_index_ipcw(str_train, str_test, -scores_f, quant)[0] brs_m = brier_score(str_train, str_test, scores_f, quant) fprs['all'][fold], tprs['all'][fold] = roc_m[0][0][1], roc_m[0][0][0] ctds['all'][fold] = ctd_m brss['all'][fold] = brs_m[1][0] cols = ['b', 'r', 'g'] roc_auc = {} ctds_mean = {} brss_mean = {} j = 0 for group in list(groups) + ['all']: all_fpr = np.unique(np.concatenate([fprs[group][i] for i in set(folds)])) # The ROC curves are interpolated at these points.
mean_tpr = np.mean(mean_tprs, axis=0) std_tpr = 1.96 * np.std(mean_tprs, axis=0) / np.sqrt(10) fprs[group]['macro'] = all_fpr tprs[group]['macro'] = mean_tpr tprs_std[group] = std_tpr roc_auc[group] = auc(fprs[group]['macro'], tprs[group]['macro']) ctds_mean[group] = np.mean([ctds[group][fold] for fold in folds]) brss_mean[group] = np.mean([brss[group][fold] for fold in folds]) lbl = str(group) lbl += ' AUC:' + str(round(roc_auc[group], 3)) lbl += ' Ctd:'+ str(round(ctds_mean[group], 3)) lbl += ' BS:'+ str(round(brss_mean[group], 3)) if plot: ax.plot( all_fpr, mean_tpr, c=cols[j], label=lbl) ax.fill_between( all_fpr, mean_tpr - std_tpr, mean_tpr + std_tpr, color=cols[j], alpha=0.25) j += 1 if plot: ax.set_xlabel('False Positive Rate', fontsize=fs) ax.set_ylabel('True Positive Rate', fontsize=fs) ax.legend(fontsize=fs) ax.set_xscale('log') return roc_auc, ctds_mean, brss_mean def plot_results(outputs, x, e, t, a, folds, groups, quantiles, strat='quantile', adj='KM', plot=True): """Function to plot the ROC and Calibration curves from a survival model. Accepts a trained survival analysis model, features and horizon of interest and generates the IPCW adjusted ROC curve and Calibration curve at pre-specified horizons of time. Args: outputs: a python dict with survival probabilities for each fold x: a numpy array of input features. e: a numpy array of input features. t: a numpy array of input features. a: a numpy vector of protected attributes. folds: a numpy vector of cv fold. groups: List of the demogrpahics to adjust for. quantiles: a list of event time quantiles at which the models are to be evaluated. strat: Specifies how the bins are computed. One of: "quantile": Equal sized bins. "uniform": Uniformly stratified. adj: Adjustment strategy for the Expected Calibration Error. One of: "KM": Kaplan-Meier (Default) "IPCW": Inverse Propensity of Censoring Returns: a numpy vector of estimated risks P(T>t|X) at the horizon "quant". """ if plot: mpl.rcParams['hatch.linewidth'] = 2.0 fig, big_axes = plt.subplots( figsize=(8 * (len(groups) + 2), 6 * len(quantiles)), nrows=len(quantiles), ncols=1) plt.subplots_adjust(hspace=0.4) i = 0 for _, big_ax in enumerate(big_axes, start=1): big_ax.set_title( 'Receiver Operator Characteristic and Calibration at t=' + str(quantiles[i]) + '\n', fontsize=16) big_ax.tick_params( labelcolor=(1., 1., 1., 0.0), top='off', bottom='off', left='off', right='off') i += 1 eces = {} metrics = {} for quant in quantiles: eces[quant] = {} for i in range(len(quantiles)): scores = outputs[quantiles[i]] for j in range(len(groups) + 2): pt = (i * (len(groups) + 2) + j + 1) if plot: ax = fig.add_subplot(len(quantiles), len(groups) + 2, pt) else: ax = None if (j==1): eces[quantiles[i]]['all'] = plot_calibration_curve(ax, scores, e, t, a, folds, None, quantiles[i], strat=strat, adj=adj, plot=plot) if (j>1): eces[quantiles[i]][groups[j - 2]] = plot_calibration_curve(ax, scores, e, t, a, folds, groups[j - 2], quantiles[i], strat=strat, adj=adj, plot=plot) if (j==0): metrics[quantiles[i]] = plot_roc_curve(ax, scores, e, t, a, folds, groups, quantiles[i], plot=plot) for quant in quantiles: metrics[quant] = metrics[quant] + (eces[quant], ) if plot: plt.show() return metrics
mean_tprs = [] for i in set(folds): mean_tprs.append(np.interp(all_fpr, fprs[group][i], tprs[group][i])) # Finally the interpolated curves are averaged over to compute AUC.
random_line_split
plots.py
# coding=utf-8 # Copyright 2020 The Google Research Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Utilitites to plot the ROC and Calibration for survival models. This module has utility functions to generate ROC and Calibration plots for survival models at given horizons of time. Note that ideally both the ROC and Calibration curves require to be adjusted for censoring using IPCW estimates. Not designed to be called directly, would be called when running a function from dcm.deep_cox_mixtures """ from dcm import baseline_models from dcm import models from dcm.calibration import calibration_curve import matplotlib as mpl from matplotlib import pyplot as plt from dcm.skmetrics import brier_score from dcm.skmetrics import cumulative_dynamic_auc from dcm.skmetrics import concordance_index_ipcw import numpy as np import logging logging.getLogger("matplotlib").setLevel(logging.CRITICAL) from sklearn.metrics import auc def plot_calibration_curve(ax, scores, e, t, a, folds, group, quant, strat='quantile', adj='IPCW', plot=True): """Function to plot Calibration Curve at a specified time horizon. Accepts a matplotlib figure instance, risk scores from a trained survival analysis model, and quantiles of event interest and generates an IPCW adjusted calibration curve. Args: ax: a matplotlib subfigure object. scores: risk scores P(T>t) issued by a trained survival analysis model (output of deep_cox_mixtures.models.predict_survival). e: a numpy array of event indicators. t: a numpy array of event/censoring times. a: a numpy vector of protected attributes. folds: a numpy vector of cv folds. group: List of the demogrpahics to adjust for. quant: a list of event time quantiles at which the models are to be evaluated. strat: Specifies how the bins are computed. One of: "quantile": Equal sized bins. "uniform": Uniformly stratified. adj (str): Determines if IPCW adjustment is carried out on a population or subgroup level. One of "IPCWpop", "IPCWcon" (not implemented). Returns: A plotted matplotlib calibration curve. """ allscores = np.ones_like(t).astype('float') for fold in set(folds): allscores[folds == fold] = scores[fold] scores = allscores b_fc = (0, 0, 1, .4) r_fc = (1, 0, 0, .2) b_ec = (0, 0, 1, .8) r_ec = (1, 0, 0, .8) n_bins = 20 hatch = '//' fs = 16 prob_true_n, _, outbins, ece = calibration_curve( scores, e, t, a, group, quant, typ=adj, ret_bins=True, strat=strat, n_bins=n_bins) for d in range(len(prob_true_n)): binsize = outbins[d + 1] - outbins[d] binloc = (outbins[d + 1] + outbins[d]) / 2 gap = (prob_true_n[d] - binloc) if gap < 0: bottom = prob_true_n[d] else: bottom = prob_true_n[d] - abs(gap) if d == len(prob_true_n) - 1: lbl1 = 'Score' lbl2 = 'Gap' else: lbl1 = None lbl2 = None if plot: ax.bar( binloc, prob_true_n[d], width=binsize, facecolor=b_fc, edgecolor=b_ec, linewidth=2.5, label=lbl1) ax.bar( binloc, abs(gap), bottom=bottom, width=binsize, facecolor=r_fc, edgecolor=r_ec, linewidth=2.5, hatch=hatch, label=lbl2) d += 1 if plot: ax.plot([0, 1], [0, 1], c='k', ls='--', lw=2, zorder=100) ax.set_xlabel('Predicted Score', fontsize=fs) ax.set_ylabel('True Score', fontsize=fs) ax.legend(fontsize=fs) ax.set_title(str(group), fontsize=fs) ax.set_xlim(0, 1) ax.set_ylim(0, 1) ax.grid(ls=':', lw=2, zorder=-100, color='grey') ax.set_axisbelow(True) ax.text( x=0.030, y=.7, s='ECE=' + str(round(ece, 3)), size=fs, bbox=dict(boxstyle='round', fc='white', ec='grey', pad=0.2)) return ece def
(ax, scores, e, t, a, folds, groups, quant, plot=True): """Function to plot ROC at a specified time horizon. Accepts a matplotlib figure instance, risk scores from a trained survival analysis model, and quantiles of event interest and generates an IPCW adjusted ROC curve. Args: ax: a matplotlib subfigure object. scores: choice of model. One of "coupled_deep_cph", "coupled_deep_cph_vae". e: a numpy array of input features. t: a numpy array of input features. a: a numpy vector of protected attributes. folds: a numpy vector of cv fold. groups: List of the demogrpahics to adjust for. quant: a list of event time quantiles at which the models are to be evaluated. Returns: A plotted matplotlib ROC curve. """ fs = 16 fprs, tprs, tprs_std, ctds, brss = {}, {}, {}, {}, {} fprs['all'] = {} tprs['all'] = {} ctds['all'] = {} brss['all'] = {} for group in groups: fprs[group] = {} tprs[group] = {} ctds[group] = {} brss[group] = {} for fold in set(folds): ate = a[folds == fold] str_test = baseline_models.structure_for_eval_(t[folds == fold], e[folds == fold]) if len(set(folds)) == 1: atr = ate str_train = str_test else: atr = a[folds != fold] str_train = baseline_models.structure_for_eval_(t[folds != fold], e[folds != fold]) t_tr_max = np.max([t_[1] for t_ in str_train]) t_ = np.array([t_[1] for t_ in str_test]) clean = (t_<=t_tr_max) str_test = str_test[t_<=t_tr_max] ate = ate[t_<=t_tr_max] scores_f = scores[fold][clean] for group in groups: te_protg = (ate == group) tr_protg = (atr == group) try: roc_m = cumulative_dynamic_auc(str_train[tr_protg], str_test[te_protg], -scores_f[te_protg], [quant]) brs_m = brier_score(str_train[tr_protg], str_test[te_protg], scores_f[te_protg], quant) ctd_m = concordance_index_ipcw(str_train[tr_protg], str_test[te_protg], -scores_f[te_protg], quant)[0] except: roc_m = cumulative_dynamic_auc(str_train, str_test[te_protg], -scores_f[te_protg], [quant]) brs_m = brier_score(str_train, str_test[te_protg], scores_f[te_protg], quant) ctd_m = concordance_index_ipcw(str_train, str_test[te_protg], -scores_f[te_protg], quant)[0] fprs[group][fold] = roc_m[0][0][1] tprs[group][fold] = roc_m[0][0][0] ctds[group][fold] = ctd_m brss[group][fold] = brs_m[1][0] roc_m = cumulative_dynamic_auc(str_train, str_test, -scores_f, [quant]) ctd_m = concordance_index_ipcw(str_train, str_test, -scores_f, quant)[0] brs_m = brier_score(str_train, str_test, scores_f, quant) fprs['all'][fold], tprs['all'][fold] = roc_m[0][0][1], roc_m[0][0][0] ctds['all'][fold] = ctd_m brss['all'][fold] = brs_m[1][0] cols = ['b', 'r', 'g'] roc_auc = {} ctds_mean = {} brss_mean = {} j = 0 for group in list(groups) + ['all']: all_fpr = np.unique(np.concatenate([fprs[group][i] for i in set(folds)])) # The ROC curves are interpolated at these points. mean_tprs = [] for i in set(folds): mean_tprs.append(np.interp(all_fpr, fprs[group][i], tprs[group][i])) # Finally the interpolated curves are averaged over to compute AUC. mean_tpr = np.mean(mean_tprs, axis=0) std_tpr = 1.96 * np.std(mean_tprs, axis=0) / np.sqrt(10) fprs[group]['macro'] = all_fpr tprs[group]['macro'] = mean_tpr tprs_std[group] = std_tpr roc_auc[group] = auc(fprs[group]['macro'], tprs[group]['macro']) ctds_mean[group] = np.mean([ctds[group][fold] for fold in folds]) brss_mean[group] = np.mean([brss[group][fold] for fold in folds]) lbl = str(group) lbl += ' AUC:' + str(round(roc_auc[group], 3)) lbl += ' Ctd:'+ str(round(ctds_mean[group], 3)) lbl += ' BS:'+ str(round(brss_mean[group], 3)) if plot: ax.plot( all_fpr, mean_tpr, c=cols[j], label=lbl) ax.fill_between( all_fpr, mean_tpr - std_tpr, mean_tpr + std_tpr, color=cols[j], alpha=0.25) j += 1 if plot: ax.set_xlabel('False Positive Rate', fontsize=fs) ax.set_ylabel('True Positive Rate', fontsize=fs) ax.legend(fontsize=fs) ax.set_xscale('log') return roc_auc, ctds_mean, brss_mean def plot_results(outputs, x, e, t, a, folds, groups, quantiles, strat='quantile', adj='KM', plot=True): """Function to plot the ROC and Calibration curves from a survival model. Accepts a trained survival analysis model, features and horizon of interest and generates the IPCW adjusted ROC curve and Calibration curve at pre-specified horizons of time. Args: outputs: a python dict with survival probabilities for each fold x: a numpy array of input features. e: a numpy array of input features. t: a numpy array of input features. a: a numpy vector of protected attributes. folds: a numpy vector of cv fold. groups: List of the demogrpahics to adjust for. quantiles: a list of event time quantiles at which the models are to be evaluated. strat: Specifies how the bins are computed. One of: "quantile": Equal sized bins. "uniform": Uniformly stratified. adj: Adjustment strategy for the Expected Calibration Error. One of: "KM": Kaplan-Meier (Default) "IPCW": Inverse Propensity of Censoring Returns: a numpy vector of estimated risks P(T>t|X) at the horizon "quant". """ if plot: mpl.rcParams['hatch.linewidth'] = 2.0 fig, big_axes = plt.subplots( figsize=(8 * (len(groups) + 2), 6 * len(quantiles)), nrows=len(quantiles), ncols=1) plt.subplots_adjust(hspace=0.4) i = 0 for _, big_ax in enumerate(big_axes, start=1): big_ax.set_title( 'Receiver Operator Characteristic and Calibration at t=' + str(quantiles[i]) + '\n', fontsize=16) big_ax.tick_params( labelcolor=(1., 1., 1., 0.0), top='off', bottom='off', left='off', right='off') i += 1 eces = {} metrics = {} for quant in quantiles: eces[quant] = {} for i in range(len(quantiles)): scores = outputs[quantiles[i]] for j in range(len(groups) + 2): pt = (i * (len(groups) + 2) + j + 1) if plot: ax = fig.add_subplot(len(quantiles), len(groups) + 2, pt) else: ax = None if (j==1): eces[quantiles[i]]['all'] = plot_calibration_curve(ax, scores, e, t, a, folds, None, quantiles[i], strat=strat, adj=adj, plot=plot) if (j>1): eces[quantiles[i]][groups[j - 2]] = plot_calibration_curve(ax, scores, e, t, a, folds, groups[j - 2], quantiles[i], strat=strat, adj=adj, plot=plot) if (j==0): metrics[quantiles[i]] = plot_roc_curve(ax, scores, e, t, a, folds, groups, quantiles[i], plot=plot) for quant in quantiles: metrics[quant] = metrics[quant] + (eces[quant], ) if plot: plt.show() return metrics
plot_roc_curve
identifier_name
main.rs
use std::{io, thread}; use std::num::ParseIntError; use std::time::Duration; fn main() { memory(); } struct Punto { _x: i8, _y: i8 } fn memory() { let _punto = Punto { _x: 1, _y: 2 }; println!("Inicio address: {:p}", &_punto); _address(&_punto); _copy_value(_punto); println!("------------------",); let caja = Box::new(Punto { _x: 1, _y: 2 }); println!("Inicio address: {:p}", caja); _address(&caja); _copy_value(*caja); println!("------------------",); let otra_caja = Box::new(Punto { _x: 1, _y: 2 }); println!("Inicio address: {:p}", otra_caja); //* Realiza una copia *otra_caja; let otro_contenido = *otra_caja; println!("Contenido address: {:p}", &otro_contenido); } fn _address(p: &Punto) { println!("Puntero address: {:p}", p); } fn _copy_value(p: Punto) { println!("Valor address: {:p}", &p); } fn _vectores() { let x = vec!["Hola", "mundo"]; let _y = x[0]; } fn _input() { println!("Introduce data ..."); let mut data = String::new(); io::stdin().read_line(&mut data) .ok() .expect("Fallo al leer linea"); print!("Data: {}", data); } fn _parseo() { let num = "3p"; let result = num.parse(); let number = match result { Ok(n) => n, Err(_) => 0 }; assert_eq!(0, number); } fn _result() { let num1: Result<i32, ParseIntError> = "2".parse(); let num2: Result<i32, ParseIntError> = "3".parse(); let result = num1 .and_then(|x| num2 .map(|y| x + y)); let end = match result { Ok(n) => n.to_string(), Err(e) => e.to_string() }; assert_eq!("5", end); } fn _result_match() { let result: Result<i32, &str> = Ok(5); let number = match result { Ok(x) => x, Err(_e) => 0, }; assert_eq!(5, number) } fn _threads() { let handles: Vec<_> = (0..10).map(|x| { thread::spawn(move|| { println!("{}", x) }) }).collect(); for h in handles { h.join().ok().expect("No se pudo unir un hilo!"); } } fn _threads_join() { thread::spawn(|| { thread::sleep(Duration::from_millis(3000)); println!("Hola desde 1"); }); let thread2 = thread::spawn(|| { thread::sleep(Duration::from_millis(3000)); println!("Hola desde 2"); }); println!("end..."); thread2.join().expect("Error"); //thread::sleep(Duration::from_millis(4000)); } fn _iteradores() { let mut rango = 0..10; loop { match rango.next() { Some(x) => { println!("{}", x); }, None => { break } } } let nums = vec![1, 2, 3]; for num in nums.iter() { println!("{}", num); } } fn _consumidores() { let _uno_hasta_cien = (1..101).collect::<Vec<i32>>(); let _uno_hasta_cien = (1..101).collect::<Vec<_>>(); let mayores_a_cuarenta_y_dos = (0..100) .find(|x| *x > 42); match mayores_a_cuarenta_y_dos { Some(_) => println!("Tenemos algunos números!"), None => println!("No se encontraron números :("), } let suma = (1..4).fold(0, |suma, x| suma + x); //6 assert_eq!(6, suma); } fn _adaptadores_de_iterador() { let _nums = (1..100).map(|x| x + 1).collect::<Vec<_>>(); let _nums = (1..30) .filter(|&x| x % 2 == 0) .filter(|&x| x % 3 == 0) .take(5) .collect::<Vec<i32>>(); for x in (1..11).map(|x| x + 1).collect::<Vec<_>>() { println!("{}", x); } } fn _hilos() { thread::spawn(|| { println!("Hola desde un hilo!"); }); thread::sleep(Duration::from_millis(10)); } fn _thread_handle() { let handle = thread::spawn(|| { "Hola desde un hilo!" }); //unwrap() hará un pánico ( panic! ) si el Result es Err assert_eq!("Hola desde un hilo!", handle.join().unwrap()); } fn _panico_hilo() { let valor = 1; let result = thread::spawn(move || { if valor % 2 == 0 { panic!("ups!"); } 1 }).join(); let resultado = match result { Ok(n) => n, Err(_e) => 0 }; assert_eq!(1, resultado); } fn _panico_unreachable() { enum Estado { _Activo, _Inactivo, Desconocido } use Estado::{_Activo, _Inactivo, Desconocido}; let estado = Desconocido; let _numero = match estado { _Activo => 1, _Inactivo => 0, _ => unreachable!() }; println!("Linea no alcanzable") } fn _option() { let s = "foo"; assert_eq!(s.find('f'), Some(0)); assert_eq!(s.find('z'), None); assert_eq!(s.find('f').map(|p| p + 1), Some(1)); assert_eq!(s.find('z').map(|p| p + 1), None); } fn _option_match() { let option = Some(5); let number = match option { Some(x) => x, None => 0, }; assert_eq!(5, number); } fn _result_funciones() { enum Error { Tecnico } let f: fn(i32) -> Result<i32, Error> = |num: i32| match num { 1 => Ok(num + 1), _ => Err(Error::Tecnico) }; /*fn f(num: i32) -> Result<i32, Error> { match num { 1 => Ok(num + 1), _ => Err(Error::Tecnico) } }*/ assert!(f(1).is_ok()); assert!(f(2).is_err()); let result: Result<i32, &str> = f(2) .map(|ok| ok) .map_err(|_err| "Error =("); match result { Ok(n) => println!("{}", n), Err(e) => println!("{}", e) }; } fn _panic_result() { let result: Result<i32, &str> = Ok(1); //let result: Result<i32, &str> = Err("Error =("); let valor = result.ok().expect("Error!"); assert_eq!(1, valor) } fn _try() { fn _parser(num: &str) -> Result<i32, ParseIntError> { num.parse() } fn f(x: &str, y: &str) -> Result<i32, ParseIntError> { let num1 = _parser(x); let num2 = _parser(y); //let resultado = _parser(x) ? + _parser(y)?; let resultado = num1? + num2?; Ok(resultado) } assert!(f("1", "2").is_ok()); assert!(f("1P", "2").is_err()); match f("1P", "2") { Ok(n) => println!("Ok: {}", n), Err(e) => println!("Error: {}", e) } } fn _try_azucar_sintactico() { fn foo(n: i32) -> Result<i32, String> { if n % 2 == 0 { Ok(1) } else { Err(String::from("Error")) } } fn bar() -> Result<i32, String> { Ok(2) } fn foo_bar() -> Result<i32, String> { let res = foo(2)? + bar()?; Ok(res) } let fb = foo_bar(); assert!(fb.is_ok()); } fn _apuntadores_a_funcion() { fn mas_uno(i: i32) -> i32 { i + 1 } let f: fn(i32) -> i32 = mas_uno; assert_eq!(2, f(1)); } fn _primitivos() { let _a: bool = false; let _b: char = 'x'; let _c: i32 = 42; //i8, i16, i32, i64, u8, u16, u32, u64, isize, usize, f32, f64 } fn _arreglos() { let mut m: [i32; 3] = [1, 2, 3]; m[2] = 5; assert_eq!(5, m[2]); } fn _slices() { let a: [i32; 5] = [0, 1, 2, 3, 4]; let middle: &[i32] = &a[1..4]; assert_eq!(1, middle[0]); } fn _tuplas() { let (x, y) = (1, "Hello"); assert_eq!(1, x); assert_eq!("Hello", y); let z = (1, "Hello"); assert_eq!(1, z.0); } fn _expresiones() { let x = 5; let y = if x == 5 { 10 } else { 15 }; assert_eq!(10, y) } fn _while() { let mut x = 0; while x < 10 { x += 1; } assert_eq!(10, x) } fn _for() { for x in 0..10 { println!("{}", x); } } fn _loop() { let mut x = 0; loop { x += 1; if x >= 10 { break } } assert_eq!(10, x) } fn _etiquetas_loop() { 'exterior: for x in 0..10 { 'interior: for y in 0..10 { if x % 2 == 0 { continue 'exterior; } // continua el ciclo por encima de x if y % 2 == 0 { continue 'interior; } // continua el ciclo por encima de y println!("x: {}, y: {}", x, y); } } } fn _enumerate() { for (i,j) in (5..10).enumerate() { println!("i = {} y j = {}", i, j); } let lineas = "hola\nmundo".lines(); for (numero_linea, linea) in lineas.enumerate() { println!("{}: {}", numero_linea, linea); } } fn _pertenencia() { let v = vec![1, 2, 3]; let v2 = v; println!("v2[0] es: {}", v2[0]); //println!("v[0] es: {}", v[0]); // Error borrow of moved value: `v` } fn _pertenencia_funcion() { fn tomar(_v: Vec<i32>) { // Algo } let v = vec![1, 2, 3]; tomar(v); //println!("v[0] es: {}", v[0]); // Error borrow of moved value: `v` } fn _copy() { // i32 , Todos los tipos primitivos implementan el trait Copy // Se realiza una copia y su pertenencia no es movida let v: i32 = 1; let _v2 = v; println!("v es: {}", v); // =) } fn _devolver_pertenencia() { fn _foo(v: Vec<i32>) -> Vec<i32> { v } fn foo(v1: Vec<i32>, v2: Vec<i32>) -> (Vec<i32>, Vec<i32>, i32) { (v1, v2, 42) } let v1 = vec![1, 2, 3]; let v2 = vec![1, 2, 3]; let (v1, _v2, _r) = foo(v1, v2); assert_eq!(1, v1[0]); } fn _prestamo() { fn foo(_v1: &Vec<i32>, _v2: &Vec<i32>) -> i32 { 42 } let v1 = vec![1, 2, 3]; let _v2 = vec![1, 2, 3]; let _r = foo(&v1, &_v2); // podemos usar a v1 y v2 aqui assert_eq!(1, v1[0]); } fn _mutabilidad() { let mut x = 5; assert_eq!(5, x); x = 6; assert_eq!(6, x); } fn _estructuras() { struct Punto { x: i32, y: i32, } let origen = Punto { x: 0, y: 0 }; assert_eq!(0, origen.x); assert_eq!(0, origen.y); } fn _sintaxis_de_actualizacion() { struct Punto3d { _x: i32, _y: i32, _z: i32, } let origen = Punto3d { _x: 1, _y: 2, _z: 3 }; let punto = Punto3d { _y: 1, .. origen }; assert_eq!(3, punto._z); } fn _estructuras_pertenencia() { struct Punto { x: i32, y: i32, } fn foo(punto: Punto) -> i32 { punto.x + punto.y } let origen = Punto { x: 1, y: 2 }; let suma = foo(origen); println!("{}", suma); //println!("Punto x {}", origen.x); // Error borrow of moved value: `origen` } fn _estructuras_prestamo() { struct Punto { x: i32, y: i32, } fn foo(punto: &Punto) -> i32 { punto.x + punto.y } let origen = Punto { x: 1, y: 2 }; let suma = foo(&origen); assert_eq!(3, suma); assert_eq!(1, origen.x); } fn _tupla_estructuras() { struct Color(i32, i32, i32); let azul = Color(0, 0, 255); assert_eq!(255, azul.2); } fn _estructuras_tipo_unitario() { struct Electron; let _e = Electron; } fn _enumeraciones() { enum Mensaje { Salir, CambiarColor(i32, i32, i32), Mover { _x: i32, _y: i32 }, Escribir(String), } let _salir = Mensaje::Salir; let _cambiar_color = Mensaje::CambiarColor(0, 0, 255); use Mensaje::{Mover}; let _mover = Mover {_x: 0, _y: 2}; let _escribir = Mensaje::Escribir("Hello".to_string()); } fn _match_en_enums() { enum _Mensaje { Salir, CambiarColor(i32, i32, i32), Mover { x: i32, _y: i32 }, Escribir(String), } fn _salir() { /* ... */ } fn _cambiar_color(_r: i32, _g: i32, _b: i32) { /* ... */ } fn _mover_cursor(_x: i32, _y: i32) { /* ... */ } fn _procesar_mensaje(msj: _Mensaje) { match msj { _Mensaje::Salir => _salir(), _Mensaje::CambiarColor(r, g, b) => _cambiar_color(r, g, b), _Mensaje::Mover { x, _y: y } => _mover_cursor(x, y), _Mensaje::Escribir(s) => println!("{}", s),
let x = 2; let num = match x { 1 | 2 => "1, 2", 3 => "3", _ => "...", }; assert_eq!("1, 2", num); } fn _match_rangos() { let x = 3; let resultado = match x { 1 ..= 5 => "uno al cinco", _ => "cualquier cosa", }; assert_eq!("uno al cinco", resultado); let y = 's'; let letra = match y { 'a' ..= 'j' => "letra temprana", 'k' ..= 'z' => "letra tardia", _ => "algo mas" }; assert_eq!("letra tardia", letra); } fn _destructuracion() { struct Punto { x: i32, y: i32, } let origen = Punto { x: 0, y: 2 }; match origen { Punto { x, y } => println!("({},{})", x, y), } match origen { Punto { x, .. } => println!("x es {}", x) } } fn _enlaces_a_variable() { let x = 1; match x { e @ 1 ..= 5 => println!("valor de rango {} obtenido", e), _ => println!("lo que sea"), } } fn _guardias() { enum EnteroOpcional { Valor(i32), _Faltante, } let x = EnteroOpcional::Valor(5); match x { EnteroOpcional::Valor(i) if i > 5 => println!("Entero mayor a cinco obtenido!"), EnteroOpcional::Valor(..) => println!("Entero obtenido!"), EnteroOpcional::_Faltante => println!("Sin suerte."), } } fn _multiples_patrones_y_guardias() { let x = 4; let y = false; let resultado = match x { 4 | 5 if y => "si", _ => "no" }; assert_eq!("no", resultado); } fn _llamadas_a_metodos() { struct Circulo { _x: f64, _y: f64, radio: f64, } impl Circulo { fn area(&self) -> f64 { std::f64::consts::PI * (self.radio * self.radio) } } let c = Circulo { _x: 0.0, _y: 0.0, radio: 2.0 }; println!("{}", c.area()); } fn _metodos_en_cadena() { struct Circulo { x: f64, y: f64, radio: f64, } impl Circulo { fn agrandar(&self, incremento: f64) -> Circulo { Circulo { x: self.x, y: self.y, radio: self.radio + incremento } } fn area(&self) -> f64 { std::f64::consts::PI * (self.radio * self.radio) } } let c = Circulo { x: 0.0, y: 0.0, radio: 2.0 }; println!("{}", c.area()); let d = c.agrandar(2.0).area(); println!("{}", d); } fn _funciones_asociadas() { struct Circulo { _x: f64, _y: f64, radio: f64, } impl Circulo { fn new(x: f64, y: f64, radio: f64) -> Circulo { Circulo { _x: x, _y: y, radio: radio, } } } let c = Circulo::new(0.0, 0.0, 2.0); assert_eq!(2.0, c.radio); } fn _builder() { struct Circulo { x: f64, y: f64, radio: f64 } impl Circulo { fn area(&self) -> f64 { std::f64::consts::PI * (self.radio * self.radio) } } struct CirculoBuilder { x: f64, y: f64, radio: f64 } impl CirculoBuilder { fn new() -> CirculoBuilder { CirculoBuilder { x: 0.0, y: 0.0, radio: 1.0, } } fn x(&mut self, coordenada: f64) -> &mut CirculoBuilder { self.x = coordenada; self } fn y(&mut self, coordenada: f64) -> &mut CirculoBuilder { self.y = coordenada; self } fn radio(&mut self, radio: f64) -> &mut CirculoBuilder { self.radio = radio; self } fn build(&self) -> Circulo { Circulo { x: self.x, y: self.y, radio: self.radio } } } let c = CirculoBuilder::new() .x(1.0) .y(2.0) .radio(2.0) .build(); println!("area: {}", c.area()); println!("x: {}", c.x); println!("y: {}", c.y); assert_eq!(2.0, c.y); } fn _cadenas_de_caracteres() { let _saludo: &str = "Hola."; let mut s: String = "Hola".to_string(); s.push_str(", mundo."); assert_eq!("Hola, mundo.", s); } fn _genericos() { enum _Option<T> { _Some(T), _None, } let _x: _Option<i32> = _Option::_Some(5); } fn _funciones_genericas() { fn foo<T>(x: T) -> T { x } let num = foo(1); assert_eq!(1, num); } fn _structs_genericos() { struct Info<T1, T2> { x: T1, y: T2, } impl<T1, T2> Info<T1, T2> { fn foo(&self) { // } } let info = Info { x: 1, y: "=)" }; info.foo(); assert_eq!(1, info.x); assert_eq!("=)", info.y); } fn _traits() { trait Area { fn area(&self) -> f64; } struct Circulo { _x: f64, _y: f64, radio: f64 } impl Area for Circulo { fn area(&self) -> f64 { std::f64::consts::PI * (self.radio * self.radio) } } let c = Circulo{ _x:0.0, _y:0.0, radio: 2.0 }; let a = c.area(); println!("{}", a); //Genericos fn imrimir_area<T: Area>(figura: T) { println!("Esta figura tiene un area de {}", figura.area()); } imrimir_area(c) } fn _multiples_limites_de_trait() { use std::fmt::Display; fn foo<T: Clone, K: Clone + Display>(x: T, y: K) -> String { let _x_clone = x.clone(); let y_clone = y.clone(); format!("{}", y_clone) } fn bar<T, K>(x: T, y: K) -> String where T: Clone, K: Clone + Display { let _x_clone = x.clone(); let y_clone = y.clone(); format!("{}", y_clone) } let r_foo = foo("Hola", "mundo"); let r_bar = bar("Hola", "mundo"); assert_eq!(r_foo, r_bar); } fn _metodos_por_defecto() { trait Foo { fn es_valido(&self) -> bool; fn es_invalido(&self) -> bool { !self.es_valido() } } struct Default; impl Foo for Default { fn es_valido(&self) -> bool { true } } let default = Default; assert!(default.es_valido()); assert!(!default.es_invalido()); } fn _metodos_por_defecto_bar() { trait Bar { fn plus_one(x: i32) -> i32 { x + 1} } struct ImplBar; impl Bar for ImplBar{}; let sum = ImplBar::plus_one(2); assert_eq!(3, sum); } fn _herencia() { trait Foo { fn foo(&self); } trait FooBar : Foo { fn foobar(&self); } struct Baz; impl Foo for Baz { fn foo(&self) { println!("foo"); } } impl FooBar for Baz { fn foobar(&self) { println!("foobar"); } } let baz = Baz; baz.foo(); baz.foobar(); } fn _drop() { struct HasDrop; impl Drop for HasDrop { fn drop(&mut self) { println!("Dropeando!"); } } let _x = HasDrop; } fn _if_let() { fn foo(x: i32) { println!("Number: {}", x); } let option = Some(5); if let Some(x) = option { foo(x); } } fn _closures() { let plus_one = |x: i32| x + 1; assert_eq!(2, plus_one(1)); //let plus_one: fn(i32) -> i32 = |x: i32| x + 1; /* fn suma_uno_v1 (x: i32) -> i32 { x + 1 } let suma_uno_v2 = |x: i32| -> i32 { x + 1 }; let suma_uno_v3 = |x: i32| x + 1 ; */ } fn _closures_como_argumentos() { fn llamar_con_uno<F>(closure: F) -> i32 where F : Fn(i32) -> i32 { closure(1) } let respuesta = llamar_con_uno(|x| x + 2); assert_eq!(3, respuesta); } fn _retornando_closures() { fn factory() -> Box<dyn Fn(i32) -> i32> { let num = 5; Box::new(move |x| x + num) } let f = factory(); let respuesta = f(1); assert_eq!(6, respuesta); } fn _futures() { use futures::executor::block_on; async fn hello_world() { println!("hello, world!"); } let future = hello_world(); block_on(future); } fn _await() { async fn first_function() -> u32 { thread::sleep(Duration::from_millis(2000)); println!("1"); 1 } async fn second_function() -> u32 { thread::sleep(Duration::from_millis(2000)); println!("2"); 2 } async fn another_function() { let first = first_function().await; let second = second_function().await; let sum = first + second; println!("{}", sum); } use futures::executor::block_on; block_on(another_function()); println!("end..."); } fn _futures_join() { use futures::join; use futures::executor::block_on; async fn get_book() -> u32 { thread::sleep(Duration::from_millis(3000)); println!("get_book"); 1 } async fn get_music() -> u32 { thread::sleep(Duration::from_millis(3000)); println!("get_music"); 1 } async fn get_book_and_music() -> (u32, u32) { let book_fut = get_book(); let music_fut = get_music(); join!(book_fut, music_fut) } block_on(get_book_and_music()); } fn _futures_join_2() { use futures::executor::block_on; async fn learn_song() -> u32 { thread::sleep(Duration::from_millis(3000)); println!("learn_song"); 1 } async fn sing_song(_song: u32) -> u32 { thread::sleep(Duration::from_millis(3000)); println!("sing_song"); 1 } async fn dance() -> u32 { thread::sleep(Duration::from_millis(3000)); println!("dance"); 1 } async fn learn_and_sing() { let song = learn_song().await; sing_song(song).await; } async fn async_main() { let f1 = learn_and_sing(); let f2 = dance(); futures::join!(f1, f2); } block_on(async_main()); }
}; } } fn _multiples_patrones() {
random_line_split
main.rs
use std::{io, thread}; use std::num::ParseIntError; use std::time::Duration; fn main() { memory(); } struct Punto { _x: i8, _y: i8 } fn memory() { let _punto = Punto { _x: 1, _y: 2 }; println!("Inicio address: {:p}", &_punto); _address(&_punto); _copy_value(_punto); println!("------------------",); let caja = Box::new(Punto { _x: 1, _y: 2 }); println!("Inicio address: {:p}", caja); _address(&caja); _copy_value(*caja); println!("------------------",); let otra_caja = Box::new(Punto { _x: 1, _y: 2 }); println!("Inicio address: {:p}", otra_caja); //* Realiza una copia *otra_caja; let otro_contenido = *otra_caja; println!("Contenido address: {:p}", &otro_contenido); } fn _address(p: &Punto) { println!("Puntero address: {:p}", p); } fn _copy_value(p: Punto) { println!("Valor address: {:p}", &p); } fn _vectores() { let x = vec!["Hola", "mundo"]; let _y = x[0]; } fn _input() { println!("Introduce data ..."); let mut data = String::new(); io::stdin().read_line(&mut data) .ok() .expect("Fallo al leer linea"); print!("Data: {}", data); } fn _parseo() { let num = "3p"; let result = num.parse(); let number = match result { Ok(n) => n, Err(_) => 0 }; assert_eq!(0, number); } fn _result() { let num1: Result<i32, ParseIntError> = "2".parse(); let num2: Result<i32, ParseIntError> = "3".parse(); let result = num1 .and_then(|x| num2 .map(|y| x + y)); let end = match result { Ok(n) => n.to_string(), Err(e) => e.to_string() }; assert_eq!("5", end); } fn _result_match() { let result: Result<i32, &str> = Ok(5); let number = match result { Ok(x) => x, Err(_e) => 0, }; assert_eq!(5, number) } fn _threads() { let handles: Vec<_> = (0..10).map(|x| { thread::spawn(move|| { println!("{}", x) }) }).collect(); for h in handles { h.join().ok().expect("No se pudo unir un hilo!"); } } fn _threads_join() { thread::spawn(|| { thread::sleep(Duration::from_millis(3000)); println!("Hola desde 1"); }); let thread2 = thread::spawn(|| { thread::sleep(Duration::from_millis(3000)); println!("Hola desde 2"); }); println!("end..."); thread2.join().expect("Error"); //thread::sleep(Duration::from_millis(4000)); } fn _iteradores() { let mut rango = 0..10; loop { match rango.next() { Some(x) => { println!("{}", x); }, None => { break } } } let nums = vec![1, 2, 3]; for num in nums.iter() { println!("{}", num); } } fn _consumidores() { let _uno_hasta_cien = (1..101).collect::<Vec<i32>>(); let _uno_hasta_cien = (1..101).collect::<Vec<_>>(); let mayores_a_cuarenta_y_dos = (0..100) .find(|x| *x > 42); match mayores_a_cuarenta_y_dos { Some(_) => println!("Tenemos algunos números!"), None => println!("No se encontraron números :("), } let suma = (1..4).fold(0, |suma, x| suma + x); //6 assert_eq!(6, suma); } fn _adaptadores_de_iterador() { let _nums = (1..100).map(|x| x + 1).collect::<Vec<_>>(); let _nums = (1..30) .filter(|&x| x % 2 == 0) .filter(|&x| x % 3 == 0) .take(5) .collect::<Vec<i32>>(); for x in (1..11).map(|x| x + 1).collect::<Vec<_>>() { println!("{}", x); } } fn _hilos() { thread::spawn(|| { println!("Hola desde un hilo!"); }); thread::sleep(Duration::from_millis(10)); } fn _thread_handle() { let handle = thread::spawn(|| { "Hola desde un hilo!" }); //unwrap() hará un pánico ( panic! ) si el Result es Err assert_eq!("Hola desde un hilo!", handle.join().unwrap()); } fn _panico_hilo() { let valor = 1; let result = thread::spawn(move || { if valor % 2 == 0 { panic!("ups!"); } 1 }).join(); let resultado = match result { Ok(n) => n, Err(_e) => 0 }; assert_eq!(1, resultado); } fn _panico_unreachable() { enum Estado { _Activo, _Inactivo, Desconocido } use Estado::{_Activo, _Inactivo, Desconocido}; let estado = Desconocido; let _numero = match estado { _Activo => 1, _Inactivo => 0, _ => unreachable!() }; println!("Linea no alcanzable") } fn _option() { let s = "foo"; assert_eq!(s.find('f'), Some(0)); assert_eq!(s.find('z'), None); assert_eq!(s.find('f').map(|p| p + 1), Some(1)); assert_eq!(s.find('z').map(|p| p + 1), None); } fn _option_match() { let option = Some(5); let number = match option { Some(x) => x, None => 0, }; assert_eq!(5, number); } fn _result_funciones() { enum Error { Tecnico } let f: fn(i32) -> Result<i32, Error> = |num: i32| match num { 1 => Ok(num + 1), _ => Err(Error::Tecnico) }; /*fn f(num: i32) -> Result<i32, Error> { match num { 1 => Ok(num + 1), _ => Err(Error::Tecnico) } }*/ assert!(f(1).is_ok()); assert!(f(2).is_err()); let result: Result<i32, &str> = f(2) .map(|ok| ok) .map_err(|_err| "Error =("); match result { Ok(n) => println!("{}", n), Err(e) => println!("{}", e) }; } fn _panic_result() { let result: Result<i32, &str> = Ok(1); //let result: Result<i32, &str> = Err("Error =("); let valor = result.ok().expect("Error!"); assert_eq!(1, valor) } fn _try() { fn _parser(num: &str) -> Result<i32, ParseIntError> { num.parse() } fn f(x: &str, y: &str) -> Result<i32, ParseIntError> { let num1 = _parser(x); let num2 = _parser(y); //let resultado = _parser(x) ? + _parser(y)?; let resultado = num1? + num2?; Ok(resultado) } assert!(f("1", "2").is_ok()); assert!(f("1P", "2").is_err()); match f("1P", "2") { Ok(n) => println!("Ok: {}", n), Err(e) => println!("Error: {}", e) } } fn _try_azucar_sintactico() { fn foo(n: i32) -> Result<i32, String> { if n % 2 == 0 { Ok(1) } else { Err(String::from("Error")) } } fn bar() -> Result<i32, String> { Ok(2) } fn foo_bar() -> Result<i32, String> { let res = foo(2)? + bar()?; Ok(res) } let fb = foo_bar(); assert!(fb.is_ok()); } fn _apuntadores_a_funcion() { fn mas_uno(i: i32) -> i32 { i + 1 } let f: fn(i32) -> i32 = mas_uno; assert_eq!(2, f(1)); } fn _primitivos() { let _a: bool = false; let _b: char = 'x'; let _c: i32 = 42; //i8, i16, i32, i64, u8, u16, u32, u64, isize, usize, f32, f64 } fn _arreglos() { let mut m: [i32; 3] = [1, 2, 3]; m[2] = 5; assert_eq!(5, m[2]); } fn _slices() { let a: [i32; 5] = [0, 1, 2, 3, 4]; let middle: &[i32] = &a[1..4]; assert_eq!(1, middle[0]); } fn _tuplas() { let (x, y) = (1, "Hello"); assert_eq!(1, x); assert_eq!("Hello", y); let z = (1, "Hello"); assert_eq!(1, z.0); } fn _expresiones() { let x = 5; let y = if x == 5 { 10 } else { 15 }; assert_eq!(10, y) } fn _while() { let mut x = 0; while x < 10 { x += 1; } assert_eq!(10, x) } fn _for() { for x in 0..10 { println!("{}", x); } } fn _loop() { let mut x = 0; loop { x += 1; if x >= 10 { break } } assert_eq!(10, x) } fn _etiquetas_loop() { 'exterior: for x in 0..10 { 'interior: for y in 0..10 { if x % 2 == 0 { continue 'exterior; } // continua el ciclo por encima de x if y % 2 == 0 { continue 'interior; } // continua el ciclo por encima de y println!("x: {}, y: {}", x, y); } } } fn _enumerate() { for (i,j) in (5..10).enumerate() { println!("i = {} y j = {}", i, j); } let lineas = "hola\nmundo".lines(); for (numero_linea, linea) in lineas.enumerate() { println!("{}: {}", numero_linea, linea); } } fn _pertenencia() { let v = vec![1, 2, 3]; let v2 = v; println!("v2[0] es: {}", v2[0]); //println!("v[0] es: {}", v[0]); // Error borrow of moved value: `v` } fn _pertenencia_funcion() { fn tomar(_v: Vec<i32>) { // Algo } let v = vec![1, 2, 3]; tomar(v); //println!("v[0] es: {}", v[0]); // Error borrow of moved value: `v` } fn _copy() { // i32 , Todos los tipos primitivos implementan el trait Copy // Se realiza una copia y su pertenencia no es movida let v: i32 = 1; let _v2 = v; println!("v es: {}", v); // =) } fn _devolver_pertenencia() { fn _foo(v: Vec<i32>) -> Vec<i32> { v } fn foo(v1: Vec<i32>, v2: Vec<i32>) -> (Vec<i32>, Vec<i32>, i32) { (v1, v2, 42) } let v1 = vec![1, 2, 3]; let v2 = vec![1, 2, 3]; let (v1, _v2, _r) = foo(v1, v2); assert_eq!(1, v1[0]); } fn _prestamo() { fn foo(_v1: &Vec<i32>, _v2: &Vec<i32>) -> i32 { 42 } let v1 = vec![1, 2, 3]; let _v2 = vec![1, 2, 3]; let _r = foo(&v1, &_v2); // podemos usar a v1 y v2 aqui assert_eq!(1, v1[0]); } fn _mutabilidad() { let mut x = 5; assert_eq!(5, x); x = 6; assert_eq!(6, x); } fn _estructuras() { struct Punto { x: i32, y: i32, } let origen = Punto { x: 0, y: 0 }; assert_eq!(0, origen.x); assert_eq!(0, origen.y); } fn _sintaxis_de_actualizacion() { struct Punto3d { _x: i32, _y: i32, _z: i32, } let origen = Punto3d { _x: 1, _y: 2, _z: 3 }; let punto = Punto3d { _y: 1, .. origen }; assert_eq!(3, punto._z); } fn _estructuras_pertenencia() { struct Punto { x: i32, y: i32, } fn foo(punto: Punto) -> i32 { punto.x + punto.y } let origen = Punto { x: 1, y: 2 }; let suma = foo(origen); println!("{}", suma); //println!("Punto x {}", origen.x); // Error borrow of moved value: `origen` } fn _estructuras_prestamo() { struct Punto { x: i32, y: i32, } fn foo(punto: &Punto) -> i32 {
let origen = Punto { x: 1, y: 2 }; let suma = foo(&origen); assert_eq!(3, suma); assert_eq!(1, origen.x); } fn _tupla_estructuras() { struct Color(i32, i32, i32); let azul = Color(0, 0, 255); assert_eq!(255, azul.2); } fn _estructuras_tipo_unitario() { struct Electron; let _e = Electron; } fn _enumeraciones() { enum Mensaje { Salir, CambiarColor(i32, i32, i32), Mover { _x: i32, _y: i32 }, Escribir(String), } let _salir = Mensaje::Salir; let _cambiar_color = Mensaje::CambiarColor(0, 0, 255); use Mensaje::{Mover}; let _mover = Mover {_x: 0, _y: 2}; let _escribir = Mensaje::Escribir("Hello".to_string()); } fn _match_en_enums() { enum _Mensaje { Salir, CambiarColor(i32, i32, i32), Mover { x: i32, _y: i32 }, Escribir(String), } fn _salir() { /* ... */ } fn _cambiar_color(_r: i32, _g: i32, _b: i32) { /* ... */ } fn _mover_cursor(_x: i32, _y: i32) { /* ... */ } fn _procesar_mensaje(msj: _Mensaje) { match msj { _Mensaje::Salir => _salir(), _Mensaje::CambiarColor(r, g, b) => _cambiar_color(r, g, b), _Mensaje::Mover { x, _y: y } => _mover_cursor(x, y), _Mensaje::Escribir(s) => println!("{}", s), }; } } fn _multiples_patrones() { let x = 2; let num = match x { 1 | 2 => "1, 2", 3 => "3", _ => "...", }; assert_eq!("1, 2", num); } fn _match_rangos() { let x = 3; let resultado = match x { 1 ..= 5 => "uno al cinco", _ => "cualquier cosa", }; assert_eq!("uno al cinco", resultado); let y = 's'; let letra = match y { 'a' ..= 'j' => "letra temprana", 'k' ..= 'z' => "letra tardia", _ => "algo mas" }; assert_eq!("letra tardia", letra); } fn _destructuracion() { struct Punto { x: i32, y: i32, } let origen = Punto { x: 0, y: 2 }; match origen { Punto { x, y } => println!("({},{})", x, y), } match origen { Punto { x, .. } => println!("x es {}", x) } } fn _enlaces_a_variable() { let x = 1; match x { e @ 1 ..= 5 => println!("valor de rango {} obtenido", e), _ => println!("lo que sea"), } } fn _guardias() { enum EnteroOpcional { Valor(i32), _Faltante, } let x = EnteroOpcional::Valor(5); match x { EnteroOpcional::Valor(i) if i > 5 => println!("Entero mayor a cinco obtenido!"), EnteroOpcional::Valor(..) => println!("Entero obtenido!"), EnteroOpcional::_Faltante => println!("Sin suerte."), } } fn _multiples_patrones_y_guardias() { let x = 4; let y = false; let resultado = match x { 4 | 5 if y => "si", _ => "no" }; assert_eq!("no", resultado); } fn _llamadas_a_metodos() { struct Circulo { _x: f64, _y: f64, radio: f64, } impl Circulo { fn area(&self) -> f64 { std::f64::consts::PI * (self.radio * self.radio) } } let c = Circulo { _x: 0.0, _y: 0.0, radio: 2.0 }; println!("{}", c.area()); } fn _metodos_en_cadena() { struct Circulo { x: f64, y: f64, radio: f64, } impl Circulo { fn agrandar(&self, incremento: f64) -> Circulo { Circulo { x: self.x, y: self.y, radio: self.radio + incremento } } fn area(&self) -> f64 { std::f64::consts::PI * (self.radio * self.radio) } } let c = Circulo { x: 0.0, y: 0.0, radio: 2.0 }; println!("{}", c.area()); let d = c.agrandar(2.0).area(); println!("{}", d); } fn _funciones_asociadas() { struct Circulo { _x: f64, _y: f64, radio: f64, } impl Circulo { fn new(x: f64, y: f64, radio: f64) -> Circulo { Circulo { _x: x, _y: y, radio: radio, } } } let c = Circulo::new(0.0, 0.0, 2.0); assert_eq!(2.0, c.radio); } fn _builder() { struct Circulo { x: f64, y: f64, radio: f64 } impl Circulo { fn area(&self) -> f64 { std::f64::consts::PI * (self.radio * self.radio) } } struct CirculoBuilder { x: f64, y: f64, radio: f64 } impl CirculoBuilder { fn new() -> CirculoBuilder { CirculoBuilder { x: 0.0, y: 0.0, radio: 1.0, } } fn x(&mut self, coordenada: f64) -> &mut CirculoBuilder { self.x = coordenada; self } fn y(&mut self, coordenada: f64) -> &mut CirculoBuilder { self.y = coordenada; self } fn radio(&mut self, radio: f64) -> &mut CirculoBuilder { self.radio = radio; self } fn build(&self) -> Circulo { Circulo { x: self.x, y: self.y, radio: self.radio } } } let c = CirculoBuilder::new() .x(1.0) .y(2.0) .radio(2.0) .build(); println!("area: {}", c.area()); println!("x: {}", c.x); println!("y: {}", c.y); assert_eq!(2.0, c.y); } fn _cadenas_de_caracteres() { let _saludo: &str = "Hola."; let mut s: String = "Hola".to_string(); s.push_str(", mundo."); assert_eq!("Hola, mundo.", s); } fn _genericos() { enum _Option<T> { _Some(T), _None, } let _x: _Option<i32> = _Option::_Some(5); } fn _funciones_genericas() { fn foo<T>(x: T) -> T { x } let num = foo(1); assert_eq!(1, num); } fn _structs_genericos() { struct Info<T1, T2> { x: T1, y: T2, } impl<T1, T2> Info<T1, T2> { fn foo(&self) { // } } let info = Info { x: 1, y: "=)" }; info.foo(); assert_eq!(1, info.x); assert_eq!("=)", info.y); } fn _traits() { trait Area { fn area(&self) -> f64; } struct Circulo { _x: f64, _y: f64, radio: f64 } impl Area for Circulo { fn area(&self) -> f64 { std::f64::consts::PI * (self.radio * self.radio) } } let c = Circulo{ _x:0.0, _y:0.0, radio: 2.0 }; let a = c.area(); println!("{}", a); //Genericos fn imrimir_area<T: Area>(figura: T) { println!("Esta figura tiene un area de {}", figura.area()); } imrimir_area(c) } fn _multiples_limites_de_trait() { use std::fmt::Display; fn foo<T: Clone, K: Clone + Display>(x: T, y: K) -> String { let _x_clone = x.clone(); let y_clone = y.clone(); format!("{}", y_clone) } fn bar<T, K>(x: T, y: K) -> String where T: Clone, K: Clone + Display { let _x_clone = x.clone(); let y_clone = y.clone(); format!("{}", y_clone) } let r_foo = foo("Hola", "mundo"); let r_bar = bar("Hola", "mundo"); assert_eq!(r_foo, r_bar); } fn _metodos_por_defecto() { trait Foo { fn es_valido(&self) -> bool; fn es_invalido(&self) -> bool { !self.es_valido() } } struct Default; impl Foo for Default { fn es_valido(&self) -> bool { true } } let default = Default; assert!(default.es_valido()); assert!(!default.es_invalido()); } fn _metodos_por_defecto_bar() { trait Bar { fn plus_one(x: i32) -> i32 { x + 1} } struct ImplBar; impl Bar for ImplBar{}; let sum = ImplBar::plus_one(2); assert_eq!(3, sum); } fn _herencia() { trait Foo { fn foo(&self); } trait FooBar : Foo { fn foobar(&self); } struct Baz; impl Foo for Baz { fn foo(&self) { println!("foo"); } } impl FooBar for Baz { fn foobar(&self) { println!("foobar"); } } let baz = Baz; baz.foo(); baz.foobar(); } fn _drop() { struct HasDrop; impl Drop for HasDrop { fn drop(&mut self) { println!("Dropeando!"); } } let _x = HasDrop; } fn _if_let() { fn foo(x: i32) { println!("Number: {}", x); } let option = Some(5); if let Some(x) = option { foo(x); } } fn _closures() { let plus_one = |x: i32| x + 1; assert_eq!(2, plus_one(1)); //let plus_one: fn(i32) -> i32 = |x: i32| x + 1; /* fn suma_uno_v1 (x: i32) -> i32 { x + 1 } let suma_uno_v2 = |x: i32| -> i32 { x + 1 }; let suma_uno_v3 = |x: i32| x + 1 ; */ } fn _closures_como_argumentos() { fn llamar_con_uno<F>(closure: F) -> i32 where F : Fn(i32) -> i32 { closure(1) } let respuesta = llamar_con_uno(|x| x + 2); assert_eq!(3, respuesta); } fn _retornando_closures() { fn factory() -> Box<dyn Fn(i32) -> i32> { let num = 5; Box::new(move |x| x + num) } let f = factory(); let respuesta = f(1); assert_eq!(6, respuesta); } fn _futures() { use futures::executor::block_on; async fn hello_world() { println!("hello, world!"); } let future = hello_world(); block_on(future); } fn _await() { async fn first_function() -> u32 { thread::sleep(Duration::from_millis(2000)); println!("1"); 1 } async fn second_function() -> u32 { thread::sleep(Duration::from_millis(2000)); println!("2"); 2 } async fn another_function() { let first = first_function().await; let second = second_function().await; let sum = first + second; println!("{}", sum); } use futures::executor::block_on; block_on(another_function()); println!("end..."); } fn _futures_join() { use futures::join; use futures::executor::block_on; async fn get_book() -> u32 { thread::sleep(Duration::from_millis(3000)); println!("get_book"); 1 } async fn get_music() -> u32 { thread::sleep(Duration::from_millis(3000)); println!("get_music"); 1 } async fn get_book_and_music() -> (u32, u32) { let book_fut = get_book(); let music_fut = get_music(); join!(book_fut, music_fut) } block_on(get_book_and_music()); } fn _futures_join_2() { use futures::executor::block_on; async fn learn_song() -> u32 { thread::sleep(Duration::from_millis(3000)); println!("learn_song"); 1 } async fn sing_song(_song: u32) -> u32 { thread::sleep(Duration::from_millis(3000)); println!("sing_song"); 1 } async fn dance() -> u32 { thread::sleep(Duration::from_millis(3000)); println!("dance"); 1 } async fn learn_and_sing() { let song = learn_song().await; sing_song(song).await; } async fn async_main() { let f1 = learn_and_sing(); let f2 = dance(); futures::join!(f1, f2); } block_on(async_main()); }
punto.x + punto.y }
identifier_body
main.rs
use std::{io, thread}; use std::num::ParseIntError; use std::time::Duration; fn main() { memory(); } struct Punto { _x: i8, _y: i8 } fn memory() { let _punto = Punto { _x: 1, _y: 2 }; println!("Inicio address: {:p}", &_punto); _address(&_punto); _copy_value(_punto); println!("------------------",); let caja = Box::new(Punto { _x: 1, _y: 2 }); println!("Inicio address: {:p}", caja); _address(&caja); _copy_value(*caja); println!("------------------",); let otra_caja = Box::new(Punto { _x: 1, _y: 2 }); println!("Inicio address: {:p}", otra_caja); //* Realiza una copia *otra_caja; let otro_contenido = *otra_caja; println!("Contenido address: {:p}", &otro_contenido); } fn _address(p: &Punto) { println!("Puntero address: {:p}", p); } fn _copy_value(p: Punto) { println!("Valor address: {:p}", &p); } fn _vectores() { let x = vec!["Hola", "mundo"]; let _y = x[0]; } fn _input() { println!("Introduce data ..."); let mut data = String::new(); io::stdin().read_line(&mut data) .ok() .expect("Fallo al leer linea"); print!("Data: {}", data); } fn _parseo() { let num = "3p"; let result = num.parse(); let number = match result { Ok(n) => n, Err(_) => 0 }; assert_eq!(0, number); } fn _result() { let num1: Result<i32, ParseIntError> = "2".parse(); let num2: Result<i32, ParseIntError> = "3".parse(); let result = num1 .and_then(|x| num2 .map(|y| x + y)); let end = match result { Ok(n) => n.to_string(), Err(e) => e.to_string() }; assert_eq!("5", end); } fn _result_match() { let result: Result<i32, &str> = Ok(5); let number = match result { Ok(x) => x, Err(_e) => 0, }; assert_eq!(5, number) } fn _threads() { let handles: Vec<_> = (0..10).map(|x| { thread::spawn(move|| { println!("{}", x) }) }).collect(); for h in handles { h.join().ok().expect("No se pudo unir un hilo!"); } } fn _threads_join() { thread::spawn(|| { thread::sleep(Duration::from_millis(3000)); println!("Hola desde 1"); }); let thread2 = thread::spawn(|| { thread::sleep(Duration::from_millis(3000)); println!("Hola desde 2"); }); println!("end..."); thread2.join().expect("Error"); //thread::sleep(Duration::from_millis(4000)); } fn _iteradores() { let mut rango = 0..10; loop { match rango.next() { Some(x) => { println!("{}", x); }, None => { break } } } let nums = vec![1, 2, 3]; for num in nums.iter() { println!("{}", num); } } fn _consumidores() { let _uno_hasta_cien = (1..101).collect::<Vec<i32>>(); let _uno_hasta_cien = (1..101).collect::<Vec<_>>(); let mayores_a_cuarenta_y_dos = (0..100) .find(|x| *x > 42); match mayores_a_cuarenta_y_dos { Some(_) => println!("Tenemos algunos números!"), None => println!("No se encontraron números :("), } let suma = (1..4).fold(0, |suma, x| suma + x); //6 assert_eq!(6, suma); } fn _adaptadores_de_iterador() { let _nums = (1..100).map(|x| x + 1).collect::<Vec<_>>(); let _nums = (1..30) .filter(|&x| x % 2 == 0) .filter(|&x| x % 3 == 0) .take(5) .collect::<Vec<i32>>(); for x in (1..11).map(|x| x + 1).collect::<Vec<_>>() { println!("{}", x); } } fn _hilos() { thread::spawn(|| { println!("Hola desde un hilo!"); }); thread::sleep(Duration::from_millis(10)); } fn _thread_handle() { let handle = thread::spawn(|| { "Hola desde un hilo!" }); //unwrap() hará un pánico ( panic! ) si el Result es Err assert_eq!("Hola desde un hilo!", handle.join().unwrap()); } fn _panico_hilo() { let valor = 1; let result = thread::spawn(move || { if valor % 2 == 0 { panic!("ups!"); } 1 }).join(); let resultado = match result { Ok(n) => n, Err(_e) => 0 }; assert_eq!(1, resultado); } fn _panico_unreachable() { enum Estado { _Activo, _Inactivo, Desconocido } use Estado::{_Activo, _Inactivo, Desconocido}; let estado = Desconocido; let _numero = match estado { _Activo => 1, _Inactivo => 0, _ => unreachable!() }; println!("Linea no alcanzable") } fn _option() { let s = "foo"; assert_eq!(s.find('f'), Some(0)); assert_eq!(s.find('z'), None); assert_eq!(s.find('f').map(|p| p + 1), Some(1)); assert_eq!(s.find('z').map(|p| p + 1), None); } fn _option_match() { let option = Some(5); let number = match option { Some(x) => x, None => 0, }; assert_eq!(5, number); } fn _result_funciones() { enum Error { Tecnico } let f: fn(i32) -> Result<i32, Error> = |num: i32| match num { 1 => Ok(num + 1), _ => Err(Error::Tecnico) }; /*fn f(num: i32) -> Result<i32, Error> { match num { 1 => Ok(num + 1), _ => Err(Error::Tecnico) } }*/ assert!(f(1).is_ok()); assert!(f(2).is_err()); let result: Result<i32, &str> = f(2) .map(|ok| ok) .map_err(|_err| "Error =("); match result { Ok(n) => println!("{}", n), Err(e) => println!("{}", e) }; } fn _panic_result() { let result: Result<i32, &str> = Ok(1); //let result: Result<i32, &str> = Err("Error =("); let valor = result.ok().expect("Error!"); assert_eq!(1, valor) } fn _try() { fn _parser(num: &str) -> Result<i32, ParseIntError> { num.parse() } fn f(x: &str, y: &str) -> Result<i32, ParseIntError> { let num1 = _parser(x); let num2 = _parser(y); //let resultado = _parser(x) ? + _parser(y)?; let resultado = num1? + num2?; Ok(resultado) } assert!(f("1", "2").is_ok()); assert!(f("1P", "2").is_err()); match f("1P", "2") { Ok(n) => println!("Ok: {}", n), Err(e) => println!("Error: {}", e) } } fn _try_azucar_sintactico() { fn foo(n: i32) -> Result<i32, String> { if n % 2 == 0 { Ok(1) } else { Err(String::from("Error")) } } fn bar() -> Result<i32, String> { Ok(2) } fn foo_bar() -> Result<i32, String> { let res = foo(2)? + bar()?; Ok(res) } let fb = foo_bar(); assert!(fb.is_ok()); } fn _apuntadores_a_funcion() { fn mas_uno(i: i32) -> i32 { i + 1 } let f: fn(i32) -> i32 = mas_uno; assert_eq!(2, f(1)); } fn _primitivos() { let _a: bool = false; let _b: char = 'x'; let _c: i32 = 42; //i8, i16, i32, i64, u8, u16, u32, u64, isize, usize, f32, f64 } fn _arreglos() { let mut m: [i32; 3] = [1, 2, 3]; m[2] = 5; assert_eq!(5, m[2]); } fn _slices() { let a: [i32; 5] = [0, 1, 2, 3, 4]; let middle: &[i32] = &a[1..4]; assert_eq!(1, middle[0]); } fn _tuplas() { let (x, y) = (1, "Hello"); assert_eq!(1, x); assert_eq!("Hello", y); let z = (1, "Hello"); assert_eq!(1, z.0); } fn _expresiones() { let x = 5; let y = if x == 5 { 10 } else { 15 }; assert_eq!(10, y) } fn _while() { let mut x = 0; while x < 10 { x += 1; } assert_eq!(10, x) } fn _for() { for x in 0..10 { println!("{}", x); } } fn _loop() { let mut x = 0; loop { x += 1; if x >= 10 { break } } assert_eq!(10, x) } fn _etiquetas_loop() { 'exterior: for x in 0..10 { 'interior: for y in 0..10 { if x % 2 == 0 { continue 'exterior; } // continua el ciclo por encima de x if y % 2 == 0 { continue 'interior; } // continua el ciclo por encima de y println!("x: {}, y: {}", x, y); } } } fn _enumerate() { for (i,j) in (5..10).enumerate() { println!("i = {} y j = {}", i, j); } let lineas = "hola\nmundo".lines(); for (numero_linea, linea) in lineas.enumerate() { println!("{}: {}", numero_linea, linea); } } fn _pertenencia() { let v = vec![1, 2, 3]; let v2 = v; println!("v2[0] es: {}", v2[0]); //println!("v[0] es: {}", v[0]); // Error borrow of moved value: `v` } fn _pertenencia_funcion() { fn tomar(_v: Vec<i32>) { // Algo } let v = vec![1, 2, 3]; tomar(v); //println!("v[0] es: {}", v[0]); // Error borrow of moved value: `v` } fn _copy() { // i32 , Todos los tipos primitivos implementan el trait Copy // Se realiza una copia y su pertenencia no es movida let v: i32 = 1; let _v2 = v; println!("v es: {}", v); // =) } fn _devolver_pertenencia() { fn _foo(v: Vec<i32>) -> Vec<i32> { v } fn foo(v1: Vec<i32>, v2: Vec<i32>) -> (Vec<i32>, Vec<i32>, i32) { (v1, v2, 42) } let v1 = vec![1, 2, 3]; let v2 = vec![1, 2, 3]; let (v1, _v2, _r) = foo(v1, v2); assert_eq!(1, v1[0]); } fn _prestamo() { fn foo(_v1: &Vec<i32>, _v2: &Vec<i32>) -> i32 { 42 } let v1 = vec![1, 2, 3]; let _v2 = vec![1, 2, 3]; let _r = foo(&v1, &_v2); // podemos usar a v1 y v2 aqui assert_eq!(1, v1[0]); } fn _mutabilidad() { let mut x = 5; assert_eq!(5, x); x = 6; assert_eq!(6, x); } fn _estructuras() { struct Punto { x: i32, y: i32, } let origen = Punto { x: 0, y: 0 }; assert_eq!(0, origen.x); assert_eq!(0, origen.y); } fn _sintaxis_de_actualizacion() { struct Punto3d { _x: i32, _y: i32, _z: i32, } let origen = Punto3d { _x: 1, _y: 2, _z: 3 }; let punto = Punto3d { _y: 1, .. origen }; assert_eq!(3, punto._z); } fn _estructuras_pertenencia() { struct Punto { x: i32, y: i32, } fn foo(punto: Punto) -> i32 { punto.x + punto.y } let origen = Punto { x: 1, y: 2 }; let suma = foo(origen); println!("{}", suma); //println!("Punto x {}", origen.x); // Error borrow of moved value: `origen` } fn _estructuras_prestamo() { struct Punto { x: i32, y: i32, } fn foo(punto: &Punto) -> i32 { punto.x + punto.y } let origen = Punto { x: 1, y: 2 }; let suma = foo(&origen); assert_eq!(3, suma); assert_eq!(1, origen.x); } fn _tupla_estructuras() { struct Color(i32, i32, i32); let azul = Color(0, 0, 255); assert_eq!(255, azul.2); } fn _estructuras_tipo_unitario() { struct Electron; let _e = Electron; } fn _enumeraciones() { enum Mensaje { Salir, CambiarColor(i32, i32, i32), Mover { _x: i32, _y: i32 }, Escribir(String), } let _salir = Mensaje::Salir; let _cambiar_color = Mensaje::CambiarColor(0, 0, 255); use Mensaje::{Mover}; let _mover = Mover {_x: 0, _y: 2}; let _escribir = Mensaje::Escribir("Hello".to_string()); } fn _match_en_enums() { enum _Mensaje { Salir, CambiarColor(i32, i32, i32), Mover { x: i32, _y: i32 }, Escribir(String), } fn _salir() { /* ... */ } fn _cambiar_color(_r: i32, _g: i32, _b: i32) { /* ... */ } fn _mover_cursor(_x: i32, _y: i32) { /* ... */ } fn _procesar_mensaje(msj: _Mensaje) { match msj { _Mensaje::Salir => _salir(), _Mensaje::CambiarColor(r, g, b) => _cambiar_color(r, g, b), _Mensaje::Mover { x, _y: y } => _mover_cursor(x, y), _Mensaje::Escribir(s) => println!("{}", s), }; } } fn _mul
let x = 2; let num = match x { 1 | 2 => "1, 2", 3 => "3", _ => "...", }; assert_eq!("1, 2", num); } fn _match_rangos() { let x = 3; let resultado = match x { 1 ..= 5 => "uno al cinco", _ => "cualquier cosa", }; assert_eq!("uno al cinco", resultado); let y = 's'; let letra = match y { 'a' ..= 'j' => "letra temprana", 'k' ..= 'z' => "letra tardia", _ => "algo mas" }; assert_eq!("letra tardia", letra); } fn _destructuracion() { struct Punto { x: i32, y: i32, } let origen = Punto { x: 0, y: 2 }; match origen { Punto { x, y } => println!("({},{})", x, y), } match origen { Punto { x, .. } => println!("x es {}", x) } } fn _enlaces_a_variable() { let x = 1; match x { e @ 1 ..= 5 => println!("valor de rango {} obtenido", e), _ => println!("lo que sea"), } } fn _guardias() { enum EnteroOpcional { Valor(i32), _Faltante, } let x = EnteroOpcional::Valor(5); match x { EnteroOpcional::Valor(i) if i > 5 => println!("Entero mayor a cinco obtenido!"), EnteroOpcional::Valor(..) => println!("Entero obtenido!"), EnteroOpcional::_Faltante => println!("Sin suerte."), } } fn _multiples_patrones_y_guardias() { let x = 4; let y = false; let resultado = match x { 4 | 5 if y => "si", _ => "no" }; assert_eq!("no", resultado); } fn _llamadas_a_metodos() { struct Circulo { _x: f64, _y: f64, radio: f64, } impl Circulo { fn area(&self) -> f64 { std::f64::consts::PI * (self.radio * self.radio) } } let c = Circulo { _x: 0.0, _y: 0.0, radio: 2.0 }; println!("{}", c.area()); } fn _metodos_en_cadena() { struct Circulo { x: f64, y: f64, radio: f64, } impl Circulo { fn agrandar(&self, incremento: f64) -> Circulo { Circulo { x: self.x, y: self.y, radio: self.radio + incremento } } fn area(&self) -> f64 { std::f64::consts::PI * (self.radio * self.radio) } } let c = Circulo { x: 0.0, y: 0.0, radio: 2.0 }; println!("{}", c.area()); let d = c.agrandar(2.0).area(); println!("{}", d); } fn _funciones_asociadas() { struct Circulo { _x: f64, _y: f64, radio: f64, } impl Circulo { fn new(x: f64, y: f64, radio: f64) -> Circulo { Circulo { _x: x, _y: y, radio: radio, } } } let c = Circulo::new(0.0, 0.0, 2.0); assert_eq!(2.0, c.radio); } fn _builder() { struct Circulo { x: f64, y: f64, radio: f64 } impl Circulo { fn area(&self) -> f64 { std::f64::consts::PI * (self.radio * self.radio) } } struct CirculoBuilder { x: f64, y: f64, radio: f64 } impl CirculoBuilder { fn new() -> CirculoBuilder { CirculoBuilder { x: 0.0, y: 0.0, radio: 1.0, } } fn x(&mut self, coordenada: f64) -> &mut CirculoBuilder { self.x = coordenada; self } fn y(&mut self, coordenada: f64) -> &mut CirculoBuilder { self.y = coordenada; self } fn radio(&mut self, radio: f64) -> &mut CirculoBuilder { self.radio = radio; self } fn build(&self) -> Circulo { Circulo { x: self.x, y: self.y, radio: self.radio } } } let c = CirculoBuilder::new() .x(1.0) .y(2.0) .radio(2.0) .build(); println!("area: {}", c.area()); println!("x: {}", c.x); println!("y: {}", c.y); assert_eq!(2.0, c.y); } fn _cadenas_de_caracteres() { let _saludo: &str = "Hola."; let mut s: String = "Hola".to_string(); s.push_str(", mundo."); assert_eq!("Hola, mundo.", s); } fn _genericos() { enum _Option<T> { _Some(T), _None, } let _x: _Option<i32> = _Option::_Some(5); } fn _funciones_genericas() { fn foo<T>(x: T) -> T { x } let num = foo(1); assert_eq!(1, num); } fn _structs_genericos() { struct Info<T1, T2> { x: T1, y: T2, } impl<T1, T2> Info<T1, T2> { fn foo(&self) { // } } let info = Info { x: 1, y: "=)" }; info.foo(); assert_eq!(1, info.x); assert_eq!("=)", info.y); } fn _traits() { trait Area { fn area(&self) -> f64; } struct Circulo { _x: f64, _y: f64, radio: f64 } impl Area for Circulo { fn area(&self) -> f64 { std::f64::consts::PI * (self.radio * self.radio) } } let c = Circulo{ _x:0.0, _y:0.0, radio: 2.0 }; let a = c.area(); println!("{}", a); //Genericos fn imrimir_area<T: Area>(figura: T) { println!("Esta figura tiene un area de {}", figura.area()); } imrimir_area(c) } fn _multiples_limites_de_trait() { use std::fmt::Display; fn foo<T: Clone, K: Clone + Display>(x: T, y: K) -> String { let _x_clone = x.clone(); let y_clone = y.clone(); format!("{}", y_clone) } fn bar<T, K>(x: T, y: K) -> String where T: Clone, K: Clone + Display { let _x_clone = x.clone(); let y_clone = y.clone(); format!("{}", y_clone) } let r_foo = foo("Hola", "mundo"); let r_bar = bar("Hola", "mundo"); assert_eq!(r_foo, r_bar); } fn _metodos_por_defecto() { trait Foo { fn es_valido(&self) -> bool; fn es_invalido(&self) -> bool { !self.es_valido() } } struct Default; impl Foo for Default { fn es_valido(&self) -> bool { true } } let default = Default; assert!(default.es_valido()); assert!(!default.es_invalido()); } fn _metodos_por_defecto_bar() { trait Bar { fn plus_one(x: i32) -> i32 { x + 1} } struct ImplBar; impl Bar for ImplBar{}; let sum = ImplBar::plus_one(2); assert_eq!(3, sum); } fn _herencia() { trait Foo { fn foo(&self); } trait FooBar : Foo { fn foobar(&self); } struct Baz; impl Foo for Baz { fn foo(&self) { println!("foo"); } } impl FooBar for Baz { fn foobar(&self) { println!("foobar"); } } let baz = Baz; baz.foo(); baz.foobar(); } fn _drop() { struct HasDrop; impl Drop for HasDrop { fn drop(&mut self) { println!("Dropeando!"); } } let _x = HasDrop; } fn _if_let() { fn foo(x: i32) { println!("Number: {}", x); } let option = Some(5); if let Some(x) = option { foo(x); } } fn _closures() { let plus_one = |x: i32| x + 1; assert_eq!(2, plus_one(1)); //let plus_one: fn(i32) -> i32 = |x: i32| x + 1; /* fn suma_uno_v1 (x: i32) -> i32 { x + 1 } let suma_uno_v2 = |x: i32| -> i32 { x + 1 }; let suma_uno_v3 = |x: i32| x + 1 ; */ } fn _closures_como_argumentos() { fn llamar_con_uno<F>(closure: F) -> i32 where F : Fn(i32) -> i32 { closure(1) } let respuesta = llamar_con_uno(|x| x + 2); assert_eq!(3, respuesta); } fn _retornando_closures() { fn factory() -> Box<dyn Fn(i32) -> i32> { let num = 5; Box::new(move |x| x + num) } let f = factory(); let respuesta = f(1); assert_eq!(6, respuesta); } fn _futures() { use futures::executor::block_on; async fn hello_world() { println!("hello, world!"); } let future = hello_world(); block_on(future); } fn _await() { async fn first_function() -> u32 { thread::sleep(Duration::from_millis(2000)); println!("1"); 1 } async fn second_function() -> u32 { thread::sleep(Duration::from_millis(2000)); println!("2"); 2 } async fn another_function() { let first = first_function().await; let second = second_function().await; let sum = first + second; println!("{}", sum); } use futures::executor::block_on; block_on(another_function()); println!("end..."); } fn _futures_join() { use futures::join; use futures::executor::block_on; async fn get_book() -> u32 { thread::sleep(Duration::from_millis(3000)); println!("get_book"); 1 } async fn get_music() -> u32 { thread::sleep(Duration::from_millis(3000)); println!("get_music"); 1 } async fn get_book_and_music() -> (u32, u32) { let book_fut = get_book(); let music_fut = get_music(); join!(book_fut, music_fut) } block_on(get_book_and_music()); } fn _futures_join_2() { use futures::executor::block_on; async fn learn_song() -> u32 { thread::sleep(Duration::from_millis(3000)); println!("learn_song"); 1 } async fn sing_song(_song: u32) -> u32 { thread::sleep(Duration::from_millis(3000)); println!("sing_song"); 1 } async fn dance() -> u32 { thread::sleep(Duration::from_millis(3000)); println!("dance"); 1 } async fn learn_and_sing() { let song = learn_song().await; sing_song(song).await; } async fn async_main() { let f1 = learn_and_sing(); let f2 = dance(); futures::join!(f1, f2); } block_on(async_main()); }
tiples_patrones() {
identifier_name
main.rs
use std::{io, thread}; use std::num::ParseIntError; use std::time::Duration; fn main() { memory(); } struct Punto { _x: i8, _y: i8 } fn memory() { let _punto = Punto { _x: 1, _y: 2 }; println!("Inicio address: {:p}", &_punto); _address(&_punto); _copy_value(_punto); println!("------------------",); let caja = Box::new(Punto { _x: 1, _y: 2 }); println!("Inicio address: {:p}", caja); _address(&caja); _copy_value(*caja); println!("------------------",); let otra_caja = Box::new(Punto { _x: 1, _y: 2 }); println!("Inicio address: {:p}", otra_caja); //* Realiza una copia *otra_caja; let otro_contenido = *otra_caja; println!("Contenido address: {:p}", &otro_contenido); } fn _address(p: &Punto) { println!("Puntero address: {:p}", p); } fn _copy_value(p: Punto) { println!("Valor address: {:p}", &p); } fn _vectores() { let x = vec!["Hola", "mundo"]; let _y = x[0]; } fn _input() { println!("Introduce data ..."); let mut data = String::new(); io::stdin().read_line(&mut data) .ok() .expect("Fallo al leer linea"); print!("Data: {}", data); } fn _parseo() { let num = "3p"; let result = num.parse(); let number = match result { Ok(n) => n, Err(_) => 0 }; assert_eq!(0, number); } fn _result() { let num1: Result<i32, ParseIntError> = "2".parse(); let num2: Result<i32, ParseIntError> = "3".parse(); let result = num1 .and_then(|x| num2 .map(|y| x + y)); let end = match result { Ok(n) => n.to_string(), Err(e) => e.to_string() }; assert_eq!("5", end); } fn _result_match() { let result: Result<i32, &str> = Ok(5); let number = match result { Ok(x) => x, Err(_e) => 0, }; assert_eq!(5, number) } fn _threads() { let handles: Vec<_> = (0..10).map(|x| { thread::spawn(move|| { println!("{}", x) }) }).collect(); for h in handles { h.join().ok().expect("No se pudo unir un hilo!"); } } fn _threads_join() { thread::spawn(|| { thread::sleep(Duration::from_millis(3000)); println!("Hola desde 1"); }); let thread2 = thread::spawn(|| { thread::sleep(Duration::from_millis(3000)); println!("Hola desde 2"); }); println!("end..."); thread2.join().expect("Error"); //thread::sleep(Duration::from_millis(4000)); } fn _iteradores() { let mut rango = 0..10; loop { match rango.next() { Some(x) => { println!("{}", x); }, None => { break } } } let nums = vec![1, 2, 3]; for num in nums.iter() { println!("{}", num); } } fn _consumidores() { let _uno_hasta_cien = (1..101).collect::<Vec<i32>>(); let _uno_hasta_cien = (1..101).collect::<Vec<_>>(); let mayores_a_cuarenta_y_dos = (0..100) .find(|x| *x > 42); match mayores_a_cuarenta_y_dos { Some(_) => println!("Tenemos algunos números!"), None => println!("No se encontraron números :("), } let suma = (1..4).fold(0, |suma, x| suma + x); //6 assert_eq!(6, suma); } fn _adaptadores_de_iterador() { let _nums = (1..100).map(|x| x + 1).collect::<Vec<_>>(); let _nums = (1..30) .filter(|&x| x % 2 == 0) .filter(|&x| x % 3 == 0) .take(5) .collect::<Vec<i32>>(); for x in (1..11).map(|x| x + 1).collect::<Vec<_>>() { println!("{}", x); } } fn _hilos() { thread::spawn(|| { println!("Hola desde un hilo!"); }); thread::sleep(Duration::from_millis(10)); } fn _thread_handle() { let handle = thread::spawn(|| { "Hola desde un hilo!" }); //unwrap() hará un pánico ( panic! ) si el Result es Err assert_eq!("Hola desde un hilo!", handle.join().unwrap()); } fn _panico_hilo() { let valor = 1; let result = thread::spawn(move || { if valor % 2 == 0 { panic!("ups!"); } 1 }).join(); let resultado = match result { Ok(n) => n, Err(_e) => 0 }; assert_eq!(1, resultado); } fn _panico_unreachable() { enum Estado { _Activo, _Inactivo, Desconocido } use Estado::{_Activo, _Inactivo, Desconocido}; let estado = Desconocido; let _numero = match estado { _Activo => 1, _Inactivo => 0, _ => unreachable!() }; println!("Linea no alcanzable") } fn _option() { let s = "foo"; assert_eq!(s.find('f'), Some(0)); assert_eq!(s.find('z'), None); assert_eq!(s.find('f').map(|p| p + 1), Some(1)); assert_eq!(s.find('z').map(|p| p + 1), None); } fn _option_match() { let option = Some(5); let number = match option { Some(x) => x, None => 0, }; assert_eq!(5, number); } fn _result_funciones() { enum Error { Tecnico } let f: fn(i32) -> Result<i32, Error> = |num: i32| match num { 1 => Ok(num + 1), _ => Err(Error::Tecnico) }; /*fn f(num: i32) -> Result<i32, Error> { match num { 1 => Ok(num + 1), _ => Err(Error::Tecnico) } }*/ assert!(f(1).is_ok()); assert!(f(2).is_err()); let result: Result<i32, &str> = f(2) .map(|ok| ok) .map_err(|_err| "Error =("); match result { Ok(n) => println!("{}", n), Err(e) => println!("{}", e) }; } fn _panic_result() { let result: Result<i32, &str> = Ok(1); //let result: Result<i32, &str> = Err("Error =("); let valor = result.ok().expect("Error!"); assert_eq!(1, valor) } fn _try() { fn _parser(num: &str) -> Result<i32, ParseIntError> { num.parse() } fn f(x: &str, y: &str) -> Result<i32, ParseIntError> { let num1 = _parser(x); let num2 = _parser(y); //let resultado = _parser(x) ? + _parser(y)?; let resultado = num1? + num2?; Ok(resultado) } assert!(f("1", "2").is_ok()); assert!(f("1P", "2").is_err()); match f("1P", "2") { Ok(n) => println!("Ok: {}", n), Err(e) => println!("Error: {}", e) } } fn _try_azucar_sintactico() { fn foo(n: i32) -> Result<i32, String> { if n % 2 == 0 { Ok(1) } else { Err(String::from("Error")) } } fn bar() -> Result<i32, String> { Ok(2) } fn foo_bar() -> Result<i32, String> { let res = foo(2)? + bar()?; Ok(res) } let fb = foo_bar(); assert!(fb.is_ok()); } fn _apuntadores_a_funcion() { fn mas_uno(i: i32) -> i32 { i + 1 } let f: fn(i32) -> i32 = mas_uno; assert_eq!(2, f(1)); } fn _primitivos() { let _a: bool = false; let _b: char = 'x'; let _c: i32 = 42; //i8, i16, i32, i64, u8, u16, u32, u64, isize, usize, f32, f64 } fn _arreglos() { let mut m: [i32; 3] = [1, 2, 3]; m[2] = 5; assert_eq!(5, m[2]); } fn _slices() { let a: [i32; 5] = [0, 1, 2, 3, 4]; let middle: &[i32] = &a[1..4]; assert_eq!(1, middle[0]); } fn _tuplas() { let (x, y) = (1, "Hello"); assert_eq!(1, x); assert_eq!("Hello", y); let z = (1, "Hello"); assert_eq!(1, z.0); } fn _expresiones() { let x = 5; let y = if x == 5 { 10 } else { 15 }; assert_eq!(10, y) } fn _while() { let mut x = 0; while x < 10 { x += 1; } assert_eq!(10, x) } fn _for() { for x in 0..10 { println!("{}", x); } } fn _loop() { let mut x = 0; loop { x += 1; if x >= 10 { break } } assert_eq!(10, x) } fn _etiquetas_loop() { 'exterior: for x in 0..10 { 'interior: for y in 0..10 { if x % 2 == 0 { continue 'exterior; } // continua el ciclo por encima de x if y % 2 == 0 { co
continua el ciclo por encima de y println!("x: {}, y: {}", x, y); } } } fn _enumerate() { for (i,j) in (5..10).enumerate() { println!("i = {} y j = {}", i, j); } let lineas = "hola\nmundo".lines(); for (numero_linea, linea) in lineas.enumerate() { println!("{}: {}", numero_linea, linea); } } fn _pertenencia() { let v = vec![1, 2, 3]; let v2 = v; println!("v2[0] es: {}", v2[0]); //println!("v[0] es: {}", v[0]); // Error borrow of moved value: `v` } fn _pertenencia_funcion() { fn tomar(_v: Vec<i32>) { // Algo } let v = vec![1, 2, 3]; tomar(v); //println!("v[0] es: {}", v[0]); // Error borrow of moved value: `v` } fn _copy() { // i32 , Todos los tipos primitivos implementan el trait Copy // Se realiza una copia y su pertenencia no es movida let v: i32 = 1; let _v2 = v; println!("v es: {}", v); // =) } fn _devolver_pertenencia() { fn _foo(v: Vec<i32>) -> Vec<i32> { v } fn foo(v1: Vec<i32>, v2: Vec<i32>) -> (Vec<i32>, Vec<i32>, i32) { (v1, v2, 42) } let v1 = vec![1, 2, 3]; let v2 = vec![1, 2, 3]; let (v1, _v2, _r) = foo(v1, v2); assert_eq!(1, v1[0]); } fn _prestamo() { fn foo(_v1: &Vec<i32>, _v2: &Vec<i32>) -> i32 { 42 } let v1 = vec![1, 2, 3]; let _v2 = vec![1, 2, 3]; let _r = foo(&v1, &_v2); // podemos usar a v1 y v2 aqui assert_eq!(1, v1[0]); } fn _mutabilidad() { let mut x = 5; assert_eq!(5, x); x = 6; assert_eq!(6, x); } fn _estructuras() { struct Punto { x: i32, y: i32, } let origen = Punto { x: 0, y: 0 }; assert_eq!(0, origen.x); assert_eq!(0, origen.y); } fn _sintaxis_de_actualizacion() { struct Punto3d { _x: i32, _y: i32, _z: i32, } let origen = Punto3d { _x: 1, _y: 2, _z: 3 }; let punto = Punto3d { _y: 1, .. origen }; assert_eq!(3, punto._z); } fn _estructuras_pertenencia() { struct Punto { x: i32, y: i32, } fn foo(punto: Punto) -> i32 { punto.x + punto.y } let origen = Punto { x: 1, y: 2 }; let suma = foo(origen); println!("{}", suma); //println!("Punto x {}", origen.x); // Error borrow of moved value: `origen` } fn _estructuras_prestamo() { struct Punto { x: i32, y: i32, } fn foo(punto: &Punto) -> i32 { punto.x + punto.y } let origen = Punto { x: 1, y: 2 }; let suma = foo(&origen); assert_eq!(3, suma); assert_eq!(1, origen.x); } fn _tupla_estructuras() { struct Color(i32, i32, i32); let azul = Color(0, 0, 255); assert_eq!(255, azul.2); } fn _estructuras_tipo_unitario() { struct Electron; let _e = Electron; } fn _enumeraciones() { enum Mensaje { Salir, CambiarColor(i32, i32, i32), Mover { _x: i32, _y: i32 }, Escribir(String), } let _salir = Mensaje::Salir; let _cambiar_color = Mensaje::CambiarColor(0, 0, 255); use Mensaje::{Mover}; let _mover = Mover {_x: 0, _y: 2}; let _escribir = Mensaje::Escribir("Hello".to_string()); } fn _match_en_enums() { enum _Mensaje { Salir, CambiarColor(i32, i32, i32), Mover { x: i32, _y: i32 }, Escribir(String), } fn _salir() { /* ... */ } fn _cambiar_color(_r: i32, _g: i32, _b: i32) { /* ... */ } fn _mover_cursor(_x: i32, _y: i32) { /* ... */ } fn _procesar_mensaje(msj: _Mensaje) { match msj { _Mensaje::Salir => _salir(), _Mensaje::CambiarColor(r, g, b) => _cambiar_color(r, g, b), _Mensaje::Mover { x, _y: y } => _mover_cursor(x, y), _Mensaje::Escribir(s) => println!("{}", s), }; } } fn _multiples_patrones() { let x = 2; let num = match x { 1 | 2 => "1, 2", 3 => "3", _ => "...", }; assert_eq!("1, 2", num); } fn _match_rangos() { let x = 3; let resultado = match x { 1 ..= 5 => "uno al cinco", _ => "cualquier cosa", }; assert_eq!("uno al cinco", resultado); let y = 's'; let letra = match y { 'a' ..= 'j' => "letra temprana", 'k' ..= 'z' => "letra tardia", _ => "algo mas" }; assert_eq!("letra tardia", letra); } fn _destructuracion() { struct Punto { x: i32, y: i32, } let origen = Punto { x: 0, y: 2 }; match origen { Punto { x, y } => println!("({},{})", x, y), } match origen { Punto { x, .. } => println!("x es {}", x) } } fn _enlaces_a_variable() { let x = 1; match x { e @ 1 ..= 5 => println!("valor de rango {} obtenido", e), _ => println!("lo que sea"), } } fn _guardias() { enum EnteroOpcional { Valor(i32), _Faltante, } let x = EnteroOpcional::Valor(5); match x { EnteroOpcional::Valor(i) if i > 5 => println!("Entero mayor a cinco obtenido!"), EnteroOpcional::Valor(..) => println!("Entero obtenido!"), EnteroOpcional::_Faltante => println!("Sin suerte."), } } fn _multiples_patrones_y_guardias() { let x = 4; let y = false; let resultado = match x { 4 | 5 if y => "si", _ => "no" }; assert_eq!("no", resultado); } fn _llamadas_a_metodos() { struct Circulo { _x: f64, _y: f64, radio: f64, } impl Circulo { fn area(&self) -> f64 { std::f64::consts::PI * (self.radio * self.radio) } } let c = Circulo { _x: 0.0, _y: 0.0, radio: 2.0 }; println!("{}", c.area()); } fn _metodos_en_cadena() { struct Circulo { x: f64, y: f64, radio: f64, } impl Circulo { fn agrandar(&self, incremento: f64) -> Circulo { Circulo { x: self.x, y: self.y, radio: self.radio + incremento } } fn area(&self) -> f64 { std::f64::consts::PI * (self.radio * self.radio) } } let c = Circulo { x: 0.0, y: 0.0, radio: 2.0 }; println!("{}", c.area()); let d = c.agrandar(2.0).area(); println!("{}", d); } fn _funciones_asociadas() { struct Circulo { _x: f64, _y: f64, radio: f64, } impl Circulo { fn new(x: f64, y: f64, radio: f64) -> Circulo { Circulo { _x: x, _y: y, radio: radio, } } } let c = Circulo::new(0.0, 0.0, 2.0); assert_eq!(2.0, c.radio); } fn _builder() { struct Circulo { x: f64, y: f64, radio: f64 } impl Circulo { fn area(&self) -> f64 { std::f64::consts::PI * (self.radio * self.radio) } } struct CirculoBuilder { x: f64, y: f64, radio: f64 } impl CirculoBuilder { fn new() -> CirculoBuilder { CirculoBuilder { x: 0.0, y: 0.0, radio: 1.0, } } fn x(&mut self, coordenada: f64) -> &mut CirculoBuilder { self.x = coordenada; self } fn y(&mut self, coordenada: f64) -> &mut CirculoBuilder { self.y = coordenada; self } fn radio(&mut self, radio: f64) -> &mut CirculoBuilder { self.radio = radio; self } fn build(&self) -> Circulo { Circulo { x: self.x, y: self.y, radio: self.radio } } } let c = CirculoBuilder::new() .x(1.0) .y(2.0) .radio(2.0) .build(); println!("area: {}", c.area()); println!("x: {}", c.x); println!("y: {}", c.y); assert_eq!(2.0, c.y); } fn _cadenas_de_caracteres() { let _saludo: &str = "Hola."; let mut s: String = "Hola".to_string(); s.push_str(", mundo."); assert_eq!("Hola, mundo.", s); } fn _genericos() { enum _Option<T> { _Some(T), _None, } let _x: _Option<i32> = _Option::_Some(5); } fn _funciones_genericas() { fn foo<T>(x: T) -> T { x } let num = foo(1); assert_eq!(1, num); } fn _structs_genericos() { struct Info<T1, T2> { x: T1, y: T2, } impl<T1, T2> Info<T1, T2> { fn foo(&self) { // } } let info = Info { x: 1, y: "=)" }; info.foo(); assert_eq!(1, info.x); assert_eq!("=)", info.y); } fn _traits() { trait Area { fn area(&self) -> f64; } struct Circulo { _x: f64, _y: f64, radio: f64 } impl Area for Circulo { fn area(&self) -> f64 { std::f64::consts::PI * (self.radio * self.radio) } } let c = Circulo{ _x:0.0, _y:0.0, radio: 2.0 }; let a = c.area(); println!("{}", a); //Genericos fn imrimir_area<T: Area>(figura: T) { println!("Esta figura tiene un area de {}", figura.area()); } imrimir_area(c) } fn _multiples_limites_de_trait() { use std::fmt::Display; fn foo<T: Clone, K: Clone + Display>(x: T, y: K) -> String { let _x_clone = x.clone(); let y_clone = y.clone(); format!("{}", y_clone) } fn bar<T, K>(x: T, y: K) -> String where T: Clone, K: Clone + Display { let _x_clone = x.clone(); let y_clone = y.clone(); format!("{}", y_clone) } let r_foo = foo("Hola", "mundo"); let r_bar = bar("Hola", "mundo"); assert_eq!(r_foo, r_bar); } fn _metodos_por_defecto() { trait Foo { fn es_valido(&self) -> bool; fn es_invalido(&self) -> bool { !self.es_valido() } } struct Default; impl Foo for Default { fn es_valido(&self) -> bool { true } } let default = Default; assert!(default.es_valido()); assert!(!default.es_invalido()); } fn _metodos_por_defecto_bar() { trait Bar { fn plus_one(x: i32) -> i32 { x + 1} } struct ImplBar; impl Bar for ImplBar{}; let sum = ImplBar::plus_one(2); assert_eq!(3, sum); } fn _herencia() { trait Foo { fn foo(&self); } trait FooBar : Foo { fn foobar(&self); } struct Baz; impl Foo for Baz { fn foo(&self) { println!("foo"); } } impl FooBar for Baz { fn foobar(&self) { println!("foobar"); } } let baz = Baz; baz.foo(); baz.foobar(); } fn _drop() { struct HasDrop; impl Drop for HasDrop { fn drop(&mut self) { println!("Dropeando!"); } } let _x = HasDrop; } fn _if_let() { fn foo(x: i32) { println!("Number: {}", x); } let option = Some(5); if let Some(x) = option { foo(x); } } fn _closures() { let plus_one = |x: i32| x + 1; assert_eq!(2, plus_one(1)); //let plus_one: fn(i32) -> i32 = |x: i32| x + 1; /* fn suma_uno_v1 (x: i32) -> i32 { x + 1 } let suma_uno_v2 = |x: i32| -> i32 { x + 1 }; let suma_uno_v3 = |x: i32| x + 1 ; */ } fn _closures_como_argumentos() { fn llamar_con_uno<F>(closure: F) -> i32 where F : Fn(i32) -> i32 { closure(1) } let respuesta = llamar_con_uno(|x| x + 2); assert_eq!(3, respuesta); } fn _retornando_closures() { fn factory() -> Box<dyn Fn(i32) -> i32> { let num = 5; Box::new(move |x| x + num) } let f = factory(); let respuesta = f(1); assert_eq!(6, respuesta); } fn _futures() { use futures::executor::block_on; async fn hello_world() { println!("hello, world!"); } let future = hello_world(); block_on(future); } fn _await() { async fn first_function() -> u32 { thread::sleep(Duration::from_millis(2000)); println!("1"); 1 } async fn second_function() -> u32 { thread::sleep(Duration::from_millis(2000)); println!("2"); 2 } async fn another_function() { let first = first_function().await; let second = second_function().await; let sum = first + second; println!("{}", sum); } use futures::executor::block_on; block_on(another_function()); println!("end..."); } fn _futures_join() { use futures::join; use futures::executor::block_on; async fn get_book() -> u32 { thread::sleep(Duration::from_millis(3000)); println!("get_book"); 1 } async fn get_music() -> u32 { thread::sleep(Duration::from_millis(3000)); println!("get_music"); 1 } async fn get_book_and_music() -> (u32, u32) { let book_fut = get_book(); let music_fut = get_music(); join!(book_fut, music_fut) } block_on(get_book_and_music()); } fn _futures_join_2() { use futures::executor::block_on; async fn learn_song() -> u32 { thread::sleep(Duration::from_millis(3000)); println!("learn_song"); 1 } async fn sing_song(_song: u32) -> u32 { thread::sleep(Duration::from_millis(3000)); println!("sing_song"); 1 } async fn dance() -> u32 { thread::sleep(Duration::from_millis(3000)); println!("dance"); 1 } async fn learn_and_sing() { let song = learn_song().await; sing_song(song).await; } async fn async_main() { let f1 = learn_and_sing(); let f2 = dance(); futures::join!(f1, f2); } block_on(async_main()); }
ntinue 'interior; } //
conditional_block
main.go
package main import ( "fmt" "math/rand" "strconv" "time" ) const ( // direction up = iota down right left stuck ) const ( // entity in map entity_path = iota + 1 entity_treasure entity_player entity_obstacle ) const ( // axis axis_x = iota axis_y ) const ( // render terminal unix = iota + 1 playground ) const ( delay_time = 400 // time used to display a step taken for each movement before continuing pause_time = 4000 // time used to display gimmick before full run exploration render_interface = unix // planned to be able to run in golang playground but got TIMEOUT instead! ) var ( mapSize = [2]int{8, 6} // define maximum size of the map [X,Y] playerStartPosition = [2]int{2, 2} // define default location of the player listCustomObstacle = [][2]int{ {3, 2}, {3, 4}, {4, 4}, {5, 4}, {5, 3}, {7, 3}, } ) type Player struct { Position [2]int DirectionTaken int FoundTreasure bool Range map[int]int } type Treasure struct { Position [2]int } type TreasureMap struct { Size [2]int OriginalMapping map[[2]int]int Mapping map[[2]int]int ListPossibleTreasureLocation map[[2]int]bool TreasureLocation [2]int } func
() { player := NewPlayer() treasure := NewTreasure() treasureMap := NewTreasureMap(mapSize) treasureMap.createMap(listCustomObstacle) treasureMap.setEntity(entity_player, player.Position) for true { treasure.randomizePosition(mapSize[0], mapSize[1]) if treasureMap.setEntity(entity_treasure, treasure.Position) { break } } treasureMap.render() // display initial condition with treasure fmt.Println("Initial Condition, treasure hid in:", treasure.Position, "Wait for it..") time.Sleep(pause_time * time.Millisecond) treasureMap.setPossibleTreasure() treasureMap.render() // display map with possible treasure location fmt.Println("Now it's hidden! Let's go find it!") time.Sleep(pause_time * time.Millisecond) for !player.FoundTreasure { // player see unobstructed path, and determine which is treasure and which is path treasurePositionXY, listPathPosition := player.see(treasureMap) for _, pathPosition := range listPathPosition { treasureMap.setEntity(entity_path, pathPosition) treasureMap.updatePossibleTreasureLocation(listPathPosition) } if !player.FoundTreasure { // keep moving until found the treasure newPosition, _ := player.move(treasureMap.Mapping) oldPosition := player.Position // stop, when player cannot move any longer if newPosition == oldPosition { break } // move the player into new position, put path on the older position treasureMap.setEntity(entity_path, oldPosition) treasureMap.setEntity(entity_player, newPosition) treasureMap.render() // update player position player.setPosition(newPosition) } else { treasureMap.clearPossibleTreasureLocation() treasureMap.setTreasureLocation(treasurePositionXY) treasureMap.revealMap(treasurePositionXY) treasureMap.render() break } } } // NewPlayer creating a new player with initial position func NewPlayer() Player { return Player{ Position: playerStartPosition, Range: make(map[int]int), } } // setPosition update the value of player position func (p *Player) setPosition(newPosition [2]int) { p.Position = newPosition } // move update the coordinate of the entity_player limited by predefined direction func (p *Player) move(treasureMap map[[2]int]int) ([2]int, bool) { if p.DirectionTaken == up { newPlayerPositionXY := [2]int{p.Position[0], p.Position[1] + 1} if treasureMap[newPlayerPositionXY] == entity_obstacle { p.DirectionTaken = right } else { return newPlayerPositionXY, true } } if p.DirectionTaken == right { newPlayerPositionXY := [2]int{p.Position[0] + 1, p.Position[1]} if treasureMap[newPlayerPositionXY] == entity_obstacle { p.DirectionTaken = down } else { return newPlayerPositionXY, true } } if p.DirectionTaken == down { newPlayerPositionXY := [2]int{p.Position[0], p.Position[1] - 1} if treasureMap[newPlayerPositionXY] == entity_obstacle { p.DirectionTaken = stuck } else { return newPlayerPositionXY, true } } return p.Position, false } // see check all unobstructed line of X & Y from entity_player position func (p *Player) see(treasureMap TreasureMap) ([2]int, [][2]int) { var ( startX, startY = p.Position[0], p.Position[1] treasurePosition, treasureFound [2]int listPathPosition, pathFound [][2]int ) // see all entity in x axis with same y axis / right direction -> treasurePosition, pathFound = checkMap(treasureMap, startX+1, startY, 1, axis_x) if treasureMap.OriginalMapping[treasurePosition] == entity_treasure { treasureFound = treasurePosition } listPathPosition = append(listPathPosition, pathFound...) p.Range[right] = len(pathFound) // see all entity in -x axis with same y axis / left direction <- treasurePosition, pathFound = checkMap(treasureMap, startX-1, startY, -1, axis_x) if treasureMap.OriginalMapping[treasurePosition] == entity_treasure { treasureFound = treasurePosition } listPathPosition = append(listPathPosition, pathFound...) p.Range[left] = len(pathFound) // see all entity in y axis with same x axis / up direction ^ treasurePosition, pathFound = checkMap(treasureMap, startY+1, startX, 1, axis_y) if treasureMap.OriginalMapping[treasurePosition] == entity_treasure { treasureFound = treasurePosition } listPathPosition = append(listPathPosition, pathFound...) p.Range[up] = len(pathFound) // see all entity in -y axis with same x axis / down direction v treasurePosition, pathFound = checkMap(treasureMap, startY-1, startX, -1, axis_y) if treasureMap.OriginalMapping[treasurePosition] == entity_treasure { treasureFound = treasurePosition } listPathPosition = append(listPathPosition, pathFound...) p.Range[down] = len(pathFound) if treasureMap.OriginalMapping[treasureFound] == entity_treasure { p.FoundTreasure = true } // check possibility of path intersection with best probability to get the most explored map if p.DirectionTaken == up && p.Range[right] > p.Range[up] { p.DirectionTaken = right } else if p.DirectionTaken == right && p.Range[down] > p.Range[right] { p.DirectionTaken = down } return treasureFound, listPathPosition } // checkMap a shorthand to validate an unobstructed line of sight in original mapping, return treasure location, list of clear path in sight func checkMap(treasureMap TreasureMap, startAxis int, staticAxis int, addValue int, typeAxis int) ([2]int, [][2]int) { var ( check = true treasurePosition [2]int pathPosition [][2]int currentPosition [2]int ) for check { if typeAxis == axis_x { currentPosition = [2]int{startAxis, staticAxis} } else { currentPosition = [2]int{staticAxis, startAxis} } if check { switch treasureMap.OriginalMapping[currentPosition] { case entity_path: pathPosition = append(pathPosition, currentPosition) case entity_treasure: treasurePosition = currentPosition case entity_obstacle: check = false default: check = false } } startAxis += addValue } return treasurePosition, pathPosition } // NewTreasure creating a new blank func NewTreasure() Treasure { return Treasure{} } // randomizePosition put the entity_treasure in the map randomly. It require several loop to ensure the entity_treasure located on a clear entity_path func (t *Treasure) randomizePosition(sizeX, sizeY int) { var ( xMin, xMax = 1, sizeX yMin, yMax = 1, sizeY treasurePositionX, treasurePositionY int treasurePositionXY [2]int ) rand.Seed(time.Now().UnixNano()) treasurePositionX = rand.Intn(xMax-xMin) + xMin treasurePositionY = rand.Intn(yMax-yMin) + yMin treasurePositionXY = [2]int{treasurePositionX, treasurePositionY} t.Position = treasurePositionXY } // NewTreasureMap creating a new blank treasure map func NewTreasureMap(size [2]int) TreasureMap { return TreasureMap{ Size: size, Mapping: make(map[[2]int]int), OriginalMapping: make(map[[2]int]int), ListPossibleTreasureLocation: make(map[[2]int]bool), } } // render display of the mapping, not the original mapping. It also print the info of list possible location of the treasure. func (tm *TreasureMap) render() { var ( treasureMapDrawPerLine, treasureMapDrawComplete, treasureMapAdditional string ) for y := 1; y <= tm.Size[1]; y++ { treasureMapDrawPerLine = "" if y < tm.Size[1] { treasureMapDrawPerLine = "\n" } for x := 1; x <= tm.Size[0]; x++ { treasureMapDrawPerLine = treasureMapDrawPerLine + convertIntToEntity(tm.Mapping[[2]int{x, y}]) } treasureMapDrawComplete = treasureMapDrawPerLine + treasureMapDrawComplete } if len(tm.ListPossibleTreasureLocation) > 0 { for coordinate, possibleLocation := range tm.ListPossibleTreasureLocation { coordinateString := strconv.Itoa(coordinate[0]) + "," + strconv.Itoa(coordinate[1]) if possibleLocation { treasureMapAdditional = treasureMapAdditional + fmt.Sprintf("{%s},", coordinateString) } } treasureMapDrawComplete = treasureMapDrawComplete + fmt.Sprintf("\nPossible treasure location: %s", treasureMapAdditional) } if tm.TreasureLocation != [2]int{} { coordinateString := strconv.Itoa(tm.TreasureLocation[0]) + "," + strconv.Itoa(tm.TreasureLocation[1]) treasureMapDrawComplete = treasureMapDrawComplete + fmt.Sprintf("\nTreasure found at location: {%s}! Congratulation!", coordinateString) } renderToTerminal(treasureMapDrawComplete) } // generateMapObstacleDefault putting theentity_obstacle on the outer sandbox func (tm *TreasureMap) generate() { for y := 1; y <= tm.Size[1]; y++ { for x := 1; x <= tm.Size[0]; x++ { switch true { case x == 1, y == 1, x == tm.Size[0], y == tm.Size[1]: tm.Mapping[[2]int{x, y}] = entity_obstacle default: tm.Mapping[[2]int{x, y}] = entity_path } } } } // generateMapObstacleCustom putting theentity_obstacle by predefined location func (tm *TreasureMap) addObstacle(listCustomObstacle [][2]int) { for _, customObstacle := range listCustomObstacle { tm.Mapping[customObstacle] = entity_obstacle } } // createMap generate a sandbox, obstacle in it boundaries, and custom obstacle inside func (tm *TreasureMap) createMap(obstacle [][2]int) { tm.generate() tm.addObstacle(obstacle) } // setEntity put the entity in a position within the map func (tm *TreasureMap) setEntity(entity int, position [2]int) bool { switch tm.Mapping[position] { case entity_obstacle: return false case entity_path: if entity == entity_treasure || entity == entity_path || entity == entity_player { tm.Mapping[position] = entity return true } case entity_treasure: if tm.OriginalMapping[position] == entity_treasure { return false } else { tm.Mapping[position] = entity return true } case entity_player: if entity == entity_path { tm.Mapping[position] = entity return true } default: return false } return false } // setPossibleTreasure hide the actual entity_treasure coordinate to be exposed later func (tm *TreasureMap) setPossibleTreasure() { for coordinate := range tm.Mapping { tm.OriginalMapping[coordinate] = tm.Mapping[coordinate] if tm.Mapping[coordinate] == entity_path { tm.ListPossibleTreasureLocation[coordinate] = true tm.Mapping[coordinate] = entity_treasure } } } // revealMap unhide all unexplored possible treasure func (tm *TreasureMap) revealMap(treasurePositionXY [2]int) { for coordinate := range tm.Mapping { if tm.Mapping[coordinate] == entity_treasure && coordinate != treasurePositionXY { tm.Mapping[coordinate] = entity_path } } } // setTreasureLocation mark the found treasure location func (tm *TreasureMap) setTreasureLocation(treasurePositionXY [2]int) { tm.TreasureLocation = treasurePositionXY } // updatePossibleTreasureLocation keeping record of all possible treasure location func (tm *TreasureMap) updatePossibleTreasureLocation(listPathPosition [][2]int) { // remove the possible treasure location if its a path for _, pathPosition := range listPathPosition { tm.ListPossibleTreasureLocation[pathPosition] = false } } // clearPossibleTreasureLocation empty the list of possible treasure location, usually used once the treasure found func (tm *TreasureMap) clearPossibleTreasureLocation() { tm.ListPossibleTreasureLocation = make(map[[2]int]bool) } // renderToTerminal performing animated rendering to terminal. // If you want to run in golang playground, change constant render_interface from unix to playground. // But sadly still unable to run in playground func renderToTerminal(output string) { switch render_interface { case unix: fmt.Println("\033[2J") fmt.Println(output) case playground: fmt.Printf("\x0c %s", output) } time.Sleep(delay_time * time.Millisecond) } // convertIntToEntity convert code constant of entity to a map drawn entity func convertIntToEntity(code int) string { switch code { case entity_path: return "." case entity_obstacle: return "#" case entity_player: return "X" case entity_treasure: return "$" default: return "." } }
main
identifier_name
main.go
package main import ( "fmt" "math/rand" "strconv" "time" ) const ( // direction up = iota down right left stuck ) const ( // entity in map entity_path = iota + 1 entity_treasure entity_player entity_obstacle ) const ( // axis axis_x = iota axis_y ) const ( // render terminal unix = iota + 1 playground ) const ( delay_time = 400 // time used to display a step taken for each movement before continuing pause_time = 4000 // time used to display gimmick before full run exploration render_interface = unix // planned to be able to run in golang playground but got TIMEOUT instead! ) var ( mapSize = [2]int{8, 6} // define maximum size of the map [X,Y] playerStartPosition = [2]int{2, 2} // define default location of the player listCustomObstacle = [][2]int{ {3, 2}, {3, 4}, {4, 4}, {5, 4}, {5, 3}, {7, 3}, } ) type Player struct { Position [2]int DirectionTaken int FoundTreasure bool Range map[int]int } type Treasure struct { Position [2]int } type TreasureMap struct { Size [2]int OriginalMapping map[[2]int]int Mapping map[[2]int]int ListPossibleTreasureLocation map[[2]int]bool TreasureLocation [2]int } func main() { player := NewPlayer() treasure := NewTreasure() treasureMap := NewTreasureMap(mapSize) treasureMap.createMap(listCustomObstacle) treasureMap.setEntity(entity_player, player.Position) for true { treasure.randomizePosition(mapSize[0], mapSize[1]) if treasureMap.setEntity(entity_treasure, treasure.Position) { break } } treasureMap.render() // display initial condition with treasure fmt.Println("Initial Condition, treasure hid in:", treasure.Position, "Wait for it..") time.Sleep(pause_time * time.Millisecond) treasureMap.setPossibleTreasure() treasureMap.render() // display map with possible treasure location fmt.Println("Now it's hidden! Let's go find it!") time.Sleep(pause_time * time.Millisecond) for !player.FoundTreasure { // player see unobstructed path, and determine which is treasure and which is path treasurePositionXY, listPathPosition := player.see(treasureMap) for _, pathPosition := range listPathPosition { treasureMap.setEntity(entity_path, pathPosition) treasureMap.updatePossibleTreasureLocation(listPathPosition) } if !player.FoundTreasure { // keep moving until found the treasure newPosition, _ := player.move(treasureMap.Mapping) oldPosition := player.Position // stop, when player cannot move any longer if newPosition == oldPosition { break } // move the player into new position, put path on the older position treasureMap.setEntity(entity_path, oldPosition) treasureMap.setEntity(entity_player, newPosition) treasureMap.render() // update player position player.setPosition(newPosition) } else { treasureMap.clearPossibleTreasureLocation() treasureMap.setTreasureLocation(treasurePositionXY) treasureMap.revealMap(treasurePositionXY) treasureMap.render() break } } } // NewPlayer creating a new player with initial position func NewPlayer() Player { return Player{ Position: playerStartPosition, Range: make(map[int]int), } } // setPosition update the value of player position func (p *Player) setPosition(newPosition [2]int) { p.Position = newPosition } // move update the coordinate of the entity_player limited by predefined direction func (p *Player) move(treasureMap map[[2]int]int) ([2]int, bool) { if p.DirectionTaken == up { newPlayerPositionXY := [2]int{p.Position[0], p.Position[1] + 1} if treasureMap[newPlayerPositionXY] == entity_obstacle { p.DirectionTaken = right } else { return newPlayerPositionXY, true } } if p.DirectionTaken == right { newPlayerPositionXY := [2]int{p.Position[0] + 1, p.Position[1]} if treasureMap[newPlayerPositionXY] == entity_obstacle { p.DirectionTaken = down } else { return newPlayerPositionXY, true } } if p.DirectionTaken == down { newPlayerPositionXY := [2]int{p.Position[0], p.Position[1] - 1} if treasureMap[newPlayerPositionXY] == entity_obstacle { p.DirectionTaken = stuck } else { return newPlayerPositionXY, true } } return p.Position, false } // see check all unobstructed line of X & Y from entity_player position func (p *Player) see(treasureMap TreasureMap) ([2]int, [][2]int) { var ( startX, startY = p.Position[0], p.Position[1] treasurePosition, treasureFound [2]int listPathPosition, pathFound [][2]int ) // see all entity in x axis with same y axis / right direction -> treasurePosition, pathFound = checkMap(treasureMap, startX+1, startY, 1, axis_x) if treasureMap.OriginalMapping[treasurePosition] == entity_treasure { treasureFound = treasurePosition } listPathPosition = append(listPathPosition, pathFound...) p.Range[right] = len(pathFound) // see all entity in -x axis with same y axis / left direction <- treasurePosition, pathFound = checkMap(treasureMap, startX-1, startY, -1, axis_x) if treasureMap.OriginalMapping[treasurePosition] == entity_treasure { treasureFound = treasurePosition } listPathPosition = append(listPathPosition, pathFound...) p.Range[left] = len(pathFound) // see all entity in y axis with same x axis / up direction ^ treasurePosition, pathFound = checkMap(treasureMap, startY+1, startX, 1, axis_y) if treasureMap.OriginalMapping[treasurePosition] == entity_treasure { treasureFound = treasurePosition } listPathPosition = append(listPathPosition, pathFound...) p.Range[up] = len(pathFound) // see all entity in -y axis with same x axis / down direction v treasurePosition, pathFound = checkMap(treasureMap, startY-1, startX, -1, axis_y) if treasureMap.OriginalMapping[treasurePosition] == entity_treasure { treasureFound = treasurePosition } listPathPosition = append(listPathPosition, pathFound...) p.Range[down] = len(pathFound)
p.FoundTreasure = true } // check possibility of path intersection with best probability to get the most explored map if p.DirectionTaken == up && p.Range[right] > p.Range[up] { p.DirectionTaken = right } else if p.DirectionTaken == right && p.Range[down] > p.Range[right] { p.DirectionTaken = down } return treasureFound, listPathPosition } // checkMap a shorthand to validate an unobstructed line of sight in original mapping, return treasure location, list of clear path in sight func checkMap(treasureMap TreasureMap, startAxis int, staticAxis int, addValue int, typeAxis int) ([2]int, [][2]int) { var ( check = true treasurePosition [2]int pathPosition [][2]int currentPosition [2]int ) for check { if typeAxis == axis_x { currentPosition = [2]int{startAxis, staticAxis} } else { currentPosition = [2]int{staticAxis, startAxis} } if check { switch treasureMap.OriginalMapping[currentPosition] { case entity_path: pathPosition = append(pathPosition, currentPosition) case entity_treasure: treasurePosition = currentPosition case entity_obstacle: check = false default: check = false } } startAxis += addValue } return treasurePosition, pathPosition } // NewTreasure creating a new blank func NewTreasure() Treasure { return Treasure{} } // randomizePosition put the entity_treasure in the map randomly. It require several loop to ensure the entity_treasure located on a clear entity_path func (t *Treasure) randomizePosition(sizeX, sizeY int) { var ( xMin, xMax = 1, sizeX yMin, yMax = 1, sizeY treasurePositionX, treasurePositionY int treasurePositionXY [2]int ) rand.Seed(time.Now().UnixNano()) treasurePositionX = rand.Intn(xMax-xMin) + xMin treasurePositionY = rand.Intn(yMax-yMin) + yMin treasurePositionXY = [2]int{treasurePositionX, treasurePositionY} t.Position = treasurePositionXY } // NewTreasureMap creating a new blank treasure map func NewTreasureMap(size [2]int) TreasureMap { return TreasureMap{ Size: size, Mapping: make(map[[2]int]int), OriginalMapping: make(map[[2]int]int), ListPossibleTreasureLocation: make(map[[2]int]bool), } } // render display of the mapping, not the original mapping. It also print the info of list possible location of the treasure. func (tm *TreasureMap) render() { var ( treasureMapDrawPerLine, treasureMapDrawComplete, treasureMapAdditional string ) for y := 1; y <= tm.Size[1]; y++ { treasureMapDrawPerLine = "" if y < tm.Size[1] { treasureMapDrawPerLine = "\n" } for x := 1; x <= tm.Size[0]; x++ { treasureMapDrawPerLine = treasureMapDrawPerLine + convertIntToEntity(tm.Mapping[[2]int{x, y}]) } treasureMapDrawComplete = treasureMapDrawPerLine + treasureMapDrawComplete } if len(tm.ListPossibleTreasureLocation) > 0 { for coordinate, possibleLocation := range tm.ListPossibleTreasureLocation { coordinateString := strconv.Itoa(coordinate[0]) + "," + strconv.Itoa(coordinate[1]) if possibleLocation { treasureMapAdditional = treasureMapAdditional + fmt.Sprintf("{%s},", coordinateString) } } treasureMapDrawComplete = treasureMapDrawComplete + fmt.Sprintf("\nPossible treasure location: %s", treasureMapAdditional) } if tm.TreasureLocation != [2]int{} { coordinateString := strconv.Itoa(tm.TreasureLocation[0]) + "," + strconv.Itoa(tm.TreasureLocation[1]) treasureMapDrawComplete = treasureMapDrawComplete + fmt.Sprintf("\nTreasure found at location: {%s}! Congratulation!", coordinateString) } renderToTerminal(treasureMapDrawComplete) } // generateMapObstacleDefault putting theentity_obstacle on the outer sandbox func (tm *TreasureMap) generate() { for y := 1; y <= tm.Size[1]; y++ { for x := 1; x <= tm.Size[0]; x++ { switch true { case x == 1, y == 1, x == tm.Size[0], y == tm.Size[1]: tm.Mapping[[2]int{x, y}] = entity_obstacle default: tm.Mapping[[2]int{x, y}] = entity_path } } } } // generateMapObstacleCustom putting theentity_obstacle by predefined location func (tm *TreasureMap) addObstacle(listCustomObstacle [][2]int) { for _, customObstacle := range listCustomObstacle { tm.Mapping[customObstacle] = entity_obstacle } } // createMap generate a sandbox, obstacle in it boundaries, and custom obstacle inside func (tm *TreasureMap) createMap(obstacle [][2]int) { tm.generate() tm.addObstacle(obstacle) } // setEntity put the entity in a position within the map func (tm *TreasureMap) setEntity(entity int, position [2]int) bool { switch tm.Mapping[position] { case entity_obstacle: return false case entity_path: if entity == entity_treasure || entity == entity_path || entity == entity_player { tm.Mapping[position] = entity return true } case entity_treasure: if tm.OriginalMapping[position] == entity_treasure { return false } else { tm.Mapping[position] = entity return true } case entity_player: if entity == entity_path { tm.Mapping[position] = entity return true } default: return false } return false } // setPossibleTreasure hide the actual entity_treasure coordinate to be exposed later func (tm *TreasureMap) setPossibleTreasure() { for coordinate := range tm.Mapping { tm.OriginalMapping[coordinate] = tm.Mapping[coordinate] if tm.Mapping[coordinate] == entity_path { tm.ListPossibleTreasureLocation[coordinate] = true tm.Mapping[coordinate] = entity_treasure } } } // revealMap unhide all unexplored possible treasure func (tm *TreasureMap) revealMap(treasurePositionXY [2]int) { for coordinate := range tm.Mapping { if tm.Mapping[coordinate] == entity_treasure && coordinate != treasurePositionXY { tm.Mapping[coordinate] = entity_path } } } // setTreasureLocation mark the found treasure location func (tm *TreasureMap) setTreasureLocation(treasurePositionXY [2]int) { tm.TreasureLocation = treasurePositionXY } // updatePossibleTreasureLocation keeping record of all possible treasure location func (tm *TreasureMap) updatePossibleTreasureLocation(listPathPosition [][2]int) { // remove the possible treasure location if its a path for _, pathPosition := range listPathPosition { tm.ListPossibleTreasureLocation[pathPosition] = false } } // clearPossibleTreasureLocation empty the list of possible treasure location, usually used once the treasure found func (tm *TreasureMap) clearPossibleTreasureLocation() { tm.ListPossibleTreasureLocation = make(map[[2]int]bool) } // renderToTerminal performing animated rendering to terminal. // If you want to run in golang playground, change constant render_interface from unix to playground. // But sadly still unable to run in playground func renderToTerminal(output string) { switch render_interface { case unix: fmt.Println("\033[2J") fmt.Println(output) case playground: fmt.Printf("\x0c %s", output) } time.Sleep(delay_time * time.Millisecond) } // convertIntToEntity convert code constant of entity to a map drawn entity func convertIntToEntity(code int) string { switch code { case entity_path: return "." case entity_obstacle: return "#" case entity_player: return "X" case entity_treasure: return "$" default: return "." } }
if treasureMap.OriginalMapping[treasureFound] == entity_treasure {
random_line_split
main.go
package main import ( "fmt" "math/rand" "strconv" "time" ) const ( // direction up = iota down right left stuck ) const ( // entity in map entity_path = iota + 1 entity_treasure entity_player entity_obstacle ) const ( // axis axis_x = iota axis_y ) const ( // render terminal unix = iota + 1 playground ) const ( delay_time = 400 // time used to display a step taken for each movement before continuing pause_time = 4000 // time used to display gimmick before full run exploration render_interface = unix // planned to be able to run in golang playground but got TIMEOUT instead! ) var ( mapSize = [2]int{8, 6} // define maximum size of the map [X,Y] playerStartPosition = [2]int{2, 2} // define default location of the player listCustomObstacle = [][2]int{ {3, 2}, {3, 4}, {4, 4}, {5, 4}, {5, 3}, {7, 3}, } ) type Player struct { Position [2]int DirectionTaken int FoundTreasure bool Range map[int]int } type Treasure struct { Position [2]int } type TreasureMap struct { Size [2]int OriginalMapping map[[2]int]int Mapping map[[2]int]int ListPossibleTreasureLocation map[[2]int]bool TreasureLocation [2]int } func main() { player := NewPlayer() treasure := NewTreasure() treasureMap := NewTreasureMap(mapSize) treasureMap.createMap(listCustomObstacle) treasureMap.setEntity(entity_player, player.Position) for true { treasure.randomizePosition(mapSize[0], mapSize[1]) if treasureMap.setEntity(entity_treasure, treasure.Position) { break } } treasureMap.render() // display initial condition with treasure fmt.Println("Initial Condition, treasure hid in:", treasure.Position, "Wait for it..") time.Sleep(pause_time * time.Millisecond) treasureMap.setPossibleTreasure() treasureMap.render() // display map with possible treasure location fmt.Println("Now it's hidden! Let's go find it!") time.Sleep(pause_time * time.Millisecond) for !player.FoundTreasure { // player see unobstructed path, and determine which is treasure and which is path treasurePositionXY, listPathPosition := player.see(treasureMap) for _, pathPosition := range listPathPosition { treasureMap.setEntity(entity_path, pathPosition) treasureMap.updatePossibleTreasureLocation(listPathPosition) } if !player.FoundTreasure { // keep moving until found the treasure newPosition, _ := player.move(treasureMap.Mapping) oldPosition := player.Position // stop, when player cannot move any longer if newPosition == oldPosition
// move the player into new position, put path on the older position treasureMap.setEntity(entity_path, oldPosition) treasureMap.setEntity(entity_player, newPosition) treasureMap.render() // update player position player.setPosition(newPosition) } else { treasureMap.clearPossibleTreasureLocation() treasureMap.setTreasureLocation(treasurePositionXY) treasureMap.revealMap(treasurePositionXY) treasureMap.render() break } } } // NewPlayer creating a new player with initial position func NewPlayer() Player { return Player{ Position: playerStartPosition, Range: make(map[int]int), } } // setPosition update the value of player position func (p *Player) setPosition(newPosition [2]int) { p.Position = newPosition } // move update the coordinate of the entity_player limited by predefined direction func (p *Player) move(treasureMap map[[2]int]int) ([2]int, bool) { if p.DirectionTaken == up { newPlayerPositionXY := [2]int{p.Position[0], p.Position[1] + 1} if treasureMap[newPlayerPositionXY] == entity_obstacle { p.DirectionTaken = right } else { return newPlayerPositionXY, true } } if p.DirectionTaken == right { newPlayerPositionXY := [2]int{p.Position[0] + 1, p.Position[1]} if treasureMap[newPlayerPositionXY] == entity_obstacle { p.DirectionTaken = down } else { return newPlayerPositionXY, true } } if p.DirectionTaken == down { newPlayerPositionXY := [2]int{p.Position[0], p.Position[1] - 1} if treasureMap[newPlayerPositionXY] == entity_obstacle { p.DirectionTaken = stuck } else { return newPlayerPositionXY, true } } return p.Position, false } // see check all unobstructed line of X & Y from entity_player position func (p *Player) see(treasureMap TreasureMap) ([2]int, [][2]int) { var ( startX, startY = p.Position[0], p.Position[1] treasurePosition, treasureFound [2]int listPathPosition, pathFound [][2]int ) // see all entity in x axis with same y axis / right direction -> treasurePosition, pathFound = checkMap(treasureMap, startX+1, startY, 1, axis_x) if treasureMap.OriginalMapping[treasurePosition] == entity_treasure { treasureFound = treasurePosition } listPathPosition = append(listPathPosition, pathFound...) p.Range[right] = len(pathFound) // see all entity in -x axis with same y axis / left direction <- treasurePosition, pathFound = checkMap(treasureMap, startX-1, startY, -1, axis_x) if treasureMap.OriginalMapping[treasurePosition] == entity_treasure { treasureFound = treasurePosition } listPathPosition = append(listPathPosition, pathFound...) p.Range[left] = len(pathFound) // see all entity in y axis with same x axis / up direction ^ treasurePosition, pathFound = checkMap(treasureMap, startY+1, startX, 1, axis_y) if treasureMap.OriginalMapping[treasurePosition] == entity_treasure { treasureFound = treasurePosition } listPathPosition = append(listPathPosition, pathFound...) p.Range[up] = len(pathFound) // see all entity in -y axis with same x axis / down direction v treasurePosition, pathFound = checkMap(treasureMap, startY-1, startX, -1, axis_y) if treasureMap.OriginalMapping[treasurePosition] == entity_treasure { treasureFound = treasurePosition } listPathPosition = append(listPathPosition, pathFound...) p.Range[down] = len(pathFound) if treasureMap.OriginalMapping[treasureFound] == entity_treasure { p.FoundTreasure = true } // check possibility of path intersection with best probability to get the most explored map if p.DirectionTaken == up && p.Range[right] > p.Range[up] { p.DirectionTaken = right } else if p.DirectionTaken == right && p.Range[down] > p.Range[right] { p.DirectionTaken = down } return treasureFound, listPathPosition } // checkMap a shorthand to validate an unobstructed line of sight in original mapping, return treasure location, list of clear path in sight func checkMap(treasureMap TreasureMap, startAxis int, staticAxis int, addValue int, typeAxis int) ([2]int, [][2]int) { var ( check = true treasurePosition [2]int pathPosition [][2]int currentPosition [2]int ) for check { if typeAxis == axis_x { currentPosition = [2]int{startAxis, staticAxis} } else { currentPosition = [2]int{staticAxis, startAxis} } if check { switch treasureMap.OriginalMapping[currentPosition] { case entity_path: pathPosition = append(pathPosition, currentPosition) case entity_treasure: treasurePosition = currentPosition case entity_obstacle: check = false default: check = false } } startAxis += addValue } return treasurePosition, pathPosition } // NewTreasure creating a new blank func NewTreasure() Treasure { return Treasure{} } // randomizePosition put the entity_treasure in the map randomly. It require several loop to ensure the entity_treasure located on a clear entity_path func (t *Treasure) randomizePosition(sizeX, sizeY int) { var ( xMin, xMax = 1, sizeX yMin, yMax = 1, sizeY treasurePositionX, treasurePositionY int treasurePositionXY [2]int ) rand.Seed(time.Now().UnixNano()) treasurePositionX = rand.Intn(xMax-xMin) + xMin treasurePositionY = rand.Intn(yMax-yMin) + yMin treasurePositionXY = [2]int{treasurePositionX, treasurePositionY} t.Position = treasurePositionXY } // NewTreasureMap creating a new blank treasure map func NewTreasureMap(size [2]int) TreasureMap { return TreasureMap{ Size: size, Mapping: make(map[[2]int]int), OriginalMapping: make(map[[2]int]int), ListPossibleTreasureLocation: make(map[[2]int]bool), } } // render display of the mapping, not the original mapping. It also print the info of list possible location of the treasure. func (tm *TreasureMap) render() { var ( treasureMapDrawPerLine, treasureMapDrawComplete, treasureMapAdditional string ) for y := 1; y <= tm.Size[1]; y++ { treasureMapDrawPerLine = "" if y < tm.Size[1] { treasureMapDrawPerLine = "\n" } for x := 1; x <= tm.Size[0]; x++ { treasureMapDrawPerLine = treasureMapDrawPerLine + convertIntToEntity(tm.Mapping[[2]int{x, y}]) } treasureMapDrawComplete = treasureMapDrawPerLine + treasureMapDrawComplete } if len(tm.ListPossibleTreasureLocation) > 0 { for coordinate, possibleLocation := range tm.ListPossibleTreasureLocation { coordinateString := strconv.Itoa(coordinate[0]) + "," + strconv.Itoa(coordinate[1]) if possibleLocation { treasureMapAdditional = treasureMapAdditional + fmt.Sprintf("{%s},", coordinateString) } } treasureMapDrawComplete = treasureMapDrawComplete + fmt.Sprintf("\nPossible treasure location: %s", treasureMapAdditional) } if tm.TreasureLocation != [2]int{} { coordinateString := strconv.Itoa(tm.TreasureLocation[0]) + "," + strconv.Itoa(tm.TreasureLocation[1]) treasureMapDrawComplete = treasureMapDrawComplete + fmt.Sprintf("\nTreasure found at location: {%s}! Congratulation!", coordinateString) } renderToTerminal(treasureMapDrawComplete) } // generateMapObstacleDefault putting theentity_obstacle on the outer sandbox func (tm *TreasureMap) generate() { for y := 1; y <= tm.Size[1]; y++ { for x := 1; x <= tm.Size[0]; x++ { switch true { case x == 1, y == 1, x == tm.Size[0], y == tm.Size[1]: tm.Mapping[[2]int{x, y}] = entity_obstacle default: tm.Mapping[[2]int{x, y}] = entity_path } } } } // generateMapObstacleCustom putting theentity_obstacle by predefined location func (tm *TreasureMap) addObstacle(listCustomObstacle [][2]int) { for _, customObstacle := range listCustomObstacle { tm.Mapping[customObstacle] = entity_obstacle } } // createMap generate a sandbox, obstacle in it boundaries, and custom obstacle inside func (tm *TreasureMap) createMap(obstacle [][2]int) { tm.generate() tm.addObstacle(obstacle) } // setEntity put the entity in a position within the map func (tm *TreasureMap) setEntity(entity int, position [2]int) bool { switch tm.Mapping[position] { case entity_obstacle: return false case entity_path: if entity == entity_treasure || entity == entity_path || entity == entity_player { tm.Mapping[position] = entity return true } case entity_treasure: if tm.OriginalMapping[position] == entity_treasure { return false } else { tm.Mapping[position] = entity return true } case entity_player: if entity == entity_path { tm.Mapping[position] = entity return true } default: return false } return false } // setPossibleTreasure hide the actual entity_treasure coordinate to be exposed later func (tm *TreasureMap) setPossibleTreasure() { for coordinate := range tm.Mapping { tm.OriginalMapping[coordinate] = tm.Mapping[coordinate] if tm.Mapping[coordinate] == entity_path { tm.ListPossibleTreasureLocation[coordinate] = true tm.Mapping[coordinate] = entity_treasure } } } // revealMap unhide all unexplored possible treasure func (tm *TreasureMap) revealMap(treasurePositionXY [2]int) { for coordinate := range tm.Mapping { if tm.Mapping[coordinate] == entity_treasure && coordinate != treasurePositionXY { tm.Mapping[coordinate] = entity_path } } } // setTreasureLocation mark the found treasure location func (tm *TreasureMap) setTreasureLocation(treasurePositionXY [2]int) { tm.TreasureLocation = treasurePositionXY } // updatePossibleTreasureLocation keeping record of all possible treasure location func (tm *TreasureMap) updatePossibleTreasureLocation(listPathPosition [][2]int) { // remove the possible treasure location if its a path for _, pathPosition := range listPathPosition { tm.ListPossibleTreasureLocation[pathPosition] = false } } // clearPossibleTreasureLocation empty the list of possible treasure location, usually used once the treasure found func (tm *TreasureMap) clearPossibleTreasureLocation() { tm.ListPossibleTreasureLocation = make(map[[2]int]bool) } // renderToTerminal performing animated rendering to terminal. // If you want to run in golang playground, change constant render_interface from unix to playground. // But sadly still unable to run in playground func renderToTerminal(output string) { switch render_interface { case unix: fmt.Println("\033[2J") fmt.Println(output) case playground: fmt.Printf("\x0c %s", output) } time.Sleep(delay_time * time.Millisecond) } // convertIntToEntity convert code constant of entity to a map drawn entity func convertIntToEntity(code int) string { switch code { case entity_path: return "." case entity_obstacle: return "#" case entity_player: return "X" case entity_treasure: return "$" default: return "." } }
{ break }
conditional_block
main.go
package main import ( "fmt" "math/rand" "strconv" "time" ) const ( // direction up = iota down right left stuck ) const ( // entity in map entity_path = iota + 1 entity_treasure entity_player entity_obstacle ) const ( // axis axis_x = iota axis_y ) const ( // render terminal unix = iota + 1 playground ) const ( delay_time = 400 // time used to display a step taken for each movement before continuing pause_time = 4000 // time used to display gimmick before full run exploration render_interface = unix // planned to be able to run in golang playground but got TIMEOUT instead! ) var ( mapSize = [2]int{8, 6} // define maximum size of the map [X,Y] playerStartPosition = [2]int{2, 2} // define default location of the player listCustomObstacle = [][2]int{ {3, 2}, {3, 4}, {4, 4}, {5, 4}, {5, 3}, {7, 3}, } ) type Player struct { Position [2]int DirectionTaken int FoundTreasure bool Range map[int]int } type Treasure struct { Position [2]int } type TreasureMap struct { Size [2]int OriginalMapping map[[2]int]int Mapping map[[2]int]int ListPossibleTreasureLocation map[[2]int]bool TreasureLocation [2]int } func main() { player := NewPlayer() treasure := NewTreasure() treasureMap := NewTreasureMap(mapSize) treasureMap.createMap(listCustomObstacle) treasureMap.setEntity(entity_player, player.Position) for true { treasure.randomizePosition(mapSize[0], mapSize[1]) if treasureMap.setEntity(entity_treasure, treasure.Position) { break } } treasureMap.render() // display initial condition with treasure fmt.Println("Initial Condition, treasure hid in:", treasure.Position, "Wait for it..") time.Sleep(pause_time * time.Millisecond) treasureMap.setPossibleTreasure() treasureMap.render() // display map with possible treasure location fmt.Println("Now it's hidden! Let's go find it!") time.Sleep(pause_time * time.Millisecond) for !player.FoundTreasure { // player see unobstructed path, and determine which is treasure and which is path treasurePositionXY, listPathPosition := player.see(treasureMap) for _, pathPosition := range listPathPosition { treasureMap.setEntity(entity_path, pathPosition) treasureMap.updatePossibleTreasureLocation(listPathPosition) } if !player.FoundTreasure { // keep moving until found the treasure newPosition, _ := player.move(treasureMap.Mapping) oldPosition := player.Position // stop, when player cannot move any longer if newPosition == oldPosition { break } // move the player into new position, put path on the older position treasureMap.setEntity(entity_path, oldPosition) treasureMap.setEntity(entity_player, newPosition) treasureMap.render() // update player position player.setPosition(newPosition) } else { treasureMap.clearPossibleTreasureLocation() treasureMap.setTreasureLocation(treasurePositionXY) treasureMap.revealMap(treasurePositionXY) treasureMap.render() break } } } // NewPlayer creating a new player with initial position func NewPlayer() Player { return Player{ Position: playerStartPosition, Range: make(map[int]int), } } // setPosition update the value of player position func (p *Player) setPosition(newPosition [2]int) { p.Position = newPosition } // move update the coordinate of the entity_player limited by predefined direction func (p *Player) move(treasureMap map[[2]int]int) ([2]int, bool) { if p.DirectionTaken == up { newPlayerPositionXY := [2]int{p.Position[0], p.Position[1] + 1} if treasureMap[newPlayerPositionXY] == entity_obstacle { p.DirectionTaken = right } else { return newPlayerPositionXY, true } } if p.DirectionTaken == right { newPlayerPositionXY := [2]int{p.Position[0] + 1, p.Position[1]} if treasureMap[newPlayerPositionXY] == entity_obstacle { p.DirectionTaken = down } else { return newPlayerPositionXY, true } } if p.DirectionTaken == down { newPlayerPositionXY := [2]int{p.Position[0], p.Position[1] - 1} if treasureMap[newPlayerPositionXY] == entity_obstacle { p.DirectionTaken = stuck } else { return newPlayerPositionXY, true } } return p.Position, false } // see check all unobstructed line of X & Y from entity_player position func (p *Player) see(treasureMap TreasureMap) ([2]int, [][2]int)
// checkMap a shorthand to validate an unobstructed line of sight in original mapping, return treasure location, list of clear path in sight func checkMap(treasureMap TreasureMap, startAxis int, staticAxis int, addValue int, typeAxis int) ([2]int, [][2]int) { var ( check = true treasurePosition [2]int pathPosition [][2]int currentPosition [2]int ) for check { if typeAxis == axis_x { currentPosition = [2]int{startAxis, staticAxis} } else { currentPosition = [2]int{staticAxis, startAxis} } if check { switch treasureMap.OriginalMapping[currentPosition] { case entity_path: pathPosition = append(pathPosition, currentPosition) case entity_treasure: treasurePosition = currentPosition case entity_obstacle: check = false default: check = false } } startAxis += addValue } return treasurePosition, pathPosition } // NewTreasure creating a new blank func NewTreasure() Treasure { return Treasure{} } // randomizePosition put the entity_treasure in the map randomly. It require several loop to ensure the entity_treasure located on a clear entity_path func (t *Treasure) randomizePosition(sizeX, sizeY int) { var ( xMin, xMax = 1, sizeX yMin, yMax = 1, sizeY treasurePositionX, treasurePositionY int treasurePositionXY [2]int ) rand.Seed(time.Now().UnixNano()) treasurePositionX = rand.Intn(xMax-xMin) + xMin treasurePositionY = rand.Intn(yMax-yMin) + yMin treasurePositionXY = [2]int{treasurePositionX, treasurePositionY} t.Position = treasurePositionXY } // NewTreasureMap creating a new blank treasure map func NewTreasureMap(size [2]int) TreasureMap { return TreasureMap{ Size: size, Mapping: make(map[[2]int]int), OriginalMapping: make(map[[2]int]int), ListPossibleTreasureLocation: make(map[[2]int]bool), } } // render display of the mapping, not the original mapping. It also print the info of list possible location of the treasure. func (tm *TreasureMap) render() { var ( treasureMapDrawPerLine, treasureMapDrawComplete, treasureMapAdditional string ) for y := 1; y <= tm.Size[1]; y++ { treasureMapDrawPerLine = "" if y < tm.Size[1] { treasureMapDrawPerLine = "\n" } for x := 1; x <= tm.Size[0]; x++ { treasureMapDrawPerLine = treasureMapDrawPerLine + convertIntToEntity(tm.Mapping[[2]int{x, y}]) } treasureMapDrawComplete = treasureMapDrawPerLine + treasureMapDrawComplete } if len(tm.ListPossibleTreasureLocation) > 0 { for coordinate, possibleLocation := range tm.ListPossibleTreasureLocation { coordinateString := strconv.Itoa(coordinate[0]) + "," + strconv.Itoa(coordinate[1]) if possibleLocation { treasureMapAdditional = treasureMapAdditional + fmt.Sprintf("{%s},", coordinateString) } } treasureMapDrawComplete = treasureMapDrawComplete + fmt.Sprintf("\nPossible treasure location: %s", treasureMapAdditional) } if tm.TreasureLocation != [2]int{} { coordinateString := strconv.Itoa(tm.TreasureLocation[0]) + "," + strconv.Itoa(tm.TreasureLocation[1]) treasureMapDrawComplete = treasureMapDrawComplete + fmt.Sprintf("\nTreasure found at location: {%s}! Congratulation!", coordinateString) } renderToTerminal(treasureMapDrawComplete) } // generateMapObstacleDefault putting theentity_obstacle on the outer sandbox func (tm *TreasureMap) generate() { for y := 1; y <= tm.Size[1]; y++ { for x := 1; x <= tm.Size[0]; x++ { switch true { case x == 1, y == 1, x == tm.Size[0], y == tm.Size[1]: tm.Mapping[[2]int{x, y}] = entity_obstacle default: tm.Mapping[[2]int{x, y}] = entity_path } } } } // generateMapObstacleCustom putting theentity_obstacle by predefined location func (tm *TreasureMap) addObstacle(listCustomObstacle [][2]int) { for _, customObstacle := range listCustomObstacle { tm.Mapping[customObstacle] = entity_obstacle } } // createMap generate a sandbox, obstacle in it boundaries, and custom obstacle inside func (tm *TreasureMap) createMap(obstacle [][2]int) { tm.generate() tm.addObstacle(obstacle) } // setEntity put the entity in a position within the map func (tm *TreasureMap) setEntity(entity int, position [2]int) bool { switch tm.Mapping[position] { case entity_obstacle: return false case entity_path: if entity == entity_treasure || entity == entity_path || entity == entity_player { tm.Mapping[position] = entity return true } case entity_treasure: if tm.OriginalMapping[position] == entity_treasure { return false } else { tm.Mapping[position] = entity return true } case entity_player: if entity == entity_path { tm.Mapping[position] = entity return true } default: return false } return false } // setPossibleTreasure hide the actual entity_treasure coordinate to be exposed later func (tm *TreasureMap) setPossibleTreasure() { for coordinate := range tm.Mapping { tm.OriginalMapping[coordinate] = tm.Mapping[coordinate] if tm.Mapping[coordinate] == entity_path { tm.ListPossibleTreasureLocation[coordinate] = true tm.Mapping[coordinate] = entity_treasure } } } // revealMap unhide all unexplored possible treasure func (tm *TreasureMap) revealMap(treasurePositionXY [2]int) { for coordinate := range tm.Mapping { if tm.Mapping[coordinate] == entity_treasure && coordinate != treasurePositionXY { tm.Mapping[coordinate] = entity_path } } } // setTreasureLocation mark the found treasure location func (tm *TreasureMap) setTreasureLocation(treasurePositionXY [2]int) { tm.TreasureLocation = treasurePositionXY } // updatePossibleTreasureLocation keeping record of all possible treasure location func (tm *TreasureMap) updatePossibleTreasureLocation(listPathPosition [][2]int) { // remove the possible treasure location if its a path for _, pathPosition := range listPathPosition { tm.ListPossibleTreasureLocation[pathPosition] = false } } // clearPossibleTreasureLocation empty the list of possible treasure location, usually used once the treasure found func (tm *TreasureMap) clearPossibleTreasureLocation() { tm.ListPossibleTreasureLocation = make(map[[2]int]bool) } // renderToTerminal performing animated rendering to terminal. // If you want to run in golang playground, change constant render_interface from unix to playground. // But sadly still unable to run in playground func renderToTerminal(output string) { switch render_interface { case unix: fmt.Println("\033[2J") fmt.Println(output) case playground: fmt.Printf("\x0c %s", output) } time.Sleep(delay_time * time.Millisecond) } // convertIntToEntity convert code constant of entity to a map drawn entity func convertIntToEntity(code int) string { switch code { case entity_path: return "." case entity_obstacle: return "#" case entity_player: return "X" case entity_treasure: return "$" default: return "." } }
{ var ( startX, startY = p.Position[0], p.Position[1] treasurePosition, treasureFound [2]int listPathPosition, pathFound [][2]int ) // see all entity in x axis with same y axis / right direction -> treasurePosition, pathFound = checkMap(treasureMap, startX+1, startY, 1, axis_x) if treasureMap.OriginalMapping[treasurePosition] == entity_treasure { treasureFound = treasurePosition } listPathPosition = append(listPathPosition, pathFound...) p.Range[right] = len(pathFound) // see all entity in -x axis with same y axis / left direction <- treasurePosition, pathFound = checkMap(treasureMap, startX-1, startY, -1, axis_x) if treasureMap.OriginalMapping[treasurePosition] == entity_treasure { treasureFound = treasurePosition } listPathPosition = append(listPathPosition, pathFound...) p.Range[left] = len(pathFound) // see all entity in y axis with same x axis / up direction ^ treasurePosition, pathFound = checkMap(treasureMap, startY+1, startX, 1, axis_y) if treasureMap.OriginalMapping[treasurePosition] == entity_treasure { treasureFound = treasurePosition } listPathPosition = append(listPathPosition, pathFound...) p.Range[up] = len(pathFound) // see all entity in -y axis with same x axis / down direction v treasurePosition, pathFound = checkMap(treasureMap, startY-1, startX, -1, axis_y) if treasureMap.OriginalMapping[treasurePosition] == entity_treasure { treasureFound = treasurePosition } listPathPosition = append(listPathPosition, pathFound...) p.Range[down] = len(pathFound) if treasureMap.OriginalMapping[treasureFound] == entity_treasure { p.FoundTreasure = true } // check possibility of path intersection with best probability to get the most explored map if p.DirectionTaken == up && p.Range[right] > p.Range[up] { p.DirectionTaken = right } else if p.DirectionTaken == right && p.Range[down] > p.Range[right] { p.DirectionTaken = down } return treasureFound, listPathPosition }
identifier_body
vessel.pb.go
// Code generated by protoc-gen-go. DO NOT EDIT. // source: proto/vessel/vessel.proto /*/ 5 Package vessel is a generated protocol buffer package. 6 It is generated from these files: 7 proto/vessel/vessel.proto 8 It has these top-level messages: 9 Vessel 10 Specification 11 Response 12 */ package vessel import ( fmt "fmt" proto "github.com/golang/protobuf/proto" math "math" ) import ( client "github.com/micro/go-micro/client" server "github.com/micro/go-micro/server" context "golang.org/x/net/context" ) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package type Vessel struct { Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` Capacity int32 `protobuf:"varint,2,opt,name=capacity,proto3" json:"capacity,omitempty"` MaxWeight int32 `protobuf:"varint,3,opt,name=max_weight,json=maxWeight,proto3" json:"max_weight,omitempty"` Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` Available bool `protobuf:"varint,5,opt,name=available,proto3" json:"available,omitempty"` OwnerId string `protobuf:"bytes,6,opt,name=owner_id,json=ownerId,proto3" json:"owner_id,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Vessel) Reset() { *m = Vessel{} } func (m *Vessel) String() string { return proto.CompactTextString(m) } func (*Vessel) ProtoMessage() {} func (*Vessel) Descriptor() ([]byte, []int) { return fileDescriptor_04ef66862bb50716, []int{0} } func (m *Vessel) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Vessel.Unmarshal(m, b) } func (m *Vessel) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Vessel.Marshal(b, m, deterministic) } func (m *Vessel) XXX_Merge(src proto.Message) { xxx_messageInfo_Vessel.Merge(m, src) } func (m *Vessel) XXX_Size() int { return xxx_messageInfo_Vessel.Size(m) } func (m *Vessel) XXX_DiscardUnknown() { xxx_messageInfo_Vessel.DiscardUnknown(m) } var xxx_messageInfo_Vessel proto.InternalMessageInfo func (m *Vessel) GetId() string { if m != nil { return m.Id } return "" } func (m *Vessel) GetCapacity() int32 { if m != nil { return m.Capacity } return 0 } func (m *Vessel) GetMaxWeight() int32 { if m != nil { return m.MaxWeight } return 0 } func (m *Vessel) GetName() string { if m != nil { return m.Name } return "" } func (m *Vessel) GetAvailable() bool { if m != nil { return m.Available } return false } func (m *Vessel) GetOwnerId() string { if m != nil { return m.OwnerId } return "" } type Specification struct { Capacity int32 `protobuf:"varint,1,opt,name=capacity,proto3" json:"capacity,omitempty"` MaxWeight int32 `protobuf:"varint,2,opt,name=max_weight,json=maxWeight,proto3" json:"max_weight,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Specification) Reset() { *m = Specification{} } func (m *Specification) String() string { return proto.CompactTextString(m) } func (*Specification) ProtoMessage() {} func (*Specification) Descriptor() ([]byte, []int) { return fileDescriptor_04ef66862bb50716, []int{1} } func (m *Specification) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Specification.Unmarshal(m, b) } func (m *Specification) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Specification.Marshal(b, m, deterministic) } func (m *Specification) XXX_Merge(src proto.Message) { xxx_messageInfo_Specification.Merge(m, src) } func (m *Specification) XXX_Size() int { return xxx_messageInfo_Specification.Size(m) } func (m *Specification) XXX_DiscardUnknown() { xxx_messageInfo_Specification.DiscardUnknown(m) } var xxx_messageInfo_Specification proto.InternalMessageInfo func (m *Specification) GetCapacity() int32 { if m != nil { return m.Capacity } return 0 } func (m *Specification) GetMaxWeight() int32 { if m != nil { return m.MaxWeight } return 0 } type Response struct { Vessel *Vessel `protobuf:"bytes,1,opt,name=vessel,proto3" json:"vessel,omitempty"` Vessels []*Vessel `protobuf:"bytes,2,rep,name=vessels,proto3" json:"vessels,omitempty"` Created bool `protobuf:"varint,3,opt,name=created,proto3" json:"created,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Response) Reset() { *m = Response{} } func (m *Response) String() string { return proto.CompactTextString(m) } func (*Response) ProtoMessage() {} func (*Response) Descriptor() ([]byte, []int) { return fileDescriptor_04ef66862bb50716, []int{2} } func (m *Response) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Response.Unmarshal(m, b) } func (m *Response) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Response.Marshal(b, m, deterministic) } func (m *Response) XXX_Merge(src proto.Message) { xxx_messageInfo_Response.Merge(m, src) } func (m *Response) XXX_Size() int { return xxx_messageInfo_Response.Size(m) } func (m *Response) XXX_DiscardUnknown() { xxx_messageInfo_Response.DiscardUnknown(m) } var xxx_messageInfo_Response proto.InternalMessageInfo func (m *Response) GetVessel() *Vessel { if m != nil { return m.Vessel } return nil } func (m *Response) GetVessels() []*Vessel { if m != nil { return m.Vessels } return nil } func (m *Response) GetCreated() bool { if m != nil { return m.Created } return false } func init() { proto.RegisterType((*Vessel)(nil), "vessel.Vessel") proto.RegisterType((*Specification)(nil), "vessel.Specification") proto.RegisterType((*Response)(nil), "vessel.Response") } // Reference imports to suppress errors if they are not otherwise used. var _ context.Context var _ client.Option var _ server.Option // Client API for VesselService service type VesselServiceClient interface { FindAvailable(ctx context.Context, in *Specification, opts ...client.CallOption) (*Response, error) Create(ctx context.Context, in *Vessel, opts ...client.CallOption) (*Response, error) } type vesselServiceClient struct { c client.Client serviceName string } func NewVesselServiceClient(serviceName string, c client.Client) VesselServiceClient {
serviceName = "vessel" } return &vesselServiceClient{ c: c, serviceName: serviceName, } } func (c *vesselServiceClient) FindAvailable(ctx context.Context, in *Specification, opts ...client.CallOption) (*Response, error) { req := c.c.NewRequest(c.serviceName, "VesselService.FindAvailable", in) out := new(Response) err := c.c.Call(ctx, req, out, opts...) if err != nil { return nil, err } return out, nil } func (c *vesselServiceClient) Create(ctx context.Context, in *Vessel, opts ...client.CallOption) (*Response, error) { req := c.c.NewRequest(c.serviceName, "VesselService.Create", in) out := new(Response) err := c.c.Call(ctx, req, out, opts...) if err != nil { return nil, err } return out, nil } // Server API for VesselService service type VesselServiceHandler interface { FindAvailable(context.Context, *Specification, *Response) error Create(context.Context, *Vessel, *Response) error } func RegisterVesselServiceHandler(s server.Server, hdlr VesselServiceHandler, opts ...server.HandlerOption) { s.Handle(s.NewHandler(&VesselService{hdlr}, opts...)) } type VesselService struct { VesselServiceHandler } func (h *VesselService) FindAvailable(ctx context.Context, in *Specification, out *Response) error { return h.VesselServiceHandler.FindAvailable(ctx, in, out) } func (h *VesselService) Create(ctx context.Context, in *Vessel, out *Response) error { return h.VesselServiceHandler.Create(ctx, in, out) } func init() { proto.RegisterFile("proto/vessel/vessel.proto", fileDescriptor_04ef66862bb50716) } var fileDescriptor_04ef66862bb50716 = []byte{ // 300 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x51, 0x4d, 0x4b, 0xc3, 0x40, 0x10, 0x75, 0xd3, 0x36, 0x4d, 0x47, 0x5a, 0x64, 0x40, 0xd8, 0x16, 0x85, 0x90, 0x83, 0xe4, 0x20, 0x15, 0xea, 0xc5, 0xab, 0x08, 0x82, 0x1e, 0xb7, 0xa0, 0xc7, 0xb2, 0xdd, 0x1d, 0x75, 0xa1, 0x4d, 0x42, 0x12, 0xd2, 0xfa, 0x6f, 0xfc, 0xa9, 0xc2, 0xe4, 0x43, 0x5a, 0xc5, 0xd3, 0xce, 0x7b, 0xf3, 0xf6, 0xf1, 0xf6, 0x2d, 0x4c, 0xb3, 0x3c, 0x2d, 0xd3, 0x9b, 0x8a, 0x8a, 0x82, 0x36, 0xcd, 0x31, 0x67, 0x0e, 0xfd, 0x1a, 0x45, 0x5f, 0x02, 0xfc, 0x17, 0x1e, 0x71, 0x02, 0x9e, 0xb3, 0x52, 0x84, 0x22, 0x1e, 0x29, 0xcf, 0x59, 0x9c, 0x41, 0x60, 0x74, 0xa6, 0x8d, 0x2b, 0x3f, 0xa5, 0x17, 0x8a, 0x78, 0xa0, 0x3a, 0x8c, 0x97, 0x00, 0x5b, 0xbd, 0x5f, 0xed, 0xc8, 0xbd, 0x7f, 0x94, 0xb2, 0xc7, 0xdb, 0xd1, 0x56, 0xef, 0x5f, 0x99, 0x40, 0x84, 0x7e, 0xa2, 0xb7, 0x24, 0xfb, 0x6c, 0xc6, 0x33, 0x5e, 0xc0, 0x48, 0x57, 0xda, 0x6d, 0xf4, 0x7a, 0x43, 0x72, 0x10, 0x8a, 0x38, 0x50, 0x3f, 0x04, 0x4e, 0x21, 0x48, 0x77, 0x09, 0xe5, 0x2b, 0x67, 0xa5, 0xcf, 0xb7, 0x86, 0x8c, 0x9f, 0x6c, 0xf4, 0x0c, 0xe3, 0x65, 0x46, 0xc6, 0xbd, 0x39, 0xa3, 0x4b, 0x97, 0x26, 0x07, 0xc1, 0xc4, 0xbf, 0xc1, 0xbc, 0xa3, 0x60, 0x51, 0x05, 0x81, 0xa2, 0x22, 0x4b, 0x93, 0x82, 0xf0, 0x0a, 0x9a, 0x12, 0xd8, 0xe4, 0x74, 0x31, 0x99, 0x37, 0x0d, 0xd5, 0x7d, 0xa8, 0x66, 0x8b, 0x31, 0x0c, 0xeb, 0xa9, 0x90, 0x5e, 0xd8, 0xfb, 0x43, 0xd8, 0xae, 0x51, 0xc2, 0xd0, 0xe4, 0xa4, 0x4b, 0xb2, 0x5c, 0x49, 0xa0, 0x5a, 0xb8, 0xd8, 0xc1, 0xb8, 0x16, 0x2f, 0x29, 0xaf, 0x9c, 0x21, 0xbc, 0x83, 0xf1, 0xa3, 0x4b, 0xec, 0x7d, 0x57, 0xc0, 0x79, 0x6b, 0x7a, 0xf0, 0xd6, 0xd9, 0x59, 0x4b, 0x77, 0xb1, 0xaf, 0xc1, 0x7f, 0x60, 0x57, 0x3c, 0xca, 0xf1, 0x5b, 0x1b, 0x9d, 0xac, 0x7d, 0xfe, 0xee, 0xdb, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0x23, 0x2f, 0x31, 0xf4, 0x0b, 0x02, 0x00, 0x00, }
if c == nil { c = client.NewClient() } if len(serviceName) == 0 {
random_line_split
vessel.pb.go
// Code generated by protoc-gen-go. DO NOT EDIT. // source: proto/vessel/vessel.proto /*/ 5 Package vessel is a generated protocol buffer package. 6 It is generated from these files: 7 proto/vessel/vessel.proto 8 It has these top-level messages: 9 Vessel 10 Specification 11 Response 12 */ package vessel import ( fmt "fmt" proto "github.com/golang/protobuf/proto" math "math" ) import ( client "github.com/micro/go-micro/client" server "github.com/micro/go-micro/server" context "golang.org/x/net/context" ) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package type Vessel struct { Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` Capacity int32 `protobuf:"varint,2,opt,name=capacity,proto3" json:"capacity,omitempty"` MaxWeight int32 `protobuf:"varint,3,opt,name=max_weight,json=maxWeight,proto3" json:"max_weight,omitempty"` Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` Available bool `protobuf:"varint,5,opt,name=available,proto3" json:"available,omitempty"` OwnerId string `protobuf:"bytes,6,opt,name=owner_id,json=ownerId,proto3" json:"owner_id,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Vessel) Reset() { *m = Vessel{} } func (m *Vessel) String() string { return proto.CompactTextString(m) } func (*Vessel) ProtoMessage() {} func (*Vessel) Descriptor() ([]byte, []int) { return fileDescriptor_04ef66862bb50716, []int{0} } func (m *Vessel) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Vessel.Unmarshal(m, b) } func (m *Vessel) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Vessel.Marshal(b, m, deterministic) } func (m *Vessel) XXX_Merge(src proto.Message) { xxx_messageInfo_Vessel.Merge(m, src) } func (m *Vessel) XXX_Size() int { return xxx_messageInfo_Vessel.Size(m) } func (m *Vessel) XXX_DiscardUnknown() { xxx_messageInfo_Vessel.DiscardUnknown(m) } var xxx_messageInfo_Vessel proto.InternalMessageInfo func (m *Vessel) GetId() string { if m != nil { return m.Id } return "" } func (m *Vessel) GetCapacity() int32 { if m != nil { return m.Capacity } return 0 } func (m *Vessel) GetMaxWeight() int32 { if m != nil { return m.MaxWeight } return 0 } func (m *Vessel) GetName() string { if m != nil { return m.Name } return "" } func (m *Vessel) GetAvailable() bool { if m != nil { return m.Available } return false } func (m *Vessel) GetOwnerId() string { if m != nil { return m.OwnerId } return "" } type Specification struct { Capacity int32 `protobuf:"varint,1,opt,name=capacity,proto3" json:"capacity,omitempty"` MaxWeight int32 `protobuf:"varint,2,opt,name=max_weight,json=maxWeight,proto3" json:"max_weight,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Specification) Reset() { *m = Specification{} } func (m *Specification) String() string { return proto.CompactTextString(m) } func (*Specification) ProtoMessage() {} func (*Specification) Descriptor() ([]byte, []int) { return fileDescriptor_04ef66862bb50716, []int{1} } func (m *Specification) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Specification.Unmarshal(m, b) } func (m *Specification) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Specification.Marshal(b, m, deterministic) } func (m *Specification) XXX_Merge(src proto.Message)
func (m *Specification) XXX_Size() int { return xxx_messageInfo_Specification.Size(m) } func (m *Specification) XXX_DiscardUnknown() { xxx_messageInfo_Specification.DiscardUnknown(m) } var xxx_messageInfo_Specification proto.InternalMessageInfo func (m *Specification) GetCapacity() int32 { if m != nil { return m.Capacity } return 0 } func (m *Specification) GetMaxWeight() int32 { if m != nil { return m.MaxWeight } return 0 } type Response struct { Vessel *Vessel `protobuf:"bytes,1,opt,name=vessel,proto3" json:"vessel,omitempty"` Vessels []*Vessel `protobuf:"bytes,2,rep,name=vessels,proto3" json:"vessels,omitempty"` Created bool `protobuf:"varint,3,opt,name=created,proto3" json:"created,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Response) Reset() { *m = Response{} } func (m *Response) String() string { return proto.CompactTextString(m) } func (*Response) ProtoMessage() {} func (*Response) Descriptor() ([]byte, []int) { return fileDescriptor_04ef66862bb50716, []int{2} } func (m *Response) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Response.Unmarshal(m, b) } func (m *Response) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Response.Marshal(b, m, deterministic) } func (m *Response) XXX_Merge(src proto.Message) { xxx_messageInfo_Response.Merge(m, src) } func (m *Response) XXX_Size() int { return xxx_messageInfo_Response.Size(m) } func (m *Response) XXX_DiscardUnknown() { xxx_messageInfo_Response.DiscardUnknown(m) } var xxx_messageInfo_Response proto.InternalMessageInfo func (m *Response) GetVessel() *Vessel { if m != nil { return m.Vessel } return nil } func (m *Response) GetVessels() []*Vessel { if m != nil { return m.Vessels } return nil } func (m *Response) GetCreated() bool { if m != nil { return m.Created } return false } func init() { proto.RegisterType((*Vessel)(nil), "vessel.Vessel") proto.RegisterType((*Specification)(nil), "vessel.Specification") proto.RegisterType((*Response)(nil), "vessel.Response") } // Reference imports to suppress errors if they are not otherwise used. var _ context.Context var _ client.Option var _ server.Option // Client API for VesselService service type VesselServiceClient interface { FindAvailable(ctx context.Context, in *Specification, opts ...client.CallOption) (*Response, error) Create(ctx context.Context, in *Vessel, opts ...client.CallOption) (*Response, error) } type vesselServiceClient struct { c client.Client serviceName string } func NewVesselServiceClient(serviceName string, c client.Client) VesselServiceClient { if c == nil { c = client.NewClient() } if len(serviceName) == 0 { serviceName = "vessel" } return &vesselServiceClient{ c: c, serviceName: serviceName, } } func (c *vesselServiceClient) FindAvailable(ctx context.Context, in *Specification, opts ...client.CallOption) (*Response, error) { req := c.c.NewRequest(c.serviceName, "VesselService.FindAvailable", in) out := new(Response) err := c.c.Call(ctx, req, out, opts...) if err != nil { return nil, err } return out, nil } func (c *vesselServiceClient) Create(ctx context.Context, in *Vessel, opts ...client.CallOption) (*Response, error) { req := c.c.NewRequest(c.serviceName, "VesselService.Create", in) out := new(Response) err := c.c.Call(ctx, req, out, opts...) if err != nil { return nil, err } return out, nil } // Server API for VesselService service type VesselServiceHandler interface { FindAvailable(context.Context, *Specification, *Response) error Create(context.Context, *Vessel, *Response) error } func RegisterVesselServiceHandler(s server.Server, hdlr VesselServiceHandler, opts ...server.HandlerOption) { s.Handle(s.NewHandler(&VesselService{hdlr}, opts...)) } type VesselService struct { VesselServiceHandler } func (h *VesselService) FindAvailable(ctx context.Context, in *Specification, out *Response) error { return h.VesselServiceHandler.FindAvailable(ctx, in, out) } func (h *VesselService) Create(ctx context.Context, in *Vessel, out *Response) error { return h.VesselServiceHandler.Create(ctx, in, out) } func init() { proto.RegisterFile("proto/vessel/vessel.proto", fileDescriptor_04ef66862bb50716) } var fileDescriptor_04ef66862bb50716 = []byte{ // 300 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x51, 0x4d, 0x4b, 0xc3, 0x40, 0x10, 0x75, 0xd3, 0x36, 0x4d, 0x47, 0x5a, 0x64, 0x40, 0xd8, 0x16, 0x85, 0x90, 0x83, 0xe4, 0x20, 0x15, 0xea, 0xc5, 0xab, 0x08, 0x82, 0x1e, 0xb7, 0xa0, 0xc7, 0xb2, 0xdd, 0x1d, 0x75, 0xa1, 0x4d, 0x42, 0x12, 0xd2, 0xfa, 0x6f, 0xfc, 0xa9, 0xc2, 0xe4, 0x43, 0x5a, 0xc5, 0xd3, 0xce, 0x7b, 0xf3, 0xf6, 0xf1, 0xf6, 0x2d, 0x4c, 0xb3, 0x3c, 0x2d, 0xd3, 0x9b, 0x8a, 0x8a, 0x82, 0x36, 0xcd, 0x31, 0x67, 0x0e, 0xfd, 0x1a, 0x45, 0x5f, 0x02, 0xfc, 0x17, 0x1e, 0x71, 0x02, 0x9e, 0xb3, 0x52, 0x84, 0x22, 0x1e, 0x29, 0xcf, 0x59, 0x9c, 0x41, 0x60, 0x74, 0xa6, 0x8d, 0x2b, 0x3f, 0xa5, 0x17, 0x8a, 0x78, 0xa0, 0x3a, 0x8c, 0x97, 0x00, 0x5b, 0xbd, 0x5f, 0xed, 0xc8, 0xbd, 0x7f, 0x94, 0xb2, 0xc7, 0xdb, 0xd1, 0x56, 0xef, 0x5f, 0x99, 0x40, 0x84, 0x7e, 0xa2, 0xb7, 0x24, 0xfb, 0x6c, 0xc6, 0x33, 0x5e, 0xc0, 0x48, 0x57, 0xda, 0x6d, 0xf4, 0x7a, 0x43, 0x72, 0x10, 0x8a, 0x38, 0x50, 0x3f, 0x04, 0x4e, 0x21, 0x48, 0x77, 0x09, 0xe5, 0x2b, 0x67, 0xa5, 0xcf, 0xb7, 0x86, 0x8c, 0x9f, 0x6c, 0xf4, 0x0c, 0xe3, 0x65, 0x46, 0xc6, 0xbd, 0x39, 0xa3, 0x4b, 0x97, 0x26, 0x07, 0xc1, 0xc4, 0xbf, 0xc1, 0xbc, 0xa3, 0x60, 0x51, 0x05, 0x81, 0xa2, 0x22, 0x4b, 0x93, 0x82, 0xf0, 0x0a, 0x9a, 0x12, 0xd8, 0xe4, 0x74, 0x31, 0x99, 0x37, 0x0d, 0xd5, 0x7d, 0xa8, 0x66, 0x8b, 0x31, 0x0c, 0xeb, 0xa9, 0x90, 0x5e, 0xd8, 0xfb, 0x43, 0xd8, 0xae, 0x51, 0xc2, 0xd0, 0xe4, 0xa4, 0x4b, 0xb2, 0x5c, 0x49, 0xa0, 0x5a, 0xb8, 0xd8, 0xc1, 0xb8, 0x16, 0x2f, 0x29, 0xaf, 0x9c, 0x21, 0xbc, 0x83, 0xf1, 0xa3, 0x4b, 0xec, 0x7d, 0x57, 0xc0, 0x79, 0x6b, 0x7a, 0xf0, 0xd6, 0xd9, 0x59, 0x4b, 0x77, 0xb1, 0xaf, 0xc1, 0x7f, 0x60, 0x57, 0x3c, 0xca, 0xf1, 0x5b, 0x1b, 0x9d, 0xac, 0x7d, 0xfe, 0xee, 0xdb, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0x23, 0x2f, 0x31, 0xf4, 0x0b, 0x02, 0x00, 0x00, }
{ xxx_messageInfo_Specification.Merge(m, src) }
identifier_body
vessel.pb.go
// Code generated by protoc-gen-go. DO NOT EDIT. // source: proto/vessel/vessel.proto /*/ 5 Package vessel is a generated protocol buffer package. 6 It is generated from these files: 7 proto/vessel/vessel.proto 8 It has these top-level messages: 9 Vessel 10 Specification 11 Response 12 */ package vessel import ( fmt "fmt" proto "github.com/golang/protobuf/proto" math "math" ) import ( client "github.com/micro/go-micro/client" server "github.com/micro/go-micro/server" context "golang.org/x/net/context" ) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package type Vessel struct { Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` Capacity int32 `protobuf:"varint,2,opt,name=capacity,proto3" json:"capacity,omitempty"` MaxWeight int32 `protobuf:"varint,3,opt,name=max_weight,json=maxWeight,proto3" json:"max_weight,omitempty"` Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` Available bool `protobuf:"varint,5,opt,name=available,proto3" json:"available,omitempty"` OwnerId string `protobuf:"bytes,6,opt,name=owner_id,json=ownerId,proto3" json:"owner_id,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Vessel) Reset() { *m = Vessel{} } func (m *Vessel) String() string { return proto.CompactTextString(m) } func (*Vessel) ProtoMessage() {} func (*Vessel) Descriptor() ([]byte, []int) { return fileDescriptor_04ef66862bb50716, []int{0} } func (m *Vessel) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Vessel.Unmarshal(m, b) } func (m *Vessel) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Vessel.Marshal(b, m, deterministic) } func (m *Vessel) XXX_Merge(src proto.Message) { xxx_messageInfo_Vessel.Merge(m, src) } func (m *Vessel) XXX_Size() int { return xxx_messageInfo_Vessel.Size(m) } func (m *Vessel) XXX_DiscardUnknown() { xxx_messageInfo_Vessel.DiscardUnknown(m) } var xxx_messageInfo_Vessel proto.InternalMessageInfo func (m *Vessel) GetId() string { if m != nil { return m.Id } return "" } func (m *Vessel) GetCapacity() int32 { if m != nil { return m.Capacity } return 0 } func (m *Vessel) GetMaxWeight() int32 { if m != nil { return m.MaxWeight } return 0 } func (m *Vessel) GetName() string { if m != nil { return m.Name } return "" } func (m *Vessel) GetAvailable() bool { if m != nil { return m.Available } return false } func (m *Vessel) GetOwnerId() string { if m != nil { return m.OwnerId } return "" } type Specification struct { Capacity int32 `protobuf:"varint,1,opt,name=capacity,proto3" json:"capacity,omitempty"` MaxWeight int32 `protobuf:"varint,2,opt,name=max_weight,json=maxWeight,proto3" json:"max_weight,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Specification) Reset() { *m = Specification{} } func (m *Specification) String() string { return proto.CompactTextString(m) } func (*Specification) ProtoMessage() {} func (*Specification) Descriptor() ([]byte, []int) { return fileDescriptor_04ef66862bb50716, []int{1} } func (m *Specification) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Specification.Unmarshal(m, b) } func (m *Specification) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Specification.Marshal(b, m, deterministic) } func (m *Specification) XXX_Merge(src proto.Message) { xxx_messageInfo_Specification.Merge(m, src) } func (m *Specification) XXX_Size() int { return xxx_messageInfo_Specification.Size(m) } func (m *Specification) XXX_DiscardUnknown() { xxx_messageInfo_Specification.DiscardUnknown(m) } var xxx_messageInfo_Specification proto.InternalMessageInfo func (m *Specification) GetCapacity() int32 { if m != nil { return m.Capacity } return 0 } func (m *Specification) GetMaxWeight() int32 { if m != nil { return m.MaxWeight } return 0 } type Response struct { Vessel *Vessel `protobuf:"bytes,1,opt,name=vessel,proto3" json:"vessel,omitempty"` Vessels []*Vessel `protobuf:"bytes,2,rep,name=vessels,proto3" json:"vessels,omitempty"` Created bool `protobuf:"varint,3,opt,name=created,proto3" json:"created,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Response) Reset() { *m = Response{} } func (m *Response) String() string { return proto.CompactTextString(m) } func (*Response) ProtoMessage() {} func (*Response) Descriptor() ([]byte, []int) { return fileDescriptor_04ef66862bb50716, []int{2} } func (m *Response) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Response.Unmarshal(m, b) } func (m *Response) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Response.Marshal(b, m, deterministic) } func (m *Response) XXX_Merge(src proto.Message) { xxx_messageInfo_Response.Merge(m, src) } func (m *Response) XXX_Size() int { return xxx_messageInfo_Response.Size(m) } func (m *Response) XXX_DiscardUnknown() { xxx_messageInfo_Response.DiscardUnknown(m) } var xxx_messageInfo_Response proto.InternalMessageInfo func (m *Response) GetVessel() *Vessel { if m != nil { return m.Vessel } return nil } func (m *Response) GetVessels() []*Vessel { if m != nil { return m.Vessels } return nil } func (m *Response) GetCreated() bool { if m != nil { return m.Created } return false } func init() { proto.RegisterType((*Vessel)(nil), "vessel.Vessel") proto.RegisterType((*Specification)(nil), "vessel.Specification") proto.RegisterType((*Response)(nil), "vessel.Response") } // Reference imports to suppress errors if they are not otherwise used. var _ context.Context var _ client.Option var _ server.Option // Client API for VesselService service type VesselServiceClient interface { FindAvailable(ctx context.Context, in *Specification, opts ...client.CallOption) (*Response, error) Create(ctx context.Context, in *Vessel, opts ...client.CallOption) (*Response, error) } type vesselServiceClient struct { c client.Client serviceName string } func NewVesselServiceClient(serviceName string, c client.Client) VesselServiceClient { if c == nil { c = client.NewClient() } if len(serviceName) == 0 { serviceName = "vessel" } return &vesselServiceClient{ c: c, serviceName: serviceName, } } func (c *vesselServiceClient) FindAvailable(ctx context.Context, in *Specification, opts ...client.CallOption) (*Response, error) { req := c.c.NewRequest(c.serviceName, "VesselService.FindAvailable", in) out := new(Response) err := c.c.Call(ctx, req, out, opts...) if err != nil
return out, nil } func (c *vesselServiceClient) Create(ctx context.Context, in *Vessel, opts ...client.CallOption) (*Response, error) { req := c.c.NewRequest(c.serviceName, "VesselService.Create", in) out := new(Response) err := c.c.Call(ctx, req, out, opts...) if err != nil { return nil, err } return out, nil } // Server API for VesselService service type VesselServiceHandler interface { FindAvailable(context.Context, *Specification, *Response) error Create(context.Context, *Vessel, *Response) error } func RegisterVesselServiceHandler(s server.Server, hdlr VesselServiceHandler, opts ...server.HandlerOption) { s.Handle(s.NewHandler(&VesselService{hdlr}, opts...)) } type VesselService struct { VesselServiceHandler } func (h *VesselService) FindAvailable(ctx context.Context, in *Specification, out *Response) error { return h.VesselServiceHandler.FindAvailable(ctx, in, out) } func (h *VesselService) Create(ctx context.Context, in *Vessel, out *Response) error { return h.VesselServiceHandler.Create(ctx, in, out) } func init() { proto.RegisterFile("proto/vessel/vessel.proto", fileDescriptor_04ef66862bb50716) } var fileDescriptor_04ef66862bb50716 = []byte{ // 300 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x51, 0x4d, 0x4b, 0xc3, 0x40, 0x10, 0x75, 0xd3, 0x36, 0x4d, 0x47, 0x5a, 0x64, 0x40, 0xd8, 0x16, 0x85, 0x90, 0x83, 0xe4, 0x20, 0x15, 0xea, 0xc5, 0xab, 0x08, 0x82, 0x1e, 0xb7, 0xa0, 0xc7, 0xb2, 0xdd, 0x1d, 0x75, 0xa1, 0x4d, 0x42, 0x12, 0xd2, 0xfa, 0x6f, 0xfc, 0xa9, 0xc2, 0xe4, 0x43, 0x5a, 0xc5, 0xd3, 0xce, 0x7b, 0xf3, 0xf6, 0xf1, 0xf6, 0x2d, 0x4c, 0xb3, 0x3c, 0x2d, 0xd3, 0x9b, 0x8a, 0x8a, 0x82, 0x36, 0xcd, 0x31, 0x67, 0x0e, 0xfd, 0x1a, 0x45, 0x5f, 0x02, 0xfc, 0x17, 0x1e, 0x71, 0x02, 0x9e, 0xb3, 0x52, 0x84, 0x22, 0x1e, 0x29, 0xcf, 0x59, 0x9c, 0x41, 0x60, 0x74, 0xa6, 0x8d, 0x2b, 0x3f, 0xa5, 0x17, 0x8a, 0x78, 0xa0, 0x3a, 0x8c, 0x97, 0x00, 0x5b, 0xbd, 0x5f, 0xed, 0xc8, 0xbd, 0x7f, 0x94, 0xb2, 0xc7, 0xdb, 0xd1, 0x56, 0xef, 0x5f, 0x99, 0x40, 0x84, 0x7e, 0xa2, 0xb7, 0x24, 0xfb, 0x6c, 0xc6, 0x33, 0x5e, 0xc0, 0x48, 0x57, 0xda, 0x6d, 0xf4, 0x7a, 0x43, 0x72, 0x10, 0x8a, 0x38, 0x50, 0x3f, 0x04, 0x4e, 0x21, 0x48, 0x77, 0x09, 0xe5, 0x2b, 0x67, 0xa5, 0xcf, 0xb7, 0x86, 0x8c, 0x9f, 0x6c, 0xf4, 0x0c, 0xe3, 0x65, 0x46, 0xc6, 0xbd, 0x39, 0xa3, 0x4b, 0x97, 0x26, 0x07, 0xc1, 0xc4, 0xbf, 0xc1, 0xbc, 0xa3, 0x60, 0x51, 0x05, 0x81, 0xa2, 0x22, 0x4b, 0x93, 0x82, 0xf0, 0x0a, 0x9a, 0x12, 0xd8, 0xe4, 0x74, 0x31, 0x99, 0x37, 0x0d, 0xd5, 0x7d, 0xa8, 0x66, 0x8b, 0x31, 0x0c, 0xeb, 0xa9, 0x90, 0x5e, 0xd8, 0xfb, 0x43, 0xd8, 0xae, 0x51, 0xc2, 0xd0, 0xe4, 0xa4, 0x4b, 0xb2, 0x5c, 0x49, 0xa0, 0x5a, 0xb8, 0xd8, 0xc1, 0xb8, 0x16, 0x2f, 0x29, 0xaf, 0x9c, 0x21, 0xbc, 0x83, 0xf1, 0xa3, 0x4b, 0xec, 0x7d, 0x57, 0xc0, 0x79, 0x6b, 0x7a, 0xf0, 0xd6, 0xd9, 0x59, 0x4b, 0x77, 0xb1, 0xaf, 0xc1, 0x7f, 0x60, 0x57, 0x3c, 0xca, 0xf1, 0x5b, 0x1b, 0x9d, 0xac, 0x7d, 0xfe, 0xee, 0xdb, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0x23, 0x2f, 0x31, 0xf4, 0x0b, 0x02, 0x00, 0x00, }
{ return nil, err }
conditional_block
vessel.pb.go
// Code generated by protoc-gen-go. DO NOT EDIT. // source: proto/vessel/vessel.proto /*/ 5 Package vessel is a generated protocol buffer package. 6 It is generated from these files: 7 proto/vessel/vessel.proto 8 It has these top-level messages: 9 Vessel 10 Specification 11 Response 12 */ package vessel import ( fmt "fmt" proto "github.com/golang/protobuf/proto" math "math" ) import ( client "github.com/micro/go-micro/client" server "github.com/micro/go-micro/server" context "golang.org/x/net/context" ) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package type Vessel struct { Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` Capacity int32 `protobuf:"varint,2,opt,name=capacity,proto3" json:"capacity,omitempty"` MaxWeight int32 `protobuf:"varint,3,opt,name=max_weight,json=maxWeight,proto3" json:"max_weight,omitempty"` Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` Available bool `protobuf:"varint,5,opt,name=available,proto3" json:"available,omitempty"` OwnerId string `protobuf:"bytes,6,opt,name=owner_id,json=ownerId,proto3" json:"owner_id,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Vessel) Reset() { *m = Vessel{} } func (m *Vessel) String() string { return proto.CompactTextString(m) } func (*Vessel) ProtoMessage() {} func (*Vessel) Descriptor() ([]byte, []int) { return fileDescriptor_04ef66862bb50716, []int{0} } func (m *Vessel) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Vessel.Unmarshal(m, b) } func (m *Vessel) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Vessel.Marshal(b, m, deterministic) } func (m *Vessel) XXX_Merge(src proto.Message) { xxx_messageInfo_Vessel.Merge(m, src) } func (m *Vessel) XXX_Size() int { return xxx_messageInfo_Vessel.Size(m) } func (m *Vessel) XXX_DiscardUnknown() { xxx_messageInfo_Vessel.DiscardUnknown(m) } var xxx_messageInfo_Vessel proto.InternalMessageInfo func (m *Vessel) GetId() string { if m != nil { return m.Id } return "" } func (m *Vessel) GetCapacity() int32 { if m != nil { return m.Capacity } return 0 } func (m *Vessel) GetMaxWeight() int32 { if m != nil { return m.MaxWeight } return 0 } func (m *Vessel) GetName() string { if m != nil { return m.Name } return "" } func (m *Vessel) GetAvailable() bool { if m != nil { return m.Available } return false } func (m *Vessel) GetOwnerId() string { if m != nil { return m.OwnerId } return "" } type Specification struct { Capacity int32 `protobuf:"varint,1,opt,name=capacity,proto3" json:"capacity,omitempty"` MaxWeight int32 `protobuf:"varint,2,opt,name=max_weight,json=maxWeight,proto3" json:"max_weight,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Specification) Reset() { *m = Specification{} } func (m *Specification) String() string { return proto.CompactTextString(m) } func (*Specification) ProtoMessage() {} func (*Specification) Descriptor() ([]byte, []int) { return fileDescriptor_04ef66862bb50716, []int{1} } func (m *Specification) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Specification.Unmarshal(m, b) } func (m *Specification) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Specification.Marshal(b, m, deterministic) } func (m *Specification) XXX_Merge(src proto.Message) { xxx_messageInfo_Specification.Merge(m, src) } func (m *Specification) XXX_Size() int { return xxx_messageInfo_Specification.Size(m) } func (m *Specification) XXX_DiscardUnknown() { xxx_messageInfo_Specification.DiscardUnknown(m) } var xxx_messageInfo_Specification proto.InternalMessageInfo func (m *Specification) GetCapacity() int32 { if m != nil { return m.Capacity } return 0 } func (m *Specification) GetMaxWeight() int32 { if m != nil { return m.MaxWeight } return 0 } type Response struct { Vessel *Vessel `protobuf:"bytes,1,opt,name=vessel,proto3" json:"vessel,omitempty"` Vessels []*Vessel `protobuf:"bytes,2,rep,name=vessels,proto3" json:"vessels,omitempty"` Created bool `protobuf:"varint,3,opt,name=created,proto3" json:"created,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Response) Reset() { *m = Response{} } func (m *Response) String() string { return proto.CompactTextString(m) } func (*Response) ProtoMessage() {} func (*Response) Descriptor() ([]byte, []int) { return fileDescriptor_04ef66862bb50716, []int{2} } func (m *Response) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Response.Unmarshal(m, b) } func (m *Response) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Response.Marshal(b, m, deterministic) } func (m *Response)
(src proto.Message) { xxx_messageInfo_Response.Merge(m, src) } func (m *Response) XXX_Size() int { return xxx_messageInfo_Response.Size(m) } func (m *Response) XXX_DiscardUnknown() { xxx_messageInfo_Response.DiscardUnknown(m) } var xxx_messageInfo_Response proto.InternalMessageInfo func (m *Response) GetVessel() *Vessel { if m != nil { return m.Vessel } return nil } func (m *Response) GetVessels() []*Vessel { if m != nil { return m.Vessels } return nil } func (m *Response) GetCreated() bool { if m != nil { return m.Created } return false } func init() { proto.RegisterType((*Vessel)(nil), "vessel.Vessel") proto.RegisterType((*Specification)(nil), "vessel.Specification") proto.RegisterType((*Response)(nil), "vessel.Response") } // Reference imports to suppress errors if they are not otherwise used. var _ context.Context var _ client.Option var _ server.Option // Client API for VesselService service type VesselServiceClient interface { FindAvailable(ctx context.Context, in *Specification, opts ...client.CallOption) (*Response, error) Create(ctx context.Context, in *Vessel, opts ...client.CallOption) (*Response, error) } type vesselServiceClient struct { c client.Client serviceName string } func NewVesselServiceClient(serviceName string, c client.Client) VesselServiceClient { if c == nil { c = client.NewClient() } if len(serviceName) == 0 { serviceName = "vessel" } return &vesselServiceClient{ c: c, serviceName: serviceName, } } func (c *vesselServiceClient) FindAvailable(ctx context.Context, in *Specification, opts ...client.CallOption) (*Response, error) { req := c.c.NewRequest(c.serviceName, "VesselService.FindAvailable", in) out := new(Response) err := c.c.Call(ctx, req, out, opts...) if err != nil { return nil, err } return out, nil } func (c *vesselServiceClient) Create(ctx context.Context, in *Vessel, opts ...client.CallOption) (*Response, error) { req := c.c.NewRequest(c.serviceName, "VesselService.Create", in) out := new(Response) err := c.c.Call(ctx, req, out, opts...) if err != nil { return nil, err } return out, nil } // Server API for VesselService service type VesselServiceHandler interface { FindAvailable(context.Context, *Specification, *Response) error Create(context.Context, *Vessel, *Response) error } func RegisterVesselServiceHandler(s server.Server, hdlr VesselServiceHandler, opts ...server.HandlerOption) { s.Handle(s.NewHandler(&VesselService{hdlr}, opts...)) } type VesselService struct { VesselServiceHandler } func (h *VesselService) FindAvailable(ctx context.Context, in *Specification, out *Response) error { return h.VesselServiceHandler.FindAvailable(ctx, in, out) } func (h *VesselService) Create(ctx context.Context, in *Vessel, out *Response) error { return h.VesselServiceHandler.Create(ctx, in, out) } func init() { proto.RegisterFile("proto/vessel/vessel.proto", fileDescriptor_04ef66862bb50716) } var fileDescriptor_04ef66862bb50716 = []byte{ // 300 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x51, 0x4d, 0x4b, 0xc3, 0x40, 0x10, 0x75, 0xd3, 0x36, 0x4d, 0x47, 0x5a, 0x64, 0x40, 0xd8, 0x16, 0x85, 0x90, 0x83, 0xe4, 0x20, 0x15, 0xea, 0xc5, 0xab, 0x08, 0x82, 0x1e, 0xb7, 0xa0, 0xc7, 0xb2, 0xdd, 0x1d, 0x75, 0xa1, 0x4d, 0x42, 0x12, 0xd2, 0xfa, 0x6f, 0xfc, 0xa9, 0xc2, 0xe4, 0x43, 0x5a, 0xc5, 0xd3, 0xce, 0x7b, 0xf3, 0xf6, 0xf1, 0xf6, 0x2d, 0x4c, 0xb3, 0x3c, 0x2d, 0xd3, 0x9b, 0x8a, 0x8a, 0x82, 0x36, 0xcd, 0x31, 0x67, 0x0e, 0xfd, 0x1a, 0x45, 0x5f, 0x02, 0xfc, 0x17, 0x1e, 0x71, 0x02, 0x9e, 0xb3, 0x52, 0x84, 0x22, 0x1e, 0x29, 0xcf, 0x59, 0x9c, 0x41, 0x60, 0x74, 0xa6, 0x8d, 0x2b, 0x3f, 0xa5, 0x17, 0x8a, 0x78, 0xa0, 0x3a, 0x8c, 0x97, 0x00, 0x5b, 0xbd, 0x5f, 0xed, 0xc8, 0xbd, 0x7f, 0x94, 0xb2, 0xc7, 0xdb, 0xd1, 0x56, 0xef, 0x5f, 0x99, 0x40, 0x84, 0x7e, 0xa2, 0xb7, 0x24, 0xfb, 0x6c, 0xc6, 0x33, 0x5e, 0xc0, 0x48, 0x57, 0xda, 0x6d, 0xf4, 0x7a, 0x43, 0x72, 0x10, 0x8a, 0x38, 0x50, 0x3f, 0x04, 0x4e, 0x21, 0x48, 0x77, 0x09, 0xe5, 0x2b, 0x67, 0xa5, 0xcf, 0xb7, 0x86, 0x8c, 0x9f, 0x6c, 0xf4, 0x0c, 0xe3, 0x65, 0x46, 0xc6, 0xbd, 0x39, 0xa3, 0x4b, 0x97, 0x26, 0x07, 0xc1, 0xc4, 0xbf, 0xc1, 0xbc, 0xa3, 0x60, 0x51, 0x05, 0x81, 0xa2, 0x22, 0x4b, 0x93, 0x82, 0xf0, 0x0a, 0x9a, 0x12, 0xd8, 0xe4, 0x74, 0x31, 0x99, 0x37, 0x0d, 0xd5, 0x7d, 0xa8, 0x66, 0x8b, 0x31, 0x0c, 0xeb, 0xa9, 0x90, 0x5e, 0xd8, 0xfb, 0x43, 0xd8, 0xae, 0x51, 0xc2, 0xd0, 0xe4, 0xa4, 0x4b, 0xb2, 0x5c, 0x49, 0xa0, 0x5a, 0xb8, 0xd8, 0xc1, 0xb8, 0x16, 0x2f, 0x29, 0xaf, 0x9c, 0x21, 0xbc, 0x83, 0xf1, 0xa3, 0x4b, 0xec, 0x7d, 0x57, 0xc0, 0x79, 0x6b, 0x7a, 0xf0, 0xd6, 0xd9, 0x59, 0x4b, 0x77, 0xb1, 0xaf, 0xc1, 0x7f, 0x60, 0x57, 0x3c, 0xca, 0xf1, 0x5b, 0x1b, 0x9d, 0xac, 0x7d, 0xfe, 0xee, 0xdb, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0x23, 0x2f, 0x31, 0xf4, 0x0b, 0x02, 0x00, 0x00, }
XXX_Merge
identifier_name
download.go
// Copyright 2020 The Moov Authors // Use of this source code is governed by an Apache License // license that can be found in the LICENSE file. package main import ( "database/sql" "encoding/json" "errors" "fmt" "io/ioutil" "net/http" "time" moovhttp "github.com/moov-io/base/http" "github.com/moov-io/watchman/pkg/csl" "github.com/moov-io/watchman/pkg/dpl" "github.com/moov-io/watchman/pkg/ofac" "github.com/go-kit/kit/log" "github.com/gorilla/mux" "github.com/prometheus/client_golang/prometheus" ) var ( lastDataRefreshSuccess = prometheus.NewGaugeVec(prometheus.GaugeOpts{ Name: "last_data_refresh_success", Help: "Unix timestamp of when data was last refreshed successfully", }, nil) lastDataRefreshFailure = prometheus.NewGaugeVec(prometheus.GaugeOpts{ Name: "last_data_refresh_failure", Help: "Unix timestamp of the most recent failure to refresh data", }, []string{"source"}) lastDataRefreshCount = prometheus.NewGaugeVec(prometheus.GaugeOpts{ Name: "last_data_refresh_count", Help: "Count of records for a given sanction or entity list", }, []string{"source"}) ) func init() { prometheus.MustRegister(lastDataRefreshSuccess) prometheus.MustRegister(lastDataRefreshCount) prometheus.MustRegister(lastDataRefreshFailure) } // Download holds counts for each type of list data parsed from files and a // timestamp of when the download happened. type Download struct { Timestamp time.Time `json:"timestamp"` // US Office of Foreign Assets Control (OFAC) SDNs int `json:"SDNs"` Alts int `json:"altNames"` Addresses int `json:"addresses"` SectoralSanctions int `json:"sectoralSanctions"` // US Bureau of Industry and Security (BIS) DeniedPersons int `json:"deniedPersons"` BISEntities int `json:"bisEntities"` } type downloadStats struct { // US Office of Foreign Assets Control (OFAC) SDNs int `json:"SDNs"` Alts int `json:"altNames"` Addresses int `json:"addresses"` SectoralSanctions int `json:"sectoralSanctions"` // US Bureau of Industry and Security (BIS) DeniedPersons int `json:"deniedPersons"` BISEntities int `json:"bisEntities"` RefreshedAt time.Time `json:"timestamp"` } // periodicDataRefresh will forever block for interval's duration and then download and reparse the data. // Download stats are recorded as part of a successful re-download and parse. func (s *searcher) periodicDataRefresh(interval time.Duration, downloadRepo downloadRepository, updates chan *downloadStats) { if interval == 0*time.Second { s.logger.Log("download", fmt.Sprintf("not scheduling periodic refreshing duration=%v", interval)) return } for { time.Sleep(interval) stats, err := s.refreshData("") if err != nil { if s.logger != nil { s.logger.Log("main", fmt.Sprintf("ERROR: refreshing data: %v", err)) } } else { downloadRepo.recordStats(stats) if s.logger != nil { s.logger.Log( "main", fmt.Sprintf("data refreshed %v ago", time.Since(stats.RefreshedAt)), "SDNs", stats.SDNs, "AltNames", stats.Alts, "Addresses", stats.Addresses, "SSI", stats.SectoralSanctions, "DPL", stats.DeniedPersons, "BISEntities", stats.BISEntities, ) } updates <- stats // send stats for re-search and watch notifications } } } func ofacRecords(logger log.Logger, initialDir string) (*ofac.Results, error) { files, err := ofac.Download(logger, initialDir) if err != nil { return nil, fmt.Errorf("download: %v", err) } if len(files) == 0 { return nil, errors.New("no OFAC Results") } var res *ofac.Results for i := range files { if i == 0 { rr, err := ofac.Read(files[i]) if err != nil { return nil, fmt.Errorf("read: %v", err) } if rr != nil { res = rr } } else { rr, err := ofac.Read(files[i]) if err != nil { return nil, fmt.Errorf("read and replace: %v", err) } if rr != nil { res.Addresses = append(res.Addresses, rr.Addresses...) res.AlternateIdentities = append(res.AlternateIdentities, rr.AlternateIdentities...) res.SDNs = append(res.SDNs, rr.SDNs...) res.SDNComments = append(res.SDNComments, rr.SDNComments...) } } } return res, err } func dplRecords(logger log.Logger, initialDir string) ([]*dpl.DPL, error) { file, err := dpl.Download(logger, initialDir) if err != nil { return nil, err } return dpl.Read(file) } func cslRecords(logger log.Logger, initialDir string) (*csl.CSL, error) { file, err := csl.Download(logger, initialDir) if err != nil { logger.Log("download", "WARN: skipping CSL download", "description", err) return &csl.CSL{}, nil } cslRecords, err := csl.Read(file) if err != nil { return nil, err } return cslRecords, err } // refreshData reaches out to the various websites to download the latest // files, runs each list's parser, and index data for searches. func (s *searcher) refreshData(initialDir string) (*downloadStats, error) { if s.logger != nil { s.logger.Log("download", "Starting refresh of data") if initialDir != "" { s.logger.Log("download", fmt.Sprintf("reading files from %s", initialDir)) } } lastDataRefreshFailure.WithLabelValues("SDNs").Set(float64(time.Now().Unix())) results, err := ofacRecords(s.logger, initialDir) if err != nil { lastDataRefreshFailure.WithLabelValues("SDNs").Set(float64(time.Now().Unix())) return nil, fmt.Errorf("OFAC records: %v", err) }
alts := precomputeAlts(results.AlternateIdentities) deniedPersons, err := dplRecords(s.logger, initialDir) if err != nil { lastDataRefreshFailure.WithLabelValues("DPs").Set(float64(time.Now().Unix())) return nil, fmt.Errorf("DPL records: %v", err) } dps := precomputeDPs(deniedPersons, s.pipe) consolidatedLists, err := cslRecords(s.logger, initialDir) if err != nil { lastDataRefreshFailure.WithLabelValues("CSL").Set(float64(time.Now().Unix())) return nil, fmt.Errorf("CSL records: %v", err) } ssis := precomputeSSIs(consolidatedLists.SSIs, s.pipe) els := precomputeBISEntities(consolidatedLists.ELs, s.pipe) stats := &downloadStats{ // OFAC SDNs: len(sdns), Alts: len(alts), Addresses: len(adds), SectoralSanctions: len(ssis), // BIS BISEntities: len(els), DeniedPersons: len(dps), } stats.RefreshedAt = lastRefresh(initialDir) // record prometheus metrics lastDataRefreshCount.WithLabelValues("SDNs").Set(float64(len(sdns))) lastDataRefreshCount.WithLabelValues("SSIs").Set(float64(len(ssis))) lastDataRefreshCount.WithLabelValues("BISEntities").Set(float64(len(els))) lastDataRefreshCount.WithLabelValues("DPs").Set(float64(len(dps))) // Set new records after precomputation (to minimize lock contention) s.Lock() // OFAC s.SDNs = sdns s.Addresses = adds s.Alts = alts s.SSIs = ssis // BIS s.DPs = dps s.BISEntities = els // metadata s.lastRefreshedAt = stats.RefreshedAt s.Unlock() if s.logger != nil { s.logger.Log("download", "Finished refresh of data") } // record successful data refresh lastDataRefreshSuccess.WithLabelValues().Set(float64(time.Now().Unix())) return stats, nil } // lastRefresh returns a time.Time for the oldest file in dir or the current time if empty. func lastRefresh(dir string) time.Time { if dir == "" { return time.Now() } infos, err := ioutil.ReadDir(dir) if len(infos) == 0 || err != nil { return time.Time{} // zero time because there's no initial data } oldest := infos[0].ModTime() for i := range infos[1:] { if t := infos[i].ModTime(); t.Before(oldest) { oldest = t } } return oldest } func addDownloadRoutes(logger log.Logger, r *mux.Router, repo downloadRepository) { r.Methods("GET").Path("/downloads").HandlerFunc(getLatestDownloads(logger, repo)) } func getLatestDownloads(logger log.Logger, repo downloadRepository) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { w = wrapResponseWriter(logger, w, r) limit := extractSearchLimit(r) downloads, err := repo.latestDownloads(limit) if err != nil { moovhttp.Problem(w, err) return } logger.Log("download", "get latest downloads", "requestID", moovhttp.GetRequestID(r)) w.Header().Set("Content-Type", "application/json; charset=utf-8") w.WriteHeader(http.StatusOK) if err := json.NewEncoder(w).Encode(downloads); err != nil { moovhttp.Problem(w, err) return } } } type downloadRepository interface { latestDownloads(limit int) ([]Download, error) recordStats(stats *downloadStats) error } type sqliteDownloadRepository struct { db *sql.DB logger log.Logger } func (r *sqliteDownloadRepository) close() error { return r.db.Close() } func (r *sqliteDownloadRepository) recordStats(stats *downloadStats) error { if stats == nil { return errors.New("recordStats: nil downloadStats") } query := `insert into download_stats (downloaded_at, sdns, alt_names, addresses, sectoral_sanctions, denied_persons, bis_entities) values (?, ?, ?, ?, ?, ?, ?);` stmt, err := r.db.Prepare(query) if err != nil { return err } defer stmt.Close() _, err = stmt.Exec(stats.RefreshedAt, stats.SDNs, stats.Alts, stats.Addresses, stats.SectoralSanctions, stats.DeniedPersons, stats.BISEntities) return err } func (r *sqliteDownloadRepository) latestDownloads(limit int) ([]Download, error) { query := `select downloaded_at, sdns, alt_names, addresses, sectoral_sanctions, denied_persons, bis_entities from download_stats order by downloaded_at desc limit ?;` stmt, err := r.db.Prepare(query) if err != nil { return nil, err } defer stmt.Close() rows, err := stmt.Query(limit) if err != nil { return nil, err } defer rows.Close() var downloads []Download for rows.Next() { var dl Download if err := rows.Scan(&dl.Timestamp, &dl.SDNs, &dl.Alts, &dl.Addresses, &dl.SectoralSanctions, &dl.DeniedPersons, &dl.BISEntities); err == nil { downloads = append(downloads, dl) } } return downloads, rows.Err() }
sdns := precomputeSDNs(results.SDNs, results.Addresses, s.pipe) adds := precomputeAddresses(results.Addresses)
random_line_split
download.go
// Copyright 2020 The Moov Authors // Use of this source code is governed by an Apache License // license that can be found in the LICENSE file. package main import ( "database/sql" "encoding/json" "errors" "fmt" "io/ioutil" "net/http" "time" moovhttp "github.com/moov-io/base/http" "github.com/moov-io/watchman/pkg/csl" "github.com/moov-io/watchman/pkg/dpl" "github.com/moov-io/watchman/pkg/ofac" "github.com/go-kit/kit/log" "github.com/gorilla/mux" "github.com/prometheus/client_golang/prometheus" ) var ( lastDataRefreshSuccess = prometheus.NewGaugeVec(prometheus.GaugeOpts{ Name: "last_data_refresh_success", Help: "Unix timestamp of when data was last refreshed successfully", }, nil) lastDataRefreshFailure = prometheus.NewGaugeVec(prometheus.GaugeOpts{ Name: "last_data_refresh_failure", Help: "Unix timestamp of the most recent failure to refresh data", }, []string{"source"}) lastDataRefreshCount = prometheus.NewGaugeVec(prometheus.GaugeOpts{ Name: "last_data_refresh_count", Help: "Count of records for a given sanction or entity list", }, []string{"source"}) ) func init() { prometheus.MustRegister(lastDataRefreshSuccess) prometheus.MustRegister(lastDataRefreshCount) prometheus.MustRegister(lastDataRefreshFailure) } // Download holds counts for each type of list data parsed from files and a // timestamp of when the download happened. type Download struct { Timestamp time.Time `json:"timestamp"` // US Office of Foreign Assets Control (OFAC) SDNs int `json:"SDNs"` Alts int `json:"altNames"` Addresses int `json:"addresses"` SectoralSanctions int `json:"sectoralSanctions"` // US Bureau of Industry and Security (BIS) DeniedPersons int `json:"deniedPersons"` BISEntities int `json:"bisEntities"` } type downloadStats struct { // US Office of Foreign Assets Control (OFAC) SDNs int `json:"SDNs"` Alts int `json:"altNames"` Addresses int `json:"addresses"` SectoralSanctions int `json:"sectoralSanctions"` // US Bureau of Industry and Security (BIS) DeniedPersons int `json:"deniedPersons"` BISEntities int `json:"bisEntities"` RefreshedAt time.Time `json:"timestamp"` } // periodicDataRefresh will forever block for interval's duration and then download and reparse the data. // Download stats are recorded as part of a successful re-download and parse. func (s *searcher) periodicDataRefresh(interval time.Duration, downloadRepo downloadRepository, updates chan *downloadStats) { if interval == 0*time.Second { s.logger.Log("download", fmt.Sprintf("not scheduling periodic refreshing duration=%v", interval)) return } for { time.Sleep(interval) stats, err := s.refreshData("") if err != nil { if s.logger != nil { s.logger.Log("main", fmt.Sprintf("ERROR: refreshing data: %v", err)) } } else { downloadRepo.recordStats(stats) if s.logger != nil
updates <- stats // send stats for re-search and watch notifications } } } func ofacRecords(logger log.Logger, initialDir string) (*ofac.Results, error) { files, err := ofac.Download(logger, initialDir) if err != nil { return nil, fmt.Errorf("download: %v", err) } if len(files) == 0 { return nil, errors.New("no OFAC Results") } var res *ofac.Results for i := range files { if i == 0 { rr, err := ofac.Read(files[i]) if err != nil { return nil, fmt.Errorf("read: %v", err) } if rr != nil { res = rr } } else { rr, err := ofac.Read(files[i]) if err != nil { return nil, fmt.Errorf("read and replace: %v", err) } if rr != nil { res.Addresses = append(res.Addresses, rr.Addresses...) res.AlternateIdentities = append(res.AlternateIdentities, rr.AlternateIdentities...) res.SDNs = append(res.SDNs, rr.SDNs...) res.SDNComments = append(res.SDNComments, rr.SDNComments...) } } } return res, err } func dplRecords(logger log.Logger, initialDir string) ([]*dpl.DPL, error) { file, err := dpl.Download(logger, initialDir) if err != nil { return nil, err } return dpl.Read(file) } func cslRecords(logger log.Logger, initialDir string) (*csl.CSL, error) { file, err := csl.Download(logger, initialDir) if err != nil { logger.Log("download", "WARN: skipping CSL download", "description", err) return &csl.CSL{}, nil } cslRecords, err := csl.Read(file) if err != nil { return nil, err } return cslRecords, err } // refreshData reaches out to the various websites to download the latest // files, runs each list's parser, and index data for searches. func (s *searcher) refreshData(initialDir string) (*downloadStats, error) { if s.logger != nil { s.logger.Log("download", "Starting refresh of data") if initialDir != "" { s.logger.Log("download", fmt.Sprintf("reading files from %s", initialDir)) } } lastDataRefreshFailure.WithLabelValues("SDNs").Set(float64(time.Now().Unix())) results, err := ofacRecords(s.logger, initialDir) if err != nil { lastDataRefreshFailure.WithLabelValues("SDNs").Set(float64(time.Now().Unix())) return nil, fmt.Errorf("OFAC records: %v", err) } sdns := precomputeSDNs(results.SDNs, results.Addresses, s.pipe) adds := precomputeAddresses(results.Addresses) alts := precomputeAlts(results.AlternateIdentities) deniedPersons, err := dplRecords(s.logger, initialDir) if err != nil { lastDataRefreshFailure.WithLabelValues("DPs").Set(float64(time.Now().Unix())) return nil, fmt.Errorf("DPL records: %v", err) } dps := precomputeDPs(deniedPersons, s.pipe) consolidatedLists, err := cslRecords(s.logger, initialDir) if err != nil { lastDataRefreshFailure.WithLabelValues("CSL").Set(float64(time.Now().Unix())) return nil, fmt.Errorf("CSL records: %v", err) } ssis := precomputeSSIs(consolidatedLists.SSIs, s.pipe) els := precomputeBISEntities(consolidatedLists.ELs, s.pipe) stats := &downloadStats{ // OFAC SDNs: len(sdns), Alts: len(alts), Addresses: len(adds), SectoralSanctions: len(ssis), // BIS BISEntities: len(els), DeniedPersons: len(dps), } stats.RefreshedAt = lastRefresh(initialDir) // record prometheus metrics lastDataRefreshCount.WithLabelValues("SDNs").Set(float64(len(sdns))) lastDataRefreshCount.WithLabelValues("SSIs").Set(float64(len(ssis))) lastDataRefreshCount.WithLabelValues("BISEntities").Set(float64(len(els))) lastDataRefreshCount.WithLabelValues("DPs").Set(float64(len(dps))) // Set new records after precomputation (to minimize lock contention) s.Lock() // OFAC s.SDNs = sdns s.Addresses = adds s.Alts = alts s.SSIs = ssis // BIS s.DPs = dps s.BISEntities = els // metadata s.lastRefreshedAt = stats.RefreshedAt s.Unlock() if s.logger != nil { s.logger.Log("download", "Finished refresh of data") } // record successful data refresh lastDataRefreshSuccess.WithLabelValues().Set(float64(time.Now().Unix())) return stats, nil } // lastRefresh returns a time.Time for the oldest file in dir or the current time if empty. func lastRefresh(dir string) time.Time { if dir == "" { return time.Now() } infos, err := ioutil.ReadDir(dir) if len(infos) == 0 || err != nil { return time.Time{} // zero time because there's no initial data } oldest := infos[0].ModTime() for i := range infos[1:] { if t := infos[i].ModTime(); t.Before(oldest) { oldest = t } } return oldest } func addDownloadRoutes(logger log.Logger, r *mux.Router, repo downloadRepository) { r.Methods("GET").Path("/downloads").HandlerFunc(getLatestDownloads(logger, repo)) } func getLatestDownloads(logger log.Logger, repo downloadRepository) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { w = wrapResponseWriter(logger, w, r) limit := extractSearchLimit(r) downloads, err := repo.latestDownloads(limit) if err != nil { moovhttp.Problem(w, err) return } logger.Log("download", "get latest downloads", "requestID", moovhttp.GetRequestID(r)) w.Header().Set("Content-Type", "application/json; charset=utf-8") w.WriteHeader(http.StatusOK) if err := json.NewEncoder(w).Encode(downloads); err != nil { moovhttp.Problem(w, err) return } } } type downloadRepository interface { latestDownloads(limit int) ([]Download, error) recordStats(stats *downloadStats) error } type sqliteDownloadRepository struct { db *sql.DB logger log.Logger } func (r *sqliteDownloadRepository) close() error { return r.db.Close() } func (r *sqliteDownloadRepository) recordStats(stats *downloadStats) error { if stats == nil { return errors.New("recordStats: nil downloadStats") } query := `insert into download_stats (downloaded_at, sdns, alt_names, addresses, sectoral_sanctions, denied_persons, bis_entities) values (?, ?, ?, ?, ?, ?, ?);` stmt, err := r.db.Prepare(query) if err != nil { return err } defer stmt.Close() _, err = stmt.Exec(stats.RefreshedAt, stats.SDNs, stats.Alts, stats.Addresses, stats.SectoralSanctions, stats.DeniedPersons, stats.BISEntities) return err } func (r *sqliteDownloadRepository) latestDownloads(limit int) ([]Download, error) { query := `select downloaded_at, sdns, alt_names, addresses, sectoral_sanctions, denied_persons, bis_entities from download_stats order by downloaded_at desc limit ?;` stmt, err := r.db.Prepare(query) if err != nil { return nil, err } defer stmt.Close() rows, err := stmt.Query(limit) if err != nil { return nil, err } defer rows.Close() var downloads []Download for rows.Next() { var dl Download if err := rows.Scan(&dl.Timestamp, &dl.SDNs, &dl.Alts, &dl.Addresses, &dl.SectoralSanctions, &dl.DeniedPersons, &dl.BISEntities); err == nil { downloads = append(downloads, dl) } } return downloads, rows.Err() }
{ s.logger.Log( "main", fmt.Sprintf("data refreshed %v ago", time.Since(stats.RefreshedAt)), "SDNs", stats.SDNs, "AltNames", stats.Alts, "Addresses", stats.Addresses, "SSI", stats.SectoralSanctions, "DPL", stats.DeniedPersons, "BISEntities", stats.BISEntities, ) }
conditional_block
download.go
// Copyright 2020 The Moov Authors // Use of this source code is governed by an Apache License // license that can be found in the LICENSE file. package main import ( "database/sql" "encoding/json" "errors" "fmt" "io/ioutil" "net/http" "time" moovhttp "github.com/moov-io/base/http" "github.com/moov-io/watchman/pkg/csl" "github.com/moov-io/watchman/pkg/dpl" "github.com/moov-io/watchman/pkg/ofac" "github.com/go-kit/kit/log" "github.com/gorilla/mux" "github.com/prometheus/client_golang/prometheus" ) var ( lastDataRefreshSuccess = prometheus.NewGaugeVec(prometheus.GaugeOpts{ Name: "last_data_refresh_success", Help: "Unix timestamp of when data was last refreshed successfully", }, nil) lastDataRefreshFailure = prometheus.NewGaugeVec(prometheus.GaugeOpts{ Name: "last_data_refresh_failure", Help: "Unix timestamp of the most recent failure to refresh data", }, []string{"source"}) lastDataRefreshCount = prometheus.NewGaugeVec(prometheus.GaugeOpts{ Name: "last_data_refresh_count", Help: "Count of records for a given sanction or entity list", }, []string{"source"}) ) func init() { prometheus.MustRegister(lastDataRefreshSuccess) prometheus.MustRegister(lastDataRefreshCount) prometheus.MustRegister(lastDataRefreshFailure) } // Download holds counts for each type of list data parsed from files and a // timestamp of when the download happened. type Download struct { Timestamp time.Time `json:"timestamp"` // US Office of Foreign Assets Control (OFAC) SDNs int `json:"SDNs"` Alts int `json:"altNames"` Addresses int `json:"addresses"` SectoralSanctions int `json:"sectoralSanctions"` // US Bureau of Industry and Security (BIS) DeniedPersons int `json:"deniedPersons"` BISEntities int `json:"bisEntities"` } type downloadStats struct { // US Office of Foreign Assets Control (OFAC) SDNs int `json:"SDNs"` Alts int `json:"altNames"` Addresses int `json:"addresses"` SectoralSanctions int `json:"sectoralSanctions"` // US Bureau of Industry and Security (BIS) DeniedPersons int `json:"deniedPersons"` BISEntities int `json:"bisEntities"` RefreshedAt time.Time `json:"timestamp"` } // periodicDataRefresh will forever block for interval's duration and then download and reparse the data. // Download stats are recorded as part of a successful re-download and parse. func (s *searcher) periodicDataRefresh(interval time.Duration, downloadRepo downloadRepository, updates chan *downloadStats) { if interval == 0*time.Second { s.logger.Log("download", fmt.Sprintf("not scheduling periodic refreshing duration=%v", interval)) return } for { time.Sleep(interval) stats, err := s.refreshData("") if err != nil { if s.logger != nil { s.logger.Log("main", fmt.Sprintf("ERROR: refreshing data: %v", err)) } } else { downloadRepo.recordStats(stats) if s.logger != nil { s.logger.Log( "main", fmt.Sprintf("data refreshed %v ago", time.Since(stats.RefreshedAt)), "SDNs", stats.SDNs, "AltNames", stats.Alts, "Addresses", stats.Addresses, "SSI", stats.SectoralSanctions, "DPL", stats.DeniedPersons, "BISEntities", stats.BISEntities, ) } updates <- stats // send stats for re-search and watch notifications } } } func ofacRecords(logger log.Logger, initialDir string) (*ofac.Results, error) { files, err := ofac.Download(logger, initialDir) if err != nil { return nil, fmt.Errorf("download: %v", err) } if len(files) == 0 { return nil, errors.New("no OFAC Results") } var res *ofac.Results for i := range files { if i == 0 { rr, err := ofac.Read(files[i]) if err != nil { return nil, fmt.Errorf("read: %v", err) } if rr != nil { res = rr } } else { rr, err := ofac.Read(files[i]) if err != nil { return nil, fmt.Errorf("read and replace: %v", err) } if rr != nil { res.Addresses = append(res.Addresses, rr.Addresses...) res.AlternateIdentities = append(res.AlternateIdentities, rr.AlternateIdentities...) res.SDNs = append(res.SDNs, rr.SDNs...) res.SDNComments = append(res.SDNComments, rr.SDNComments...) } } } return res, err } func dplRecords(logger log.Logger, initialDir string) ([]*dpl.DPL, error) { file, err := dpl.Download(logger, initialDir) if err != nil { return nil, err } return dpl.Read(file) } func cslRecords(logger log.Logger, initialDir string) (*csl.CSL, error) { file, err := csl.Download(logger, initialDir) if err != nil { logger.Log("download", "WARN: skipping CSL download", "description", err) return &csl.CSL{}, nil } cslRecords, err := csl.Read(file) if err != nil { return nil, err } return cslRecords, err } // refreshData reaches out to the various websites to download the latest // files, runs each list's parser, and index data for searches. func (s *searcher) refreshData(initialDir string) (*downloadStats, error) { if s.logger != nil { s.logger.Log("download", "Starting refresh of data") if initialDir != "" { s.logger.Log("download", fmt.Sprintf("reading files from %s", initialDir)) } } lastDataRefreshFailure.WithLabelValues("SDNs").Set(float64(time.Now().Unix())) results, err := ofacRecords(s.logger, initialDir) if err != nil { lastDataRefreshFailure.WithLabelValues("SDNs").Set(float64(time.Now().Unix())) return nil, fmt.Errorf("OFAC records: %v", err) } sdns := precomputeSDNs(results.SDNs, results.Addresses, s.pipe) adds := precomputeAddresses(results.Addresses) alts := precomputeAlts(results.AlternateIdentities) deniedPersons, err := dplRecords(s.logger, initialDir) if err != nil { lastDataRefreshFailure.WithLabelValues("DPs").Set(float64(time.Now().Unix())) return nil, fmt.Errorf("DPL records: %v", err) } dps := precomputeDPs(deniedPersons, s.pipe) consolidatedLists, err := cslRecords(s.logger, initialDir) if err != nil { lastDataRefreshFailure.WithLabelValues("CSL").Set(float64(time.Now().Unix())) return nil, fmt.Errorf("CSL records: %v", err) } ssis := precomputeSSIs(consolidatedLists.SSIs, s.pipe) els := precomputeBISEntities(consolidatedLists.ELs, s.pipe) stats := &downloadStats{ // OFAC SDNs: len(sdns), Alts: len(alts), Addresses: len(adds), SectoralSanctions: len(ssis), // BIS BISEntities: len(els), DeniedPersons: len(dps), } stats.RefreshedAt = lastRefresh(initialDir) // record prometheus metrics lastDataRefreshCount.WithLabelValues("SDNs").Set(float64(len(sdns))) lastDataRefreshCount.WithLabelValues("SSIs").Set(float64(len(ssis))) lastDataRefreshCount.WithLabelValues("BISEntities").Set(float64(len(els))) lastDataRefreshCount.WithLabelValues("DPs").Set(float64(len(dps))) // Set new records after precomputation (to minimize lock contention) s.Lock() // OFAC s.SDNs = sdns s.Addresses = adds s.Alts = alts s.SSIs = ssis // BIS s.DPs = dps s.BISEntities = els // metadata s.lastRefreshedAt = stats.RefreshedAt s.Unlock() if s.logger != nil { s.logger.Log("download", "Finished refresh of data") } // record successful data refresh lastDataRefreshSuccess.WithLabelValues().Set(float64(time.Now().Unix())) return stats, nil } // lastRefresh returns a time.Time for the oldest file in dir or the current time if empty. func lastRefresh(dir string) time.Time { if dir == "" { return time.Now() } infos, err := ioutil.ReadDir(dir) if len(infos) == 0 || err != nil { return time.Time{} // zero time because there's no initial data } oldest := infos[0].ModTime() for i := range infos[1:] { if t := infos[i].ModTime(); t.Before(oldest) { oldest = t } } return oldest } func addDownloadRoutes(logger log.Logger, r *mux.Router, repo downloadRepository)
func getLatestDownloads(logger log.Logger, repo downloadRepository) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { w = wrapResponseWriter(logger, w, r) limit := extractSearchLimit(r) downloads, err := repo.latestDownloads(limit) if err != nil { moovhttp.Problem(w, err) return } logger.Log("download", "get latest downloads", "requestID", moovhttp.GetRequestID(r)) w.Header().Set("Content-Type", "application/json; charset=utf-8") w.WriteHeader(http.StatusOK) if err := json.NewEncoder(w).Encode(downloads); err != nil { moovhttp.Problem(w, err) return } } } type downloadRepository interface { latestDownloads(limit int) ([]Download, error) recordStats(stats *downloadStats) error } type sqliteDownloadRepository struct { db *sql.DB logger log.Logger } func (r *sqliteDownloadRepository) close() error { return r.db.Close() } func (r *sqliteDownloadRepository) recordStats(stats *downloadStats) error { if stats == nil { return errors.New("recordStats: nil downloadStats") } query := `insert into download_stats (downloaded_at, sdns, alt_names, addresses, sectoral_sanctions, denied_persons, bis_entities) values (?, ?, ?, ?, ?, ?, ?);` stmt, err := r.db.Prepare(query) if err != nil { return err } defer stmt.Close() _, err = stmt.Exec(stats.RefreshedAt, stats.SDNs, stats.Alts, stats.Addresses, stats.SectoralSanctions, stats.DeniedPersons, stats.BISEntities) return err } func (r *sqliteDownloadRepository) latestDownloads(limit int) ([]Download, error) { query := `select downloaded_at, sdns, alt_names, addresses, sectoral_sanctions, denied_persons, bis_entities from download_stats order by downloaded_at desc limit ?;` stmt, err := r.db.Prepare(query) if err != nil { return nil, err } defer stmt.Close() rows, err := stmt.Query(limit) if err != nil { return nil, err } defer rows.Close() var downloads []Download for rows.Next() { var dl Download if err := rows.Scan(&dl.Timestamp, &dl.SDNs, &dl.Alts, &dl.Addresses, &dl.SectoralSanctions, &dl.DeniedPersons, &dl.BISEntities); err == nil { downloads = append(downloads, dl) } } return downloads, rows.Err() }
{ r.Methods("GET").Path("/downloads").HandlerFunc(getLatestDownloads(logger, repo)) }
identifier_body
download.go
// Copyright 2020 The Moov Authors // Use of this source code is governed by an Apache License // license that can be found in the LICENSE file. package main import ( "database/sql" "encoding/json" "errors" "fmt" "io/ioutil" "net/http" "time" moovhttp "github.com/moov-io/base/http" "github.com/moov-io/watchman/pkg/csl" "github.com/moov-io/watchman/pkg/dpl" "github.com/moov-io/watchman/pkg/ofac" "github.com/go-kit/kit/log" "github.com/gorilla/mux" "github.com/prometheus/client_golang/prometheus" ) var ( lastDataRefreshSuccess = prometheus.NewGaugeVec(prometheus.GaugeOpts{ Name: "last_data_refresh_success", Help: "Unix timestamp of when data was last refreshed successfully", }, nil) lastDataRefreshFailure = prometheus.NewGaugeVec(prometheus.GaugeOpts{ Name: "last_data_refresh_failure", Help: "Unix timestamp of the most recent failure to refresh data", }, []string{"source"}) lastDataRefreshCount = prometheus.NewGaugeVec(prometheus.GaugeOpts{ Name: "last_data_refresh_count", Help: "Count of records for a given sanction or entity list", }, []string{"source"}) ) func init() { prometheus.MustRegister(lastDataRefreshSuccess) prometheus.MustRegister(lastDataRefreshCount) prometheus.MustRegister(lastDataRefreshFailure) } // Download holds counts for each type of list data parsed from files and a // timestamp of when the download happened. type Download struct { Timestamp time.Time `json:"timestamp"` // US Office of Foreign Assets Control (OFAC) SDNs int `json:"SDNs"` Alts int `json:"altNames"` Addresses int `json:"addresses"` SectoralSanctions int `json:"sectoralSanctions"` // US Bureau of Industry and Security (BIS) DeniedPersons int `json:"deniedPersons"` BISEntities int `json:"bisEntities"` } type downloadStats struct { // US Office of Foreign Assets Control (OFAC) SDNs int `json:"SDNs"` Alts int `json:"altNames"` Addresses int `json:"addresses"` SectoralSanctions int `json:"sectoralSanctions"` // US Bureau of Industry and Security (BIS) DeniedPersons int `json:"deniedPersons"` BISEntities int `json:"bisEntities"` RefreshedAt time.Time `json:"timestamp"` } // periodicDataRefresh will forever block for interval's duration and then download and reparse the data. // Download stats are recorded as part of a successful re-download and parse. func (s *searcher) periodicDataRefresh(interval time.Duration, downloadRepo downloadRepository, updates chan *downloadStats) { if interval == 0*time.Second { s.logger.Log("download", fmt.Sprintf("not scheduling periodic refreshing duration=%v", interval)) return } for { time.Sleep(interval) stats, err := s.refreshData("") if err != nil { if s.logger != nil { s.logger.Log("main", fmt.Sprintf("ERROR: refreshing data: %v", err)) } } else { downloadRepo.recordStats(stats) if s.logger != nil { s.logger.Log( "main", fmt.Sprintf("data refreshed %v ago", time.Since(stats.RefreshedAt)), "SDNs", stats.SDNs, "AltNames", stats.Alts, "Addresses", stats.Addresses, "SSI", stats.SectoralSanctions, "DPL", stats.DeniedPersons, "BISEntities", stats.BISEntities, ) } updates <- stats // send stats for re-search and watch notifications } } } func ofacRecords(logger log.Logger, initialDir string) (*ofac.Results, error) { files, err := ofac.Download(logger, initialDir) if err != nil { return nil, fmt.Errorf("download: %v", err) } if len(files) == 0 { return nil, errors.New("no OFAC Results") } var res *ofac.Results for i := range files { if i == 0 { rr, err := ofac.Read(files[i]) if err != nil { return nil, fmt.Errorf("read: %v", err) } if rr != nil { res = rr } } else { rr, err := ofac.Read(files[i]) if err != nil { return nil, fmt.Errorf("read and replace: %v", err) } if rr != nil { res.Addresses = append(res.Addresses, rr.Addresses...) res.AlternateIdentities = append(res.AlternateIdentities, rr.AlternateIdentities...) res.SDNs = append(res.SDNs, rr.SDNs...) res.SDNComments = append(res.SDNComments, rr.SDNComments...) } } } return res, err } func dplRecords(logger log.Logger, initialDir string) ([]*dpl.DPL, error) { file, err := dpl.Download(logger, initialDir) if err != nil { return nil, err } return dpl.Read(file) } func cslRecords(logger log.Logger, initialDir string) (*csl.CSL, error) { file, err := csl.Download(logger, initialDir) if err != nil { logger.Log("download", "WARN: skipping CSL download", "description", err) return &csl.CSL{}, nil } cslRecords, err := csl.Read(file) if err != nil { return nil, err } return cslRecords, err } // refreshData reaches out to the various websites to download the latest // files, runs each list's parser, and index data for searches. func (s *searcher) refreshData(initialDir string) (*downloadStats, error) { if s.logger != nil { s.logger.Log("download", "Starting refresh of data") if initialDir != "" { s.logger.Log("download", fmt.Sprintf("reading files from %s", initialDir)) } } lastDataRefreshFailure.WithLabelValues("SDNs").Set(float64(time.Now().Unix())) results, err := ofacRecords(s.logger, initialDir) if err != nil { lastDataRefreshFailure.WithLabelValues("SDNs").Set(float64(time.Now().Unix())) return nil, fmt.Errorf("OFAC records: %v", err) } sdns := precomputeSDNs(results.SDNs, results.Addresses, s.pipe) adds := precomputeAddresses(results.Addresses) alts := precomputeAlts(results.AlternateIdentities) deniedPersons, err := dplRecords(s.logger, initialDir) if err != nil { lastDataRefreshFailure.WithLabelValues("DPs").Set(float64(time.Now().Unix())) return nil, fmt.Errorf("DPL records: %v", err) } dps := precomputeDPs(deniedPersons, s.pipe) consolidatedLists, err := cslRecords(s.logger, initialDir) if err != nil { lastDataRefreshFailure.WithLabelValues("CSL").Set(float64(time.Now().Unix())) return nil, fmt.Errorf("CSL records: %v", err) } ssis := precomputeSSIs(consolidatedLists.SSIs, s.pipe) els := precomputeBISEntities(consolidatedLists.ELs, s.pipe) stats := &downloadStats{ // OFAC SDNs: len(sdns), Alts: len(alts), Addresses: len(adds), SectoralSanctions: len(ssis), // BIS BISEntities: len(els), DeniedPersons: len(dps), } stats.RefreshedAt = lastRefresh(initialDir) // record prometheus metrics lastDataRefreshCount.WithLabelValues("SDNs").Set(float64(len(sdns))) lastDataRefreshCount.WithLabelValues("SSIs").Set(float64(len(ssis))) lastDataRefreshCount.WithLabelValues("BISEntities").Set(float64(len(els))) lastDataRefreshCount.WithLabelValues("DPs").Set(float64(len(dps))) // Set new records after precomputation (to minimize lock contention) s.Lock() // OFAC s.SDNs = sdns s.Addresses = adds s.Alts = alts s.SSIs = ssis // BIS s.DPs = dps s.BISEntities = els // metadata s.lastRefreshedAt = stats.RefreshedAt s.Unlock() if s.logger != nil { s.logger.Log("download", "Finished refresh of data") } // record successful data refresh lastDataRefreshSuccess.WithLabelValues().Set(float64(time.Now().Unix())) return stats, nil } // lastRefresh returns a time.Time for the oldest file in dir or the current time if empty. func lastRefresh(dir string) time.Time { if dir == "" { return time.Now() } infos, err := ioutil.ReadDir(dir) if len(infos) == 0 || err != nil { return time.Time{} // zero time because there's no initial data } oldest := infos[0].ModTime() for i := range infos[1:] { if t := infos[i].ModTime(); t.Before(oldest) { oldest = t } } return oldest } func addDownloadRoutes(logger log.Logger, r *mux.Router, repo downloadRepository) { r.Methods("GET").Path("/downloads").HandlerFunc(getLatestDownloads(logger, repo)) } func getLatestDownloads(logger log.Logger, repo downloadRepository) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { w = wrapResponseWriter(logger, w, r) limit := extractSearchLimit(r) downloads, err := repo.latestDownloads(limit) if err != nil { moovhttp.Problem(w, err) return } logger.Log("download", "get latest downloads", "requestID", moovhttp.GetRequestID(r)) w.Header().Set("Content-Type", "application/json; charset=utf-8") w.WriteHeader(http.StatusOK) if err := json.NewEncoder(w).Encode(downloads); err != nil { moovhttp.Problem(w, err) return } } } type downloadRepository interface { latestDownloads(limit int) ([]Download, error) recordStats(stats *downloadStats) error } type sqliteDownloadRepository struct { db *sql.DB logger log.Logger } func (r *sqliteDownloadRepository) close() error { return r.db.Close() } func (r *sqliteDownloadRepository)
(stats *downloadStats) error { if stats == nil { return errors.New("recordStats: nil downloadStats") } query := `insert into download_stats (downloaded_at, sdns, alt_names, addresses, sectoral_sanctions, denied_persons, bis_entities) values (?, ?, ?, ?, ?, ?, ?);` stmt, err := r.db.Prepare(query) if err != nil { return err } defer stmt.Close() _, err = stmt.Exec(stats.RefreshedAt, stats.SDNs, stats.Alts, stats.Addresses, stats.SectoralSanctions, stats.DeniedPersons, stats.BISEntities) return err } func (r *sqliteDownloadRepository) latestDownloads(limit int) ([]Download, error) { query := `select downloaded_at, sdns, alt_names, addresses, sectoral_sanctions, denied_persons, bis_entities from download_stats order by downloaded_at desc limit ?;` stmt, err := r.db.Prepare(query) if err != nil { return nil, err } defer stmt.Close() rows, err := stmt.Query(limit) if err != nil { return nil, err } defer rows.Close() var downloads []Download for rows.Next() { var dl Download if err := rows.Scan(&dl.Timestamp, &dl.SDNs, &dl.Alts, &dl.Addresses, &dl.SectoralSanctions, &dl.DeniedPersons, &dl.BISEntities); err == nil { downloads = append(downloads, dl) } } return downloads, rows.Err() }
recordStats
identifier_name
basics.py
# Input Examples
# print('Hello World') # myName = input("Whats your name?\n") # print("It is good to meet you, " + myName) # print('The length of your name is: ') # print(len(myName)) # myAge = input("Whats age?\n") # print('you will be ' + str(int(myAge) + 1) + ' in a year.') # Flow Controls # example 1 # name = 'Msry' # password = 'swordfish' # if name == 'Mary': # print('Hello Mary') # if password == 'swordfish': # print('Access Granted') # else: # print('Wrong Pass') # else: # print('Program Terminated!') # example 2 # name = 'Alisce' # age = 11 # if name == 'Alice': # print('Hi Alice') # elif age < 12: # print('Not Allice') # elif age > 2000: # print('you are immortal') # else: # print('hello alice') # exxample 3 # with if import pprint from random import randint, sample import re import sys import random from typing import Type spam = 0 # if spam < 5: # print ('Hello') # spam = spam + 1 # with while # while spam < 5: # print('hello') # spam = spam + 1 # name = '' # while name != 'your name': # print('Please type your name.') # name = input() # print('thanks') # example 4 # while True: # print('Who are you?') # name = input() # if name != 'Parth': # continue # print('Hello ' + name + ' What\'s your password') # password = input() # if password == 'swordfish': # break # print('Access Granted') # example 5 # for loop # print('My name is') # for i in range(5): # print('Parth five times (' + str(i) + ')') # total = 0 # for num in range(101): # total = total + num # print (total) # normal for loop Start - Stop # for i in range(12, 16): # for loop with Start - Stop - Step # for i in range(0, 10, 2): # for loop to count down # for i in range(5, -1, -2): # print(i) # importing modules # import random # for i in range(5): # print(random.randint(1, 10)) # import sys # while True: # print('Type exit to exit.') # response = input() # if response == 'exit': # sys.exit() # print('You typed ' + response + '.') # functions # def hello(): # print('Howdy!') # print('Hwdy!') # print('Hello There') # hello() # hello() # hello() # with params # def hello(name): # print('Hello ' + name) # hello('Alice') # hello('Bob') # def getAnswer(answerNum): # if answerNum == 1: # return 'It is certain' # elif answerNum == 2: # return 'It is decidedly so' # elif answerNum == 3: # return 'Yes' # elif answerNum == 4: # return 'Reply hazy try again' # elif answerNum == 5: # return 'Ask again later' # elif answerNum == 6: # return 'Concentrate and ask again' # elif answerNum == 7: # return 'My reply is no' # elif answerNum == 8: # return 'Outlook not so good' # elif answerNum == 9: # return 'very doubtful' # r = random.randint(1, 9) # print(r) # fortune = getAnswer(r) # print(fortune) # test = None # print(test) # Exception handling # def spam(divideBy): # try: # return 42 / divideBy # except ZeroDivisionError: # print('Error: Invalid Argument') # print(spam(2)) # print(spam(12)) # print(spam(0)) # print(spam(1)) # Example: Guess my number game # secretNum = random.randint(1, 20) # print('I am thinking of a number between 1 and 20') # # Ask player to guess 6 times # for guessTaken in range(1, 7): # guess = int(input('Take a guess: ')) # if guess < secretNum: # print('Your guess is too low') # elif guess > secretNum: # print('Your guess is too high') # else: # break # this codition is correct guess # if guess == secretNum: # print('Good job! You guessed my number in ' + # str(guessTaken) + ' guesses!') # else: # print('Nope. the number i was thinking of was ' + str(secretNum)) # Practice 1 # def collatz(number): # if number % 2 == 0: # print(number // 2) # return number // 2 # elif number % 2 == 1: # result = 3 * number + 1 # print(result) # return result # try: # userInput = int(input("Enter Num: ")) # while (userInput != 1): # userInput = collatz(int(userInput)) # except ValueError: # print("Please enter integer") # list # catNames = [] # while True: # print("Enter name of cat " + str(len(catNames) + 1) + '(or nothing to stop.)') # name = input() # if name == '': # break # catNames = catNames + [name] # print('The cat names are: ') # for name in catNames: # print(' ' + name) someList = ['cat', 'dog', 'mat'] # for someVal in range(len(someList)): # print('Index ' + str(someVal) + ' in supplies is: ' + someList[someVal]) # 'in' and 'not in' # 'howdy' in ['hello', 'hi', 'howdy', 'heyas'] # 'hey' not in ['hello', 'hi', 'howdy', 'heyas'] # myPets = ['jacob', 'john', 'lucifer'] # print("Enter a pet name: ") # name = input() # if name not in myPets: # print('No pet named ' + name) # else: # print(name + ' is my pet') # dog = ['fat', 'black', 'loud'] # size, color, feature = dog # print(dog) # Magic 8 Ball with a list # messages = [['It is certain', # 'It is decidedly so', # 'Yes definitely', # 'Reply hazy try again', # 'Ask again later', # 'Concentrate and ask again', # 'My reply is no', # 'Outlook not so good', # 'Very doubtful']] # print(messages[random.randint(0, len(messages) - 1)]) # passing references # def eggs(params): # params.append('Hello') # spam = [1, 2, 3] # eggs(spam) # print(spam) # spam = [['A', 'B', 'C', 'D'], ['a', 'b', 'c', 'd']] # cheese = copy.deepcopy(spam) # # cheese[1] = 42 # print(spam) # print(cheese) # spam = ['a', 'b', 'c', 'd'] # print(spam[int(int('3' * 2) / 11)]) # print(spam[:2]) # bacon = [3.14, 'cat', 11, 'cat', True] # print(bacon.remove('cat')) # print(bacon) # list1 = [1, 2, 3] # list2 = [4, 5, 6] # list3 = list1 + list1 # list3 = list1 * list1 # print(list3) # practice of list # spam = ['apples', 'bananas', 'tofu', 'cat'] # newSpam = [1, 3, 'hello'] # def makeStringFromList(aList): # for i in aList: # print(i, end=", ") # makeStringFromList(newSpam) # grid = [['.', '.', '.', '.', '.', '.'], # ['.', 'O', 'O', '.', '.', '.'], # ['O', 'O', 'O', 'O', '.', '.'], # ['O', 'O', 'O', 'O', 'O', '.'], # ['.', 'O', 'O', 'O', 'O', 'O'], # ['O', 'O', 'O', 'O', 'O', '.'], # ['O', 'O', 'O', 'O', '.', '.'], # ['.', 'O', 'O', '.', '.', '.'], # ['.', '.', '.', '.', '.', '.']] # heart = '' # for j in range(len(grid[-1])): # for i in reversed(range(len(grid))): # heart += grid[i][j] # if i == 0: # heart += '\n' # print(heart) # n = int(input("Enter Num: ")) # if n % 2 != 0: # print('Weird') # elif n % 2 == 0 and (n in range(2, 6)): # print('Not Weird 2 - 5') # elif n % 2 == 0 and (n in range(6, 21)): # print('Weird 6 - 20') # elif n % 2 == 0 and (n in range(20, 101)): # print("Not Weird 20 up") # n = int(input("Enter Num: ")) # for i in range(1, n+1): # print(i, end="") # Dictonary example # birthdays = {'Alice': 'Apr 1', 'Bob': 'Dec 12', 'Carol': 'Mar 4'} # while True: # print("Enter a name / (blank to quit): ") # name = input() # if name == "": # break # if name in birthdays: # print(birthdays[name] + ' is the birthday of ' + name) # else: # print('I don\'t have birthday info for ' + name) # bday = input() # birthdays[name] = bday # print('Birthday database updated') # spam = {'color': 'red', 'age': 42} # # for i in spam.values(): # # for i in spam.keys(): # for i in spam.items(): # print(i) # message = 'It was a bright cold day in April, and the clocks were striking thirteen.' # count = {} # for char in message: # count.setdefault(char, 0) # count[char] = count[char] + 1 # pprint.pprint(count) # theBoard = {'top-L': ' ', 'top-M': ' ', 'top-R': ' ', # 'mid-L': ' ', 'mid-M': ' ', 'mid-R': ' ', # 'low-L': ' ', 'low-M': ' ', 'low-R': ' '} # def printBoard(board): # print(board['top-L'] + '|' + board['top-M'] + '|' + board['top-R']) # print("-+-+-") # print(board['mid-L'] + '|' + board['mid-M'] + '|' + board['mid-R']) # print("-+-+-") # print(board['low-L'] + '|' + board['low-M'] + '|' + board['low-R']) # # print("-+-+-") # turn = 'X' # for i in range(9): # printBoard(theBoard) # print("Turn for " + turn + ". Move on which space?") # move = input() # theBoard[move] = turn # if turn == 'X': # turn = 'O' # else: # turn = 'X' # printBoard(theBoard) # allGuests = {'Alice': {'apples': 5, 'pretzels': 12}, # 'Bob': {'ham sandwiches': 3, 'apples': 2}, # 'Carol': {'cups': 3, 'apple pies': 1}} # def totalBrought(guests, item): # numBrought = 0 # for k, v in guests.items(): # numBrought = numBrought + v.get(item, 0) # return numBrought # print('Number of things being brought: ') # print(" - Apples: " + str(totalBrought(allGuests, 'apples'))) # print(' - Cups: ' + str(totalBrought(allGuests, 'cups'))) # print(' - Cakes: ' + str(totalBrought(allGuests, 'cakes'))) # print(' - Ham Sandwiches: ' + str(totalBrought(allGuests, 'ham sandwiches'))) # print(' - Apple Pies: ' + str(totalBrought(allGuests, 'apple pies'))) # gameInventory = {'rope': 1, 'torch': 6, # 'gold coin': 42, 'dagger': 1, 'arrow': 12} # def displayInventory(inventory): # print("Inventory: ") # inventoryItems = 0 # for k, v in inventory.items(): # inventoryItems = inventoryItems + v # print(str(v) + " - " + str.capitalize(k)) # print("Total number of items: " + str(inventoryItems)) # displayInventory(gameInventory) # def addToInventory(inventory, addedItems): # for item in addedItems: # inventory.setdefault(item, 0) # inventory[item] += 1 # return inventory # def displayInventory(inventory): # print("Inventory: ") # invTotItems = 0 # for k, v in inventory.items(): # invTotItems = invTotItems + v # print(str(v) + " - " + str(k)) # print("Total number of items: " + str(invTotItems)) # inv = {'gold coin': 42, 'rope': 1} # dragonLoot = ['gold coin', 'dagger', 'gold coin', 'gold coin', 'ruby'] # inv = addToInventory(inv, dragonLoot) # displayInventory(inv) # print('''Dear Alice, # Eve's cat has been arrested for catnapping, cat burglary, and extortion. # Sincerely, # Bob''') # Strings # def printPicnic(itemsDict, leftWidth, rightWdth): # print("PICNIC ITEMS".center(leftWidth + rightWdth, '-')) # for k, v in itemsDict.items(): # print(k.ljust(leftWidth, '.') + str(v).rjust(rightWdth)) # picnicItems = {'sandwiches': 4, # 'apples': 12, 'cups': 4, 'cookies': 8000} # printPicnic(picnicItems, 12, 5) # printPicnic(picnicItems, 20, 6) # Validate input # while True: # print('Enter your age: ') # age = input() # if age.isdecimal(): # break # print('Please enter number for your age.') # while True: # print('Select a new password (letters and numbers only): ') # password = input() # if password.isalnum(): # break # print('Passwords can only have letters and numbers') # tableData = [['apples', 'oranges', 'cherries', 'banana'], # ['Alice', 'Bob', 'Carol', 'David'], # ['dogs', 'cats', 'moose', 'goose']] # def printTable(itemDict, leftWidth, rightWdth): # print("PICNIC ITEMS".center(leftWidth + rightWdth, '-')) # for k, v in itemDict.items(): # print(k.ljust(leftWidth, '.') + str(v).rjust(rightWdth)) # printTable(tableData, 12, 5) # def isPhoneNumber(text): # if len(text) != 12: # return False # for i in range(0, 3): # if not text[i].isdecimal(): # return False # if text[3] != '-': # return False # for i in range(4, 7): # if not text[i].isdecimal(): # return False # if text[7] != '-': # return False # for i in range(8, 12): # if not text[i].isdecimal(): # return False # return True # print('415-555-4242 is a phone no') # print(isPhoneNumber('415-555-4242')) # print('hello is a phone number') # print(isPhoneNumber('hello is a phone number')) # message = 'Call me at 415-555-1011 tomorrow. 415-555-9999 is my office. ididn iss 415-555-9998' # for i in range(len(message)): # chunk = message[i:i+12] # if isPhoneNumber(chunk): # print('Phone number found. ' + chunk) # print('Done') # heroRegex = re.compile(r'Batman|Tina Fey') # mo1 = heroRegex.search("Batman and Tina Fey.") # print(mo1.group())
random_line_split
profile.pb.go
// Code generated by protoc-gen-go. DO NOT EDIT. // source: github.com/micro/examples/booking/srv/profile/proto/profile.proto /* Package profile is a generated protocol buffer package. It is generated from these files: github.com/micro/examples/booking/srv/profile/proto/profile.proto It has these top-level messages: Request Result Hotel Address Image */ package profile import proto "github.com/golang/protobuf/proto" import fmt "fmt" import math "math" import ( context "golang.org/x/net/context" grpc "google.golang.org/grpc" ) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package type Request struct { HotelIds []string `protobuf:"bytes,1,rep,name=hotelIds" json:"hotelIds,omitempty"` Locale string `protobuf:"bytes,2,opt,name=locale" json:"locale,omitempty"` } func (m *Request) Reset() { *m = Request{} } func (m *Request) String() string { return proto.CompactTextString(m) } func (*Request) ProtoMessage() {} func (*Request) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } func (m *Request) GetHotelIds() []string { if m != nil { return m.HotelIds } return nil } func (m *Request) GetLocale() string { if m != nil { return m.Locale } return "" } type Result struct { Hotels []*Hotel `protobuf:"bytes,1,rep,name=hotels" json:"hotels,omitempty"` } func (m *Result) Reset() { *m = Result{} } func (m *Result) String() string { return proto.CompactTextString(m) } func (*Result) ProtoMessage() {} func (*Result) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } func (m *Result) GetHotels() []*Hotel { if m != nil { return m.Hotels } return nil } type Hotel struct { Id string `protobuf:"bytes,1,opt,name=id" json:"id,omitempty"` Name string `protobuf:"bytes,2,opt,name=name" json:"name,omitempty"` PhoneNumber string `protobuf:"bytes,3,opt,name=phoneNumber" json:"phoneNumber,omitempty"` Description string `protobuf:"bytes,4,opt,name=description" json:"description,omitempty"` Address *Address `protobuf:"bytes,5,opt,name=address" json:"address,omitempty"` Images []*Image `protobuf:"bytes,6,rep,name=images" json:"images,omitempty"` } func (m *Hotel) Reset() { *m = Hotel{} } func (m *Hotel) String() string { return proto.CompactTextString(m) } func (*Hotel) ProtoMessage() {} func (*Hotel) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } func (m *Hotel) GetId() string { if m != nil { return m.Id } return "" } func (m *Hotel) GetName() string { if m != nil { return m.Name } return "" } func (m *Hotel) GetPhoneNumber() string { if m != nil { return m.PhoneNumber } return "" } func (m *Hotel) GetDescription() string { if m != nil { return m.Description } return "" } func (m *Hotel) GetAddress() *Address { if m != nil { return m.Address } return nil } func (m *Hotel) GetImages() []*Image { if m != nil { return m.Images } return nil } type Address struct { StreetNumber string `protobuf:"bytes,1,opt,name=streetNumber" json:"streetNumber,omitempty"` StreetName string `protobuf:"bytes,2,opt,name=streetName" json:"streetName,omitempty"` City string `protobuf:"bytes,3,opt,name=city" json:"city,omitempty"` State string `protobuf:"bytes,4,opt,name=state" json:"state,omitempty"` Country string `protobuf:"bytes,5,opt,name=country" json:"country,omitempty"` PostalCode string `protobuf:"bytes,6,opt,name=postalCode" json:"postalCode,omitempty"` } func (m *Address) Reset() { *m = Address{} } func (m *Address) String() string { return proto.CompactTextString(m) } func (*Address) ProtoMessage() {} func (*Address) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } func (m *Address) GetStreetNumber() string { if m != nil { return m.StreetNumber } return "" } func (m *Address) GetStreetName() string { if m != nil { return m.StreetName } return "" } func (m *Address) GetCity() string { if m != nil { return m.City } return "" } func (m *Address) GetState() string { if m != nil { return m.State } return "" } func (m *Address) GetCountry() string { if m != nil { return m.Country } return "" } func (m *Address) GetPostalCode() string { if m != nil { return m.PostalCode } return "" } type Image struct { Url string `protobuf:"bytes,1,opt,name=url" json:"url,omitempty"` Default bool `protobuf:"varint,2,opt,name=default" json:"default,omitempty"` } func (m *Image) Reset() { *m = Image{} } func (m *Image) String() string { return proto.CompactTextString(m) } func (*Image) ProtoMessage() {} func (*Image) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } func (m *Image) GetUrl() string { if m != nil { return m.Url } return "" } func (m *Image) GetDefault() bool { if m != nil { return m.Default } return false } func init() { proto.RegisterType((*Request)(nil), "profile.Request") proto.RegisterType((*Result)(nil), "profile.Result") proto.RegisterType((*Hotel)(nil), "profile.Hotel") proto.RegisterType((*Address)(nil), "profile.Address") proto.RegisterType((*Image)(nil), "profile.Image") } // Reference imports to suppress errors if they are not otherwise used. var _ context.Context var _ grpc.ClientConn // This is a compile-time assertion to ensure that this generated file // is compatible with the grpc package it is being compiled against. const _ = grpc.SupportPackageIsVersion4 // Client API for Profile service type ProfileClient interface { GetProfiles(ctx context.Context, in *Request, opts ...grpc.CallOption) (*Result, error) } type profileClient struct { cc *grpc.ClientConn } func
(cc *grpc.ClientConn) ProfileClient { return &profileClient{cc} } func (c *profileClient) GetProfiles(ctx context.Context, in *Request, opts ...grpc.CallOption) (*Result, error) { out := new(Result) err := grpc.Invoke(ctx, "/profile.Profile/GetProfiles", in, out, c.cc, opts...) if err != nil { return nil, err } return out, nil } // Server API for Profile service type ProfileServer interface { GetProfiles(context.Context, *Request) (*Result, error) } func RegisterProfileServer(s *grpc.Server, srv ProfileServer) { s.RegisterService(&_Profile_serviceDesc, srv) } func _Profile_GetProfiles_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(Request) if err := dec(in); err != nil { return nil, err } if interceptor == nil { return srv.(ProfileServer).GetProfiles(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, FullMethod: "/profile.Profile/GetProfiles", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(ProfileServer).GetProfiles(ctx, req.(*Request)) } return interceptor(ctx, in, info, handler) } var _Profile_serviceDesc = grpc.ServiceDesc{ ServiceName: "profile.Profile", HandlerType: (*ProfileServer)(nil), Methods: []grpc.MethodDesc{ { MethodName: "GetProfiles", Handler: _Profile_GetProfiles_Handler, }, }, Streams: []grpc.StreamDesc{}, Metadata: "github.com/micro/examples/booking/srv/profile/proto/profile.proto", } func init() { proto.RegisterFile("github.com/micro/examples/booking/srv/profile/proto/profile.proto", fileDescriptor0) } var fileDescriptor0 = []byte{ // 397 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x5c, 0x92, 0xc1, 0x6e, 0xd4, 0x30, 0x10, 0x86, 0x95, 0xdd, 0x6e, 0xd2, 0x9d, 0x45, 0xa5, 0x1a, 0x21, 0x64, 0xf5, 0x80, 0x56, 0x39, 0xa0, 0x15, 0x87, 0x4d, 0xb5, 0x3d, 0x22, 0x0e, 0x15, 0x07, 0xe8, 0x05, 0x21, 0xbf, 0x81, 0x13, 0x4f, 0x77, 0x2d, 0x9c, 0x38, 0xd8, 0x0e, 0xa2, 0x8f, 0xc5, 0x33, 0xf0, 0x62, 0xc8, 0x8e, 0xd3, 0x0d, 0x3d, 0x79, 0xfe, 0x6f, 0xc6, 0x9e, 0xf9, 0xad, 0x81, 0xfb, 0xa3, 0xf2, 0xa7, 0xa1, 0xde, 0x37, 0xa6, 0xad, 0x5a, 0xd5, 0x58, 0x53, 0xd1, 0x6f, 0xd1, 0xf6, 0x9a, 0x5c, 0x55, 0x1b, 0xf3, 0x43, 0x75, 0xc7, 0xca, 0xd9, 0x5f, 0x55, 0x6f, 0xcd, 0xa3, 0xd2, 0x14, 0x4e, 0x6f, 0x26, 0xb5, 0x8f, 0x0a, 0x8b, 0x24, 0xcb, 0x4f, 0x50, 0x70, 0xfa, 0x39, 0x90, 0xf3, 0x78, 0x03, 0x97, 0x27, 0xe3, 0x49, 0x3f, 0x48, 0xc7, 0xb2, 0xed, 0x72, 0xb7, 0xe6, 0xcf, 0x1a, 0xdf, 0x42, 0xae, 0x4d, 0x23, 0x34, 0xb1, 0xc5, 0x36, 0xdb, 0xad, 0x79, 0x52, 0xe5, 0x2d, 0xe4, 0x9c, 0xdc, 0xa0, 0x3d, 0xbe, 0x87, 0x3c, 0x56, 0x8f, 0x77, 0x37, 0x87, 0xab, 0xfd, 0xd4, 0xf1, 0x6b, 0xc0, 0x3c, 0x65, 0xcb, 0xbf, 0x19, 0xac, 0x22, 0xc1, 0x2b, 0x58, 0x28, 0xc9, 0xb2, 0xf8, 0xde, 0x42, 0x49, 0x44, 0xb8, 0xe8, 0x44, 0x3b, 0x75, 0x88, 0x31, 0x6e, 0x61, 0xd3, 0x9f, 0x4c, 0x47, 0xdf, 0x86, 0xb6, 0x26, 0xcb, 0x96, 0x31, 0x35, 0x47, 0xa1, 0x42, 0x92, 0x6b, 0xac, 0xea, 0xbd, 0x32, 0x1d, 0xbb, 0x18, 0x2b, 0x66, 0x08, 0x3f, 0x40, 0x21, 0xa4, 0xb4, 0xe4, 0x1c, 0x5b, 0x6d, 0xb3, 0xdd, 0xe6, 0x70, 0xfd, 0x3c, 0xda, 0xfd, 0xc8, 0xf9, 0x54, 0x10, 0x5c, 0xa8, 0x56, 0x1c, 0xc9, 0xb1, 0xfc, 0x85, 0x8b, 0x87, 0x80, 0x79, 0xca, 0x96, 0x7f, 0x32, 0x28, 0xd2, 0x65, 0x2c, 0xe1, 0x95, 0xf3, 0x96, 0xc8, 0xa7, 0x21, 0x47, 0x47, 0xff, 0x31, 0x7c, 0x07, 0x90, 0xf4, 0xd9, 0xe1, 0x8c, 0x04, 0xef, 0x8d, 0xf2, 0x4f, 0xc9, 0x60, 0x8c, 0xf1, 0x0d, 0xac, 0x9c, 0x17, 0x9e, 0x92, 0xa7, 0x51, 0x20, 0x83, 0xa2, 0x31, 0x43, 0xe7, 0xed, 0x53, 0x74, 0xb3, 0xe6, 0x93, 0x0c, 0x3d, 0x7a, 0xe3, 0xbc, 0xd0, 0x9f, 0x8d, 0x24, 0x96, 0x8f, 0x3d, 0xce, 0xa4, 0xbc, 0x83, 0x55, 0x34, 0x81, 0xd7, 0xb0, 0x1c, 0xac, 0x4e, 0x73, 0x86, 0x30, 0x3c, 0x2a, 0xe9, 0x51, 0x0c, 0xda, 0xc7, 0xd9, 0x2e, 0xf9, 0x24, 0x0f, 0x1f, 0xa1, 0xf8, 0x3e, 0xfe, 0x00, 0xde, 0xc2, 0xe6, 0x0b, 0xf9, 0xa4, 0x1c, 0x9e, 0x7f, 0x31, 0x2d, 0xd0, 0xcd, 0xeb, 0x19, 0x09, 0x3b, 0x51, 0xe7, 0x71, 0xd9, 0xee, 0xfe, 0x05, 0x00, 0x00, 0xff, 0xff, 0xf1, 0x00, 0x45, 0x96, 0xb1, 0x02, 0x00, 0x00, }
NewProfileClient
identifier_name
profile.pb.go
// Code generated by protoc-gen-go. DO NOT EDIT. // source: github.com/micro/examples/booking/srv/profile/proto/profile.proto /* Package profile is a generated protocol buffer package. It is generated from these files: github.com/micro/examples/booking/srv/profile/proto/profile.proto It has these top-level messages: Request Result Hotel Address Image */ package profile import proto "github.com/golang/protobuf/proto" import fmt "fmt" import math "math" import ( context "golang.org/x/net/context" grpc "google.golang.org/grpc" ) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package type Request struct { HotelIds []string `protobuf:"bytes,1,rep,name=hotelIds" json:"hotelIds,omitempty"` Locale string `protobuf:"bytes,2,opt,name=locale" json:"locale,omitempty"` } func (m *Request) Reset() { *m = Request{} } func (m *Request) String() string { return proto.CompactTextString(m) } func (*Request) ProtoMessage() {} func (*Request) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } func (m *Request) GetHotelIds() []string { if m != nil { return m.HotelIds } return nil } func (m *Request) GetLocale() string { if m != nil { return m.Locale } return "" } type Result struct { Hotels []*Hotel `protobuf:"bytes,1,rep,name=hotels" json:"hotels,omitempty"` } func (m *Result) Reset() { *m = Result{} } func (m *Result) String() string { return proto.CompactTextString(m) } func (*Result) ProtoMessage() {} func (*Result) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } func (m *Result) GetHotels() []*Hotel { if m != nil { return m.Hotels } return nil } type Hotel struct { Id string `protobuf:"bytes,1,opt,name=id" json:"id,omitempty"` Name string `protobuf:"bytes,2,opt,name=name" json:"name,omitempty"` PhoneNumber string `protobuf:"bytes,3,opt,name=phoneNumber" json:"phoneNumber,omitempty"` Description string `protobuf:"bytes,4,opt,name=description" json:"description,omitempty"` Address *Address `protobuf:"bytes,5,opt,name=address" json:"address,omitempty"` Images []*Image `protobuf:"bytes,6,rep,name=images" json:"images,omitempty"` } func (m *Hotel) Reset() { *m = Hotel{} } func (m *Hotel) String() string { return proto.CompactTextString(m) } func (*Hotel) ProtoMessage() {} func (*Hotel) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } func (m *Hotel) GetId() string { if m != nil { return m.Id } return "" } func (m *Hotel) GetName() string { if m != nil { return m.Name } return "" } func (m *Hotel) GetPhoneNumber() string { if m != nil { return m.PhoneNumber } return "" } func (m *Hotel) GetDescription() string { if m != nil { return m.Description } return "" } func (m *Hotel) GetAddress() *Address { if m != nil { return m.Address } return nil } func (m *Hotel) GetImages() []*Image { if m != nil { return m.Images } return nil } type Address struct { StreetNumber string `protobuf:"bytes,1,opt,name=streetNumber" json:"streetNumber,omitempty"` StreetName string `protobuf:"bytes,2,opt,name=streetName" json:"streetName,omitempty"` City string `protobuf:"bytes,3,opt,name=city" json:"city,omitempty"` State string `protobuf:"bytes,4,opt,name=state" json:"state,omitempty"` Country string `protobuf:"bytes,5,opt,name=country" json:"country,omitempty"` PostalCode string `protobuf:"bytes,6,opt,name=postalCode" json:"postalCode,omitempty"` } func (m *Address) Reset() { *m = Address{} } func (m *Address) String() string { return proto.CompactTextString(m) } func (*Address) ProtoMessage() {} func (*Address) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } func (m *Address) GetStreetNumber() string { if m != nil { return m.StreetNumber } return "" } func (m *Address) GetStreetName() string { if m != nil { return m.StreetName } return "" } func (m *Address) GetCity() string { if m != nil { return m.City } return "" } func (m *Address) GetState() string { if m != nil { return m.State } return "" } func (m *Address) GetCountry() string { if m != nil { return m.Country } return "" } func (m *Address) GetPostalCode() string { if m != nil { return m.PostalCode } return "" } type Image struct { Url string `protobuf:"bytes,1,opt,name=url" json:"url,omitempty"` Default bool `protobuf:"varint,2,opt,name=default" json:"default,omitempty"` } func (m *Image) Reset() { *m = Image{} } func (m *Image) String() string { return proto.CompactTextString(m) } func (*Image) ProtoMessage() {} func (*Image) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } func (m *Image) GetUrl() string { if m != nil { return m.Url } return "" } func (m *Image) GetDefault() bool { if m != nil { return m.Default } return false } func init() { proto.RegisterType((*Request)(nil), "profile.Request") proto.RegisterType((*Result)(nil), "profile.Result") proto.RegisterType((*Hotel)(nil), "profile.Hotel") proto.RegisterType((*Address)(nil), "profile.Address") proto.RegisterType((*Image)(nil), "profile.Image") } // Reference imports to suppress errors if they are not otherwise used. var _ context.Context var _ grpc.ClientConn // This is a compile-time assertion to ensure that this generated file // is compatible with the grpc package it is being compiled against. const _ = grpc.SupportPackageIsVersion4 // Client API for Profile service type ProfileClient interface { GetProfiles(ctx context.Context, in *Request, opts ...grpc.CallOption) (*Result, error) }
return &profileClient{cc} } func (c *profileClient) GetProfiles(ctx context.Context, in *Request, opts ...grpc.CallOption) (*Result, error) { out := new(Result) err := grpc.Invoke(ctx, "/profile.Profile/GetProfiles", in, out, c.cc, opts...) if err != nil { return nil, err } return out, nil } // Server API for Profile service type ProfileServer interface { GetProfiles(context.Context, *Request) (*Result, error) } func RegisterProfileServer(s *grpc.Server, srv ProfileServer) { s.RegisterService(&_Profile_serviceDesc, srv) } func _Profile_GetProfiles_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(Request) if err := dec(in); err != nil { return nil, err } if interceptor == nil { return srv.(ProfileServer).GetProfiles(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, FullMethod: "/profile.Profile/GetProfiles", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(ProfileServer).GetProfiles(ctx, req.(*Request)) } return interceptor(ctx, in, info, handler) } var _Profile_serviceDesc = grpc.ServiceDesc{ ServiceName: "profile.Profile", HandlerType: (*ProfileServer)(nil), Methods: []grpc.MethodDesc{ { MethodName: "GetProfiles", Handler: _Profile_GetProfiles_Handler, }, }, Streams: []grpc.StreamDesc{}, Metadata: "github.com/micro/examples/booking/srv/profile/proto/profile.proto", } func init() { proto.RegisterFile("github.com/micro/examples/booking/srv/profile/proto/profile.proto", fileDescriptor0) } var fileDescriptor0 = []byte{ // 397 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x5c, 0x92, 0xc1, 0x6e, 0xd4, 0x30, 0x10, 0x86, 0x95, 0xdd, 0x6e, 0xd2, 0x9d, 0x45, 0xa5, 0x1a, 0x21, 0x64, 0xf5, 0x80, 0x56, 0x39, 0xa0, 0x15, 0x87, 0x4d, 0xb5, 0x3d, 0x22, 0x0e, 0x15, 0x07, 0xe8, 0x05, 0x21, 0xbf, 0x81, 0x13, 0x4f, 0x77, 0x2d, 0x9c, 0x38, 0xd8, 0x0e, 0xa2, 0x8f, 0xc5, 0x33, 0xf0, 0x62, 0xc8, 0x8e, 0xd3, 0x0d, 0x3d, 0x79, 0xfe, 0x6f, 0xc6, 0x9e, 0xf9, 0xad, 0x81, 0xfb, 0xa3, 0xf2, 0xa7, 0xa1, 0xde, 0x37, 0xa6, 0xad, 0x5a, 0xd5, 0x58, 0x53, 0xd1, 0x6f, 0xd1, 0xf6, 0x9a, 0x5c, 0x55, 0x1b, 0xf3, 0x43, 0x75, 0xc7, 0xca, 0xd9, 0x5f, 0x55, 0x6f, 0xcd, 0xa3, 0xd2, 0x14, 0x4e, 0x6f, 0x26, 0xb5, 0x8f, 0x0a, 0x8b, 0x24, 0xcb, 0x4f, 0x50, 0x70, 0xfa, 0x39, 0x90, 0xf3, 0x78, 0x03, 0x97, 0x27, 0xe3, 0x49, 0x3f, 0x48, 0xc7, 0xb2, 0xed, 0x72, 0xb7, 0xe6, 0xcf, 0x1a, 0xdf, 0x42, 0xae, 0x4d, 0x23, 0x34, 0xb1, 0xc5, 0x36, 0xdb, 0xad, 0x79, 0x52, 0xe5, 0x2d, 0xe4, 0x9c, 0xdc, 0xa0, 0x3d, 0xbe, 0x87, 0x3c, 0x56, 0x8f, 0x77, 0x37, 0x87, 0xab, 0xfd, 0xd4, 0xf1, 0x6b, 0xc0, 0x3c, 0x65, 0xcb, 0xbf, 0x19, 0xac, 0x22, 0xc1, 0x2b, 0x58, 0x28, 0xc9, 0xb2, 0xf8, 0xde, 0x42, 0x49, 0x44, 0xb8, 0xe8, 0x44, 0x3b, 0x75, 0x88, 0x31, 0x6e, 0x61, 0xd3, 0x9f, 0x4c, 0x47, 0xdf, 0x86, 0xb6, 0x26, 0xcb, 0x96, 0x31, 0x35, 0x47, 0xa1, 0x42, 0x92, 0x6b, 0xac, 0xea, 0xbd, 0x32, 0x1d, 0xbb, 0x18, 0x2b, 0x66, 0x08, 0x3f, 0x40, 0x21, 0xa4, 0xb4, 0xe4, 0x1c, 0x5b, 0x6d, 0xb3, 0xdd, 0xe6, 0x70, 0xfd, 0x3c, 0xda, 0xfd, 0xc8, 0xf9, 0x54, 0x10, 0x5c, 0xa8, 0x56, 0x1c, 0xc9, 0xb1, 0xfc, 0x85, 0x8b, 0x87, 0x80, 0x79, 0xca, 0x96, 0x7f, 0x32, 0x28, 0xd2, 0x65, 0x2c, 0xe1, 0x95, 0xf3, 0x96, 0xc8, 0xa7, 0x21, 0x47, 0x47, 0xff, 0x31, 0x7c, 0x07, 0x90, 0xf4, 0xd9, 0xe1, 0x8c, 0x04, 0xef, 0x8d, 0xf2, 0x4f, 0xc9, 0x60, 0x8c, 0xf1, 0x0d, 0xac, 0x9c, 0x17, 0x9e, 0x92, 0xa7, 0x51, 0x20, 0x83, 0xa2, 0x31, 0x43, 0xe7, 0xed, 0x53, 0x74, 0xb3, 0xe6, 0x93, 0x0c, 0x3d, 0x7a, 0xe3, 0xbc, 0xd0, 0x9f, 0x8d, 0x24, 0x96, 0x8f, 0x3d, 0xce, 0xa4, 0xbc, 0x83, 0x55, 0x34, 0x81, 0xd7, 0xb0, 0x1c, 0xac, 0x4e, 0x73, 0x86, 0x30, 0x3c, 0x2a, 0xe9, 0x51, 0x0c, 0xda, 0xc7, 0xd9, 0x2e, 0xf9, 0x24, 0x0f, 0x1f, 0xa1, 0xf8, 0x3e, 0xfe, 0x00, 0xde, 0xc2, 0xe6, 0x0b, 0xf9, 0xa4, 0x1c, 0x9e, 0x7f, 0x31, 0x2d, 0xd0, 0xcd, 0xeb, 0x19, 0x09, 0x3b, 0x51, 0xe7, 0x71, 0xd9, 0xee, 0xfe, 0x05, 0x00, 0x00, 0xff, 0xff, 0xf1, 0x00, 0x45, 0x96, 0xb1, 0x02, 0x00, 0x00, }
type profileClient struct { cc *grpc.ClientConn } func NewProfileClient(cc *grpc.ClientConn) ProfileClient {
random_line_split
profile.pb.go
// Code generated by protoc-gen-go. DO NOT EDIT. // source: github.com/micro/examples/booking/srv/profile/proto/profile.proto /* Package profile is a generated protocol buffer package. It is generated from these files: github.com/micro/examples/booking/srv/profile/proto/profile.proto It has these top-level messages: Request Result Hotel Address Image */ package profile import proto "github.com/golang/protobuf/proto" import fmt "fmt" import math "math" import ( context "golang.org/x/net/context" grpc "google.golang.org/grpc" ) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package type Request struct { HotelIds []string `protobuf:"bytes,1,rep,name=hotelIds" json:"hotelIds,omitempty"` Locale string `protobuf:"bytes,2,opt,name=locale" json:"locale,omitempty"` } func (m *Request) Reset() { *m = Request{} } func (m *Request) String() string { return proto.CompactTextString(m) } func (*Request) ProtoMessage() {} func (*Request) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } func (m *Request) GetHotelIds() []string { if m != nil { return m.HotelIds } return nil } func (m *Request) GetLocale() string { if m != nil { return m.Locale } return "" } type Result struct { Hotels []*Hotel `protobuf:"bytes,1,rep,name=hotels" json:"hotels,omitempty"` } func (m *Result) Reset() { *m = Result{} } func (m *Result) String() string { return proto.CompactTextString(m) } func (*Result) ProtoMessage() {} func (*Result) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } func (m *Result) GetHotels() []*Hotel { if m != nil { return m.Hotels } return nil } type Hotel struct { Id string `protobuf:"bytes,1,opt,name=id" json:"id,omitempty"` Name string `protobuf:"bytes,2,opt,name=name" json:"name,omitempty"` PhoneNumber string `protobuf:"bytes,3,opt,name=phoneNumber" json:"phoneNumber,omitempty"` Description string `protobuf:"bytes,4,opt,name=description" json:"description,omitempty"` Address *Address `protobuf:"bytes,5,opt,name=address" json:"address,omitempty"` Images []*Image `protobuf:"bytes,6,rep,name=images" json:"images,omitempty"` } func (m *Hotel) Reset() { *m = Hotel{} } func (m *Hotel) String() string { return proto.CompactTextString(m) } func (*Hotel) ProtoMessage() {} func (*Hotel) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } func (m *Hotel) GetId() string { if m != nil
return "" } func (m *Hotel) GetName() string { if m != nil { return m.Name } return "" } func (m *Hotel) GetPhoneNumber() string { if m != nil { return m.PhoneNumber } return "" } func (m *Hotel) GetDescription() string { if m != nil { return m.Description } return "" } func (m *Hotel) GetAddress() *Address { if m != nil { return m.Address } return nil } func (m *Hotel) GetImages() []*Image { if m != nil { return m.Images } return nil } type Address struct { StreetNumber string `protobuf:"bytes,1,opt,name=streetNumber" json:"streetNumber,omitempty"` StreetName string `protobuf:"bytes,2,opt,name=streetName" json:"streetName,omitempty"` City string `protobuf:"bytes,3,opt,name=city" json:"city,omitempty"` State string `protobuf:"bytes,4,opt,name=state" json:"state,omitempty"` Country string `protobuf:"bytes,5,opt,name=country" json:"country,omitempty"` PostalCode string `protobuf:"bytes,6,opt,name=postalCode" json:"postalCode,omitempty"` } func (m *Address) Reset() { *m = Address{} } func (m *Address) String() string { return proto.CompactTextString(m) } func (*Address) ProtoMessage() {} func (*Address) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } func (m *Address) GetStreetNumber() string { if m != nil { return m.StreetNumber } return "" } func (m *Address) GetStreetName() string { if m != nil { return m.StreetName } return "" } func (m *Address) GetCity() string { if m != nil { return m.City } return "" } func (m *Address) GetState() string { if m != nil { return m.State } return "" } func (m *Address) GetCountry() string { if m != nil { return m.Country } return "" } func (m *Address) GetPostalCode() string { if m != nil { return m.PostalCode } return "" } type Image struct { Url string `protobuf:"bytes,1,opt,name=url" json:"url,omitempty"` Default bool `protobuf:"varint,2,opt,name=default" json:"default,omitempty"` } func (m *Image) Reset() { *m = Image{} } func (m *Image) String() string { return proto.CompactTextString(m) } func (*Image) ProtoMessage() {} func (*Image) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } func (m *Image) GetUrl() string { if m != nil { return m.Url } return "" } func (m *Image) GetDefault() bool { if m != nil { return m.Default } return false } func init() { proto.RegisterType((*Request)(nil), "profile.Request") proto.RegisterType((*Result)(nil), "profile.Result") proto.RegisterType((*Hotel)(nil), "profile.Hotel") proto.RegisterType((*Address)(nil), "profile.Address") proto.RegisterType((*Image)(nil), "profile.Image") } // Reference imports to suppress errors if they are not otherwise used. var _ context.Context var _ grpc.ClientConn // This is a compile-time assertion to ensure that this generated file // is compatible with the grpc package it is being compiled against. const _ = grpc.SupportPackageIsVersion4 // Client API for Profile service type ProfileClient interface { GetProfiles(ctx context.Context, in *Request, opts ...grpc.CallOption) (*Result, error) } type profileClient struct { cc *grpc.ClientConn } func NewProfileClient(cc *grpc.ClientConn) ProfileClient { return &profileClient{cc} } func (c *profileClient) GetProfiles(ctx context.Context, in *Request, opts ...grpc.CallOption) (*Result, error) { out := new(Result) err := grpc.Invoke(ctx, "/profile.Profile/GetProfiles", in, out, c.cc, opts...) if err != nil { return nil, err } return out, nil } // Server API for Profile service type ProfileServer interface { GetProfiles(context.Context, *Request) (*Result, error) } func RegisterProfileServer(s *grpc.Server, srv ProfileServer) { s.RegisterService(&_Profile_serviceDesc, srv) } func _Profile_GetProfiles_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(Request) if err := dec(in); err != nil { return nil, err } if interceptor == nil { return srv.(ProfileServer).GetProfiles(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, FullMethod: "/profile.Profile/GetProfiles", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(ProfileServer).GetProfiles(ctx, req.(*Request)) } return interceptor(ctx, in, info, handler) } var _Profile_serviceDesc = grpc.ServiceDesc{ ServiceName: "profile.Profile", HandlerType: (*ProfileServer)(nil), Methods: []grpc.MethodDesc{ { MethodName: "GetProfiles", Handler: _Profile_GetProfiles_Handler, }, }, Streams: []grpc.StreamDesc{}, Metadata: "github.com/micro/examples/booking/srv/profile/proto/profile.proto", } func init() { proto.RegisterFile("github.com/micro/examples/booking/srv/profile/proto/profile.proto", fileDescriptor0) } var fileDescriptor0 = []byte{ // 397 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x5c, 0x92, 0xc1, 0x6e, 0xd4, 0x30, 0x10, 0x86, 0x95, 0xdd, 0x6e, 0xd2, 0x9d, 0x45, 0xa5, 0x1a, 0x21, 0x64, 0xf5, 0x80, 0x56, 0x39, 0xa0, 0x15, 0x87, 0x4d, 0xb5, 0x3d, 0x22, 0x0e, 0x15, 0x07, 0xe8, 0x05, 0x21, 0xbf, 0x81, 0x13, 0x4f, 0x77, 0x2d, 0x9c, 0x38, 0xd8, 0x0e, 0xa2, 0x8f, 0xc5, 0x33, 0xf0, 0x62, 0xc8, 0x8e, 0xd3, 0x0d, 0x3d, 0x79, 0xfe, 0x6f, 0xc6, 0x9e, 0xf9, 0xad, 0x81, 0xfb, 0xa3, 0xf2, 0xa7, 0xa1, 0xde, 0x37, 0xa6, 0xad, 0x5a, 0xd5, 0x58, 0x53, 0xd1, 0x6f, 0xd1, 0xf6, 0x9a, 0x5c, 0x55, 0x1b, 0xf3, 0x43, 0x75, 0xc7, 0xca, 0xd9, 0x5f, 0x55, 0x6f, 0xcd, 0xa3, 0xd2, 0x14, 0x4e, 0x6f, 0x26, 0xb5, 0x8f, 0x0a, 0x8b, 0x24, 0xcb, 0x4f, 0x50, 0x70, 0xfa, 0x39, 0x90, 0xf3, 0x78, 0x03, 0x97, 0x27, 0xe3, 0x49, 0x3f, 0x48, 0xc7, 0xb2, 0xed, 0x72, 0xb7, 0xe6, 0xcf, 0x1a, 0xdf, 0x42, 0xae, 0x4d, 0x23, 0x34, 0xb1, 0xc5, 0x36, 0xdb, 0xad, 0x79, 0x52, 0xe5, 0x2d, 0xe4, 0x9c, 0xdc, 0xa0, 0x3d, 0xbe, 0x87, 0x3c, 0x56, 0x8f, 0x77, 0x37, 0x87, 0xab, 0xfd, 0xd4, 0xf1, 0x6b, 0xc0, 0x3c, 0x65, 0xcb, 0xbf, 0x19, 0xac, 0x22, 0xc1, 0x2b, 0x58, 0x28, 0xc9, 0xb2, 0xf8, 0xde, 0x42, 0x49, 0x44, 0xb8, 0xe8, 0x44, 0x3b, 0x75, 0x88, 0x31, 0x6e, 0x61, 0xd3, 0x9f, 0x4c, 0x47, 0xdf, 0x86, 0xb6, 0x26, 0xcb, 0x96, 0x31, 0x35, 0x47, 0xa1, 0x42, 0x92, 0x6b, 0xac, 0xea, 0xbd, 0x32, 0x1d, 0xbb, 0x18, 0x2b, 0x66, 0x08, 0x3f, 0x40, 0x21, 0xa4, 0xb4, 0xe4, 0x1c, 0x5b, 0x6d, 0xb3, 0xdd, 0xe6, 0x70, 0xfd, 0x3c, 0xda, 0xfd, 0xc8, 0xf9, 0x54, 0x10, 0x5c, 0xa8, 0x56, 0x1c, 0xc9, 0xb1, 0xfc, 0x85, 0x8b, 0x87, 0x80, 0x79, 0xca, 0x96, 0x7f, 0x32, 0x28, 0xd2, 0x65, 0x2c, 0xe1, 0x95, 0xf3, 0x96, 0xc8, 0xa7, 0x21, 0x47, 0x47, 0xff, 0x31, 0x7c, 0x07, 0x90, 0xf4, 0xd9, 0xe1, 0x8c, 0x04, 0xef, 0x8d, 0xf2, 0x4f, 0xc9, 0x60, 0x8c, 0xf1, 0x0d, 0xac, 0x9c, 0x17, 0x9e, 0x92, 0xa7, 0x51, 0x20, 0x83, 0xa2, 0x31, 0x43, 0xe7, 0xed, 0x53, 0x74, 0xb3, 0xe6, 0x93, 0x0c, 0x3d, 0x7a, 0xe3, 0xbc, 0xd0, 0x9f, 0x8d, 0x24, 0x96, 0x8f, 0x3d, 0xce, 0xa4, 0xbc, 0x83, 0x55, 0x34, 0x81, 0xd7, 0xb0, 0x1c, 0xac, 0x4e, 0x73, 0x86, 0x30, 0x3c, 0x2a, 0xe9, 0x51, 0x0c, 0xda, 0xc7, 0xd9, 0x2e, 0xf9, 0x24, 0x0f, 0x1f, 0xa1, 0xf8, 0x3e, 0xfe, 0x00, 0xde, 0xc2, 0xe6, 0x0b, 0xf9, 0xa4, 0x1c, 0x9e, 0x7f, 0x31, 0x2d, 0xd0, 0xcd, 0xeb, 0x19, 0x09, 0x3b, 0x51, 0xe7, 0x71, 0xd9, 0xee, 0xfe, 0x05, 0x00, 0x00, 0xff, 0xff, 0xf1, 0x00, 0x45, 0x96, 0xb1, 0x02, 0x00, 0x00, }
{ return m.Id }
conditional_block
profile.pb.go
// Code generated by protoc-gen-go. DO NOT EDIT. // source: github.com/micro/examples/booking/srv/profile/proto/profile.proto /* Package profile is a generated protocol buffer package. It is generated from these files: github.com/micro/examples/booking/srv/profile/proto/profile.proto It has these top-level messages: Request Result Hotel Address Image */ package profile import proto "github.com/golang/protobuf/proto" import fmt "fmt" import math "math" import ( context "golang.org/x/net/context" grpc "google.golang.org/grpc" ) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package type Request struct { HotelIds []string `protobuf:"bytes,1,rep,name=hotelIds" json:"hotelIds,omitempty"` Locale string `protobuf:"bytes,2,opt,name=locale" json:"locale,omitempty"` } func (m *Request) Reset() { *m = Request{} } func (m *Request) String() string { return proto.CompactTextString(m) } func (*Request) ProtoMessage() {} func (*Request) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } func (m *Request) GetHotelIds() []string { if m != nil { return m.HotelIds } return nil } func (m *Request) GetLocale() string { if m != nil { return m.Locale } return "" } type Result struct { Hotels []*Hotel `protobuf:"bytes,1,rep,name=hotels" json:"hotels,omitempty"` } func (m *Result) Reset() { *m = Result{} } func (m *Result) String() string { return proto.CompactTextString(m) } func (*Result) ProtoMessage() {} func (*Result) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } func (m *Result) GetHotels() []*Hotel { if m != nil { return m.Hotels } return nil } type Hotel struct { Id string `protobuf:"bytes,1,opt,name=id" json:"id,omitempty"` Name string `protobuf:"bytes,2,opt,name=name" json:"name,omitempty"` PhoneNumber string `protobuf:"bytes,3,opt,name=phoneNumber" json:"phoneNumber,omitempty"` Description string `protobuf:"bytes,4,opt,name=description" json:"description,omitempty"` Address *Address `protobuf:"bytes,5,opt,name=address" json:"address,omitempty"` Images []*Image `protobuf:"bytes,6,rep,name=images" json:"images,omitempty"` } func (m *Hotel) Reset() { *m = Hotel{} } func (m *Hotel) String() string { return proto.CompactTextString(m) } func (*Hotel) ProtoMessage() {} func (*Hotel) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } func (m *Hotel) GetId() string { if m != nil { return m.Id } return "" } func (m *Hotel) GetName() string { if m != nil { return m.Name } return "" } func (m *Hotel) GetPhoneNumber() string { if m != nil { return m.PhoneNumber } return "" } func (m *Hotel) GetDescription() string { if m != nil { return m.Description } return "" } func (m *Hotel) GetAddress() *Address { if m != nil { return m.Address } return nil } func (m *Hotel) GetImages() []*Image { if m != nil { return m.Images } return nil } type Address struct { StreetNumber string `protobuf:"bytes,1,opt,name=streetNumber" json:"streetNumber,omitempty"` StreetName string `protobuf:"bytes,2,opt,name=streetName" json:"streetName,omitempty"` City string `protobuf:"bytes,3,opt,name=city" json:"city,omitempty"` State string `protobuf:"bytes,4,opt,name=state" json:"state,omitempty"` Country string `protobuf:"bytes,5,opt,name=country" json:"country,omitempty"` PostalCode string `protobuf:"bytes,6,opt,name=postalCode" json:"postalCode,omitempty"` } func (m *Address) Reset() { *m = Address{} } func (m *Address) String() string { return proto.CompactTextString(m) } func (*Address) ProtoMessage() {} func (*Address) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } func (m *Address) GetStreetNumber() string { if m != nil { return m.StreetNumber } return "" } func (m *Address) GetStreetName() string { if m != nil { return m.StreetName } return "" } func (m *Address) GetCity() string
func (m *Address) GetState() string { if m != nil { return m.State } return "" } func (m *Address) GetCountry() string { if m != nil { return m.Country } return "" } func (m *Address) GetPostalCode() string { if m != nil { return m.PostalCode } return "" } type Image struct { Url string `protobuf:"bytes,1,opt,name=url" json:"url,omitempty"` Default bool `protobuf:"varint,2,opt,name=default" json:"default,omitempty"` } func (m *Image) Reset() { *m = Image{} } func (m *Image) String() string { return proto.CompactTextString(m) } func (*Image) ProtoMessage() {} func (*Image) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } func (m *Image) GetUrl() string { if m != nil { return m.Url } return "" } func (m *Image) GetDefault() bool { if m != nil { return m.Default } return false } func init() { proto.RegisterType((*Request)(nil), "profile.Request") proto.RegisterType((*Result)(nil), "profile.Result") proto.RegisterType((*Hotel)(nil), "profile.Hotel") proto.RegisterType((*Address)(nil), "profile.Address") proto.RegisterType((*Image)(nil), "profile.Image") } // Reference imports to suppress errors if they are not otherwise used. var _ context.Context var _ grpc.ClientConn // This is a compile-time assertion to ensure that this generated file // is compatible with the grpc package it is being compiled against. const _ = grpc.SupportPackageIsVersion4 // Client API for Profile service type ProfileClient interface { GetProfiles(ctx context.Context, in *Request, opts ...grpc.CallOption) (*Result, error) } type profileClient struct { cc *grpc.ClientConn } func NewProfileClient(cc *grpc.ClientConn) ProfileClient { return &profileClient{cc} } func (c *profileClient) GetProfiles(ctx context.Context, in *Request, opts ...grpc.CallOption) (*Result, error) { out := new(Result) err := grpc.Invoke(ctx, "/profile.Profile/GetProfiles", in, out, c.cc, opts...) if err != nil { return nil, err } return out, nil } // Server API for Profile service type ProfileServer interface { GetProfiles(context.Context, *Request) (*Result, error) } func RegisterProfileServer(s *grpc.Server, srv ProfileServer) { s.RegisterService(&_Profile_serviceDesc, srv) } func _Profile_GetProfiles_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(Request) if err := dec(in); err != nil { return nil, err } if interceptor == nil { return srv.(ProfileServer).GetProfiles(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, FullMethod: "/profile.Profile/GetProfiles", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(ProfileServer).GetProfiles(ctx, req.(*Request)) } return interceptor(ctx, in, info, handler) } var _Profile_serviceDesc = grpc.ServiceDesc{ ServiceName: "profile.Profile", HandlerType: (*ProfileServer)(nil), Methods: []grpc.MethodDesc{ { MethodName: "GetProfiles", Handler: _Profile_GetProfiles_Handler, }, }, Streams: []grpc.StreamDesc{}, Metadata: "github.com/micro/examples/booking/srv/profile/proto/profile.proto", } func init() { proto.RegisterFile("github.com/micro/examples/booking/srv/profile/proto/profile.proto", fileDescriptor0) } var fileDescriptor0 = []byte{ // 397 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x5c, 0x92, 0xc1, 0x6e, 0xd4, 0x30, 0x10, 0x86, 0x95, 0xdd, 0x6e, 0xd2, 0x9d, 0x45, 0xa5, 0x1a, 0x21, 0x64, 0xf5, 0x80, 0x56, 0x39, 0xa0, 0x15, 0x87, 0x4d, 0xb5, 0x3d, 0x22, 0x0e, 0x15, 0x07, 0xe8, 0x05, 0x21, 0xbf, 0x81, 0x13, 0x4f, 0x77, 0x2d, 0x9c, 0x38, 0xd8, 0x0e, 0xa2, 0x8f, 0xc5, 0x33, 0xf0, 0x62, 0xc8, 0x8e, 0xd3, 0x0d, 0x3d, 0x79, 0xfe, 0x6f, 0xc6, 0x9e, 0xf9, 0xad, 0x81, 0xfb, 0xa3, 0xf2, 0xa7, 0xa1, 0xde, 0x37, 0xa6, 0xad, 0x5a, 0xd5, 0x58, 0x53, 0xd1, 0x6f, 0xd1, 0xf6, 0x9a, 0x5c, 0x55, 0x1b, 0xf3, 0x43, 0x75, 0xc7, 0xca, 0xd9, 0x5f, 0x55, 0x6f, 0xcd, 0xa3, 0xd2, 0x14, 0x4e, 0x6f, 0x26, 0xb5, 0x8f, 0x0a, 0x8b, 0x24, 0xcb, 0x4f, 0x50, 0x70, 0xfa, 0x39, 0x90, 0xf3, 0x78, 0x03, 0x97, 0x27, 0xe3, 0x49, 0x3f, 0x48, 0xc7, 0xb2, 0xed, 0x72, 0xb7, 0xe6, 0xcf, 0x1a, 0xdf, 0x42, 0xae, 0x4d, 0x23, 0x34, 0xb1, 0xc5, 0x36, 0xdb, 0xad, 0x79, 0x52, 0xe5, 0x2d, 0xe4, 0x9c, 0xdc, 0xa0, 0x3d, 0xbe, 0x87, 0x3c, 0x56, 0x8f, 0x77, 0x37, 0x87, 0xab, 0xfd, 0xd4, 0xf1, 0x6b, 0xc0, 0x3c, 0x65, 0xcb, 0xbf, 0x19, 0xac, 0x22, 0xc1, 0x2b, 0x58, 0x28, 0xc9, 0xb2, 0xf8, 0xde, 0x42, 0x49, 0x44, 0xb8, 0xe8, 0x44, 0x3b, 0x75, 0x88, 0x31, 0x6e, 0x61, 0xd3, 0x9f, 0x4c, 0x47, 0xdf, 0x86, 0xb6, 0x26, 0xcb, 0x96, 0x31, 0x35, 0x47, 0xa1, 0x42, 0x92, 0x6b, 0xac, 0xea, 0xbd, 0x32, 0x1d, 0xbb, 0x18, 0x2b, 0x66, 0x08, 0x3f, 0x40, 0x21, 0xa4, 0xb4, 0xe4, 0x1c, 0x5b, 0x6d, 0xb3, 0xdd, 0xe6, 0x70, 0xfd, 0x3c, 0xda, 0xfd, 0xc8, 0xf9, 0x54, 0x10, 0x5c, 0xa8, 0x56, 0x1c, 0xc9, 0xb1, 0xfc, 0x85, 0x8b, 0x87, 0x80, 0x79, 0xca, 0x96, 0x7f, 0x32, 0x28, 0xd2, 0x65, 0x2c, 0xe1, 0x95, 0xf3, 0x96, 0xc8, 0xa7, 0x21, 0x47, 0x47, 0xff, 0x31, 0x7c, 0x07, 0x90, 0xf4, 0xd9, 0xe1, 0x8c, 0x04, 0xef, 0x8d, 0xf2, 0x4f, 0xc9, 0x60, 0x8c, 0xf1, 0x0d, 0xac, 0x9c, 0x17, 0x9e, 0x92, 0xa7, 0x51, 0x20, 0x83, 0xa2, 0x31, 0x43, 0xe7, 0xed, 0x53, 0x74, 0xb3, 0xe6, 0x93, 0x0c, 0x3d, 0x7a, 0xe3, 0xbc, 0xd0, 0x9f, 0x8d, 0x24, 0x96, 0x8f, 0x3d, 0xce, 0xa4, 0xbc, 0x83, 0x55, 0x34, 0x81, 0xd7, 0xb0, 0x1c, 0xac, 0x4e, 0x73, 0x86, 0x30, 0x3c, 0x2a, 0xe9, 0x51, 0x0c, 0xda, 0xc7, 0xd9, 0x2e, 0xf9, 0x24, 0x0f, 0x1f, 0xa1, 0xf8, 0x3e, 0xfe, 0x00, 0xde, 0xc2, 0xe6, 0x0b, 0xf9, 0xa4, 0x1c, 0x9e, 0x7f, 0x31, 0x2d, 0xd0, 0xcd, 0xeb, 0x19, 0x09, 0x3b, 0x51, 0xe7, 0x71, 0xd9, 0xee, 0xfe, 0x05, 0x00, 0x00, 0xff, 0xff, 0xf1, 0x00, 0x45, 0x96, 0xb1, 0x02, 0x00, 0x00, }
{ if m != nil { return m.City } return "" }
identifier_body
default.py
# -*- coding: utf-8 -*- import locale locale.setlocale(locale.LC_ALL, 'es_CL.UTF8') #@cache(request.env.path_info, time_expire=150, cache_model=cache.ram) def index(): del response.headers['Cache-Control'] del response.headers['Pragma'] del response.headers['Expires'] response.headers['Cache-Control'] = 'max-age=300' # redirecciona a feedburner (los rss se generan en c=feed) if request.extension == 'rss': if request.args(0) == None: cat = '' else: cat = request.args(0) return redirect('http://feeds.feedburner.com/blogchile%s' % cat) # verificamos si pasó por c=default f=mobile y activó el bit de sesión if session.mobile: response.view = 'default/index.mobi' response.files.append(URL('static','css/blogchilemobile.css')) else: ''' si no hay bit de sesión mobile, establece el caché del browserl esto es por que sino el caché impediría cambiar al modo mobile (bug de flojo)''' response.files.append(URL('static','js/jquery.cycle.all.min.js')) if request.args(0): catslug = request.args(0) response.title = 'Blog Chile: %s' % catslug.capitalize().replace('-',' ') response.meta.keywords = catslug.replace('-',' ') response.meta.description = "Blog de %s en Chile, Blogósfera Chilena, Blogs Chilenos," % catslug.capitalize().replace('-',' ') if catslug in ['medio-ambiente','animales']: return redirect(URL(r=request,f='index',args='naturaleza'),301) else: response.title = 'Blog Chile: Portada' response.meta.description = 'Blogs de Chile: Últimas publicaciones de noticias, tecnología, opinión, deporte, diseño, ocio, música, política, arte y más en la blogósfera chilena' response.meta.keywords = 'blogs chile, turismo chile, blogs chilenos' #if request.extension == 'rss': # return redirect('http://feeds.feedburner.com/blogosfera/dDKt') try: # muestra un response.flash con la descripción de cada categoría, si es que la hay (en db.feed) if request.args: descrip = db(db.categoria.slug == request.args(0)).select(db.categoria.description)[0].description if descrip != None: response.flash = descrip except: pass # aviso temporal de WIP. chk según sessión de conexión en el sitio """ if session.avisado == False: response.flash = XML('El Sitio está temporalmente bajo algunos ajustes extraordinarios; disculpa si te ocasionan alguna molestia: %s ' % session.avisado) session.avisado = True """ publicaciones = LOAD(r=request,c='default',f='publicaciones.load',args=request.args,ajax=True,content='Cargando bloques...') return dict(publicaciones=publicaciones) #return dict() #@cache(request.env.path_info, time_expire=150, cache_model=cache.disk) def publicaciones(): i
.ajax: return '' from gluon.tools import prettydate from datetime import datetime if request.args: catslug_data = db(db.categoria.slug == request.args(0)).select(db.categoria.slug) for cat in catslug_data: catslug = cat.slug else: catslug = 'noticias' publicaciones = DIV() # obteniendo los feeds categorizados bajo el slug solicitado desde la url ### 1 categoría por feed """ for feedincat in db((db.categoria.slug == catslug) & (db.feed.categoria == db.categoria.id) #& (db.feed_categoria.feed == db.feed.id) #& (db.feed_categoria.is_active == True) & (db.feed.is_active == True) & (db.categoria.is_active == True) ).select(db.feed.ALL): """ feedincat_data = db((db.categoria.slug == catslug) & (db.feed.categoria == db.categoria.id) & (db.feed.is_active == True) & (db.categoria.is_active == True) ).select(db.feed.id,db.feed.title,db.feed.source) for feedincat in feedincat_data: # armando feed_bloque y la noticia de cada feed feedbox = DIV(DIV(A(feedincat.title,_href=feedincat.source,_target='_blank',_class='ui-widget-header-a'), _class = 'feed_titulo ui-widget-header ui-corner-all'), _class = 'feedbox feed_bloque izq ui-widget ui-corner-all') for n in db(db.noticia.feed == feedincat.id).select(db.noticia.ALL, orderby =~ db.noticia.id, limitby=(0,4)): try: actualizado = datetime.strptime(str(n.updated),'%Y-%m-%d %H:%M:%S') except: actualizado = n.created_on # armando la url que va en el rss #localurl = 'http://' + request.env.http_host + URL(c = 'default', f = 'blog.html', args = [n.slug,n.id], extension='html') # armando el título y enlace a la publicación; armando los bloques de publicación feedbox.append(DIV(DIV(A(n.title.lower()+'...', _name = n.slug, _href = URL(r = request, f = 'blog', args = [catslug,n.slug,n.id], extension=False), _class = 'noticia_link ui-widget-content-a', _target='_blank',extension='html'), DIV(prettydate(actualizado,T), _class='noticia_meta'), _class = 'noticia_contenido ui-widget-content ui-corner-all'), _class = 'noticia ui-widget ui-corner-all') ) #entradas.append(dict(title =unicode(n.title,'utf8'), link = localurl, description = unicode('%s (%s)' % (n.description, n.feed.title),'utf8'), created_on = request.now)) publicaciones.append(feedbox) response.js = XML('''function filtro(){ jQuery("#filtrando").keyup(function () { var filter = jQuery(this).val(), count = 0; jQuery(".feedbox .noticia, .feed_titulo").each(function () { if (jQuery(this).text().search(new RegExp(filter, "i")) < 0) { jQuery(this).addClass("hidden"); } else { jQuery(this).removeClass("hidden"); count++; } }); jQuery("#filtrado").text(count); }); } jQuery(document).ready(filtro); ''') d = dict(publicaciones=publicaciones) return response.render(d) #return dict(publicaciones=publicaciones) def elimina_tildes(s): """ Esta función sirve para eliminar las tildes del string que se le pase como parámetro. """ import unicodedata normalizado = ''.join((c for c in unicodedata.normalize('NFD', s) if unicodedata.category(c) != 'Mn')) return str(normalizado) #@cache(request.env.path_info, time_expire=1200, cache_model=cache.disk) def blog(): if request.extension!='html': request.extension = 'html' if not request.args: redirect(URL('default','index.html')) response.files.append(URL('static','css/blog.css')) #response.files.append(URL('static','js/jquery.iframe.js')) catslug = request.args(0) slugnoticia = request.args(1) #para mostrar la noticia en la url; SEO nid = request.args(2) #nid = int(request.args[len(request.args)-1]) #titulo = db.noticia[nid].title #print(type(nid)) titulo = slugnoticia.replace('-',' ') categoria = catslug response.title='%s: %s' % (categoria.capitalize(),titulo.capitalize()) #response.meta.description = '%s %s' % (response.title,db.noticia[nid].feed.title) if db.noticia(nid): shorturl = db.noticia(nid).shorturl else: shorturl = 'http://lmddgtfy.net/?q=%s, %s' % (request.args(0).title().replace('-',' '),request.args(1).title().replace('-',' ')) if 'http://lmddgtfy' in shorturl: response.flash = 'El enlace se ha perdido. Te dirigiré a una búsqueda PRIVADA usando DuckDuckGo.com. Disculpa las molestias.' if request.env.http_referer!=None: goback = A(SPAN(_class = 'icon leftarrow'), 'Regresar', _title='Volver a la página anterior', _class = 'pill button izq', _href = request.env.http_referer) else: goback = A(SPAN(_class = 'icon home'), 'Blogchile.cl', _class = 'positive primary button izq', _href = 'http://blogchile.cl/') cerrarmarco = A(SPAN(_class = 'icon rightarrow'), 'Ir al Blog', _class = 'pill negative button der', _href = shorturl, _title='Cerrar este marco y visitar el artículo en el blog de su fuente original') referer = goback #referer = DIV(goback, class='izq') #go = DIV(IFRAME(_src = shorturl, _style = 'height:90%;width:inherit;border:0;'), _id = 'godiv', _style = 'display:block;height:100%;width:100%;') blog = IFRAME(_src = shorturl, _id='blogiframe', _style='width:inherit;border:0;') d = dict(blog=blog,shorturl=shorturl,referer=referer,cerrarmarco=cerrarmarco) return response.render(d) def user(): """ exposes: http://..../[app]/default/user/login http://..../[app]/default/user/logout http://..../[app]/default/user/register http://..../[app]/default/user/profile http://..../[app]/default/user/retrieve_password http://..../[app]/default/user/change_password use @auth.requires_login() @auth.requires_membership('group name') @auth.requires_permission('read','table name',record_id) to decorate functions that need access control """ return dict(form = auth()) def sitemap(): del response.headers['Cache-Control'] del response.headers['Pragma'] del response.headers['Expires'] response.headers['Cache-Control'] = 'max-age=300' if request.extension == 'xml': sm = [str('<?xml version="1.0" encoding="UTF-8" ?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">')] prefix = request.env.wsgi_url_scheme+'://'+request.env.http_host for cat in db((db.categoria.id>0) & (db.categoria.is_active == True)).select(db.categoria.id,db.categoria.title,db.categoria.slug): sm.append(str(TAG.url( TAG.loc(prefix,URL(r=request,c='default',f='index.html',args=[cat.slug])), TAG.changefreq('always') ))) sm.append(str(TAG.url( TAG.loc(prefix,URL(r=request,c='default',f='feed.rss',args=[cat.slug])), TAG.changefreq('always') ))) sm.append('</urlset>') return sm elif request.extension == 'html': #response.view = 'plantilla.html' sm = DIV(_id='sitemap') for cat in db((db.categoria.id>0) & (db.categoria.is_active==True)).select(db.categoria.id,db.categoria.title,db.categoria.slug): categorias = DIV(H2(A(cat.title.capitalize(),_href=URL(r=request,c='default',f='index.html',args=[cat.slug])))) noticias = UL() data = db((db.feed.categoria == cat.id)& (db.noticia.feed == db.feed.id)).select(db.noticia.id, db.noticia.title, db.noticia.slug, distinct=True, orderby=~db.noticia.id, limitby=(0,4)) for noti in data: noticias.append(LI(A(noti.title, _href=URL(c='default',f='blog',args=[noti.slug,noti.id])))) categorias.append(noticias) sm.append(categorias) return dict(sm=sm) def sitemapindex(): sm = [str('<?xml version="1.0" encoding="UTF-8" ?> <sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">')] prefix = request.env.wsgi_url_scheme+'://'+request.env.http_host for i in xrange(1,5): sm.append(str(TAG.sitemap( TAG.loc(prefix,URL(c='default',f='sitemap%s.xml' % i)) ))) sm.append('</sitemapindex>') return sm def sitemap1(): sm = [str('<?xml version="1.0" encoding="UTF-8" ?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">')] prefix = request.env.wsgi_url_scheme+'://'+request.env.http_host data = db(db.noticia.id>0).select(db.noticia.id, db.noticia.created_on, db.noticia.title, db.noticia.slug, orderby=~db.noticia.id, limitby=(0,200)) for noti in data: sm.append(str(TAG.url( TAG.loc(prefix,URL(c='default',f='blog',args=[noti.slug,noti.id],extension='')), TAG.lastmod(noti.created_on.date()), TAG.changefreq('always') ))) sm.append('</urlset>') return sm def sitemap2(): sm = [str('<?xml version="1.0" encoding="UTF-8" ?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">')] prefix = request.env.wsgi_url_scheme+'://'+request.env.http_host data = db(db.noticia.id>0).select(db.noticia.id, db.noticia.created_on, db.noticia.title, db.noticia.slug, distinct=True, orderby=~db.noticia.id, limitby=(200,400)) for noti in data: sm.append(str(TAG.url( TAG.loc(prefix,URL(c='default',f='blog',args=[noti.slug,noti.id],extension='')), TAG.lastmod(noti.created_on.date()), TAG.changefreq('always') ))) sm.append('</urlset>') return sm def sitemap3(): sm = [str('<?xml version="1.0" encoding="UTF-8" ?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">')] prefix = request.env.wsgi_url_scheme+'://'+request.env.http_host data = db(db.noticia.id>0).select(db.noticia.id, db.noticia.created_on, db.noticia.title, db.noticia.slug, distinct=True, orderby=~db.noticia.id, limitby=(400,600)) for noti in data: sm.append(str(TAG.url( TAG.loc(prefix,URL(c='default',f='blog',args=[noti.slug,noti.id],extension='')), TAG.lastmod(noti.created_on.date()), TAG.changefreq('always') ))) sm.append('</urlset>') return sm def sitemap4(): sm = [str('<?xml version="1.0" encoding="UTF-8" ?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">')] prefix = request.env.wsgi_url_scheme+'://'+request.env.http_host data = db(db.noticia.id>0).select(db.noticia.id, db.noticia.created_on, db.noticia.title, db.noticia.slug, distinct=True, orderby=~db.noticia.id, limitby=(600,800)) for noti in data: sm.append(str(TAG.url( TAG.loc(prefix,URL(c='default',f='blog',args=[noti.slug,noti.id],extension='')), TAG.lastmod(noti.created_on.date()), TAG.changefreq('always') ))) sm.append('</urlset>') return sm def sitemap5(): sm = [str('<?xml version="1.0" encoding="UTF-8" ?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">')] prefix = request.env.wsgi_url_scheme+'://'+request.env.http_host data = db(db.noticia.id>0).select(db.noticia.id, db.noticia.created_on, db.noticia.title, db.noticia.slug, distinct=True, orderby=~db.noticia.id, limitby=(800,1000)) for noti in data: sm.append(str(TAG.url( TAG.loc(prefix,URL(c='default',f='blog',args=[noti.slug,noti.id],extension='')), TAG.lastmod(noti.created_on.date()), TAG.changefreq('always') ))) sm.append('</urlset>') return sm #################################################################################### # URLs ANTIGUAS. Las funciones a continuación están sólo para compatibilidad retroactiva #################################################################################### def respira(): if request.extension == 'rss': return redirect(URL(c='default',f='feed.rss', args=request.args),301) else: return redirect(URL(c='default',f='index',args=request.args),301) def buscar(): if request.env.http_referer == request.url: response.flash = 'Puedes buscar directamente usando: buscar?q=termino+de+busqueda' if request.args: return redirect(URL(c='default',f='buscar',vars={'q':request.args}),301) else: form = FORM(INPUT(_name='q'),INPUT(_type='submit', _value='Buscar')) if form.accepts(request.vars,session): redirect(URL(c='default',f='buscar',vars={'q':request.post_vars.q}),301) return dict(form=form) def go(): return redirect(URL(r=request,c='default',f='blog',args=request.args),301) def feed(): if request.extension == 'rss': return redirect(URL(r=request,c='default',f='index.rss',args=request.args(0)),301) else: return redirect(URL(r=request,c='default',f='index',args=request.args(0)),301)
f not request
identifier_name
default.py
# -*- coding: utf-8 -*- import locale locale.setlocale(locale.LC_ALL, 'es_CL.UTF8') #@cache(request.env.path_info, time_expire=150, cache_model=cache.ram) def index(): del response.headers['Cache-Control'] del response.headers['Pragma'] del response.headers['Expires'] response.headers['Cache-Control'] = 'max-age=300' # redirecciona a feedburner (los rss se generan en c=feed) if request.extension == 'rss': if request.args(0) == None: cat = '' else: cat = request.args(0) return redirect('http://feeds.feedburner.com/blogchile%s' % cat) # verificamos si pasó por c=default f=mobile y activó el bit de sesión if session.mobile: response.view = 'default/index.mobi' response.files.append(URL('static','css/blogchilemobile.css')) else: ''' si no hay bit de sesión mobile, establece el caché del browserl esto es por que sino el caché impediría cambiar al modo mobile (bug de flojo)''' response.files.append(URL('static','js/jquery.cycle.all.min.js')) if request.args(0): catslug = request.args(0) response.title = 'Blog Chile: %s' % catslug.capitalize().replace('-',' ') response.meta.keywords = catslug.replace('-',' ') response.meta.description = "Blog de %s en Chile, Blogósfera Chilena, Blogs Chilenos," % catslug.capitalize().replace('-',' ') if catslug in ['medio-ambiente','animales']: return redirect(URL(r=request,f='index',args='naturaleza'),301) else: response.title = 'Blog Chile: Portada' response.meta.description = 'Blogs de Chile: Últimas publicaciones de noticias, tecnología, opinión, deporte, diseño, ocio, música, política, arte y más en la blogósfera chilena' response.meta.keywords = 'blogs chile, turismo chile, blogs chilenos' #if request.extension == 'rss': # return redirect('http://feeds.feedburner.com/blogosfera/dDKt') try: # muestra un response.flash con la descripción de cada categoría, si es que la hay (en db.feed) if request.args: descrip = db(db.categoria.slug == request.args(0)).select(db.categoria.description)[0].description if descrip != None: response.flash = descrip except: pass # aviso temporal de WIP. chk según sessión de conexión en el sitio """ if session.avisado == False: response.flash = XML('El Sitio está temporalmente bajo algunos ajustes extraordinarios; disculpa si te ocasionan alguna molestia: %s ' % session.avisado) session.avisado = True """ publicaciones = LOAD(r=request,c='default',f='publicaciones.load',args=request.args,ajax=True,content='Cargando bloques...') return dict(publicaciones=publicaciones) #return dict() #@cache(request.env.path_info, time_expire=150, cache_model=cache.disk) def publicaciones(): if not request.ajax: return '' from gluon.tools import prettydate from datetime import datetime if request.args: catslug_data = db(db.categoria.slug == request.args(0)).select(db.categoria.slug) for cat in catslug_data: catslug = cat.slug else: catslug = 'noticias' publicaciones = DIV() # obteniendo los feeds categorizados bajo el slug solicitado desde la url ### 1 categoría por feed """ for feedincat in db((db.categoria.slug == catslug) & (db.feed.categoria == db.categoria.id) #& (db.feed_categoria.feed == db.feed.id) #& (db.feed_categoria.is_active == True) & (db.feed.is_active == True) & (db.categoria.is_active == True) ).select(db.feed.ALL): """ feedincat_data = db((db.categoria.slug == catslug) & (db.feed.categoria == db.categoria.id) & (db.feed.is_active == True) & (db.categoria.is_active == True) ).select(db.feed.id,db.feed.title,db.feed.source) for feedincat in feedincat_data: # armando feed_bloque y la noticia de cada feed feedbox = DIV(DIV(A(feedincat.title,_href=feedincat.source,_target='_blank',_class='ui-widget-header-a'), _class = 'feed_titulo ui-widget-header ui-corner-all'), _class = 'feedbox feed_bloque izq ui-widget ui-corner-all') for n in db(db.noticia.feed == feedincat.id).select(db.noticia.ALL, orderby =~ db.noticia.id, limitby=(0,4)): try: actualizado = datetime.strptime(str(n.updated),'%Y-%m-%d %H:%M:%S') except: actualizado = n.created_on # armando la url que va en el rss #localurl = 'http://' + request.env.http_host + URL(c = 'default', f = 'blog.html', args = [n.slug,n.id], extension='html') # armando el título y enlace a la publicación; armando los bloques de publicación feedbox.append(DIV(DIV(A(n.title.lower()+'...', _name = n.slug, _href = URL(r = request, f = 'blog', args = [catslug,n.slug,n.id], extension=False), _class = 'noticia_link ui-widget-content-a', _target='_blank',extension='html'), DIV(prettydate(actualizado,T), _class='noticia_meta'), _class = 'noticia_contenido ui-widget-content ui-corner-all'), _class = 'noticia ui-widget ui-corner-all') ) #entradas.append(dict(title =unicode(n.title,'utf8'), link = localurl, description = unicode('%s (%s)' % (n.description, n.feed.title),'utf8'), created_on = request.now)) publicaciones.append(feedbox) response.js = XML('''function filtro(){ jQuery("#filtrando").keyup(function () { var filter = jQuery(this).val(), count = 0; jQuery(".feedbox .noticia, .feed_titulo").each(function () { if (jQuery(this).text().search(new RegExp(filter, "i")) < 0) { jQuery(this).addClass("hidden"); } else { jQuery(this).removeClass("hidden"); count++; } }); jQuery("#filtrado").text(count); }); } jQuery(document).ready(filtro); ''') d = dict(publicaciones=publicaciones) return response.render(d) #return dict(publicaciones=publicaciones) def elimina_tildes(s): """ Esta función sirve para eliminar las tildes del string que se le pase como parámetro. """ import unicodedata normalizado = ''.join((c for c in unicodedata.normalize('NFD', s) if unicodedata.category(c) != 'Mn')) return str(normalizado) #@cache(request.env.path_info, time_expire=1200, cache_model=cache.disk) def blog(): if request.extension!='html': request.extension = 'html' if not request.args: redirect(URL('default','index.html')) response.files.append(URL('static','css/blog.css')) #response.files.append(URL('static','js/jquery.iframe.js')) catslug = request.args(0) slugnoticia = request.args(1) #para mostrar la noticia en la url; SEO nid = request.args(2) #nid = int(request.args[len(request.args)-1]) #titulo = db.noticia[nid].title #print(type(nid)) titulo = slugnoticia.replace('-',' ') categoria = catslug response.title='%s: %s' % (categoria.capitalize(),titulo.capitalize()) #response.meta.description = '%s %s' % (response.title,db.noticia[nid].feed.title) if db.noticia(nid): shorturl = db.noticia(nid).shorturl else: shorturl = 'http://lmddgtfy.net/?q=%s, %s' % (request.args(0).title().replace('-',' '),request.args(1).title().replace('-',' ')) if 'http://lmddgtfy' in shorturl: response.flash = 'El enlace se ha perdido. Te dirigiré a una búsqueda PRIVADA usando DuckDuckGo.com. Disculpa las molestias.' if request.env.http_referer!=None: goback = A(SPAN(_class = 'icon leftarrow'), 'Regresar', _title='Volver a la página anterior', _class = 'pill button izq', _href = request.env.http_referer) else: goback = A(SPAN(_class = 'icon home'), 'Blogchile.cl', _class = 'positive primary button izq', _href = 'http://blogchile.cl/') cerrarmarco = A(SPAN(_class = 'icon rightarrow'), 'Ir al Blog', _class = 'pill negative button der', _href = shorturl, _title='Cerrar este marco y visitar el artículo en el blog de su fuente original') referer = goback #referer = DIV(goback, class='izq') #go = DIV(IFRAME(_src = shorturl, _style = 'height:90%;width:inherit;border:0;'), _id = 'godiv', _style = 'display:block;height:100%;width:100%;') blog = IFRAME(_src = shorturl, _id='blogiframe', _style='width:inherit;border:0;') d = dict(blog=blog,shorturl=shorturl,referer=referer,cerrarmarco=cerrarmarco) return response.render(d) def user(): """ exposes: http://..../[app]/default/user/login http://..../[app]/default/user/logout http://..../[app]/default/user/register http://..../[app]/default/user/profile http://..../[app]/default/user/retrieve_password http://..../[app]/default/user/change_password use @auth.requires_login() @auth.requires_membership('group name') @auth.requires_permission('read','table name',record_id) to decorate functions that need access control """ return dict(form = auth()) def sitemap(): del response.headers['Cache-Control'] del response.headers['Pragma'] del response.headers['Expires'] response.headers['Cache-Control'] = 'max-age=300' if request.extension == 'xml': sm = [str('<?xml version="1.0" encoding="UTF-8" ?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">')] prefix = request.env.wsgi_url_scheme+'://'+request.env.http_host for cat in db((db.categoria.id>0) & (db.categoria.is_active == True)).select(db.categoria.id,db.categoria.title,db.categoria.slug): sm.append(str(TAG.url( TAG.loc(prefix,URL(r=request,c='default',f='index.html',args=[cat.slug])), TAG.changefreq('always') ))) sm.append(str(TAG.url( TAG.loc(prefix,URL(r=request,c='default',f='feed.rss',args=[cat.slug])), TAG.changefreq('always') ))) sm.append('</urlset>') return sm elif request.extension == 'html': #response.view = 'plantilla.html' sm = DIV(_id='sitemap') for cat in db((db.categoria.id>0) & (db.categoria.is_active==True)).select(db.categoria.id,db.categoria.title,db.categoria.slug): categorias = DIV(H2(A(cat.title.capitalize(),_href=URL(r=request,c='default',f='index.html',args=[cat.slug])))) noticias = UL() data = db((db.feed.categoria == cat.id)& (db.noticia.feed == db.feed.id)).select(db.noticia.id, db.noticia.title, db.noticia.slug, distinct=True, orderby=~db.noticia.id, limitby=(0,4)) for noti in data: noticias.append(LI(A(noti.title, _href=URL(c='default',f='blog',args=[noti.slug,noti.id])))) categorias.append(noticias) sm.append(categorias) return dict(sm=sm) def sitemapindex(): sm = [str('<?xml version="1.0" encoding="UTF-8" ?> <sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">')] prefix = request.env.wsgi_url_scheme+'://'+request.env.http_host for i in xrange(1,5): sm.append(str(TAG.sitemap( TAG.loc(prefix,URL(c='default',f='sitemap%s.xml' % i)) ))) sm.append('</sitemapindex>') return sm def sitemap1(): sm = [str('<?xml version="1.0" encoding="UTF-8" ?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">')] prefix = request.env.wsgi_url_scheme+'://'+request.env.http_host data = db(db.noticia.id>0).select(db.noticia.id, db.noticia.created_on, db.noticia.title, db.noticia.slug, orderby=~db.noticia.id, limitby=(0,200)) for noti in data: sm.append(str(TAG.url( TAG.loc(prefix,URL(c='default',f='blog',args=[noti.slug,noti.id],extension='')), TAG.lastmod(noti.created_on.date()), TAG.changefreq('always') ))) sm.append('</urlset>') return sm def sitemap2(): sm = [str('<?xml version="1.0" encoding="UTF-8" ?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">')] prefix = request.env.wsgi_url_scheme+'://'+request.env.http_host data = db(db.noticia.id>0).select(db.noticia.id, db.noticia.created_on, db.noticia.title, db.noticia.slug, distinct=True, orderby=~db.noticia.id, limitby=(200,400)) for noti in data: sm.append(str(TAG.url( TAG.loc(prefix,URL(c='default',f='blog',args=[noti.slug,noti.id],extension='')), TAG.lastmod(noti.created_on.date()), TAG.changefreq('always') ))) sm.append('</urlset>') return sm def sitemap3(): sm = [str('<?xml version="1.0" encoding="UTF-8" ?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">')] prefix = request.env.wsgi_url_scheme+'://'+request.env.http_host data = db(db.noticia.id>0).select(db.noticia.id, db.noticia.created_on, db.noticia.title, db.noticia.slug, distinct=True, orderby=~db.noticia.id, limitby=(400,600)) for noti in data: sm.append(str(TAG.url(
return sm def sitemap4(): sm = [str('<?xml version="1.0" encoding="UTF-8" ?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">')] prefix = request.env.wsgi_url_scheme+'://'+request.env.http_host data = db(db.noticia.id>0).select(db.noticia.id, db.noticia.created_on, db.noticia.title, db.noticia.slug, distinct=True, orderby=~db.noticia.id, limitby=(600,800)) for noti in data: sm.append(str(TAG.url( TAG.loc(prefix,URL(c='default',f='blog',args=[noti.slug,noti.id],extension='')), TAG.lastmod(noti.created_on.date()), TAG.changefreq('always') ))) sm.append('</urlset>') return sm def sitemap5(): sm = [str('<?xml version="1.0" encoding="UTF-8" ?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">')] prefix = request.env.wsgi_url_scheme+'://'+request.env.http_host data = db(db.noticia.id>0).select(db.noticia.id, db.noticia.created_on, db.noticia.title, db.noticia.slug, distinct=True, orderby=~db.noticia.id, limitby=(800,1000)) for noti in data: sm.append(str(TAG.url( TAG.loc(prefix,URL(c='default',f='blog',args=[noti.slug,noti.id],extension='')), TAG.lastmod(noti.created_on.date()), TAG.changefreq('always') ))) sm.append('</urlset>') return sm #################################################################################### # URLs ANTIGUAS. Las funciones a continuación están sólo para compatibilidad retroactiva #################################################################################### def respira(): if request.extension == 'rss': return redirect(URL(c='default',f='feed.rss', args=request.args),301) else: return redirect(URL(c='default',f='index',args=request.args),301) def buscar(): if request.env.http_referer == request.url: response.flash = 'Puedes buscar directamente usando: buscar?q=termino+de+busqueda' if request.args: return redirect(URL(c='default',f='buscar',vars={'q':request.args}),301) else: form = FORM(INPUT(_name='q'),INPUT(_type='submit', _value='Buscar')) if form.accepts(request.vars,session): redirect(URL(c='default',f='buscar',vars={'q':request.post_vars.q}),301) return dict(form=form) def go(): return redirect(URL(r=request,c='default',f='blog',args=request.args),301) def feed(): if request.extension == 'rss': return redirect(URL(r=request,c='default',f='index.rss',args=request.args(0)),301) else: return redirect(URL(r=request,c='default',f='index',args=request.args(0)),301)
TAG.loc(prefix,URL(c='default',f='blog',args=[noti.slug,noti.id],extension='')), TAG.lastmod(noti.created_on.date()), TAG.changefreq('always') ))) sm.append('</urlset>')
conditional_block
default.py
# -*- coding: utf-8 -*- import locale locale.setlocale(locale.LC_ALL, 'es_CL.UTF8') #@cache(request.env.path_info, time_expire=150, cache_model=cache.ram) def index(): del response.headers['Cache-Control'] del response.headers['Pragma'] del response.headers['Expires'] response.headers['Cache-Control'] = 'max-age=300' # redirecciona a feedburner (los rss se generan en c=feed) if request.extension == 'rss': if request.args(0) == None:
else: cat = request.args(0) return redirect('http://feeds.feedburner.com/blogchile%s' % cat) # verificamos si pasó por c=default f=mobile y activó el bit de sesión if session.mobile: response.view = 'default/index.mobi' response.files.append(URL('static','css/blogchilemobile.css')) else: ''' si no hay bit de sesión mobile, establece el caché del browserl esto es por que sino el caché impediría cambiar al modo mobile (bug de flojo)''' response.files.append(URL('static','js/jquery.cycle.all.min.js')) if request.args(0): catslug = request.args(0) response.title = 'Blog Chile: %s' % catslug.capitalize().replace('-',' ') response.meta.keywords = catslug.replace('-',' ') response.meta.description = "Blog de %s en Chile, Blogósfera Chilena, Blogs Chilenos," % catslug.capitalize().replace('-',' ') if catslug in ['medio-ambiente','animales']: return redirect(URL(r=request,f='index',args='naturaleza'),301) else: response.title = 'Blog Chile: Portada' response.meta.description = 'Blogs de Chile: Últimas publicaciones de noticias, tecnología, opinión, deporte, diseño, ocio, música, política, arte y más en la blogósfera chilena' response.meta.keywords = 'blogs chile, turismo chile, blogs chilenos' #if request.extension == 'rss': # return redirect('http://feeds.feedburner.com/blogosfera/dDKt') try: # muestra un response.flash con la descripción de cada categoría, si es que la hay (en db.feed) if request.args: descrip = db(db.categoria.slug == request.args(0)).select(db.categoria.description)[0].description if descrip != None: response.flash = descrip except: pass # aviso temporal de WIP. chk según sessión de conexión en el sitio """ if session.avisado == False: response.flash = XML('El Sitio está temporalmente bajo algunos ajustes extraordinarios; disculpa si te ocasionan alguna molestia: %s ' % session.avisado) session.avisado = True """ publicaciones = LOAD(r=request,c='default',f='publicaciones.load',args=request.args,ajax=True,content='Cargando bloques...') return dict(publicaciones=publicaciones) #return dict() #@cache(request.env.path_info, time_expire=150, cache_model=cache.disk) def publicaciones(): if not request.ajax: return '' from gluon.tools import prettydate from datetime import datetime if request.args: catslug_data = db(db.categoria.slug == request.args(0)).select(db.categoria.slug) for cat in catslug_data: catslug = cat.slug else: catslug = 'noticias' publicaciones = DIV() # obteniendo los feeds categorizados bajo el slug solicitado desde la url ### 1 categoría por feed """ for feedincat in db((db.categoria.slug == catslug) & (db.feed.categoria == db.categoria.id) #& (db.feed_categoria.feed == db.feed.id) #& (db.feed_categoria.is_active == True) & (db.feed.is_active == True) & (db.categoria.is_active == True) ).select(db.feed.ALL): """ feedincat_data = db((db.categoria.slug == catslug) & (db.feed.categoria == db.categoria.id) & (db.feed.is_active == True) & (db.categoria.is_active == True) ).select(db.feed.id,db.feed.title,db.feed.source) for feedincat in feedincat_data: # armando feed_bloque y la noticia de cada feed feedbox = DIV(DIV(A(feedincat.title,_href=feedincat.source,_target='_blank',_class='ui-widget-header-a'), _class = 'feed_titulo ui-widget-header ui-corner-all'), _class = 'feedbox feed_bloque izq ui-widget ui-corner-all') for n in db(db.noticia.feed == feedincat.id).select(db.noticia.ALL, orderby =~ db.noticia.id, limitby=(0,4)): try: actualizado = datetime.strptime(str(n.updated),'%Y-%m-%d %H:%M:%S') except: actualizado = n.created_on # armando la url que va en el rss #localurl = 'http://' + request.env.http_host + URL(c = 'default', f = 'blog.html', args = [n.slug,n.id], extension='html') # armando el título y enlace a la publicación; armando los bloques de publicación feedbox.append(DIV(DIV(A(n.title.lower()+'...', _name = n.slug, _href = URL(r = request, f = 'blog', args = [catslug,n.slug,n.id], extension=False), _class = 'noticia_link ui-widget-content-a', _target='_blank',extension='html'), DIV(prettydate(actualizado,T), _class='noticia_meta'), _class = 'noticia_contenido ui-widget-content ui-corner-all'), _class = 'noticia ui-widget ui-corner-all') ) #entradas.append(dict(title =unicode(n.title,'utf8'), link = localurl, description = unicode('%s (%s)' % (n.description, n.feed.title),'utf8'), created_on = request.now)) publicaciones.append(feedbox) response.js = XML('''function filtro(){ jQuery("#filtrando").keyup(function () { var filter = jQuery(this).val(), count = 0; jQuery(".feedbox .noticia, .feed_titulo").each(function () { if (jQuery(this).text().search(new RegExp(filter, "i")) < 0) { jQuery(this).addClass("hidden"); } else { jQuery(this).removeClass("hidden"); count++; } }); jQuery("#filtrado").text(count); }); } jQuery(document).ready(filtro); ''') d = dict(publicaciones=publicaciones) return response.render(d) #return dict(publicaciones=publicaciones) def elimina_tildes(s): """ Esta función sirve para eliminar las tildes del string que se le pase como parámetro. """ import unicodedata normalizado = ''.join((c for c in unicodedata.normalize('NFD', s) if unicodedata.category(c) != 'Mn')) return str(normalizado) #@cache(request.env.path_info, time_expire=1200, cache_model=cache.disk) def blog(): if request.extension!='html': request.extension = 'html' if not request.args: redirect(URL('default','index.html')) response.files.append(URL('static','css/blog.css')) #response.files.append(URL('static','js/jquery.iframe.js')) catslug = request.args(0) slugnoticia = request.args(1) #para mostrar la noticia en la url; SEO nid = request.args(2) #nid = int(request.args[len(request.args)-1]) #titulo = db.noticia[nid].title #print(type(nid)) titulo = slugnoticia.replace('-',' ') categoria = catslug response.title='%s: %s' % (categoria.capitalize(),titulo.capitalize()) #response.meta.description = '%s %s' % (response.title,db.noticia[nid].feed.title) if db.noticia(nid): shorturl = db.noticia(nid).shorturl else: shorturl = 'http://lmddgtfy.net/?q=%s, %s' % (request.args(0).title().replace('-',' '),request.args(1).title().replace('-',' ')) if 'http://lmddgtfy' in shorturl: response.flash = 'El enlace se ha perdido. Te dirigiré a una búsqueda PRIVADA usando DuckDuckGo.com. Disculpa las molestias.' if request.env.http_referer!=None: goback = A(SPAN(_class = 'icon leftarrow'), 'Regresar', _title='Volver a la página anterior', _class = 'pill button izq', _href = request.env.http_referer) else: goback = A(SPAN(_class = 'icon home'), 'Blogchile.cl', _class = 'positive primary button izq', _href = 'http://blogchile.cl/') cerrarmarco = A(SPAN(_class = 'icon rightarrow'), 'Ir al Blog', _class = 'pill negative button der', _href = shorturl, _title='Cerrar este marco y visitar el artículo en el blog de su fuente original') referer = goback #referer = DIV(goback, class='izq') #go = DIV(IFRAME(_src = shorturl, _style = 'height:90%;width:inherit;border:0;'), _id = 'godiv', _style = 'display:block;height:100%;width:100%;') blog = IFRAME(_src = shorturl, _id='blogiframe', _style='width:inherit;border:0;') d = dict(blog=blog,shorturl=shorturl,referer=referer,cerrarmarco=cerrarmarco) return response.render(d) def user(): """ exposes: http://..../[app]/default/user/login http://..../[app]/default/user/logout http://..../[app]/default/user/register http://..../[app]/default/user/profile http://..../[app]/default/user/retrieve_password http://..../[app]/default/user/change_password use @auth.requires_login() @auth.requires_membership('group name') @auth.requires_permission('read','table name',record_id) to decorate functions that need access control """ return dict(form = auth()) def sitemap(): del response.headers['Cache-Control'] del response.headers['Pragma'] del response.headers['Expires'] response.headers['Cache-Control'] = 'max-age=300' if request.extension == 'xml': sm = [str('<?xml version="1.0" encoding="UTF-8" ?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">')] prefix = request.env.wsgi_url_scheme+'://'+request.env.http_host for cat in db((db.categoria.id>0) & (db.categoria.is_active == True)).select(db.categoria.id,db.categoria.title,db.categoria.slug): sm.append(str(TAG.url( TAG.loc(prefix,URL(r=request,c='default',f='index.html',args=[cat.slug])), TAG.changefreq('always') ))) sm.append(str(TAG.url( TAG.loc(prefix,URL(r=request,c='default',f='feed.rss',args=[cat.slug])), TAG.changefreq('always') ))) sm.append('</urlset>') return sm elif request.extension == 'html': #response.view = 'plantilla.html' sm = DIV(_id='sitemap') for cat in db((db.categoria.id>0) & (db.categoria.is_active==True)).select(db.categoria.id,db.categoria.title,db.categoria.slug): categorias = DIV(H2(A(cat.title.capitalize(),_href=URL(r=request,c='default',f='index.html',args=[cat.slug])))) noticias = UL() data = db((db.feed.categoria == cat.id)& (db.noticia.feed == db.feed.id)).select(db.noticia.id, db.noticia.title, db.noticia.slug, distinct=True, orderby=~db.noticia.id, limitby=(0,4)) for noti in data: noticias.append(LI(A(noti.title, _href=URL(c='default',f='blog',args=[noti.slug,noti.id])))) categorias.append(noticias) sm.append(categorias) return dict(sm=sm) def sitemapindex(): sm = [str('<?xml version="1.0" encoding="UTF-8" ?> <sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">')] prefix = request.env.wsgi_url_scheme+'://'+request.env.http_host for i in xrange(1,5): sm.append(str(TAG.sitemap( TAG.loc(prefix,URL(c='default',f='sitemap%s.xml' % i)) ))) sm.append('</sitemapindex>') return sm def sitemap1(): sm = [str('<?xml version="1.0" encoding="UTF-8" ?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">')] prefix = request.env.wsgi_url_scheme+'://'+request.env.http_host data = db(db.noticia.id>0).select(db.noticia.id, db.noticia.created_on, db.noticia.title, db.noticia.slug, orderby=~db.noticia.id, limitby=(0,200)) for noti in data: sm.append(str(TAG.url( TAG.loc(prefix,URL(c='default',f='blog',args=[noti.slug,noti.id],extension='')), TAG.lastmod(noti.created_on.date()), TAG.changefreq('always') ))) sm.append('</urlset>') return sm def sitemap2(): sm = [str('<?xml version="1.0" encoding="UTF-8" ?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">')] prefix = request.env.wsgi_url_scheme+'://'+request.env.http_host data = db(db.noticia.id>0).select(db.noticia.id, db.noticia.created_on, db.noticia.title, db.noticia.slug, distinct=True, orderby=~db.noticia.id, limitby=(200,400)) for noti in data: sm.append(str(TAG.url( TAG.loc(prefix,URL(c='default',f='blog',args=[noti.slug,noti.id],extension='')), TAG.lastmod(noti.created_on.date()), TAG.changefreq('always') ))) sm.append('</urlset>') return sm def sitemap3(): sm = [str('<?xml version="1.0" encoding="UTF-8" ?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">')] prefix = request.env.wsgi_url_scheme+'://'+request.env.http_host data = db(db.noticia.id>0).select(db.noticia.id, db.noticia.created_on, db.noticia.title, db.noticia.slug, distinct=True, orderby=~db.noticia.id, limitby=(400,600)) for noti in data: sm.append(str(TAG.url( TAG.loc(prefix,URL(c='default',f='blog',args=[noti.slug,noti.id],extension='')), TAG.lastmod(noti.created_on.date()), TAG.changefreq('always') ))) sm.append('</urlset>') return sm def sitemap4(): sm = [str('<?xml version="1.0" encoding="UTF-8" ?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">')] prefix = request.env.wsgi_url_scheme+'://'+request.env.http_host data = db(db.noticia.id>0).select(db.noticia.id, db.noticia.created_on, db.noticia.title, db.noticia.slug, distinct=True, orderby=~db.noticia.id, limitby=(600,800)) for noti in data: sm.append(str(TAG.url( TAG.loc(prefix,URL(c='default',f='blog',args=[noti.slug,noti.id],extension='')), TAG.lastmod(noti.created_on.date()), TAG.changefreq('always') ))) sm.append('</urlset>') return sm def sitemap5(): sm = [str('<?xml version="1.0" encoding="UTF-8" ?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">')] prefix = request.env.wsgi_url_scheme+'://'+request.env.http_host data = db(db.noticia.id>0).select(db.noticia.id, db.noticia.created_on, db.noticia.title, db.noticia.slug, distinct=True, orderby=~db.noticia.id, limitby=(800,1000)) for noti in data: sm.append(str(TAG.url( TAG.loc(prefix,URL(c='default',f='blog',args=[noti.slug,noti.id],extension='')), TAG.lastmod(noti.created_on.date()), TAG.changefreq('always') ))) sm.append('</urlset>') return sm #################################################################################### # URLs ANTIGUAS. Las funciones a continuación están sólo para compatibilidad retroactiva #################################################################################### def respira(): if request.extension == 'rss': return redirect(URL(c='default',f='feed.rss', args=request.args),301) else: return redirect(URL(c='default',f='index',args=request.args),301) def buscar(): if request.env.http_referer == request.url: response.flash = 'Puedes buscar directamente usando: buscar?q=termino+de+busqueda' if request.args: return redirect(URL(c='default',f='buscar',vars={'q':request.args}),301) else: form = FORM(INPUT(_name='q'),INPUT(_type='submit', _value='Buscar')) if form.accepts(request.vars,session): redirect(URL(c='default',f='buscar',vars={'q':request.post_vars.q}),301) return dict(form=form) def go(): return redirect(URL(r=request,c='default',f='blog',args=request.args),301) def feed(): if request.extension == 'rss': return redirect(URL(r=request,c='default',f='index.rss',args=request.args(0)),301) else: return redirect(URL(r=request,c='default',f='index',args=request.args(0)),301)
cat = ''
random_line_split
default.py
# -*- coding: utf-8 -*- import locale locale.setlocale(locale.LC_ALL, 'es_CL.UTF8') #@cache(request.env.path_info, time_expire=150, cache_model=cache.ram) def index(): del response.headers['Cache-Control'] del response.headers['Pragma'] del response.headers['Expires'] response.headers['Cache-Control'] = 'max-age=300' # redirecciona a feedburner (los rss se generan en c=feed) if request.extension == 'rss': if request.args(0) == None: cat = '' else: cat = request.args(0) return redirect('http://feeds.feedburner.com/blogchile%s' % cat) # verificamos si pasó por c=default f=mobile y activó el bit de sesión if session.mobile: response.view = 'default/index.mobi' response.files.append(URL('static','css/blogchilemobile.css')) else: ''' si no hay bit de sesión mobile, establece el caché del browserl esto es por que sino el caché impediría cambiar al modo mobile (bug de flojo)''' response.files.append(URL('static','js/jquery.cycle.all.min.js')) if request.args(0): catslug = request.args(0) response.title = 'Blog Chile: %s' % catslug.capitalize().replace('-',' ') response.meta.keywords = catslug.replace('-',' ') response.meta.description = "Blog de %s en Chile, Blogósfera Chilena, Blogs Chilenos," % catslug.capitalize().replace('-',' ') if catslug in ['medio-ambiente','animales']: return redirect(URL(r=request,f='index',args='naturaleza'),301) else: response.title = 'Blog Chile: Portada' response.meta.description = 'Blogs de Chile: Últimas publicaciones de noticias, tecnología, opinión, deporte, diseño, ocio, música, política, arte y más en la blogósfera chilena' response.meta.keywords = 'blogs chile, turismo chile, blogs chilenos' #if request.extension == 'rss': # return redirect('http://feeds.feedburner.com/blogosfera/dDKt') try: # muestra un response.flash con la descripción de cada categoría, si es que la hay (en db.feed) if request.args: descrip = db(db.categoria.slug == request.args(0)).select(db.categoria.description)[0].description if descrip != None: response.flash = descrip except: pass # aviso temporal de WIP. chk según sessión de conexión en el sitio """ if session.avisado == False: response.flash = XML('El Sitio está temporalmente bajo algunos ajustes extraordinarios; disculpa si te ocasionan alguna molestia: %s ' % session.avisado) session.avisado = True """ publicaciones = LOAD(r=request,c='default',f='publicaciones.load',args=request.args,ajax=True,content='Cargando bloques...') return dict(publicaciones=publicaciones) #return dict() #@cache(request.env.path_info, time_expire=150, cache_model=cache.disk) def publicaciones(): if not request.ajax: return '' from gluon.tools import prettydate from datetime import datetime if request.args: catslug_data = db(db.categoria.slug == request.args(0)).select(db.categoria.slug) for cat in catslug_data: catslug = cat.slug else: catslug = 'noticias' publicaciones = DIV() # obteniendo los feeds categorizados bajo el slug solicitado desde la url ### 1 categoría por feed """ for feedincat in db((db.categoria.slug == catslug) & (db.feed.categoria == db.categoria.id) #& (db.feed_categoria.feed == db.feed.id) #& (db.feed_categoria.is_active == True) & (db.feed.is_active == True) & (db.categoria.is_active == True) ).select(db.feed.ALL): """ feedincat_data = db((db.categoria.slug == catslug) & (db.feed.categoria == db.categoria.id) & (db.feed.is_active == True) & (db.categoria.is_active == True) ).select(db.feed.id,db.feed.title,db.feed.source) for feedincat in feedincat_data: # armando feed_bloque y la noticia de cada feed feedbox = DIV(DIV(A(feedincat.title,_href=feedincat.source,_target='_blank',_class='ui-widget-header-a'), _class = 'feed_titulo ui-widget-header ui-corner-all'), _class = 'feedbox feed_bloque izq ui-widget ui-corner-all') for n in db(db.noticia.feed == feedincat.id).select(db.noticia.ALL, orderby =~ db.noticia.id, limitby=(0,4)): try: actualizado = datetime.strptime(str(n.updated),'%Y-%m-%d %H:%M:%S') except: actualizado = n.created_on # armando la url que va en el rss #localurl = 'http://' + request.env.http_host + URL(c = 'default', f = 'blog.html', args = [n.slug,n.id], extension='html') # armando el título y enlace a la publicación; armando los bloques de publicación feedbox.append(DIV(DIV(A(n.title.lower()+'...', _name = n.slug, _href = URL(r = request, f = 'blog', args = [catslug,n.slug,n.id], extension=False), _class = 'noticia_link ui-widget-content-a', _target='_blank',extension='html'), DIV(prettydate(actualizado,T), _class='noticia_meta'), _class = 'noticia_contenido ui-widget-content ui-corner-all'), _class = 'noticia ui-widget ui-corner-all') ) #entradas.append(dict(title =unicode(n.title,'utf8'), link = localurl, description = unicode('%s (%s)' % (n.description, n.feed.title),'utf8'), created_on = request.now)) publicaciones.append(feedbox) response.js = XML('''function filtro(){ jQuery("#filtrando").keyup(function () { var filter = jQuery(this).val(), count = 0; jQuery(".feedbox .noticia, .feed_titulo").each(function () { if (jQuery(this).text().search(new RegExp(filter, "i")) < 0) { jQuery(this).addClass("hidden"); } else { jQuery(this).removeClass("hidden"); count++; } }); jQuery("#filtrado").text(count); }); } jQuery(document).ready(filtro); ''') d = dict(publicaciones=publicaciones) return response.render(d) #return dict(publicaciones=publicaciones) def elimina_tildes(s): """ Esta función sirve para eliminar las tildes del string que se le pase como parámetro. """ import unicodedata normalizado = ''.join((c for c in unicodedata.normalize('NFD', s) if unicodedata.category(c) != 'Mn')) return str(normalizado) #@cache(request.env.path_info, time_expire=1200, cache_model=cache.disk) def blog(): if request.extension!='html': request.extension = 'html' if not request.args: redirect(URL('default','index.html')) response.files.append(URL('static','css/blog.css')) #response.files.append(URL('static','js/jquery.iframe.js')) catslug = request.args(0) slugnoticia = request.args(1) #para mostrar la noticia en la url; SEO nid = request.args(2) #nid = int(request.args[len(request.args)-1]) #titulo = db.noticia[nid].title #print(type(nid)) titulo = slugnoticia.replace('-',' ') categoria = catslug response.title='%s: %s' % (categoria.capitalize(),titulo.capitalize()) #response.meta.description = '%s %s' % (response.title,db.noticia[nid].feed.title) if db.noticia(nid): shorturl = db.noticia(nid).shorturl else: shorturl = 'http://lmddgtfy.net/?q=%s, %s' % (request.args(0).title().replace('-',' '),request.args(1).title().replace('-',' ')) if 'http://lmddgtfy' in shorturl: response.flash = 'El enlace se ha perdido. Te dirigiré a una búsqueda PRIVADA usando DuckDuckGo.com. Disculpa las molestias.' if request.env.http_referer!=None: goback = A(SPAN(_class = 'icon leftarrow'), 'Regresar', _title='Volver a la página anterior', _class = 'pill button izq', _href = request.env.http_referer) else: goback = A(SPAN(_class = 'icon home'), 'Blogchile.cl', _class = 'positive primary button izq', _href = 'http://blogchile.cl/') cerrarmarco = A(SPAN(_class = 'icon rightarrow'), 'Ir al Blog', _class = 'pill negative button der', _href = shorturl, _title='Cerrar este marco y visitar el artículo en el blog de su fuente original') referer = goback #referer = DIV(goback, class='izq') #go = DIV(IFRAME(_src = shorturl, _style = 'height:90%;width:inherit;border:0;'), _id = 'godiv', _style = 'display:block;height:100%;width:100%;') blog = IFRAME(_src = shorturl, _id='blogiframe', _style='width:inherit;border:0;') d = dict(blog=blog,shorturl=shorturl,referer=referer,cerrarmarco=cerrarmarco) return response.render(d) def user(): """ exposes: http://....
e.headers['Cache-Control'] del response.headers['Pragma'] del response.headers['Expires'] response.headers['Cache-Control'] = 'max-age=300' if request.extension == 'xml': sm = [str('<?xml version="1.0" encoding="UTF-8" ?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">')] prefix = request.env.wsgi_url_scheme+'://'+request.env.http_host for cat in db((db.categoria.id>0) & (db.categoria.is_active == True)).select(db.categoria.id,db.categoria.title,db.categoria.slug): sm.append(str(TAG.url( TAG.loc(prefix,URL(r=request,c='default',f='index.html',args=[cat.slug])), TAG.changefreq('always') ))) sm.append(str(TAG.url( TAG.loc(prefix,URL(r=request,c='default',f='feed.rss',args=[cat.slug])), TAG.changefreq('always') ))) sm.append('</urlset>') return sm elif request.extension == 'html': #response.view = 'plantilla.html' sm = DIV(_id='sitemap') for cat in db((db.categoria.id>0) & (db.categoria.is_active==True)).select(db.categoria.id,db.categoria.title,db.categoria.slug): categorias = DIV(H2(A(cat.title.capitalize(),_href=URL(r=request,c='default',f='index.html',args=[cat.slug])))) noticias = UL() data = db((db.feed.categoria == cat.id)& (db.noticia.feed == db.feed.id)).select(db.noticia.id, db.noticia.title, db.noticia.slug, distinct=True, orderby=~db.noticia.id, limitby=(0,4)) for noti in data: noticias.append(LI(A(noti.title, _href=URL(c='default',f='blog',args=[noti.slug,noti.id])))) categorias.append(noticias) sm.append(categorias) return dict(sm=sm) def sitemapindex(): sm = [str('<?xml version="1.0" encoding="UTF-8" ?> <sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">')] prefix = request.env.wsgi_url_scheme+'://'+request.env.http_host for i in xrange(1,5): sm.append(str(TAG.sitemap( TAG.loc(prefix,URL(c='default',f='sitemap%s.xml' % i)) ))) sm.append('</sitemapindex>') return sm def sitemap1(): sm = [str('<?xml version="1.0" encoding="UTF-8" ?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">')] prefix = request.env.wsgi_url_scheme+'://'+request.env.http_host data = db(db.noticia.id>0).select(db.noticia.id, db.noticia.created_on, db.noticia.title, db.noticia.slug, orderby=~db.noticia.id, limitby=(0,200)) for noti in data: sm.append(str(TAG.url( TAG.loc(prefix,URL(c='default',f='blog',args=[noti.slug,noti.id],extension='')), TAG.lastmod(noti.created_on.date()), TAG.changefreq('always') ))) sm.append('</urlset>') return sm def sitemap2(): sm = [str('<?xml version="1.0" encoding="UTF-8" ?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">')] prefix = request.env.wsgi_url_scheme+'://'+request.env.http_host data = db(db.noticia.id>0).select(db.noticia.id, db.noticia.created_on, db.noticia.title, db.noticia.slug, distinct=True, orderby=~db.noticia.id, limitby=(200,400)) for noti in data: sm.append(str(TAG.url( TAG.loc(prefix,URL(c='default',f='blog',args=[noti.slug,noti.id],extension='')), TAG.lastmod(noti.created_on.date()), TAG.changefreq('always') ))) sm.append('</urlset>') return sm def sitemap3(): sm = [str('<?xml version="1.0" encoding="UTF-8" ?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">')] prefix = request.env.wsgi_url_scheme+'://'+request.env.http_host data = db(db.noticia.id>0).select(db.noticia.id, db.noticia.created_on, db.noticia.title, db.noticia.slug, distinct=True, orderby=~db.noticia.id, limitby=(400,600)) for noti in data: sm.append(str(TAG.url( TAG.loc(prefix,URL(c='default',f='blog',args=[noti.slug,noti.id],extension='')), TAG.lastmod(noti.created_on.date()), TAG.changefreq('always') ))) sm.append('</urlset>') return sm def sitemap4(): sm = [str('<?xml version="1.0" encoding="UTF-8" ?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">')] prefix = request.env.wsgi_url_scheme+'://'+request.env.http_host data = db(db.noticia.id>0).select(db.noticia.id, db.noticia.created_on, db.noticia.title, db.noticia.slug, distinct=True, orderby=~db.noticia.id, limitby=(600,800)) for noti in data: sm.append(str(TAG.url( TAG.loc(prefix,URL(c='default',f='blog',args=[noti.slug,noti.id],extension='')), TAG.lastmod(noti.created_on.date()), TAG.changefreq('always') ))) sm.append('</urlset>') return sm def sitemap5(): sm = [str('<?xml version="1.0" encoding="UTF-8" ?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">')] prefix = request.env.wsgi_url_scheme+'://'+request.env.http_host data = db(db.noticia.id>0).select(db.noticia.id, db.noticia.created_on, db.noticia.title, db.noticia.slug, distinct=True, orderby=~db.noticia.id, limitby=(800,1000)) for noti in data: sm.append(str(TAG.url( TAG.loc(prefix,URL(c='default',f='blog',args=[noti.slug,noti.id],extension='')), TAG.lastmod(noti.created_on.date()), TAG.changefreq('always') ))) sm.append('</urlset>') return sm #################################################################################### # URLs ANTIGUAS. Las funciones a continuación están sólo para compatibilidad retroactiva #################################################################################### def respira(): if request.extension == 'rss': return redirect(URL(c='default',f='feed.rss', args=request.args),301) else: return redirect(URL(c='default',f='index',args=request.args),301) def buscar(): if request.env.http_referer == request.url: response.flash = 'Puedes buscar directamente usando: buscar?q=termino+de+busqueda' if request.args: return redirect(URL(c='default',f='buscar',vars={'q':request.args}),301) else: form = FORM(INPUT(_name='q'),INPUT(_type='submit', _value='Buscar')) if form.accepts(request.vars,session): redirect(URL(c='default',f='buscar',vars={'q':request.post_vars.q}),301) return dict(form=form) def go(): return redirect(URL(r=request,c='default',f='blog',args=request.args),301) def feed(): if request.extension == 'rss': return redirect(URL(r=request,c='default',f='index.rss',args=request.args(0)),301) else: return redirect(URL(r=request,c='default',f='index',args=request.args(0)),301)
/[app]/default/user/login http://..../[app]/default/user/logout http://..../[app]/default/user/register http://..../[app]/default/user/profile http://..../[app]/default/user/retrieve_password http://..../[app]/default/user/change_password use @auth.requires_login() @auth.requires_membership('group name') @auth.requires_permission('read','table name',record_id) to decorate functions that need access control """ return dict(form = auth()) def sitemap(): del respons
identifier_body
fixed.rs
//! Groups all the static pages together. use maud::Markup; use rocket::Route; mod contacts; mod links; mod projects; mod resume; /// Returns the "index" page, aka the home page of the website. /// /// This simply calls [`page_client::home::index()`] from [`page_client`]. #[get("/")] fn get_index() -> Markup { htmlgen::index() } /// Provides a [`Vec`] of [`Route`]s to be attached with [`rocket::Rocket::mount()`]. pub fn
() -> Vec<Route> { routes![ get_index, resume::get, links::get, contacts::get, projects::get, projects::project::get, ] } /// Functions generating my home page. pub mod htmlgen { use maud::{html, Markup, Render}; use page_client::{data, partials}; /// Create a basic menu. pub fn menu() -> Option<data::Menu<'static>> { Some(data::Menu(&[data::MenuItem { text: "Blog", link: Some("/blog"), children: None, }])) } /// Returns a list of links as [`Markup`]. fn link_group() -> Markup { let links = vec![ data::LogoLink { url: "https://github.com/AlterionX/", logo: "public/img/icon/github.png", alt_text: "Github", text: "AlterionX", }, data::LogoLink { url: "mailto:[email protected]", logo: "public/img/icon/email.svg", alt_text: "Email", text: "[email protected]", }, data::LogoLink { url: "public/resume/resume.pdf", logo: "public/img/icon/resume.svg", alt_text: "Resume", text: "Resume", }, ]; html! { .link-group { @for link in links.iter() { (link) } } } } /// Returns the slide show as [`Markup`]. fn slides() -> Markup { html! { .slides { (my_intro()) (my_story()) (my_work()) (my_interests()) (my_passion()) (my_reading_time()) (my_gaming_time()) } .slide-attachments { img#slide-prev.slide-attachment src="public/img/left-simple-arrow.svg"; .slide-markers.slide-attachment { (slide_markers(7)) } img#slide-next.slide-attachment src="public/img/right-simple-arrow.svg"; } } } /// Returns a slide as [`Markup`]. fn slide<T: Render, U: Render>(title: T, text: U, cls: Option<&str>) -> Markup { html! { div class={ "slide" @if let Some(cls) = cls { " " (cls) } } { h2.slide-heading { (title) } .slide-text { (text) } } } } /// Returns the slide_markers as [`Markup`]. fn slide_markers(slide_cnt: u8) -> Markup { html! { @for i in 0..slide_cnt { (slide_marker(i)) } } } /// Returns the slide_marker as [`Markup`]. fn slide_marker(idx: u8) -> Markup { html! { div id = { "slide-marker-"(idx) } class={"slide-marker" @if idx == 0 { (" active-slide-marker") }} {} } } /// Returns the first slide as [`Markup`]. fn my_intro() -> Markup { slide( "Nice to meet you", html! { p { "My name is Ben. I am a developer, but I am also:" } ul { li { "a reader; I love to read. But that can get long, so let's save the details for later." } li { "a writer; " a href="https://www.nanowrimo.org/participants/alterionx/novels" { "NaNoWriMo" } " \ (a.k.a. November) is simultaneously my favorite and most hated month of the year." } li { "a gamer; still waiting for " a href="https://robertsspaceindustries.com/" { "Star Citizen." } } li { "a linguist: I technically know Chinese, and am studying Japanese." } } p {"\ But mostly, I just enjoy watching pretty colors scroll really " span.italic.bold { "really" } " fast down \ my terminal screen while I run my programs and blabber endlessly about my interests.\ "} }, Some("intro active-slide"), ) } /// Returns the second slide as [`Markup`]. fn my_interests() -> Markup { slide( "Everything is fascinating", html! { p {"\ C, C++, and Rust are my favorite languages. I have worked in both OpenGl and Vulkan. \ I've dabbled with Unity, Godot, and Unreal; Amethyst sounds interesting as well. \ However, I also enjoy gaming and reading in my spare time, as well as learning even more about \ tech and interesting projects such as WASM, xi, TypeScript, Fuschia, and AR glasses.\ "} p {"\ As a note, just for fun, this entire website is built with Rust + WASM \ (Such a fun word. Anyways...). I don't know how many browsers it runs on, \ but it was definitely fun. \ "} }, None, ) } /// Returns the third slide as [`Markup`]. fn my_story() -> Markup { slide( "Improve a little, day by day", html! { p {"\ There was a day in 10th grade, when one of my friends introduced me to Java. I was \ enamored the moment I touched the keyboard. The actual program was cute little \ thing, reading and adding two numbers.\ "} p {"\ It blew my mind. "} p {"\ Now that I think about it, it fits; I had been enchanted by the power of words so I wanted to be a novelist,\ but then I found something even more powerful.\ "} p {"\ Either way, I had decided then and there that I knew that I wanted to program for \ a living. And now I'm here, seeking to live a life programming and architecting solutions.\ "} }, None, ) } /// Returns the fourth slide as [`Markup`]. fn my_work() -> Markup { slide( "Learning to code", html! { p {"\ I've picked up a lot of different skills since that day. I developed a custom Wordpress theme and wrote \ a chatlog for my English class. In my last year of high school, I learned about automata theory.\ "} p {"\ When I came to college, I wrote part of an OS in no-std C++ and a Python frontend for connecting to a server and testing. \ I fell in love with writing tools and performance-critical programming.\ "} p {"\ I've written (with a friend) a ray tracer, a fluid simulation, and a shattering simulation. I am in the \ middle of writing a simulation in Rust that combines a majority of these concepts. I ended up devoting \ enough time to it that I will make it my thesis project.\ "} }, None, ) } /// Returns the fifth slide as [`Markup`]. fn my_passion() -> Markup { slide( "Programming and Writing", html! { p {"\ I focus on systems development, rendering, and physical simulation. I think I've already said \ enough about that. But I also have a string interest in game development and story writing.\ "} p {"\ In fact, by virtue of NaNoWriMo, I have the first version of my novel finished!\ "} }, None, ) } /// Returns the sixth slide as [`Markup`]. fn my_reading_time() -> Markup { slide( "Breaktime: Reading!", html! { p {"\ Speaking of wriing, I love to read as well. " a href="https://brandonsanderson.com/" { "Brandon Sanderson" } "'s my favorite author, \ but " a href="https://www.patrickrothfuss.com/content/index.asp" { "Patrick Rothfuss" } " is the most \ inspirational one—still waiting for " span.underline { "The Doors of Stone" } ". (It's alright. We've only waited for a decade-ish.)\ "} p {"\ Rothfuss is the one who inspired me to write, so I aim to take just as long as him to finish my stories. \ But, actually, the subtelty and detailed foreshadowing in his work is mind boggling. As I attempt to do \ the same, I realize this all the more.\ "} }, None, ) } /// Returns the seventh slide as [`Markup`]. fn my_gaming_time() -> Markup { slide( "Breaktime: Gaming!", html! { p {"\ Games are the other half of my free time. Shooters are good as stress relief but my favorites are RPGs. \ My favorites, however, is The Last of Us. It is a work of art. Nier: Automata comes in at a close second; it's only lower \ due to the PC port -- as a developer, its poor performance was obvious.\ "} p {"\ The favorites I'd listed are RPGs, but I find myself more engrossed in Terraria and Stellaris than RPGs since they leave a lot of room to \ establish a character and role play despite not being an RPG. Dungeons and Dragons (DnD) is pretty fun as well.\ "} p {"\ I also enjoy various space sims, but Star Citizen has captured my heart and I don't think I could ever play a different \ space sim without thinking about Star Citizen.\ "} }, None, ) } /// Returns a list of [`Css`](crate::data::Css) scripts that go in my home page. fn css_scripts<'a>() -> [data::Css<'a>; 4] { [ data::Css::Critical { src: "reset" }, data::Css::Critical { src: "typography" }, data::Css::Critical { src: "main" }, data::Css::Critical { src: "index" }, ] } /// Returns the [`Markup`] version of my home page. pub fn index() -> Markup { let (glue, load) = data::Script::wasm_bindgen_loader("wasm_slideshow"); let js_scripts = [ data::Script::External(glue.as_str()), data::Script::Embedded(load.as_str()), ]; let css_scripts = css_scripts(); let menu = menu(); let logo = crate::shared_html::logo_markup(); let meta = data::MetaData::builder() .scripts(&js_scripts[..]) .css(&css_scripts[..]) .menu(menu.as_ref()) .logo(logo.as_ref()) .build(); partials::basic_page( html! { div.profile { h1.tagline { "Ben Xu | Developer" } img.propic src="public/img/propic.jpg" alt="Profile Picture"; (link_group()) (slides()) } }, Some(&meta), ) } }
routes
identifier_name
fixed.rs
//! Groups all the static pages together. use maud::Markup; use rocket::Route; mod contacts; mod links; mod projects; mod resume; /// Returns the "index" page, aka the home page of the website. /// /// This simply calls [`page_client::home::index()`] from [`page_client`]. #[get("/")] fn get_index() -> Markup { htmlgen::index() } /// Provides a [`Vec`] of [`Route`]s to be attached with [`rocket::Rocket::mount()`]. pub fn routes() -> Vec<Route> { routes![ get_index, resume::get, links::get, contacts::get, projects::get, projects::project::get, ] } /// Functions generating my home page. pub mod htmlgen { use maud::{html, Markup, Render}; use page_client::{data, partials}; /// Create a basic menu. pub fn menu() -> Option<data::Menu<'static>> { Some(data::Menu(&[data::MenuItem { text: "Blog", link: Some("/blog"), children: None, }])) } /// Returns a list of links as [`Markup`]. fn link_group() -> Markup { let links = vec![ data::LogoLink { url: "https://github.com/AlterionX/", logo: "public/img/icon/github.png", alt_text: "Github", text: "AlterionX", }, data::LogoLink { url: "mailto:[email protected]", logo: "public/img/icon/email.svg", alt_text: "Email", text: "[email protected]", }, data::LogoLink { url: "public/resume/resume.pdf", logo: "public/img/icon/resume.svg", alt_text: "Resume", text: "Resume", }, ]; html! { .link-group { @for link in links.iter() { (link) } } } } /// Returns the slide show as [`Markup`]. fn slides() -> Markup { html! { .slides { (my_intro()) (my_story()) (my_work()) (my_interests()) (my_passion()) (my_reading_time()) (my_gaming_time()) } .slide-attachments { img#slide-prev.slide-attachment src="public/img/left-simple-arrow.svg"; .slide-markers.slide-attachment { (slide_markers(7)) } img#slide-next.slide-attachment src="public/img/right-simple-arrow.svg"; } } } /// Returns a slide as [`Markup`]. fn slide<T: Render, U: Render>(title: T, text: U, cls: Option<&str>) -> Markup { html! { div class={ "slide" @if let Some(cls) = cls { " " (cls) } } { h2.slide-heading { (title) } .slide-text { (text) } } } } /// Returns the slide_markers as [`Markup`]. fn slide_markers(slide_cnt: u8) -> Markup { html! { @for i in 0..slide_cnt { (slide_marker(i)) } } } /// Returns the slide_marker as [`Markup`]. fn slide_marker(idx: u8) -> Markup
/// Returns the first slide as [`Markup`]. fn my_intro() -> Markup { slide( "Nice to meet you", html! { p { "My name is Ben. I am a developer, but I am also:" } ul { li { "a reader; I love to read. But that can get long, so let's save the details for later." } li { "a writer; " a href="https://www.nanowrimo.org/participants/alterionx/novels" { "NaNoWriMo" } " \ (a.k.a. November) is simultaneously my favorite and most hated month of the year." } li { "a gamer; still waiting for " a href="https://robertsspaceindustries.com/" { "Star Citizen." } } li { "a linguist: I technically know Chinese, and am studying Japanese." } } p {"\ But mostly, I just enjoy watching pretty colors scroll really " span.italic.bold { "really" } " fast down \ my terminal screen while I run my programs and blabber endlessly about my interests.\ "} }, Some("intro active-slide"), ) } /// Returns the second slide as [`Markup`]. fn my_interests() -> Markup { slide( "Everything is fascinating", html! { p {"\ C, C++, and Rust are my favorite languages. I have worked in both OpenGl and Vulkan. \ I've dabbled with Unity, Godot, and Unreal; Amethyst sounds interesting as well. \ However, I also enjoy gaming and reading in my spare time, as well as learning even more about \ tech and interesting projects such as WASM, xi, TypeScript, Fuschia, and AR glasses.\ "} p {"\ As a note, just for fun, this entire website is built with Rust + WASM \ (Such a fun word. Anyways...). I don't know how many browsers it runs on, \ but it was definitely fun. \ "} }, None, ) } /// Returns the third slide as [`Markup`]. fn my_story() -> Markup { slide( "Improve a little, day by day", html! { p {"\ There was a day in 10th grade, when one of my friends introduced me to Java. I was \ enamored the moment I touched the keyboard. The actual program was cute little \ thing, reading and adding two numbers.\ "} p {"\ It blew my mind. "} p {"\ Now that I think about it, it fits; I had been enchanted by the power of words so I wanted to be a novelist,\ but then I found something even more powerful.\ "} p {"\ Either way, I had decided then and there that I knew that I wanted to program for \ a living. And now I'm here, seeking to live a life programming and architecting solutions.\ "} }, None, ) } /// Returns the fourth slide as [`Markup`]. fn my_work() -> Markup { slide( "Learning to code", html! { p {"\ I've picked up a lot of different skills since that day. I developed a custom Wordpress theme and wrote \ a chatlog for my English class. In my last year of high school, I learned about automata theory.\ "} p {"\ When I came to college, I wrote part of an OS in no-std C++ and a Python frontend for connecting to a server and testing. \ I fell in love with writing tools and performance-critical programming.\ "} p {"\ I've written (with a friend) a ray tracer, a fluid simulation, and a shattering simulation. I am in the \ middle of writing a simulation in Rust that combines a majority of these concepts. I ended up devoting \ enough time to it that I will make it my thesis project.\ "} }, None, ) } /// Returns the fifth slide as [`Markup`]. fn my_passion() -> Markup { slide( "Programming and Writing", html! { p {"\ I focus on systems development, rendering, and physical simulation. I think I've already said \ enough about that. But I also have a string interest in game development and story writing.\ "} p {"\ In fact, by virtue of NaNoWriMo, I have the first version of my novel finished!\ "} }, None, ) } /// Returns the sixth slide as [`Markup`]. fn my_reading_time() -> Markup { slide( "Breaktime: Reading!", html! { p {"\ Speaking of wriing, I love to read as well. " a href="https://brandonsanderson.com/" { "Brandon Sanderson" } "'s my favorite author, \ but " a href="https://www.patrickrothfuss.com/content/index.asp" { "Patrick Rothfuss" } " is the most \ inspirational one—still waiting for " span.underline { "The Doors of Stone" } ". (It's alright. We've only waited for a decade-ish.)\ "} p {"\ Rothfuss is the one who inspired me to write, so I aim to take just as long as him to finish my stories. \ But, actually, the subtelty and detailed foreshadowing in his work is mind boggling. As I attempt to do \ the same, I realize this all the more.\ "} }, None, ) } /// Returns the seventh slide as [`Markup`]. fn my_gaming_time() -> Markup { slide( "Breaktime: Gaming!", html! { p {"\ Games are the other half of my free time. Shooters are good as stress relief but my favorites are RPGs. \ My favorites, however, is The Last of Us. It is a work of art. Nier: Automata comes in at a close second; it's only lower \ due to the PC port -- as a developer, its poor performance was obvious.\ "} p {"\ The favorites I'd listed are RPGs, but I find myself more engrossed in Terraria and Stellaris than RPGs since they leave a lot of room to \ establish a character and role play despite not being an RPG. Dungeons and Dragons (DnD) is pretty fun as well.\ "} p {"\ I also enjoy various space sims, but Star Citizen has captured my heart and I don't think I could ever play a different \ space sim without thinking about Star Citizen.\ "} }, None, ) } /// Returns a list of [`Css`](crate::data::Css) scripts that go in my home page. fn css_scripts<'a>() -> [data::Css<'a>; 4] { [ data::Css::Critical { src: "reset" }, data::Css::Critical { src: "typography" }, data::Css::Critical { src: "main" }, data::Css::Critical { src: "index" }, ] } /// Returns the [`Markup`] version of my home page. pub fn index() -> Markup { let (glue, load) = data::Script::wasm_bindgen_loader("wasm_slideshow"); let js_scripts = [ data::Script::External(glue.as_str()), data::Script::Embedded(load.as_str()), ]; let css_scripts = css_scripts(); let menu = menu(); let logo = crate::shared_html::logo_markup(); let meta = data::MetaData::builder() .scripts(&js_scripts[..]) .css(&css_scripts[..]) .menu(menu.as_ref()) .logo(logo.as_ref()) .build(); partials::basic_page( html! { div.profile { h1.tagline { "Ben Xu | Developer" } img.propic src="public/img/propic.jpg" alt="Profile Picture"; (link_group()) (slides()) } }, Some(&meta), ) } }
{ html! { div id = { "slide-marker-"(idx) } class={"slide-marker" @if idx == 0 { (" active-slide-marker") }} {} } }
identifier_body
fixed.rs
//! Groups all the static pages together. use maud::Markup; use rocket::Route; mod contacts; mod links; mod projects; mod resume; /// Returns the "index" page, aka the home page of the website. /// /// This simply calls [`page_client::home::index()`] from [`page_client`]. #[get("/")] fn get_index() -> Markup { htmlgen::index() } /// Provides a [`Vec`] of [`Route`]s to be attached with [`rocket::Rocket::mount()`]. pub fn routes() -> Vec<Route> { routes![ get_index, resume::get, links::get, contacts::get, projects::get, projects::project::get, ] } /// Functions generating my home page. pub mod htmlgen { use maud::{html, Markup, Render}; use page_client::{data, partials}; /// Create a basic menu. pub fn menu() -> Option<data::Menu<'static>> { Some(data::Menu(&[data::MenuItem { text: "Blog", link: Some("/blog"), children: None, }])) } /// Returns a list of links as [`Markup`]. fn link_group() -> Markup { let links = vec![ data::LogoLink { url: "https://github.com/AlterionX/", logo: "public/img/icon/github.png", alt_text: "Github", text: "AlterionX", }, data::LogoLink { url: "mailto:[email protected]", logo: "public/img/icon/email.svg", alt_text: "Email", text: "[email protected]", }, data::LogoLink { url: "public/resume/resume.pdf", logo: "public/img/icon/resume.svg", alt_text: "Resume", text: "Resume", }, ]; html! { .link-group { @for link in links.iter() { (link) } } } } /// Returns the slide show as [`Markup`]. fn slides() -> Markup { html! { .slides { (my_intro()) (my_story()) (my_work()) (my_interests()) (my_passion()) (my_reading_time()) (my_gaming_time()) } .slide-attachments { img#slide-prev.slide-attachment src="public/img/left-simple-arrow.svg"; .slide-markers.slide-attachment { (slide_markers(7)) } img#slide-next.slide-attachment src="public/img/right-simple-arrow.svg"; } } } /// Returns a slide as [`Markup`]. fn slide<T: Render, U: Render>(title: T, text: U, cls: Option<&str>) -> Markup { html! { div class={ "slide" @if let Some(cls) = cls { " " (cls) } } { h2.slide-heading { (title) } .slide-text { (text) } } } }
@for i in 0..slide_cnt { (slide_marker(i)) } } } /// Returns the slide_marker as [`Markup`]. fn slide_marker(idx: u8) -> Markup { html! { div id = { "slide-marker-"(idx) } class={"slide-marker" @if idx == 0 { (" active-slide-marker") }} {} } } /// Returns the first slide as [`Markup`]. fn my_intro() -> Markup { slide( "Nice to meet you", html! { p { "My name is Ben. I am a developer, but I am also:" } ul { li { "a reader; I love to read. But that can get long, so let's save the details for later." } li { "a writer; " a href="https://www.nanowrimo.org/participants/alterionx/novels" { "NaNoWriMo" } " \ (a.k.a. November) is simultaneously my favorite and most hated month of the year." } li { "a gamer; still waiting for " a href="https://robertsspaceindustries.com/" { "Star Citizen." } } li { "a linguist: I technically know Chinese, and am studying Japanese." } } p {"\ But mostly, I just enjoy watching pretty colors scroll really " span.italic.bold { "really" } " fast down \ my terminal screen while I run my programs and blabber endlessly about my interests.\ "} }, Some("intro active-slide"), ) } /// Returns the second slide as [`Markup`]. fn my_interests() -> Markup { slide( "Everything is fascinating", html! { p {"\ C, C++, and Rust are my favorite languages. I have worked in both OpenGl and Vulkan. \ I've dabbled with Unity, Godot, and Unreal; Amethyst sounds interesting as well. \ However, I also enjoy gaming and reading in my spare time, as well as learning even more about \ tech and interesting projects such as WASM, xi, TypeScript, Fuschia, and AR glasses.\ "} p {"\ As a note, just for fun, this entire website is built with Rust + WASM \ (Such a fun word. Anyways...). I don't know how many browsers it runs on, \ but it was definitely fun. \ "} }, None, ) } /// Returns the third slide as [`Markup`]. fn my_story() -> Markup { slide( "Improve a little, day by day", html! { p {"\ There was a day in 10th grade, when one of my friends introduced me to Java. I was \ enamored the moment I touched the keyboard. The actual program was cute little \ thing, reading and adding two numbers.\ "} p {"\ It blew my mind. "} p {"\ Now that I think about it, it fits; I had been enchanted by the power of words so I wanted to be a novelist,\ but then I found something even more powerful.\ "} p {"\ Either way, I had decided then and there that I knew that I wanted to program for \ a living. And now I'm here, seeking to live a life programming and architecting solutions.\ "} }, None, ) } /// Returns the fourth slide as [`Markup`]. fn my_work() -> Markup { slide( "Learning to code", html! { p {"\ I've picked up a lot of different skills since that day. I developed a custom Wordpress theme and wrote \ a chatlog for my English class. In my last year of high school, I learned about automata theory.\ "} p {"\ When I came to college, I wrote part of an OS in no-std C++ and a Python frontend for connecting to a server and testing. \ I fell in love with writing tools and performance-critical programming.\ "} p {"\ I've written (with a friend) a ray tracer, a fluid simulation, and a shattering simulation. I am in the \ middle of writing a simulation in Rust that combines a majority of these concepts. I ended up devoting \ enough time to it that I will make it my thesis project.\ "} }, None, ) } /// Returns the fifth slide as [`Markup`]. fn my_passion() -> Markup { slide( "Programming and Writing", html! { p {"\ I focus on systems development, rendering, and physical simulation. I think I've already said \ enough about that. But I also have a string interest in game development and story writing.\ "} p {"\ In fact, by virtue of NaNoWriMo, I have the first version of my novel finished!\ "} }, None, ) } /// Returns the sixth slide as [`Markup`]. fn my_reading_time() -> Markup { slide( "Breaktime: Reading!", html! { p {"\ Speaking of wriing, I love to read as well. " a href="https://brandonsanderson.com/" { "Brandon Sanderson" } "'s my favorite author, \ but " a href="https://www.patrickrothfuss.com/content/index.asp" { "Patrick Rothfuss" } " is the most \ inspirational one—still waiting for " span.underline { "The Doors of Stone" } ". (It's alright. We've only waited for a decade-ish.)\ "} p {"\ Rothfuss is the one who inspired me to write, so I aim to take just as long as him to finish my stories. \ But, actually, the subtelty and detailed foreshadowing in his work is mind boggling. As I attempt to do \ the same, I realize this all the more.\ "} }, None, ) } /// Returns the seventh slide as [`Markup`]. fn my_gaming_time() -> Markup { slide( "Breaktime: Gaming!", html! { p {"\ Games are the other half of my free time. Shooters are good as stress relief but my favorites are RPGs. \ My favorites, however, is The Last of Us. It is a work of art. Nier: Automata comes in at a close second; it's only lower \ due to the PC port -- as a developer, its poor performance was obvious.\ "} p {"\ The favorites I'd listed are RPGs, but I find myself more engrossed in Terraria and Stellaris than RPGs since they leave a lot of room to \ establish a character and role play despite not being an RPG. Dungeons and Dragons (DnD) is pretty fun as well.\ "} p {"\ I also enjoy various space sims, but Star Citizen has captured my heart and I don't think I could ever play a different \ space sim without thinking about Star Citizen.\ "} }, None, ) } /// Returns a list of [`Css`](crate::data::Css) scripts that go in my home page. fn css_scripts<'a>() -> [data::Css<'a>; 4] { [ data::Css::Critical { src: "reset" }, data::Css::Critical { src: "typography" }, data::Css::Critical { src: "main" }, data::Css::Critical { src: "index" }, ] } /// Returns the [`Markup`] version of my home page. pub fn index() -> Markup { let (glue, load) = data::Script::wasm_bindgen_loader("wasm_slideshow"); let js_scripts = [ data::Script::External(glue.as_str()), data::Script::Embedded(load.as_str()), ]; let css_scripts = css_scripts(); let menu = menu(); let logo = crate::shared_html::logo_markup(); let meta = data::MetaData::builder() .scripts(&js_scripts[..]) .css(&css_scripts[..]) .menu(menu.as_ref()) .logo(logo.as_ref()) .build(); partials::basic_page( html! { div.profile { h1.tagline { "Ben Xu | Developer" } img.propic src="public/img/propic.jpg" alt="Profile Picture"; (link_group()) (slides()) } }, Some(&meta), ) } }
/// Returns the slide_markers as [`Markup`]. fn slide_markers(slide_cnt: u8) -> Markup { html! {
random_line_split
attention_refine.py
import tensorflow as tf import numpy as np EMBEDDING_SIZE = 64 DIC_SIZE = 7148 LABEL_SIZE = 3440 # 0为padding,24为start SEQ_LEN = 128 BATCH_SIZE = 512 GRU_UNITS = 256 model_path = "./" feature_description = { "word_id": tf.io.VarLenFeature(dtype=tf.int64), "word_label": tf.io.VarLenFeature(dtype=tf.int64) } def parse_fn(example_proto): example = tf.io.parse_single_example(example_proto, feature_description) f = tf.sparse.to_dense(example['word_id']) l = tf.sparse.to_dense(example['word_label']) return tf.pad(f, [[0, SEQ_LEN - tf.shape(f)[0]]]), tf.pad(l, [[0, SEQ_LEN - tf.shape(l)[0]]]), tf.shape(f)[0], \ tf.shape(l)[0] def input_fn(file_path): ds = tf.data.TFRecordDataset(file_path, num_parallel_reads=4) \ .map(parse_fn, num_parallel_calls=4) \ .shuffle(buffer_size=1024) \ .batch(batch_size=BATCH_SIZE, drop_remainder=True).prefetch(51200).repeat() return ds class GruCell: def __init__(self, units, step_dimension): """ :param units: 每一个时间步的输出维度 :param step_dimension: 每一时间步的输入维度 """ self.units = units self.en_w_r_z = tf.Variable(tf.truncated_normal(shape=[step_dimension + self.units, self.units * 2]) / 10000) self.en_b_r_z = tf.Variable(tf.truncated_normal(shape=[units * 2, ]) / 10000) self.en_w_h = tf.Variable(tf.truncated_normal(shape=[step_dimension + self.units, self.units]) / 10000) self.en_b_h = tf.Variable(tf.truncated_normal(shape=[units, ]) / 10000) def en_cond(self, i, en_embeded, en_gru_output): return i < tf.shape(en_embeded)[1] def en_gru(self, i, en_embeded, en_gru_output): step_in = en_embeded[:, i] last_state = en_gru_output[:, i] in_concat = tf.concat((step_in, last_state), axis=-1) gate_inputs = tf.sigmoid(tf.matmul(in_concat, self.en_w_r_z) + self.en_b_r_z) r, z = tf.split(value=gate_inputs, num_or_size_splits=2, axis=1) h_ = tf.tanh(tf.matmul(tf.concat((step_in, r * last_state), axis=-1), self.en_w_h) + self.en_b_h) h = z * last_state + (1 - z) * h_ en_gru_output = tf.concat((en_gru_output, tf.expand_dims(h, axis=1)), axis=1) i = i + 1 return i, en_embeded, en_gru_output def __call__(self, seqs, en_gru_output, *args, **kwargs): """ 在call函数内部创建en_gru_output会有问题,解码过程提示变量未初始化,所以在外部创建好变量传入call;估计可能是后面用这个变量的最后一个状态去初始化其他变量导致的 :param seqs: :param en_gru_output: :param args: :param kwargs: :return: """ i0 = tf.constant(0) _, _, encoder_output = tf.while_loop(self.en_cond, self.en_gru, loop_vars=[i0, seqs, en_gru_output], shape_invariants=[i0.get_shape(), seqs.get_shape(), tf.TensorShape([None, None, self.units])]) return encoder_output class GruCellAttentionDecoder: def __init__(self, units, step_dimension): """ :param units:每一个时间步的输出维度 :param step_dimension: 每一时间步的输入维度 """ self.units = units self.de_w_r_z = tf.Variable( tf.truncated_normal(shape=[step_dimension + self.units * 2, self.units * 2]) / 10000) self.de_b_r_z = tf.Variable(tf.truncated_normal(shape=[self.units * 2, ]) / 10000) self.de_w_h = tf.Variable(tf.truncated_normal(shape=[step_dimension + self.units * 2, self.units]) / 10000) self.de_b_h = tf.Variable(tf.truncated_normal(shape=[self.units, ]) / 10000) def de_cond(self, i, de_embeded, de_gru_output, encoder_output): return i < tf.shape(de_embeded)[1] def de_gru(self, i, de_embeded, de_gru_output, encoder_output): step_in = de_embeded[:, i] last_state = de_gru_output[:, i] attention_weight = tf.nn.softmax(tf.squeeze(tf.matmul(encoder_output, tf.expand_dims(last_state, axis=2)))) context_c = tf.reduce_sum(tf.multiply(tf.expand_dims(attention_weight, axis=2), encoder_output), axis=1) step_in = tf.concat((step_in, context_c), axis=-1) in_concat = tf.concat((step_in, last_state), axis=-1) gate_inputs = tf.sigmoid(tf.matmul(in_concat, self.de_w_r_z) + self.de_b_r_z) r, z = tf.split(value=gate_inputs, num_or_size_splits=2, axis=1) h_ = tf.tanh(tf.matmul(tf.concat((step_in, r * last_state), axis=-1), self.de_w_h) + self.de_b_h) h = z * last_state + (1 - z) * h_ de_gru_output = tf.concat((de_gru_output, tf.expand_dims(h, axis=1)), axis=1) i = i + 1 return i, de_embeded, de_gru_output, encoder_output def __call__(self, de_embeded, de_gru_output, encoder_output, *args, **kwargs): """ 可以在内部创建de_gru_output,并不会抛出未初始化异常 :param de_embeded: :param args: :param kwargs: :return: """ i0 = tf.constant(0) _, _, decoder_output, _ = tf.while_loop(self.de_cond, self.de_gru, loop_vars=[i0, de_embeded, de_gru_output, encoder_output], shape_invariants=[i0.get_shape(), de_embeded.get_shape(), tf.TensorShape([None, None, self.units]), encoder_output.get_shape()]) return decoder_output sess = tf.Session() # 编码 en_input = tf.placeholder(tf.int32, shape=[None, None]) en_embedding_variable = tf.Variable(tf.truncated_normal(shape=[DIC_SIZE, EMBEDDING_SIZE])) en_embeded = tf.nn.embedding_lookup(en_embedding_variable, en_input) en_gru_cell_1 = GruCell(units=GRU_UNITS, step_dimension=EMBEDDING_SIZE) gru_init_state_1 = tf.zeros(shape=[BATCH_SIZE, 1, en_gru_cell_1.units]) # 这里不应该定义称为变量 encoder_output_1 = en_gru_cell_1(en_embeded, gru_init_state_1) en_gru_cell_2 = GruCell(units=GRU_UNITS, step_dimension=en_gru_cell_1.units) gru_init_state_2 = tf.zeros(shape=[BATCH_SIZE, 1, en_gru_cell_2.units]) # 这里不应该定义称为变量 encoder_output_2 = en_gru_cell_2(encoder_output_1[:, 1:], gru_init_state_2) # 解码 de_in_label = tf.placeholder(tf.int32, shape=[None, None]) # batch_size,seq_len de_embedding_variable = tf.Variable(tf.truncated_normal(shape=[LABEL_SIZE, EMBEDDING_SIZE])) de_embeded = tf.nn.embedding_lookup(de_embedding_variable, de_in_label) de_gru_cell_1 = GruCellAttentionDecoder(GRU_UNITS, step_dimension=EMBEDDING_SIZE) de_init_state_1 = tf.expand_dims(encoder_output_1[:, -1], axis=1) # init state decoder_output_1 = de_gru_cell_1(de_embeded, de_init_state_1, encoder_output_2[:, 1:]) de_gru_cell_2 = GruCell(GRU_UNITS, step_dimension=de_gru_cell_1.units) de_init_state_2 = tf.expand_dims(encoder_output_2[:, -1], axis=1) # init state decoder_output_2 = de_gru_cell_2(decoder_output_1[:, 1:], de_init_state_2) # 全连接 dense_w_1 = tf.Variable(tf.truncated_normal(shape=[GRU_UNITS, GRU_UNITS // 2])) dense_b_1 = tf.Variable(tf.truncated_normal(shape=[GRU_UNITS // 2, ])) dense_w_2 = tf.Variable(tf.truncated_normal(shape=[GRU_UNITS // 2, LABEL_SIZE])) dense_b_2 = tf.Variable(tf.truncated_normal(shape=[LABEL_SIZE, ])) dense_1 = tf.nn.leaky_relu(tf.tensordot(decoder_output_2, dense_w_1, [[2], [0]]) + dense_b_1) output = tf.nn.leaky_relu(tf.tensordot(dense_1, dense_w_2, [[2], [0]]) + dense_b_2) loss = tf.losses.sparse_softmax_cross_entropy(labels=de_in_label[:, 1:], logits=output[:, 1:-1]) optimizer = tf.train.AdamOptimizer(learning_rate=0.002).minimize(loss) decoder_start = np.zeros(shape=[BATCH_SIZE, 1]) + LABEL_SIZE - 1 saver = tf.train.Saver() ds = input_fn("../../../data/translate/train_data.tfrecord").make_one_shot_iterator().get_next() # sess.run(tf.global_variables_initializer()) saver.restore(sess, save_path=model_path + "ner.model-1") def save_model(): np.savetxt("./params_v3/en_embeding", sess.run(en_embedding_variable)) np.savetxt("./params_v3/de_embedding", sess.run(de_embedding_variable)) np.savetxt("./params_v3/en_grucell_1_w_r_z", sess.run(en_gru_cell_1.en_w_r_z)) np.savetxt("./params_v3/en_grucell_1_b_r_z", sess.run(en_gru_cell_1.en_b_r_z)) np.savetxt("./params_v3/en_grucell_1_w_h", sess.run(en_gru_cell_1.en_w_h)) np.savetxt("./params_v3/en_grucell_1_b_h", sess.run(en_gru_cell_1.en_b_h)) # -- np.savetxt("./params_v3/en_grucell_2_w_r_z", sess.run(en_gru_cell_2.en_w_r_z)) np.savetxt("./params_v3/en_grucell_2_b_r_z", sess.run(en_gru_cell_2.en_b_r_z)) np.savetxt("./params_v3/en_grucell_2_w_h", sess.run(en_gru_cell_2.en_w_h)) np.savetxt("./params_v3/en_grucell_2_b_h", sess.run(en_gru_cell_2.en_b_h)) # -- np.savetxt("./params_v3/de_grucell_1_w_r_z", sess.run(de_gru_cell_1.de_w_r_z)) np.savetxt("./params_v3/de_grucell_1_b_r_z", sess.run(de_gru_cell_1.de_b_r_z)) np.savetxt("./params_v3/de_grucell_1_w_h", sess.run(de_gru_cell_1.de_w_h)) np.savetxt("./params_v3/de_grucell_1_b_h", sess.run(de_gru_cell_1.de_b_h)) np.savetxt("./params_v3/de_grucell_2_w_r_z", sess.run(de_gru_cell_2.en_w_r_z)) np.savetxt("./params_v3/de_grucell_2_b_r_z", sess.run(de_gru_cell_2.en_b_r_z)) np.savetxt("./params_v3/de_grucell_2_w_h", sess.run(de_gru_cell_2.en_w_h)) np.savetxt("./params_v3/de_grucell_2_b_h", sess.run(de_gru_cell_2.en_b_h)) np.savetxt("./params_v3/dense_w_1", sess.run(dense_w_1)) np.savetxt("./params_v3/dense_b_1", sess.run(dense_b_1)) np.savetxt("./params_v3/dense_w_2", sess.run(dense_w_2)) np.savetxt("./params_v3/dense_b_2", sess.run(dense_b_2)) min_v = 0.1 for i in range(100000): features, labels, f_lengths, l_lengths = sess.run(ds) """ 这里在训练的时候不同批次的最大长度是不一样的,目的是为了加快训练速度,但对精度会有影响, 通过翻译场景来看,在预测时,适当的增加padding,对最终效果有一定的正面影响; 后续需要优化的地方是重新组织训练数据,比如长度小于10的分到一个批次,padding长度为10; 10 ~ 20的分到一个批次,padding长度为20 如果使用固定长度的padding训练,那么对算力的需求可能会增长几十倍 """ f_max_len = f_lengths[np.argmax(f_lengths, axis=-1)] l_max_len = l_lengths[np.argmax(l_lengths, axis=-1)] loss_value, _ = sess.run((loss, optimizer), feed_dict={en_input: features[:, :f_max_len + 1], de_in_label: np.concatenate( (decoder_start, labels[:, :l_max_len + 1]), axis=-1)[:, 0:-1]}) print(i, loss_value) if i % 10 == 0 and (min_v > loss_value): saver.save(sess, model_path + "ner.model", global_step=int(loss_value * 1000)) min_v = loss_value """ if i % 1 == 0: # save_model()
pred = sess.run(output, feed_dict={en_input: features[:, :f_max_len + 1], de_in_label: np.concatenate((decoder_start, labels[:, :l_max_len + 1]), axis=-1)[:, 0:-1]}) print(i, np.sum(np.sum(np.argmax(pred, axis=2)[:, 1:-1] == labels[:, :l_max_len], axis=1) == l_max_len)) """ sess.close()
# saver.save(sess, model_path + "ner.model", global_step=int(loss_value * 1000))
random_line_split
attention_refine.py
import tensorflow as tf import numpy as np EMBEDDING_SIZE = 64 DIC_SIZE = 7148 LABEL_SIZE = 3440 # 0为padding,24为start SEQ_LEN = 128 BATCH_SIZE = 512 GRU_UNITS = 256 model_path = "./" feature_description = { "word_id": tf.io.VarLenFeature(dtype=tf.int64), "word_label": tf.io.VarLenFeature(dtype=tf.int64) } def parse_fn(example_proto): example = tf.io.parse_single_example(example_proto, feature_description) f = tf.sparse.to_dense(example['word_id']) l = tf.sparse.to_dense(example['word_label']) return tf.pad(f, [[0, SEQ_LEN - tf.shape(f)[0]]]), tf.pad(l, [[0, SEQ_LEN - tf.shape(l)[0]]]), tf.shape(f)[0], \ tf.shape(l)[0] def input_fn(file_path): ds = tf.data.TFRecordDataset(file_path, num_parallel_reads=4) \ .map(parse_fn, num_parallel_calls=4) \ .shuffle(buffer_size=1024) \ .batch(batch_size=BATCH_SIZE, drop_remainder=True).prefetch(51200).repeat() return ds class GruCell: def __init__(self, units, step_dimension): """ :param units: 每一个时间步的输出维度 :param step_dimension: 每一时间步的输入维度 """ self.units = units self.en_w_r_z = tf.Variable(tf.truncated_normal(shape=[step_dimension + self.units, self.units * 2]) / 10000) self.en_b_r_z = tf.Variable(tf.truncated_normal(shape=[units * 2, ]) / 10000) self.en_w_h = tf.Variable(tf.truncated_normal(shape=[step_dimension + self.units, self.units]) / 10000) self.en_b_h = tf.Variable(tf.truncated_normal(shape=[units, ]) / 10000) def en_cond(self, i, en_embeded, en_gru_output): return i < tf.shape(en_embeded)[1] def en_gru(self, i, en_embeded, en_gru_output): step_in = en_embeded[:, i] last_state = en_gru_output[:, i] in_concat = tf.concat((step_in, last_state), axis=-1) gate_inputs = tf.sigmoid(tf.matmul(in_concat, self.en_w_r_z) + self.en_b_r_z) r, z = tf.split(value=gate_inputs, num_or_size_splits=2, axis=1) h_ = tf.tanh(tf.matmul(tf.concat((step_in, r * last_state), axis=-1), self.en_w_h) + self.en_b_h) h = z * last_state + (1 - z) * h_ en_gru_output = tf.concat((en_gru_output, tf.expand_dims(h, axis=1)), axis=1) i = i + 1 return i, en_embeded, en_gru_output def __call__(self, seqs, en_gru_output, *args, **kwargs): """ 在call函数内部创建en_gru_output会有问题,解码过程提示变量未初始化,所以在外部创建好变量传入call;估计可能是后面用这个变量的最后一个状态去初始化其他变量导致的 :param seqs: :param en_gru_output: :param args: :param kwargs: :return: """ i0 = tf.constant(0) _, _, encoder_output = tf.while_loop(self.en_cond, self.en_gru, loop_vars=[i0, seqs, en_gru_output], shape_invariants=[i0.get_shape(), seqs.get_shape(), tf.TensorShape([None, None, self.units])]) return encoder_output class GruCellAttentionDecoder: def __init__(self, units, step_dimension): """ :param units:每一个时间步的输出维度 :param step_dimension: 每一时间步的输入维度 """ self.units = units self.de_w_r
Variable( tf.truncated_normal(shape=[step_dimension + self.units * 2, self.units * 2]) / 10000) self.de_b_r_z = tf.Variable(tf.truncated_normal(shape=[self.units * 2, ]) / 10000) self.de_w_h = tf.Variable(tf.truncated_normal(shape=[step_dimension + self.units * 2, self.units]) / 10000) self.de_b_h = tf.Variable(tf.truncated_normal(shape=[self.units, ]) / 10000) def de_cond(self, i, de_embeded, de_gru_output, encoder_output): return i < tf.shape(de_embeded)[1] def de_gru(self, i, de_embeded, de_gru_output, encoder_output): step_in = de_embeded[:, i] last_state = de_gru_output[:, i] attention_weight = tf.nn.softmax(tf.squeeze(tf.matmul(encoder_output, tf.expand_dims(last_state, axis=2)))) context_c = tf.reduce_sum(tf.multiply(tf.expand_dims(attention_weight, axis=2), encoder_output), axis=1) step_in = tf.concat((step_in, context_c), axis=-1) in_concat = tf.concat((step_in, last_state), axis=-1) gate_inputs = tf.sigmoid(tf.matmul(in_concat, self.de_w_r_z) + self.de_b_r_z) r, z = tf.split(value=gate_inputs, num_or_size_splits=2, axis=1) h_ = tf.tanh(tf.matmul(tf.concat((step_in, r * last_state), axis=-1), self.de_w_h) + self.de_b_h) h = z * last_state + (1 - z) * h_ de_gru_output = tf.concat((de_gru_output, tf.expand_dims(h, axis=1)), axis=1) i = i + 1 return i, de_embeded, de_gru_output, encoder_output def __call__(self, de_embeded, de_gru_output, encoder_output, *args, **kwargs): """ 可以在内部创建de_gru_output,并不会抛出未初始化异常 :param de_embeded: :param args: :param kwargs: :return: """ i0 = tf.constant(0) _, _, decoder_output, _ = tf.while_loop(self.de_cond, self.de_gru, loop_vars=[i0, de_embeded, de_gru_output, encoder_output], shape_invariants=[i0.get_shape(), de_embeded.get_shape(), tf.TensorShape([None, None, self.units]), encoder_output.get_shape()]) return decoder_output sess = tf.Session() # 编码 en_input = tf.placeholder(tf.int32, shape=[None, None]) en_embedding_variable = tf.Variable(tf.truncated_normal(shape=[DIC_SIZE, EMBEDDING_SIZE])) en_embeded = tf.nn.embedding_lookup(en_embedding_variable, en_input) en_gru_cell_1 = GruCell(units=GRU_UNITS, step_dimension=EMBEDDING_SIZE) gru_init_state_1 = tf.zeros(shape=[BATCH_SIZE, 1, en_gru_cell_1.units]) # 这里不应该定义称为变量 encoder_output_1 = en_gru_cell_1(en_embeded, gru_init_state_1) en_gru_cell_2 = GruCell(units=GRU_UNITS, step_dimension=en_gru_cell_1.units) gru_init_state_2 = tf.zeros(shape=[BATCH_SIZE, 1, en_gru_cell_2.units]) # 这里不应该定义称为变量 encoder_output_2 = en_gru_cell_2(encoder_output_1[:, 1:], gru_init_state_2) # 解码 de_in_label = tf.placeholder(tf.int32, shape=[None, None]) # batch_size,seq_len de_embedding_variable = tf.Variable(tf.truncated_normal(shape=[LABEL_SIZE, EMBEDDING_SIZE])) de_embeded = tf.nn.embedding_lookup(de_embedding_variable, de_in_label) de_gru_cell_1 = GruCellAttentionDecoder(GRU_UNITS, step_dimension=EMBEDDING_SIZE) de_init_state_1 = tf.expand_dims(encoder_output_1[:, -1], axis=1) # init state decoder_output_1 = de_gru_cell_1(de_embeded, de_init_state_1, encoder_output_2[:, 1:]) de_gru_cell_2 = GruCell(GRU_UNITS, step_dimension=de_gru_cell_1.units) de_init_state_2 = tf.expand_dims(encoder_output_2[:, -1], axis=1) # init state decoder_output_2 = de_gru_cell_2(decoder_output_1[:, 1:], de_init_state_2) # 全连接 dense_w_1 = tf.Variable(tf.truncated_normal(shape=[GRU_UNITS, GRU_UNITS // 2])) dense_b_1 = tf.Variable(tf.truncated_normal(shape=[GRU_UNITS // 2, ])) dense_w_2 = tf.Variable(tf.truncated_normal(shape=[GRU_UNITS // 2, LABEL_SIZE])) dense_b_2 = tf.Variable(tf.truncated_normal(shape=[LABEL_SIZE, ])) dense_1 = tf.nn.leaky_relu(tf.tensordot(decoder_output_2, dense_w_1, [[2], [0]]) + dense_b_1) output = tf.nn.leaky_relu(tf.tensordot(dense_1, dense_w_2, [[2], [0]]) + dense_b_2) loss = tf.losses.sparse_softmax_cross_entropy(labels=de_in_label[:, 1:], logits=output[:, 1:-1]) optimizer = tf.train.AdamOptimizer(learning_rate=0.002).minimize(loss) decoder_start = np.zeros(shape=[BATCH_SIZE, 1]) + LABEL_SIZE - 1 saver = tf.train.Saver() ds = input_fn("../../../data/translate/train_data.tfrecord").make_one_shot_iterator().get_next() # sess.run(tf.global_variables_initializer()) saver.restore(sess, save_path=model_path + "ner.model-1") def save_model(): np.savetxt("./params_v3/en_embeding", sess.run(en_embedding_variable)) np.savetxt("./params_v3/de_embedding", sess.run(de_embedding_variable)) np.savetxt("./params_v3/en_grucell_1_w_r_z", sess.run(en_gru_cell_1.en_w_r_z)) np.savetxt("./params_v3/en_grucell_1_b_r_z", sess.run(en_gru_cell_1.en_b_r_z)) np.savetxt("./params_v3/en_grucell_1_w_h", sess.run(en_gru_cell_1.en_w_h)) np.savetxt("./params_v3/en_grucell_1_b_h", sess.run(en_gru_cell_1.en_b_h)) # -- np.savetxt("./params_v3/en_grucell_2_w_r_z", sess.run(en_gru_cell_2.en_w_r_z)) np.savetxt("./params_v3/en_grucell_2_b_r_z", sess.run(en_gru_cell_2.en_b_r_z)) np.savetxt("./params_v3/en_grucell_2_w_h", sess.run(en_gru_cell_2.en_w_h)) np.savetxt("./params_v3/en_grucell_2_b_h", sess.run(en_gru_cell_2.en_b_h)) # -- np.savetxt("./params_v3/de_grucell_1_w_r_z", sess.run(de_gru_cell_1.de_w_r_z)) np.savetxt("./params_v3/de_grucell_1_b_r_z", sess.run(de_gru_cell_1.de_b_r_z)) np.savetxt("./params_v3/de_grucell_1_w_h", sess.run(de_gru_cell_1.de_w_h)) np.savetxt("./params_v3/de_grucell_1_b_h", sess.run(de_gru_cell_1.de_b_h)) np.savetxt("./params_v3/de_grucell_2_w_r_z", sess.run(de_gru_cell_2.en_w_r_z)) np.savetxt("./params_v3/de_grucell_2_b_r_z", sess.run(de_gru_cell_2.en_b_r_z)) np.savetxt("./params_v3/de_grucell_2_w_h", sess.run(de_gru_cell_2.en_w_h)) np.savetxt("./params_v3/de_grucell_2_b_h", sess.run(de_gru_cell_2.en_b_h)) np.savetxt("./params_v3/dense_w_1", sess.run(dense_w_1)) np.savetxt("./params_v3/dense_b_1", sess.run(dense_b_1)) np.savetxt("./params_v3/dense_w_2", sess.run(dense_w_2)) np.savetxt("./params_v3/dense_b_2", sess.run(dense_b_2)) min_v = 0.1 for i in range(100000): features, labels, f_lengths, l_lengths = sess.run(ds) """ 这里在训练的时候不同批次的最大长度是不一样的,目的是为了加快训练速度,但对精度会有影响, 通过翻译场景来看,在预测时,适当的增加padding,对最终效果有一定的正面影响; 后续需要优化的地方是重新组织训练数据,比如长度小于10的分到一个批次,padding长度为10; 10 ~ 20的分到一个批次,padding长度为20 如果使用固定长度的padding训练,那么对算力的需求可能会增长几十倍 """ f_max_len = f_lengths[np.argmax(f_lengths, axis=-1)] l_max_len = l_lengths[np.argmax(l_lengths, axis=-1)] loss_value, _ = sess.run((loss, optimizer), feed_dict={en_input: features[:, :f_max_len + 1], de_in_label: np.concatenate( (decoder_start, labels[:, :l_max_len + 1]), axis=-1)[:, 0:-1]}) print(i, loss_value) if i % 10 == 0 and (min_v > loss_value): saver.save(sess, model_path + "ner.model", global_step=int(loss_value * 1000)) min_v = loss_value """ if i % 1 == 0: # save_model() # saver.save(sess, model_path + "ner.model", global_step=int(loss_value * 1000)) pred = sess.run(output, feed_dict={en_input: features[:, :f_max_len + 1], de_in_label: np.concatenate((decoder_start, labels[:, :l_max_len + 1]), axis=-1)[:, 0:-1]}) print(i, np.sum(np.sum(np.argmax(pred, axis=2)[:, 1:-1] == labels[:, :l_max_len], axis=1) == l_max_len)) """ sess.close()
_z = tf.
identifier_name
attention_refine.py
import tensorflow as tf import numpy as np EMBEDDING_SIZE = 64 DIC_SIZE = 7148 LABEL_SIZE = 3440 # 0为padding,24为start SEQ_LEN = 128 BATCH_SIZE = 512 GRU_UNITS = 256 model_path = "./" feature_description = { "word_id": tf.io.VarLenFeature(dtype=tf.int64), "word_label": tf.io.VarLenFeature(dtype=tf.int64) } def parse_fn(example_proto): example = tf.io.parse_single_example(example_proto, feature_description) f = tf.sparse.to_dense(example['word_id']) l = tf.sparse.to_dense(example['word_label']) return tf.pad(f, [[0, SEQ_LEN - tf.shape(f)[0]]]), tf.pad(l, [[0, SEQ_LEN - tf.shape(l)[0]]]), tf.shape(f)[0], \ tf.shape(l)[0] def input_fn(file_path): ds = tf.data.TFRecordDataset(file_path, num_parallel_reads=4) \ .map(parse_fn, num_parallel_calls=4) \ .shuffle(buffer_size=1024) \ .batch(batch_size=BATCH_SIZE, drop_remainder=True).prefetch(51200).repeat() return ds class GruCell: def __init__(self, units, step_dimension): """ :param units: 每一个时间步的输出维度 :param step_dimension: 每一时间步的输入维度 """ self.units = units self.en_w_r_z = tf.Variable(tf.truncated_normal(shape=[step_dimension + self.units, self.units * 2]) / 10000) self.en_b_r_z = tf.Variable(tf.truncated_normal(shape=[units * 2, ]) / 10000) self.en_w_h = tf.Variable(tf.truncated_normal(shape=[step_dimension + self.units, self.units]) / 10000) self.en_b_h = tf.Variable(tf.truncated_normal(shape=[units, ]) / 10000) def en_cond(self, i, en_embeded, en_gru_output): return i < tf.shape(en_embeded)[1] def en_gru(self, i, en_embeded, en_gru_output): step_in = en_embeded[:, i] last_state = en_gru_output[:, i] in_concat = tf.concat((step_in, last_state), axis=-1) gate_inputs = tf.sigmoid(tf.matmul(in_concat, self.en_w_r_z) + self.en_b_r_z) r, z = tf.split(value=gate_inputs, num_or_size_splits=2, axis=1) h_ = tf.tanh(tf.matmul(tf.concat((step_in, r * last_state), axis=-1), self.en_w_h) + self.en_b_h) h = z * last_state + (1 - z) * h_ en_gru_output = tf.concat((en_gru_output, tf.expand_dims(h, axis=1)), axis=1) i = i + 1 return i, en_embeded, en_gru_output def __call__(self, seqs, en_gru_output, *args, **kwargs): """ 在call函数内部创建en_gru_output会有问题,解码过程提示变量未初始化,所以在外部创建好变量传入call;估计可能是后面用这个变量的最后一个状态去初始化其他变量导致的 :param seqs: :param en_gru_output: :param args: :param kwargs: :return: """ i0 = tf.constant(0) _, _, encoder_output = tf.while_loop(self.en_cond, self.en_gru, loop_vars=[i0, seqs, en_gru_output], shape_invariants=[i0.get_shape(), seqs.get_shape(), tf.TensorShape([None, None, self.units])]) return encoder_output class GruCellAttentionDecoder: def __init__(self, units, step_dimension): """ :param units:每一个时间步的输出维度 :param step_dimension: 每一时间步的输入维度 """ self.units = units self.de_w_r_z = tf.Variable( tf.truncated_normal(shape=[step_dimension + self.units * 2, self.units * 2]) / 10000) self.de_b_r_z = tf.Variable(tf.truncated_normal(shape=[self.units * 2, ]) / 10000) self.de_w_h = tf.Variable(tf.truncated_normal(shape=[step_dimension + self.units * 2, self.units]) / 10000) self.de_b_h = tf.Variable(tf.truncated_normal(shape=[self.units, ]) / 10000) def de_cond(self, i, de_embeded, de_gru_output, encoder_output): return i < tf.shape(de_embeded)[1] def de_gru(self, i, de_embeded, de_gru_output, encoder_output): step_in = de_embeded[:, i] last_state = de_gru_output[:, i] attention_weight = tf.nn.softmax(tf.squeeze(tf.matmul(encoder_output, tf.expand_dims(last_state, axis=2)))) context_c = tf.reduce_sum(tf.multiply(tf.expand_dims(attention_weight, axis=2), encoder_output), axis=1) step_in = tf.concat((step_in, context_c), axis=-1) in_concat = tf.concat((step_in, last_state), axis=-1) gate_inputs = tf.sigmoid(tf.matmul(in_concat, self.de_w_r_z) + self.de_b_r_z) r, z = tf.split(value=gate_inputs, num_or_size_splits=2, axis=1) h_ = tf.tanh(tf.matmul(tf.concat((step_in, r * last_state), axis=-1), self.de_w_h) + self.de_b_h) h = z * last_state + (1 - z) * h_ de_gru_output = tf.concat((de_gru_output, tf.expand_dims(h, axis=1)), axis=1) i = i + 1 return i, de_embeded, de_gru_output, encoder_output def __call__(self, de_embeded, de_gru_output, encoder_output, *args, **kwargs): """ 可以在内部创建de_gru_output,并不会抛出未初始化异常 :param de_embeded: :param args: :param kwargs: :return: """ i0 = tf.constant(0) _, _, decoder_output, _ = tf.while_loop(self.de_cond, self.de_gru, loop_vars=[i0, de_embeded, de_gru_output, encoder_output], shape_invariants=[i0.get_shape(), de_embeded.get_shape(), tf.TensorShape([None, None, self.units]), encoder_output.get_shape()]) return decoder_output sess = tf.Session() # 编码 en_input = tf.placeholder(tf.int32, shape=[None, None]) en_embedding_variable = tf.Variable(tf.truncated_normal(shape=[DIC_SIZE, EMBEDDING_SIZE])) en_embeded = tf.nn.embedding_lookup(en_embedding_variable, en_input) en_gru_cell_1 = GruCell(units=GRU_UNITS, step_dimension=EMBEDDING_SIZE) gru_init_state_1 = tf.zeros(shape=[BATCH_SIZE, 1, en_gru_cell_1.units]) # 这里不应该定义称为变量 encoder_output_1 = en_gru_cell_1(en_embeded, gru_init_state_1) en_gru_cell_2 = GruCell(units=GRU_UNITS, step_dimension=en_gru_cell_1.units) gru_init_state_2 = tf.zeros(shape=[BATCH_SIZE, 1, en_gru_cell_2.units]) # 这里不应该定义称为变量 encoder_output_2 = en_gru_cell_2(encoder_output_1[:, 1:], gru_init_state_2) # 解码 de_in_label = tf.placeholder(tf.int32, shape=[None, None]) # batch_size,seq_len de_embedding_variable = tf.Variable(tf.truncated_normal(shape=[LABEL_SIZE, EMBEDDING_SIZE])) de_embeded = tf.nn.embedding_lookup(de_embedding_variable, de_in_label) de_gru_cell_1 = GruCellAttentionDecoder(GRU_UNITS, step_dimension=EMBEDDING_SIZE) de_init_state_1 = tf.expand_dims(encoder_output_1[:, -1], axis=1) # init state decoder_output_1 = de_gru_cell_1(de_embeded, de_init_state_1, encoder_output_2[:, 1:]) de_gru_cell_2 = GruCell(GRU_UNITS, step_dimension=de_gru_cell_1.units) de_init_state_2 = tf.expand_dims(encoder_output_2[:, -1], axis=1) # init state decoder_output_2 = de_gru_cell_2(decoder_output_1[:, 1:], de_init_state_2) # 全连接 dense_w_1 = tf.Variable(tf.truncated_normal(shape=[GRU_UNITS, GRU_UNITS // 2])) dense_b_1 = tf.Variable(tf.truncated_normal(shape=[GRU_UNITS // 2, ])) dense_w_2 = tf.Variable(tf.truncated_normal(shape=[GRU_UNITS // 2, LABEL_SIZE])) dense_b_2 = tf.Variable(tf.truncated_normal(shape=[LABEL_SIZE, ])) dense_1 = tf.nn.leaky_relu(tf.tensordot(decoder_output_2, dense_w_1, [[2], [0]]) + dense_b_1) output = tf.nn.leaky_relu(tf.tensordot(dense_1, dense_w_2, [[2], [0]]) + dense_b_2) loss = tf.losses.sparse_softmax_cross_entropy(labels=de_in_label[:, 1:], logits=output[:, 1:-1]) optimizer = tf.train.AdamOptimizer(learning_rate=0.002).minimize(loss) decoder_start = np.zeros(shape=[BATCH_SIZE, 1]) + LABEL_SIZE - 1 saver = tf.train.Saver() ds = input_fn("../../../data/translate/train_data.tfrecord").make_one_shot_iterator().get_next() # sess.run(tf.global_variables_initializer()) saver.restore(sess, save_path=model_path + "ner.model-1") def save_model(): np.savetxt("./params_v3/en_embeding", sess.run(en_embedding_variable)) np.savetxt("./params_v3/de_embedding", sess.run(de_embedding_variable)) np.savetxt("./params_v3/en_grucell_1_w_r_z", sess.run(en_gru_cell_1.en_w_r_z)) np.savetxt("./params_v3/en_grucell_1_b_r_z", sess.run(en_gru_cell_1.en_b_r_z)) np.savetxt("./params_v3/en_grucell_1_w_h", sess.run(en_gru_cell_1.en_w_h)) np.savetxt("./params_v3/en_grucell_1_b_h", sess.run(en_gru_cell_1.en_b_h)) # -- np.savetxt("./params_v3/en_grucell_2_w_r_z", sess.run(en_gru_cell_2.en_w_r_z)) np.savetxt("./params_v3/en_grucell_2_b_r_z", sess.run(en_gru_cell_2.en_b_r_z)) np.savetxt("./params_v3/en_grucell_2_w_h", sess.run(en_gru_cell_2.en_w_h)) np.savetxt("./params_v3/en_grucell_2_b_h", sess.run(en_gru_cell_2.en_b_h)) # -- np.savetxt("./params_v3/de_grucell_1_w_r_z", sess.run(de_gru_cell_1.de_w_r_z)) np.savetxt("./params_v3/de_grucell_1_b_r_z", sess.run(de_gru_cell_1.de_b_r_z)) np.savetxt("./params_v3/de_grucell_1_w_h", sess.run(de_gru_cell_1.de_w_h)) np.savetxt("./params_v3/de_grucell_1_b_h", sess.run(de_gru_cell_1.de_b_h)) np.savetxt("./params_v3/de_grucell_2_w_r_z", sess.run(de_gru_cell_2.en_w_r_z)) np.savetxt("./params_v3/de_grucell_2_b_r_z", sess.run(de_gru_cell_2.en_b_r_z)) np.savetxt("./params_v3/de_grucell_2_w_h", sess.run(de_gru_cell_2.en_w_h)) np.savetxt("./params_v3/de_grucell_2_b_h", sess.run(de_gru_cell_2.en_b_h)) np.savetxt("./params_v3/dense_w_1", sess.run(dense_w_1)) np.savetxt("./params_v3/dense_b_1", sess.run(dense_b_1)) np.savetxt("./params_v3/dense_w_2", sess.run(dense_w_2)) np.savetxt("./params_v3/dense_b_2", sess.run(dense_b_2)) min_v = 0.1 for i in range(100000): features, labels, f_lengths, l_lengths = sess.run(ds) """ 这里在训练的时候不同批次的最大长度是不一样的,目的是为了加快训练速度,但对精度会有影响, 通过翻译场景来看,在预测时,适当的增加padding,对最终效果有一定的正面影响; 后续需要优化的地方是重新组织训练数据,比如长度小于10的分到一个批次,padding长度为10; 10 ~ 20的分到一个批次,padding长度为20 如果使用固定长度的padding训练,那么对算力的需求可能会增长几十倍 """ f_max_len = f_lengths[np.argmax(
f_lengths, axis=-1)] l_max_len = l_lengths[np.argmax(l_lengths, axis=-1)] loss_value, _ = sess.run((loss, optimizer), feed_dict={en_input: features[:, :f_max_len + 1], de_in_label: np.concatenate( (decoder_start, labels[:, :l_max_len + 1]), axis=-1)[:, 0:-1]}) print(i, loss_value) if i % 10 == 0 and (min_v > loss_value): saver.save(sess, model_path + "ner.model", global_step=int(loss_value * 1000)) min_v = loss_value """ if i % 1 == 0: # save_model() # saver.save(sess, model_path + "ner.model", global_step=int(loss_value * 1000)) pred = sess.run(output, feed_dict={en_input: features[:, :f_max_len + 1], de_in_label: np.concatenate((decoder_start, labels[:, :l_max_len + 1]), axis=-1)[:, 0:-1]}) print(i, np.sum(np.sum(np.argmax(pred, axis=2)[:, 1:-1] == labels[:, :l_max_len], axis=1) == l_max_len)) """ sess.close()
conditional_block
attention_refine.py
import tensorflow as tf import numpy as np EMBEDDING_SIZE = 64 DIC_SIZE = 7148 LABEL_SIZE = 3440 # 0为padding,24为start SEQ_LEN = 128 BATCH_SIZE = 512 GRU_UNITS = 256 model_path = "./" feature_description = { "word_id": tf.io.VarLenFeature(dtype=tf.int64), "word_label": tf.io.VarLenFeature(dtype=tf.int64) } def parse_fn(example_proto): example = tf.io.parse_single_example(example_proto, feature_description) f = tf.sparse.to_dense(example['word_id']) l = tf.sparse.to_dense(example['word_label']) return tf.pad(f, [[0, SEQ_LEN - tf.shape(f)[0]]]), tf.pad(l, [[0, SEQ_LEN - tf.shape(l)[0]]]), tf.shape(f)[0], \ tf.shape(l)[0] def input_fn(file_path): ds = tf.data.TFRecordDataset(file_path, num_parallel_reads=4) \ .map(parse_fn, num_parallel_calls=4) \ .shuffle(buffer_size=1024) \ .batch(batch_size=BATCH_SIZE, drop_remainder=True).prefetch(51200).repeat() return ds class GruCell: def __init__(self, units, step_dimension): """ :param units: 每一个时间步的输出维度 :param step_dimension: 每一时间步的输入维度 """ self.units = units self.en_w_r_z = tf.Variable(tf.truncated_normal(shape=[step_dimension + self.units, self.units * 2]) / 10000) self.en_b_r_z = tf.Variable(tf.truncated_normal(shape=[units * 2, ]) / 10000) self.en_w_h = tf.Variable(tf.truncated_normal(shape=[step_dimension + self.units, self.units]) / 10000) self.en_b_h = tf.Variable(tf.truncated_normal(shape=[units, ]) / 10000) def en_cond(self, i, en_embeded, en_gru_output): return i < tf.shape(en_embeded)[1] def en_gru(self, i, en_embeded, en_gru_output): step_in = en_embeded[:, i] last_state = en_gru_output[:, i] in_concat = tf.concat((step_in, last_state), axis=-1) gate_inputs = tf.sigmoid(tf.matmul(in_concat, self.en_w_r_z) + self.en_b_r_z) r, z = tf.split(value=gate_inputs, num_or_size_splits=2, axis=1) h_ = tf.tanh(tf.matmul(tf.concat((step_in, r * last_state), axis=-1), self.en_w_h) + self.en_b_h) h = z * last_state + (1 - z) * h_ en_gru_output = tf.concat((en_gru_output, tf.expand_dims(h, axis=1)), axis=1) i = i + 1 return i, en_embeded, en_gru_output def __call__(self, seqs, en_gru_output, *args, **kwargs): """ 在call函数内部创建en_gru_output会有问题,解码过程提示变量未初始化,所以在外部创建好变量传入call;估计可能是后面用这个变量的最后一个状态去初始化其他变量导致的 :param seqs: :param en_gru_output: :param args: :param kwargs: :return: """ i0 = tf.constant(0) _, _, encoder_output = tf.while_loop(self.en_cond, self.en_gru, loop_vars=[i0, seqs, en_gru_output], shape_invariants=[i0.get_shape(), seqs.get_shape(), tf.TensorShape([None, None, self.units])]) return encoder_output class GruCellAttentionDecoder: def __init__(self, units, step_dimension): """ :param units:每一个时间步的输出维度 :param step_dimension: 每一时间步的输入维度 """ self.units = units self.de_w_r_z = tf.Variable( tf.truncated_norm
last_state = de_gru_output[:, i] attention_weight = tf.nn.softmax(tf.squeeze(tf.matmul(encoder_output, tf.expand_dims(last_state, axis=2)))) context_c = tf.reduce_sum(tf.multiply(tf.expand_dims(attention_weight, axis=2), encoder_output), axis=1) step_in = tf.concat((step_in, context_c), axis=-1) in_concat = tf.concat((step_in, last_state), axis=-1) gate_inputs = tf.sigmoid(tf.matmul(in_concat, self.de_w_r_z) + self.de_b_r_z) r, z = tf.split(value=gate_inputs, num_or_size_splits=2, axis=1) h_ = tf.tanh(tf.matmul(tf.concat((step_in, r * last_state), axis=-1), self.de_w_h) + self.de_b_h) h = z * last_state + (1 - z) * h_ de_gru_output = tf.concat((de_gru_output, tf.expand_dims(h, axis=1)), axis=1) i = i + 1 return i, de_embeded, de_gru_output, encoder_output def __call__(self, de_embeded, de_gru_output, encoder_output, *args, **kwargs): """ 可以在内部创建de_gru_output,并不会抛出未初始化异常 :param de_embeded: :param args: :param kwargs: :return: """ i0 = tf.constant(0) _, _, decoder_output, _ = tf.while_loop(self.de_cond, self.de_gru, loop_vars=[i0, de_embeded, de_gru_output, encoder_output], shape_invariants=[i0.get_shape(), de_embeded.get_shape(), tf.TensorShape([None, None, self.units]), encoder_output.get_shape()]) return decoder_output sess = tf.Session() # 编码 en_input = tf.placeholder(tf.int32, shape=[None, None]) en_embedding_variable = tf.Variable(tf.truncated_normal(shape=[DIC_SIZE, EMBEDDING_SIZE])) en_embeded = tf.nn.embedding_lookup(en_embedding_variable, en_input) en_gru_cell_1 = GruCell(units=GRU_UNITS, step_dimension=EMBEDDING_SIZE) gru_init_state_1 = tf.zeros(shape=[BATCH_SIZE, 1, en_gru_cell_1.units]) # 这里不应该定义称为变量 encoder_output_1 = en_gru_cell_1(en_embeded, gru_init_state_1) en_gru_cell_2 = GruCell(units=GRU_UNITS, step_dimension=en_gru_cell_1.units) gru_init_state_2 = tf.zeros(shape=[BATCH_SIZE, 1, en_gru_cell_2.units]) # 这里不应该定义称为变量 encoder_output_2 = en_gru_cell_2(encoder_output_1[:, 1:], gru_init_state_2) # 解码 de_in_label = tf.placeholder(tf.int32, shape=[None, None]) # batch_size,seq_len de_embedding_variable = tf.Variable(tf.truncated_normal(shape=[LABEL_SIZE, EMBEDDING_SIZE])) de_embeded = tf.nn.embedding_lookup(de_embedding_variable, de_in_label) de_gru_cell_1 = GruCellAttentionDecoder(GRU_UNITS, step_dimension=EMBEDDING_SIZE) de_init_state_1 = tf.expand_dims(encoder_output_1[:, -1], axis=1) # init state decoder_output_1 = de_gru_cell_1(de_embeded, de_init_state_1, encoder_output_2[:, 1:]) de_gru_cell_2 = GruCell(GRU_UNITS, step_dimension=de_gru_cell_1.units) de_init_state_2 = tf.expand_dims(encoder_output_2[:, -1], axis=1) # init state decoder_output_2 = de_gru_cell_2(decoder_output_1[:, 1:], de_init_state_2) # 全连接 dense_w_1 = tf.Variable(tf.truncated_normal(shape=[GRU_UNITS, GRU_UNITS // 2])) dense_b_1 = tf.Variable(tf.truncated_normal(shape=[GRU_UNITS // 2, ])) dense_w_2 = tf.Variable(tf.truncated_normal(shape=[GRU_UNITS // 2, LABEL_SIZE])) dense_b_2 = tf.Variable(tf.truncated_normal(shape=[LABEL_SIZE, ])) dense_1 = tf.nn.leaky_relu(tf.tensordot(decoder_output_2, dense_w_1, [[2], [0]]) + dense_b_1) output = tf.nn.leaky_relu(tf.tensordot(dense_1, dense_w_2, [[2], [0]]) + dense_b_2) loss = tf.losses.sparse_softmax_cross_entropy(labels=de_in_label[:, 1:], logits=output[:, 1:-1]) optimizer = tf.train.AdamOptimizer(learning_rate=0.002).minimize(loss) decoder_start = np.zeros(shape=[BATCH_SIZE, 1]) + LABEL_SIZE - 1 saver = tf.train.Saver() ds = input_fn("../../../data/translate/train_data.tfrecord").make_one_shot_iterator().get_next() # sess.run(tf.global_variables_initializer()) saver.restore(sess, save_path=model_path + "ner.model-1") def save_model(): np.savetxt("./params_v3/en_embeding", sess.run(en_embedding_variable)) np.savetxt("./params_v3/de_embedding", sess.run(de_embedding_variable)) np.savetxt("./params_v3/en_grucell_1_w_r_z", sess.run(en_gru_cell_1.en_w_r_z)) np.savetxt("./params_v3/en_grucell_1_b_r_z", sess.run(en_gru_cell_1.en_b_r_z)) np.savetxt("./params_v3/en_grucell_1_w_h", sess.run(en_gru_cell_1.en_w_h)) np.savetxt("./params_v3/en_grucell_1_b_h", sess.run(en_gru_cell_1.en_b_h)) # -- np.savetxt("./params_v3/en_grucell_2_w_r_z", sess.run(en_gru_cell_2.en_w_r_z)) np.savetxt("./params_v3/en_grucell_2_b_r_z", sess.run(en_gru_cell_2.en_b_r_z)) np.savetxt("./params_v3/en_grucell_2_w_h", sess.run(en_gru_cell_2.en_w_h)) np.savetxt("./params_v3/en_grucell_2_b_h", sess.run(en_gru_cell_2.en_b_h)) # -- np.savetxt("./params_v3/de_grucell_1_w_r_z", sess.run(de_gru_cell_1.de_w_r_z)) np.savetxt("./params_v3/de_grucell_1_b_r_z", sess.run(de_gru_cell_1.de_b_r_z)) np.savetxt("./params_v3/de_grucell_1_w_h", sess.run(de_gru_cell_1.de_w_h)) np.savetxt("./params_v3/de_grucell_1_b_h", sess.run(de_gru_cell_1.de_b_h)) np.savetxt("./params_v3/de_grucell_2_w_r_z", sess.run(de_gru_cell_2.en_w_r_z)) np.savetxt("./params_v3/de_grucell_2_b_r_z", sess.run(de_gru_cell_2.en_b_r_z)) np.savetxt("./params_v3/de_grucell_2_w_h", sess.run(de_gru_cell_2.en_w_h)) np.savetxt("./params_v3/de_grucell_2_b_h", sess.run(de_gru_cell_2.en_b_h)) np.savetxt("./params_v3/dense_w_1", sess.run(dense_w_1)) np.savetxt("./params_v3/dense_b_1", sess.run(dense_b_1)) np.savetxt("./params_v3/dense_w_2", sess.run(dense_w_2)) np.savetxt("./params_v3/dense_b_2", sess.run(dense_b_2)) min_v = 0.1 for i in range(100000): features, labels, f_lengths, l_lengths = sess.run(ds) """ 这里在训练的时候不同批次的最大长度是不一样的,目的是为了加快训练速度,但对精度会有影响, 通过翻译场景来看,在预测时,适当的增加padding,对最终效果有一定的正面影响; 后续需要优化的地方是重新组织训练数据,比如长度小于10的分到一个批次,padding长度为10; 10 ~ 20的分到一个批次,padding长度为20 如果使用固定长度的padding训练,那么对算力的需求可能会增长几十倍 """ f_max_len = f_lengths[np.argmax(f_lengths, axis=-1)] l_max_len = l_lengths[np.argmax(l_lengths, axis=-1)] loss_value, _ = sess.run((loss, optimizer), feed_dict={en_input: features[:, :f_max_len + 1], de_in_label: np.concatenate( (decoder_start, labels[:, :l_max_len + 1]), axis=-1)[:, 0:-1]}) print(i, loss_value) if i % 10 == 0 and (min_v > loss_value): saver.save(sess, model_path + "ner.model", global_step=int(loss_value * 1000)) min_v = loss_value """ if i % 1 == 0: # save_model() # saver.save(sess, model_path + "ner.model", global_step=int(loss_value * 1000)) pred = sess.run(output, feed_dict={en_input: features[:, :f_max_len + 1], de_in_label: np.concatenate((decoder_start, labels[:, :l_max_len + 1]), axis=-1)[:, 0:-1]}) print(i, np.sum(np.sum(np.argmax(pred, axis=2)[:, 1:-1] == labels[:, :l_max_len], axis=1) == l_max_len)) """ sess.close()
al(shape=[step_dimension + self.units * 2, self.units * 2]) / 10000) self.de_b_r_z = tf.Variable(tf.truncated_normal(shape=[self.units * 2, ]) / 10000) self.de_w_h = tf.Variable(tf.truncated_normal(shape=[step_dimension + self.units * 2, self.units]) / 10000) self.de_b_h = tf.Variable(tf.truncated_normal(shape=[self.units, ]) / 10000) def de_cond(self, i, de_embeded, de_gru_output, encoder_output): return i < tf.shape(de_embeded)[1] def de_gru(self, i, de_embeded, de_gru_output, encoder_output): step_in = de_embeded[:, i]
identifier_body
client.go
// +build linux /* http://www.apache.org/licenses/LICENSE-2.0.txt Copyright 2015 Intel Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package client import ( "errors" "fmt" "os" "path/filepath" "strconv" "strings" "sync" "github.com/fsouza/go-dockerclient" "github.com/intelsdi-x/snap-plugin-collector-docker/config" "github.com/intelsdi-x/snap-plugin-collector-docker/fs" "github.com/intelsdi-x/snap-plugin-collector-docker/network" "github.com/intelsdi-x/snap-plugin-collector-docker/wrapper" "github.com/opencontainers/runc/libcontainer/cgroups" ) const ( endpoint string = "unix:///var/run/docker.sock" dockerVersionKey string = "Version" ) // DockerClientInterface provides methods i.a. for interaction with the docker API. type DockerClientInterface interface { ListContainersAsMap() (map[string]docker.APIContainers, error) GetStatsFromContainer(string, bool) (*wrapper.Statistics, error) InspectContainer(string) (*docker.Container, error) FindCgroupMountpoint(string) (string, error) } // DockerClient holds fsouza go-dockerclient instance ready for communication with the server endpoint `unix:///var/run/docker.sock`, // cache instance which is used to store output from docker container inspect (to avoid execute inspect request multiply times, it is called only once per container) // and diskUsageCollector which is responsible for collecting container disk usage (based on `du -u` command) in the background type DockerClient struct { cl *docker.Client inspectCache map[string]*docker.Container inspectMutex sync.Mutex diskUsageCollector fs.DiskUsageCollector } type deviceInfo struct { device string major string minor string } // NewDockerClient returns dockerClient instance ready for communication with the server endpoint `unix:///var/run/docker.sock` func NewDockerClient() (*DockerClient, error) { client, err := docker.NewClient(endpoint) if err != nil { return nil, fmt.Errorf("Cannot initialize docker client instance with the given server endpoint `%s`, err=%v", endpoint, err) } dc := &DockerClient{ cl: client, inspectCache: map[string]*docker.Container{}, diskUsageCollector: fs.DiskUsageCollector{}, } dc.diskUsageCollector.Init() // get version of docker engine version, err := dc.version() if err != nil { return nil, err } config.DockerVersion = version return dc, nil } // FindCgroupMountpoint returns cgroup mountpoint of a given subsystem func (dc *DockerClient) FindCgroupMountpoint(subsystem string) (string, error)
// GetShortID returns short container ID (12 chars) func GetShortID(dockerID string) (string, error) { if len(dockerID) < 12 { return "", fmt.Errorf("Docker id %v is too short (the length of id should equal at least 12)", dockerID) } return dockerID[:12], nil } // GetStatsFromContainer returns docker containers stats: cgroups stats (cpu usage, memory usage, etc.) and network stats (tx_bytes, rx_bytes etc.); // notes that incoming container id has to be full-length to be able to inspect container func (dc *DockerClient) GetStatsFromContainer(id string, collectFs bool) (*wrapper.Statistics, error) { var ( err error pid int workingSet uint64 container = &docker.Container{} groupWrap = wrapper.Cgroups2Stats // wrapper for cgroup name and interface for stats extraction stats = wrapper.NewStatistics() ) if !isHost(id) { if !isFullLengthID(id) { return nil, fmt.Errorf("Container id %+v is not fully-length - cannot inspect container", id) } // inspect container based only on fully-length container id. container, err = dc.InspectContainer(id) if err != nil { return nil, err } // take docker container PID pid = container.State.Pid } for cg, stat := range groupWrap { groupPath, err := getSubsystemPath(cg, id) if err != nil { fmt.Fprintln(os.Stderr, "Cannot found subsystem path for cgroup=", cg, " for container id=", container) continue } // get cgroup stats for given docker err = stat.GetStats(groupPath, stats.CgroupStats) if err != nil { // just log about it if isHost(id) { fmt.Fprintln(os.Stderr, "Cannot obtain cgroups statistics for host, err=", err) } else { fmt.Fprintln(os.Stderr, "Cannot obtain cgroups statistics for container: id=", id, ", image=", container.Image, ", name=", container.Name, ", err=", err) } continue } } // calculate additional stats memory:working_set based on memory_stats if totalInactiveAnon, ok := stats.CgroupStats.MemoryStats.Stats["total_inactive_anon"]; ok { workingSet = stats.CgroupStats.MemoryStats.Usage.Usage if workingSet < totalInactiveAnon { workingSet = 0 } else { workingSet -= totalInactiveAnon } if totalInactiveFile, ok := stats.CgroupStats.MemoryStats.Stats["total_inactive_file"]; ok { if workingSet < totalInactiveFile { workingSet = 0 } else { workingSet -= totalInactiveFile } } } stats.CgroupStats.MemoryStats.Stats["working_set"] = workingSet if !isHost(id) { rootFs := "/" stats.Network, err = network.NetworkStatsFromProc(rootFs, pid) if err != nil { // only log error message fmt.Fprintf(os.Stderr, "Unable to get network stats, containerID=%+v, pid %d: %v", container.ID, pid, err) } stats.Connection.Tcp, err = network.TcpStatsFromProc(rootFs, pid) if err != nil { // only log error message fmt.Fprintf(os.Stderr, "Unable to get tcp stats from pid %d: %v", pid, err) } stats.Connection.Tcp6, err = network.Tcp6StatsFromProc(rootFs, pid) if err != nil { // only log error message fmt.Fprintf(os.Stderr, "Unable to get tcp6 stats from pid %d: %v", pid, err) } } else { stats.Network, err = network.NetworkStatsFromRoot() if err != nil { // only log error message fmt.Fprintf(os.Stderr, "Unable to get network stats, containerID=%v, %v", id, err) } } if collectFs { stats.Filesystem, err = fs.GetFsStats(container) if err != nil { // only log error message fmt.Fprintf(os.Stderr, "Unable to get filesystem stats for docker: %v, err=%v", id, err) } } return stats, nil } // InspectContainer returns information about the container with given ID func (dc *DockerClient) InspectContainer(id string) (*docker.Container, error) { dc.inspectMutex.Lock() defer dc.inspectMutex.Unlock() // check if the inspect info is already stored in inspectCache if info, haveInfo := dc.inspectCache[id]; haveInfo { return info, nil } info, err := dc.cl.InspectContainer(id) if err != nil { return nil, err } dc.inspectCache[id] = info return info, nil } // ListContainersAsMap returns list of all available docker containers and base information about them (status, uptime, etc.) func (dc *DockerClient) ListContainersAsMap() (map[string]docker.APIContainers, error) { containers := make(map[string]docker.APIContainers) containerList, err := dc.cl.ListContainers(docker.ListContainersOptions{}) if err != nil { return nil, err } for _, cont := range containerList { shortID, err := GetShortID(cont.ID) if err != nil { return nil, err } containers[shortID] = cont } containers["root"] = docker.APIContainers{ID: "/"} if len(containers) == 0 { return nil, errors.New("No docker container found") } return containers, nil } func getSubsystemPath(subsystem string, id string) (string, error) { var subsystemPath string systemSlice := "system.slice" groupPath, err := cgroups.FindCgroupMountpoint(subsystem) if err != nil { fmt.Fprintf(os.Stderr, "[WARNING] Could not find mount point for %v\n", subsystem) return "", err } if isHost(id) { if isRunningSystemd() { subsystemPath = filepath.Join(groupPath, systemSlice) } else { subsystemPath = groupPath } return subsystemPath, nil } if isFsCgroupParent(groupPath) { // default cgroupfs parent is used for container subsystemPath = filepath.Join(groupPath, "docker", id) } else { // cgroup is created under systemd.slice subsystemPath = filepath.Join(groupPath, systemSlice, "docker-"+id+".scope") } return subsystemPath, nil } // isFullLengthID returns true if docker ID is a full-length (64 chars) func isFullLengthID(dockerID string) bool { if len(dockerID) == 64 { return true } return false } // isFsCgroupParent returns true if the docker was run with default cgroup parent func isFsCgroupParent(groupPath string) bool { fi, err := os.Lstat(filepath.Join(groupPath, "docker")) if err != nil { return false } return fi.IsDir() } // isRunningSystemd returns true if the host was booted with systemd func isRunningSystemd() bool { fi, err := os.Lstat("/run/systemd/system") if err != nil { return false } return fi.IsDir() } // isHost returns true if a given id pointing to host func isHost(id string) bool { if id == "/" { // it's a host return true } return false } // version returns version of docker engine func (dc *DockerClient) version() (version []int, _ error) { version = []int{0, 0} env, err := dc.cl.Version() if err != nil { return version, err } parseInt := func(str string, defVal int) int { val, err := strconv.ParseInt(str, 10, 64) if err != nil { return defVal } return int(val) } for _, kv := range *env { kvs := strings.Split(kv, "=") if len(kvs) < 2 { return nil, fmt.Errorf("Cannot retrive the version of docker engine, is `%v`, expected e.g.`Version = 1.10`", kv) } if kvs[0] != dockerVersionKey { continue } versionSplit := strings.Split(kvs[1], ".") if len(versionSplit) < 2 { return nil, fmt.Errorf("Invalid format of docker engine version, is `%v`, expected e.g. `1.10", kvs[1]) } version := []int{parseInt(versionSplit[0], 0), parseInt(versionSplit[1], 0)} return version, nil } return version, nil }
{ return cgroups.FindCgroupMountpoint(subsystem) }
identifier_body
client.go
// +build linux /* http://www.apache.org/licenses/LICENSE-2.0.txt Copyright 2015 Intel Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package client import ( "errors" "fmt" "os" "path/filepath" "strconv" "strings" "sync" "github.com/fsouza/go-dockerclient" "github.com/intelsdi-x/snap-plugin-collector-docker/config" "github.com/intelsdi-x/snap-plugin-collector-docker/fs" "github.com/intelsdi-x/snap-plugin-collector-docker/network" "github.com/intelsdi-x/snap-plugin-collector-docker/wrapper" "github.com/opencontainers/runc/libcontainer/cgroups" ) const ( endpoint string = "unix:///var/run/docker.sock" dockerVersionKey string = "Version" ) // DockerClientInterface provides methods i.a. for interaction with the docker API. type DockerClientInterface interface { ListContainersAsMap() (map[string]docker.APIContainers, error) GetStatsFromContainer(string, bool) (*wrapper.Statistics, error) InspectContainer(string) (*docker.Container, error) FindCgroupMountpoint(string) (string, error) } // DockerClient holds fsouza go-dockerclient instance ready for communication with the server endpoint `unix:///var/run/docker.sock`, // cache instance which is used to store output from docker container inspect (to avoid execute inspect request multiply times, it is called only once per container) // and diskUsageCollector which is responsible for collecting container disk usage (based on `du -u` command) in the background type DockerClient struct { cl *docker.Client inspectCache map[string]*docker.Container inspectMutex sync.Mutex diskUsageCollector fs.DiskUsageCollector } type deviceInfo struct { device string major string minor string } // NewDockerClient returns dockerClient instance ready for communication with the server endpoint `unix:///var/run/docker.sock` func NewDockerClient() (*DockerClient, error) { client, err := docker.NewClient(endpoint) if err != nil { return nil, fmt.Errorf("Cannot initialize docker client instance with the given server endpoint `%s`, err=%v", endpoint, err) } dc := &DockerClient{ cl: client, inspectCache: map[string]*docker.Container{}, diskUsageCollector: fs.DiskUsageCollector{}, } dc.diskUsageCollector.Init() // get version of docker engine version, err := dc.version() if err != nil { return nil, err } config.DockerVersion = version return dc, nil } // FindCgroupMountpoint returns cgroup mountpoint of a given subsystem func (dc *DockerClient) FindCgroupMountpoint(subsystem string) (string, error) { return cgroups.FindCgroupMountpoint(subsystem) } // GetShortID returns short container ID (12 chars) func GetShortID(dockerID string) (string, error) { if len(dockerID) < 12 { return "", fmt.Errorf("Docker id %v is too short (the length of id should equal at least 12)", dockerID) } return dockerID[:12], nil } // GetStatsFromContainer returns docker containers stats: cgroups stats (cpu usage, memory usage, etc.) and network stats (tx_bytes, rx_bytes etc.);
func (dc *DockerClient) GetStatsFromContainer(id string, collectFs bool) (*wrapper.Statistics, error) { var ( err error pid int workingSet uint64 container = &docker.Container{} groupWrap = wrapper.Cgroups2Stats // wrapper for cgroup name and interface for stats extraction stats = wrapper.NewStatistics() ) if !isHost(id) { if !isFullLengthID(id) { return nil, fmt.Errorf("Container id %+v is not fully-length - cannot inspect container", id) } // inspect container based only on fully-length container id. container, err = dc.InspectContainer(id) if err != nil { return nil, err } // take docker container PID pid = container.State.Pid } for cg, stat := range groupWrap { groupPath, err := getSubsystemPath(cg, id) if err != nil { fmt.Fprintln(os.Stderr, "Cannot found subsystem path for cgroup=", cg, " for container id=", container) continue } // get cgroup stats for given docker err = stat.GetStats(groupPath, stats.CgroupStats) if err != nil { // just log about it if isHost(id) { fmt.Fprintln(os.Stderr, "Cannot obtain cgroups statistics for host, err=", err) } else { fmt.Fprintln(os.Stderr, "Cannot obtain cgroups statistics for container: id=", id, ", image=", container.Image, ", name=", container.Name, ", err=", err) } continue } } // calculate additional stats memory:working_set based on memory_stats if totalInactiveAnon, ok := stats.CgroupStats.MemoryStats.Stats["total_inactive_anon"]; ok { workingSet = stats.CgroupStats.MemoryStats.Usage.Usage if workingSet < totalInactiveAnon { workingSet = 0 } else { workingSet -= totalInactiveAnon } if totalInactiveFile, ok := stats.CgroupStats.MemoryStats.Stats["total_inactive_file"]; ok { if workingSet < totalInactiveFile { workingSet = 0 } else { workingSet -= totalInactiveFile } } } stats.CgroupStats.MemoryStats.Stats["working_set"] = workingSet if !isHost(id) { rootFs := "/" stats.Network, err = network.NetworkStatsFromProc(rootFs, pid) if err != nil { // only log error message fmt.Fprintf(os.Stderr, "Unable to get network stats, containerID=%+v, pid %d: %v", container.ID, pid, err) } stats.Connection.Tcp, err = network.TcpStatsFromProc(rootFs, pid) if err != nil { // only log error message fmt.Fprintf(os.Stderr, "Unable to get tcp stats from pid %d: %v", pid, err) } stats.Connection.Tcp6, err = network.Tcp6StatsFromProc(rootFs, pid) if err != nil { // only log error message fmt.Fprintf(os.Stderr, "Unable to get tcp6 stats from pid %d: %v", pid, err) } } else { stats.Network, err = network.NetworkStatsFromRoot() if err != nil { // only log error message fmt.Fprintf(os.Stderr, "Unable to get network stats, containerID=%v, %v", id, err) } } if collectFs { stats.Filesystem, err = fs.GetFsStats(container) if err != nil { // only log error message fmt.Fprintf(os.Stderr, "Unable to get filesystem stats for docker: %v, err=%v", id, err) } } return stats, nil } // InspectContainer returns information about the container with given ID func (dc *DockerClient) InspectContainer(id string) (*docker.Container, error) { dc.inspectMutex.Lock() defer dc.inspectMutex.Unlock() // check if the inspect info is already stored in inspectCache if info, haveInfo := dc.inspectCache[id]; haveInfo { return info, nil } info, err := dc.cl.InspectContainer(id) if err != nil { return nil, err } dc.inspectCache[id] = info return info, nil } // ListContainersAsMap returns list of all available docker containers and base information about them (status, uptime, etc.) func (dc *DockerClient) ListContainersAsMap() (map[string]docker.APIContainers, error) { containers := make(map[string]docker.APIContainers) containerList, err := dc.cl.ListContainers(docker.ListContainersOptions{}) if err != nil { return nil, err } for _, cont := range containerList { shortID, err := GetShortID(cont.ID) if err != nil { return nil, err } containers[shortID] = cont } containers["root"] = docker.APIContainers{ID: "/"} if len(containers) == 0 { return nil, errors.New("No docker container found") } return containers, nil } func getSubsystemPath(subsystem string, id string) (string, error) { var subsystemPath string systemSlice := "system.slice" groupPath, err := cgroups.FindCgroupMountpoint(subsystem) if err != nil { fmt.Fprintf(os.Stderr, "[WARNING] Could not find mount point for %v\n", subsystem) return "", err } if isHost(id) { if isRunningSystemd() { subsystemPath = filepath.Join(groupPath, systemSlice) } else { subsystemPath = groupPath } return subsystemPath, nil } if isFsCgroupParent(groupPath) { // default cgroupfs parent is used for container subsystemPath = filepath.Join(groupPath, "docker", id) } else { // cgroup is created under systemd.slice subsystemPath = filepath.Join(groupPath, systemSlice, "docker-"+id+".scope") } return subsystemPath, nil } // isFullLengthID returns true if docker ID is a full-length (64 chars) func isFullLengthID(dockerID string) bool { if len(dockerID) == 64 { return true } return false } // isFsCgroupParent returns true if the docker was run with default cgroup parent func isFsCgroupParent(groupPath string) bool { fi, err := os.Lstat(filepath.Join(groupPath, "docker")) if err != nil { return false } return fi.IsDir() } // isRunningSystemd returns true if the host was booted with systemd func isRunningSystemd() bool { fi, err := os.Lstat("/run/systemd/system") if err != nil { return false } return fi.IsDir() } // isHost returns true if a given id pointing to host func isHost(id string) bool { if id == "/" { // it's a host return true } return false } // version returns version of docker engine func (dc *DockerClient) version() (version []int, _ error) { version = []int{0, 0} env, err := dc.cl.Version() if err != nil { return version, err } parseInt := func(str string, defVal int) int { val, err := strconv.ParseInt(str, 10, 64) if err != nil { return defVal } return int(val) } for _, kv := range *env { kvs := strings.Split(kv, "=") if len(kvs) < 2 { return nil, fmt.Errorf("Cannot retrive the version of docker engine, is `%v`, expected e.g.`Version = 1.10`", kv) } if kvs[0] != dockerVersionKey { continue } versionSplit := strings.Split(kvs[1], ".") if len(versionSplit) < 2 { return nil, fmt.Errorf("Invalid format of docker engine version, is `%v`, expected e.g. `1.10", kvs[1]) } version := []int{parseInt(versionSplit[0], 0), parseInt(versionSplit[1], 0)} return version, nil } return version, nil }
// notes that incoming container id has to be full-length to be able to inspect container
random_line_split
client.go
// +build linux /* http://www.apache.org/licenses/LICENSE-2.0.txt Copyright 2015 Intel Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package client import ( "errors" "fmt" "os" "path/filepath" "strconv" "strings" "sync" "github.com/fsouza/go-dockerclient" "github.com/intelsdi-x/snap-plugin-collector-docker/config" "github.com/intelsdi-x/snap-plugin-collector-docker/fs" "github.com/intelsdi-x/snap-plugin-collector-docker/network" "github.com/intelsdi-x/snap-plugin-collector-docker/wrapper" "github.com/opencontainers/runc/libcontainer/cgroups" ) const ( endpoint string = "unix:///var/run/docker.sock" dockerVersionKey string = "Version" ) // DockerClientInterface provides methods i.a. for interaction with the docker API. type DockerClientInterface interface { ListContainersAsMap() (map[string]docker.APIContainers, error) GetStatsFromContainer(string, bool) (*wrapper.Statistics, error) InspectContainer(string) (*docker.Container, error) FindCgroupMountpoint(string) (string, error) } // DockerClient holds fsouza go-dockerclient instance ready for communication with the server endpoint `unix:///var/run/docker.sock`, // cache instance which is used to store output from docker container inspect (to avoid execute inspect request multiply times, it is called only once per container) // and diskUsageCollector which is responsible for collecting container disk usage (based on `du -u` command) in the background type DockerClient struct { cl *docker.Client inspectCache map[string]*docker.Container inspectMutex sync.Mutex diskUsageCollector fs.DiskUsageCollector } type deviceInfo struct { device string major string minor string } // NewDockerClient returns dockerClient instance ready for communication with the server endpoint `unix:///var/run/docker.sock` func NewDockerClient() (*DockerClient, error) { client, err := docker.NewClient(endpoint) if err != nil { return nil, fmt.Errorf("Cannot initialize docker client instance with the given server endpoint `%s`, err=%v", endpoint, err) } dc := &DockerClient{ cl: client, inspectCache: map[string]*docker.Container{}, diskUsageCollector: fs.DiskUsageCollector{}, } dc.diskUsageCollector.Init() // get version of docker engine version, err := dc.version() if err != nil { return nil, err } config.DockerVersion = version return dc, nil } // FindCgroupMountpoint returns cgroup mountpoint of a given subsystem func (dc *DockerClient) FindCgroupMountpoint(subsystem string) (string, error) { return cgroups.FindCgroupMountpoint(subsystem) } // GetShortID returns short container ID (12 chars) func GetShortID(dockerID string) (string, error) { if len(dockerID) < 12 { return "", fmt.Errorf("Docker id %v is too short (the length of id should equal at least 12)", dockerID) } return dockerID[:12], nil } // GetStatsFromContainer returns docker containers stats: cgroups stats (cpu usage, memory usage, etc.) and network stats (tx_bytes, rx_bytes etc.); // notes that incoming container id has to be full-length to be able to inspect container func (dc *DockerClient) GetStatsFromContainer(id string, collectFs bool) (*wrapper.Statistics, error) { var ( err error pid int workingSet uint64 container = &docker.Container{} groupWrap = wrapper.Cgroups2Stats // wrapper for cgroup name and interface for stats extraction stats = wrapper.NewStatistics() ) if !isHost(id) { if !isFullLengthID(id) { return nil, fmt.Errorf("Container id %+v is not fully-length - cannot inspect container", id) } // inspect container based only on fully-length container id. container, err = dc.InspectContainer(id) if err != nil { return nil, err } // take docker container PID pid = container.State.Pid } for cg, stat := range groupWrap { groupPath, err := getSubsystemPath(cg, id) if err != nil { fmt.Fprintln(os.Stderr, "Cannot found subsystem path for cgroup=", cg, " for container id=", container) continue } // get cgroup stats for given docker err = stat.GetStats(groupPath, stats.CgroupStats) if err != nil { // just log about it if isHost(id) { fmt.Fprintln(os.Stderr, "Cannot obtain cgroups statistics for host, err=", err) } else { fmt.Fprintln(os.Stderr, "Cannot obtain cgroups statistics for container: id=", id, ", image=", container.Image, ", name=", container.Name, ", err=", err) } continue } } // calculate additional stats memory:working_set based on memory_stats if totalInactiveAnon, ok := stats.CgroupStats.MemoryStats.Stats["total_inactive_anon"]; ok { workingSet = stats.CgroupStats.MemoryStats.Usage.Usage if workingSet < totalInactiveAnon { workingSet = 0 } else { workingSet -= totalInactiveAnon } if totalInactiveFile, ok := stats.CgroupStats.MemoryStats.Stats["total_inactive_file"]; ok { if workingSet < totalInactiveFile { workingSet = 0 } else { workingSet -= totalInactiveFile } } } stats.CgroupStats.MemoryStats.Stats["working_set"] = workingSet if !isHost(id) { rootFs := "/" stats.Network, err = network.NetworkStatsFromProc(rootFs, pid) if err != nil { // only log error message fmt.Fprintf(os.Stderr, "Unable to get network stats, containerID=%+v, pid %d: %v", container.ID, pid, err) } stats.Connection.Tcp, err = network.TcpStatsFromProc(rootFs, pid) if err != nil { // only log error message fmt.Fprintf(os.Stderr, "Unable to get tcp stats from pid %d: %v", pid, err) } stats.Connection.Tcp6, err = network.Tcp6StatsFromProc(rootFs, pid) if err != nil { // only log error message fmt.Fprintf(os.Stderr, "Unable to get tcp6 stats from pid %d: %v", pid, err) } } else { stats.Network, err = network.NetworkStatsFromRoot() if err != nil { // only log error message fmt.Fprintf(os.Stderr, "Unable to get network stats, containerID=%v, %v", id, err) } } if collectFs { stats.Filesystem, err = fs.GetFsStats(container) if err != nil { // only log error message fmt.Fprintf(os.Stderr, "Unable to get filesystem stats for docker: %v, err=%v", id, err) } } return stats, nil } // InspectContainer returns information about the container with given ID func (dc *DockerClient) InspectContainer(id string) (*docker.Container, error) { dc.inspectMutex.Lock() defer dc.inspectMutex.Unlock() // check if the inspect info is already stored in inspectCache if info, haveInfo := dc.inspectCache[id]; haveInfo { return info, nil } info, err := dc.cl.InspectContainer(id) if err != nil
dc.inspectCache[id] = info return info, nil } // ListContainersAsMap returns list of all available docker containers and base information about them (status, uptime, etc.) func (dc *DockerClient) ListContainersAsMap() (map[string]docker.APIContainers, error) { containers := make(map[string]docker.APIContainers) containerList, err := dc.cl.ListContainers(docker.ListContainersOptions{}) if err != nil { return nil, err } for _, cont := range containerList { shortID, err := GetShortID(cont.ID) if err != nil { return nil, err } containers[shortID] = cont } containers["root"] = docker.APIContainers{ID: "/"} if len(containers) == 0 { return nil, errors.New("No docker container found") } return containers, nil } func getSubsystemPath(subsystem string, id string) (string, error) { var subsystemPath string systemSlice := "system.slice" groupPath, err := cgroups.FindCgroupMountpoint(subsystem) if err != nil { fmt.Fprintf(os.Stderr, "[WARNING] Could not find mount point for %v\n", subsystem) return "", err } if isHost(id) { if isRunningSystemd() { subsystemPath = filepath.Join(groupPath, systemSlice) } else { subsystemPath = groupPath } return subsystemPath, nil } if isFsCgroupParent(groupPath) { // default cgroupfs parent is used for container subsystemPath = filepath.Join(groupPath, "docker", id) } else { // cgroup is created under systemd.slice subsystemPath = filepath.Join(groupPath, systemSlice, "docker-"+id+".scope") } return subsystemPath, nil } // isFullLengthID returns true if docker ID is a full-length (64 chars) func isFullLengthID(dockerID string) bool { if len(dockerID) == 64 { return true } return false } // isFsCgroupParent returns true if the docker was run with default cgroup parent func isFsCgroupParent(groupPath string) bool { fi, err := os.Lstat(filepath.Join(groupPath, "docker")) if err != nil { return false } return fi.IsDir() } // isRunningSystemd returns true if the host was booted with systemd func isRunningSystemd() bool { fi, err := os.Lstat("/run/systemd/system") if err != nil { return false } return fi.IsDir() } // isHost returns true if a given id pointing to host func isHost(id string) bool { if id == "/" { // it's a host return true } return false } // version returns version of docker engine func (dc *DockerClient) version() (version []int, _ error) { version = []int{0, 0} env, err := dc.cl.Version() if err != nil { return version, err } parseInt := func(str string, defVal int) int { val, err := strconv.ParseInt(str, 10, 64) if err != nil { return defVal } return int(val) } for _, kv := range *env { kvs := strings.Split(kv, "=") if len(kvs) < 2 { return nil, fmt.Errorf("Cannot retrive the version of docker engine, is `%v`, expected e.g.`Version = 1.10`", kv) } if kvs[0] != dockerVersionKey { continue } versionSplit := strings.Split(kvs[1], ".") if len(versionSplit) < 2 { return nil, fmt.Errorf("Invalid format of docker engine version, is `%v`, expected e.g. `1.10", kvs[1]) } version := []int{parseInt(versionSplit[0], 0), parseInt(versionSplit[1], 0)} return version, nil } return version, nil }
{ return nil, err }
conditional_block
client.go
// +build linux /* http://www.apache.org/licenses/LICENSE-2.0.txt Copyright 2015 Intel Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package client import ( "errors" "fmt" "os" "path/filepath" "strconv" "strings" "sync" "github.com/fsouza/go-dockerclient" "github.com/intelsdi-x/snap-plugin-collector-docker/config" "github.com/intelsdi-x/snap-plugin-collector-docker/fs" "github.com/intelsdi-x/snap-plugin-collector-docker/network" "github.com/intelsdi-x/snap-plugin-collector-docker/wrapper" "github.com/opencontainers/runc/libcontainer/cgroups" ) const ( endpoint string = "unix:///var/run/docker.sock" dockerVersionKey string = "Version" ) // DockerClientInterface provides methods i.a. for interaction with the docker API. type DockerClientInterface interface { ListContainersAsMap() (map[string]docker.APIContainers, error) GetStatsFromContainer(string, bool) (*wrapper.Statistics, error) InspectContainer(string) (*docker.Container, error) FindCgroupMountpoint(string) (string, error) } // DockerClient holds fsouza go-dockerclient instance ready for communication with the server endpoint `unix:///var/run/docker.sock`, // cache instance which is used to store output from docker container inspect (to avoid execute inspect request multiply times, it is called only once per container) // and diskUsageCollector which is responsible for collecting container disk usage (based on `du -u` command) in the background type DockerClient struct { cl *docker.Client inspectCache map[string]*docker.Container inspectMutex sync.Mutex diskUsageCollector fs.DiskUsageCollector } type deviceInfo struct { device string major string minor string } // NewDockerClient returns dockerClient instance ready for communication with the server endpoint `unix:///var/run/docker.sock` func NewDockerClient() (*DockerClient, error) { client, err := docker.NewClient(endpoint) if err != nil { return nil, fmt.Errorf("Cannot initialize docker client instance with the given server endpoint `%s`, err=%v", endpoint, err) } dc := &DockerClient{ cl: client, inspectCache: map[string]*docker.Container{}, diskUsageCollector: fs.DiskUsageCollector{}, } dc.diskUsageCollector.Init() // get version of docker engine version, err := dc.version() if err != nil { return nil, err } config.DockerVersion = version return dc, nil } // FindCgroupMountpoint returns cgroup mountpoint of a given subsystem func (dc *DockerClient) FindCgroupMountpoint(subsystem string) (string, error) { return cgroups.FindCgroupMountpoint(subsystem) } // GetShortID returns short container ID (12 chars) func GetShortID(dockerID string) (string, error) { if len(dockerID) < 12 { return "", fmt.Errorf("Docker id %v is too short (the length of id should equal at least 12)", dockerID) } return dockerID[:12], nil } // GetStatsFromContainer returns docker containers stats: cgroups stats (cpu usage, memory usage, etc.) and network stats (tx_bytes, rx_bytes etc.); // notes that incoming container id has to be full-length to be able to inspect container func (dc *DockerClient) GetStatsFromContainer(id string, collectFs bool) (*wrapper.Statistics, error) { var ( err error pid int workingSet uint64 container = &docker.Container{} groupWrap = wrapper.Cgroups2Stats // wrapper for cgroup name and interface for stats extraction stats = wrapper.NewStatistics() ) if !isHost(id) { if !isFullLengthID(id) { return nil, fmt.Errorf("Container id %+v is not fully-length - cannot inspect container", id) } // inspect container based only on fully-length container id. container, err = dc.InspectContainer(id) if err != nil { return nil, err } // take docker container PID pid = container.State.Pid } for cg, stat := range groupWrap { groupPath, err := getSubsystemPath(cg, id) if err != nil { fmt.Fprintln(os.Stderr, "Cannot found subsystem path for cgroup=", cg, " for container id=", container) continue } // get cgroup stats for given docker err = stat.GetStats(groupPath, stats.CgroupStats) if err != nil { // just log about it if isHost(id) { fmt.Fprintln(os.Stderr, "Cannot obtain cgroups statistics for host, err=", err) } else { fmt.Fprintln(os.Stderr, "Cannot obtain cgroups statistics for container: id=", id, ", image=", container.Image, ", name=", container.Name, ", err=", err) } continue } } // calculate additional stats memory:working_set based on memory_stats if totalInactiveAnon, ok := stats.CgroupStats.MemoryStats.Stats["total_inactive_anon"]; ok { workingSet = stats.CgroupStats.MemoryStats.Usage.Usage if workingSet < totalInactiveAnon { workingSet = 0 } else { workingSet -= totalInactiveAnon } if totalInactiveFile, ok := stats.CgroupStats.MemoryStats.Stats["total_inactive_file"]; ok { if workingSet < totalInactiveFile { workingSet = 0 } else { workingSet -= totalInactiveFile } } } stats.CgroupStats.MemoryStats.Stats["working_set"] = workingSet if !isHost(id) { rootFs := "/" stats.Network, err = network.NetworkStatsFromProc(rootFs, pid) if err != nil { // only log error message fmt.Fprintf(os.Stderr, "Unable to get network stats, containerID=%+v, pid %d: %v", container.ID, pid, err) } stats.Connection.Tcp, err = network.TcpStatsFromProc(rootFs, pid) if err != nil { // only log error message fmt.Fprintf(os.Stderr, "Unable to get tcp stats from pid %d: %v", pid, err) } stats.Connection.Tcp6, err = network.Tcp6StatsFromProc(rootFs, pid) if err != nil { // only log error message fmt.Fprintf(os.Stderr, "Unable to get tcp6 stats from pid %d: %v", pid, err) } } else { stats.Network, err = network.NetworkStatsFromRoot() if err != nil { // only log error message fmt.Fprintf(os.Stderr, "Unable to get network stats, containerID=%v, %v", id, err) } } if collectFs { stats.Filesystem, err = fs.GetFsStats(container) if err != nil { // only log error message fmt.Fprintf(os.Stderr, "Unable to get filesystem stats for docker: %v, err=%v", id, err) } } return stats, nil } // InspectContainer returns information about the container with given ID func (dc *DockerClient) InspectContainer(id string) (*docker.Container, error) { dc.inspectMutex.Lock() defer dc.inspectMutex.Unlock() // check if the inspect info is already stored in inspectCache if info, haveInfo := dc.inspectCache[id]; haveInfo { return info, nil } info, err := dc.cl.InspectContainer(id) if err != nil { return nil, err } dc.inspectCache[id] = info return info, nil } // ListContainersAsMap returns list of all available docker containers and base information about them (status, uptime, etc.) func (dc *DockerClient) ListContainersAsMap() (map[string]docker.APIContainers, error) { containers := make(map[string]docker.APIContainers) containerList, err := dc.cl.ListContainers(docker.ListContainersOptions{}) if err != nil { return nil, err } for _, cont := range containerList { shortID, err := GetShortID(cont.ID) if err != nil { return nil, err } containers[shortID] = cont } containers["root"] = docker.APIContainers{ID: "/"} if len(containers) == 0 { return nil, errors.New("No docker container found") } return containers, nil } func getSubsystemPath(subsystem string, id string) (string, error) { var subsystemPath string systemSlice := "system.slice" groupPath, err := cgroups.FindCgroupMountpoint(subsystem) if err != nil { fmt.Fprintf(os.Stderr, "[WARNING] Could not find mount point for %v\n", subsystem) return "", err } if isHost(id) { if isRunningSystemd() { subsystemPath = filepath.Join(groupPath, systemSlice) } else { subsystemPath = groupPath } return subsystemPath, nil } if isFsCgroupParent(groupPath) { // default cgroupfs parent is used for container subsystemPath = filepath.Join(groupPath, "docker", id) } else { // cgroup is created under systemd.slice subsystemPath = filepath.Join(groupPath, systemSlice, "docker-"+id+".scope") } return subsystemPath, nil } // isFullLengthID returns true if docker ID is a full-length (64 chars) func isFullLengthID(dockerID string) bool { if len(dockerID) == 64 { return true } return false } // isFsCgroupParent returns true if the docker was run with default cgroup parent func isFsCgroupParent(groupPath string) bool { fi, err := os.Lstat(filepath.Join(groupPath, "docker")) if err != nil { return false } return fi.IsDir() } // isRunningSystemd returns true if the host was booted with systemd func
() bool { fi, err := os.Lstat("/run/systemd/system") if err != nil { return false } return fi.IsDir() } // isHost returns true if a given id pointing to host func isHost(id string) bool { if id == "/" { // it's a host return true } return false } // version returns version of docker engine func (dc *DockerClient) version() (version []int, _ error) { version = []int{0, 0} env, err := dc.cl.Version() if err != nil { return version, err } parseInt := func(str string, defVal int) int { val, err := strconv.ParseInt(str, 10, 64) if err != nil { return defVal } return int(val) } for _, kv := range *env { kvs := strings.Split(kv, "=") if len(kvs) < 2 { return nil, fmt.Errorf("Cannot retrive the version of docker engine, is `%v`, expected e.g.`Version = 1.10`", kv) } if kvs[0] != dockerVersionKey { continue } versionSplit := strings.Split(kvs[1], ".") if len(versionSplit) < 2 { return nil, fmt.Errorf("Invalid format of docker engine version, is `%v`, expected e.g. `1.10", kvs[1]) } version := []int{parseInt(versionSplit[0], 0), parseInt(versionSplit[1], 0)} return version, nil } return version, nil }
isRunningSystemd
identifier_name
M130104.py
# -*- coding: utf-8 -*- """ 中国闽台缘博物馆博物馆爬虫文件 @author: lxx http://www.mtybwg.org.cn/index.aspx 130104 代码更新: 2020.05.10 代码完成 2020.05.10 代码创建 """ import requests import os from bs4 import BeautifulSoup import re import json from selenium import webdriver import time ID = "130104" def debugPrint(message): if __name__ == "__main__": print(message) def getText(item, newline=False): item = re.sub(r"<br/>","",item, flags=re.I) item = re.sub(r"<br />","",item,flags=re.I) item = re.sub(r"<br>","",item,flags=re.I) item = re.sub(r"\r","",item) item = re.sub(r"\t","",item) item = re.sub(r"\xa0","",item) item = re.sub(r" ","",item) item = re.sub(r"\u3000","",item) item = re.sub(r"&emsp;","",item) item = re.sub(r"&nbsp;","",item) il = re.findall(r'(?<=).*?(?=)', item) s = "" flag = 0 flagp = 0 for i in il: if i == '/' and flag != 0: flagp = 1 else: flagp = 0 if i == '<': flag = flag + 1 elif i == '>': flag = flag - 1 else: if flag == 0: s = s+i if flagp == 1: if newline: s = s + '\n' pass # print(s) return s def getMuseumData(): datadict = {} #用来存储爬取的网页信息 datadict["M_ID"] = "130104" datadict["M_CName"] = "中国闽台缘博物馆" datadict["M_EName"] = "China Museum for Fujian Taiwan kinship" datadict["M_Batch"] = 1 datadict["M_Address"] = "福建泉州北清东路212号" #官网主页相关内容 baseurl = "http://www.mtybwg.org.cn/" #要爬取的网页链接 datadict["M_Web"] = baseurl html = askURL(baseurl) # 保存获取到的网页源码 soup = BeautifulSoup(html, "html.parser") datadict["M_Logo"] = "http://www.mtybwg.org.cn/templates/mty/images/logo.jpg" # 博物馆开放时间及门票 i = 0 time = [] item = soup.find("div", class_="top").find("ul",class_="notice").find("p") item = item.find("span").text # print(item) # time = item.split() # print(time) time0 = re.findall(r'开放时间:(.*))', item) # print(time0) # exit() datadict["M_Openingtime"] = time0[0] datadict["M_Ticket"] = "免费开放" # 门票信息 url = "http://www.mtybwg.org.cn/about/detail/249.aspx" html = askURL(url) soup = BeautifulSoup(html,"html.parser") # print(soup) item = soup.find("ul",class_="detailcon") # print(item) # item = str(item) time = [] # time = re.findall(r'<(.*。)', string) for pi in item.find_all(style="white-space:normal;line-height:32px;margin:0cm 0cm 0pt;"): pi = getText(pi.text) time.append(pi) # print(time) datadict["M_OpeningInformation"] = time[0:2] datadict["M_Booking"] = time[17:20] datadict["M_TicketInformation"] = time[16] datadict["M_Triffic"] = time[10:14] # 博物馆图片(list) url = "http://www.mtybwg.org.cn/about/924.aspx" html = askURL(url) soup = BeautifulSoup(html,"html.parser") src = [] for item in soup.find("ul", class_="detailcon").find_all("img"): src.append(item["src"]) p = [] for pi in src: pi = baseurl[0:-1] + pi p.append(pi) # print(p) datadict["M_Pictures"] = p # print(p) # 博物馆介绍 src.clear() for item in soup.find("ul", class_="detailcon").find_all("p",class_="MsoNormal"): # print("===========") item = getText(item.text) src.append(item) # print(src) p = [] for pi in src: if len(pi) >= 10: p.append(pi) # srcs = re.findall('<img src="(.*?)"/>', str(src)) datadict["M_Introduction"] = p jsondata = json.dumps(datadict, ensure_ascii=False,indent = 4) with open("./museums/M130104.json", 'w', encoding='utf-8') as f: f.write(jsondata) return datadict exit() def getCollectionsData(): baseurl = "http://www.mtybwg.org.cn/" index = "http://www.mtybwg.org.cn/cangpin.aspx" html = askURL(index) soup = BeautifulSoup(html,"html.parser") # print(soup) # exit() href = [] for item in soup
ref = [] for item in soup.find("div", class_="rightcon").find("ul",class_="falllist animated").find_all("li"): href0 = item.find("a",class_="pic")["href"] href.append(href0) # print(href) # exit() n = len(href) - 1 for href1 in href: if href1 == "http://vr1.mtybwg.org.cn/20160316/": break url = href1 html = askURL(url) soup = BeautifulSoup(html,"html.parser") hrefa = [] for item in soup.find("div", class_="rightcon").find("ul",class_="falllist animated").find_all("li"): href0 = item.find("a")["href"] hrefa.append(baseurl[0:-1] + href0) # print(hrefa) for href2 in hrefa: url = href2 html = askURL(url) soup = BeautifulSoup(html,"html.parser") # print(type(href)) activityDict = {} activityDict["ARM_In"] = ID Id = re.findall(r'http.*/(.*?).aspx', url) activityDict["A_ID"] = ID + '-' + str(Id[0]) item = soup.find("ul", class_="infolist") # print(item) title = re.findall(r'<h1>(.*)</h1>', str(item)) title = str(title) src = str(item.find_all("img")) src = re.findall(r'<img.*src="(.*?)"', src) txt0 = item.find("ul",class_="detailcon").find("p", class_="MsoNormal").text txt0 = getText(txt0) # txt0 = txt0.split() # # print(txt0) txt = [] txt.append("活动描述:") txt.append(txt0) activityDict["A_Name"] = title activityDict["A_Type"] = "1" activityDict["A_Pictures"] = baseurl[0:-1] + src[0] activityDict["A_Information"] = txt jsondata = json.dumps(activityDict, ensure_ascii=False,indent = 4) with open("./activities/A"+activityDict["A_ID"]+".json", 'w', encoding='utf-8') as f: f.write(jsondata) # exit() pass pass # 教育及学术活动(——微信推送格式,故只存链接) baseurl = "http://www.mtybwg.org.cn/" index = "http://www.mtybwg.org.cn/{}/0-1.aspx" # html = askURL(index) for pi in {"xueshu","xuanjiao"}: index0 = index.format(i) html = askURL(index0) # print(index0) soup = BeautifulSoup(html, "html.parser") # print("hhh") item = soup.find("ul", class_="infolist").find("ul",class_="iflist") href = [] title = [] if pi == "xuanjiao": type = "3" else: type = "2" for li in item.find_all("li"): # print("=3=3=3=3=3=3=") # print(li) if li.text == "": pass else: href.append(li.find("a")["href"]) title.append(li.text) # print(title) # print(href) n = len(title) for i in range(n): activityDict = {} activityDict["ARM_In"] = ID activityDict["A_ID"] = ID + "-" + str(i+1) activityDict["A_Name"] = title[i] activityDict["A_Type"] = type activityDict["A_Information"] = baseurl[0:-1]+href[i] jsondata = json.dumps(activityDict, ensure_ascii=False,indent = 4) with open("./activities/A"+activityDict["A_ID"]+".json", 'w', encoding='utf-8') as f: f.write(jsondata) exit() def askURL(url): # head = headers[0] head = { 'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36(KHTML, like Gecko) Chrome / 80.0.3987.122 Safari / 537.36' } html = "" try: res = requests.get(url, headers=head) res.raise_for_status() res.encoding = res.apparent_encoding html = res.text except requests.RequestException as e: print(e) return html def askPic(url): head = { 'User-Agent':'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; QQDownload 732; .NET4.0C; .NET4.0E)' } try: res = requests.get(url, headers=head) res.raise_for_status() res.encoding = res.apparent_encoding except requests.RequestException as e: print(e) return res if __name__ == "__main__": getMuseumData() getCollectionsData() getActivitiesData()
.find("div", class_="rightcon").find("ul",class_="falllist animated").find_all("li"): href0 = item.find("a",class_="pic")["href"] href.append(href0) # print(href) # exit() n = len(href) for href1 in href: url = href1 html = askURL(url) soup = BeautifulSoup(html,"html.parser") hrefa = [] for item in soup.find("div", class_="rightcon").find("ul",class_="falllist falllist2 animated").find_all("li"): href0 = item.find("a")["href"] hrefa.append(baseurl[0:-1] + href0) # print(hrefa) for href2 in hrefa: url = href2 html = askURL(url) soup = BeautifulSoup(html,"html.parser") # print(type(href)) collectiondict = {} collectiondict["CRM_In"] = ID Id = re.findall(r'http.*/(.*?).aspx', url) # print(Id) collectiondict["C_ID"] = ID + '-' + str(Id[0]) item = soup.find("ul", class_="infolist") # print(item) title = re.findall(r'<h1>(.*)</h1>', str(item)) title = str(title) src = str(item.find_all("img")) src = re.findall(r'<img.*src="(.*?)"', src) txt0 = item.find("div",class_="pluscon").find("ul", class_="con").text txt0 = getText(txt0) # txt0 = txt0.split() # # print(txt0) txt = [] txt.append("藏品描述:") txt.append(txt0) collectiondict["C_Name"] = title collectiondict["C_Pictures"] = baseurl[0:-1] + src[0] collectiondict["C_Introduction"] = txt jsondata = json.dumps(collectiondict, ensure_ascii=False,indent = 4 ) with open("./collections/C"+collectiondict["C_ID"]+".json", 'w', encoding='utf-8') as f: f.write(jsondata) # exit() pass pass def getActivitiesData(): baseurl = "http://www.mtybwg.org.cn/" # 展览 index = "http://www.mtybwg.org.cn/zhanlan.aspx" html = askURL(index) soup = BeautifulSoup(html,"html.parser") # print(soup) # exit() h
identifier_body
M130104.py
# -*- coding: utf-8 -*- """ 中国闽台缘博物馆博物馆爬虫文件 @author: lxx http://www.mtybwg.org.cn/index.aspx 130104 代码更新: 2020.05.10 代码完成 2020.05.10 代码创建 """ import requests import os from bs4 import BeautifulSoup import re import json from selenium import webdriver import time ID = "130104" def debugPrint(message): if __name__ == "__main__": print(message) def getText(item, newline=False): item = re.sub(r"<br/>","",item, flags=re.I) item = re.sub(r"<br />","",item,flags=re.I) item = re.sub(r"<br>","",item,flags=re.I) item = re.sub(r"\r","",item) item = re.sub(r"\t","",item) item = re.sub(r"\xa0","",item) item = re.sub(r" ","",item) item = re.sub(r"\u3000","",item) item = re.sub(r"&emsp;","",item) item = re.sub(r"&nbsp;","",item) il = re.findall(r'(?<=).*?(?=)', item) s = "" flag = 0 flagp = 0 for i in il: if i == '/' and flag != 0: flagp = 1 else: flagp = 0 if i == '<': flag = flag + 1 elif i == '>': flag = flag - 1 else: if flag == 0: s = s+i if flagp == 1: if newline: s = s + '\n' pass # print(s) return s def getMuseumData(): datadict = {} #用来存储爬取的网页信息 d
"] = "130104" datadict["M_CName"] = "中国闽台缘博物馆" datadict["M_EName"] = "China Museum for Fujian Taiwan kinship" datadict["M_Batch"] = 1 datadict["M_Address"] = "福建泉州北清东路212号" #官网主页相关内容 baseurl = "http://www.mtybwg.org.cn/" #要爬取的网页链接 datadict["M_Web"] = baseurl html = askURL(baseurl) # 保存获取到的网页源码 soup = BeautifulSoup(html, "html.parser") datadict["M_Logo"] = "http://www.mtybwg.org.cn/templates/mty/images/logo.jpg" # 博物馆开放时间及门票 i = 0 time = [] item = soup.find("div", class_="top").find("ul",class_="notice").find("p") item = item.find("span").text # print(item) # time = item.split() # print(time) time0 = re.findall(r'开放时间:(.*))', item) # print(time0) # exit() datadict["M_Openingtime"] = time0[0] datadict["M_Ticket"] = "免费开放" # 门票信息 url = "http://www.mtybwg.org.cn/about/detail/249.aspx" html = askURL(url) soup = BeautifulSoup(html,"html.parser") # print(soup) item = soup.find("ul",class_="detailcon") # print(item) # item = str(item) time = [] # time = re.findall(r'<(.*。)', string) for pi in item.find_all(style="white-space:normal;line-height:32px;margin:0cm 0cm 0pt;"): pi = getText(pi.text) time.append(pi) # print(time) datadict["M_OpeningInformation"] = time[0:2] datadict["M_Booking"] = time[17:20] datadict["M_TicketInformation"] = time[16] datadict["M_Triffic"] = time[10:14] # 博物馆图片(list) url = "http://www.mtybwg.org.cn/about/924.aspx" html = askURL(url) soup = BeautifulSoup(html,"html.parser") src = [] for item in soup.find("ul", class_="detailcon").find_all("img"): src.append(item["src"]) p = [] for pi in src: pi = baseurl[0:-1] + pi p.append(pi) # print(p) datadict["M_Pictures"] = p # print(p) # 博物馆介绍 src.clear() for item in soup.find("ul", class_="detailcon").find_all("p",class_="MsoNormal"): # print("===========") item = getText(item.text) src.append(item) # print(src) p = [] for pi in src: if len(pi) >= 10: p.append(pi) # srcs = re.findall('<img src="(.*?)"/>', str(src)) datadict["M_Introduction"] = p jsondata = json.dumps(datadict, ensure_ascii=False,indent = 4) with open("./museums/M130104.json", 'w', encoding='utf-8') as f: f.write(jsondata) return datadict exit() def getCollectionsData(): baseurl = "http://www.mtybwg.org.cn/" index = "http://www.mtybwg.org.cn/cangpin.aspx" html = askURL(index) soup = BeautifulSoup(html,"html.parser") # print(soup) # exit() href = [] for item in soup.find("div", class_="rightcon").find("ul",class_="falllist animated").find_all("li"): href0 = item.find("a",class_="pic")["href"] href.append(href0) # print(href) # exit() n = len(href) for href1 in href: url = href1 html = askURL(url) soup = BeautifulSoup(html,"html.parser") hrefa = [] for item in soup.find("div", class_="rightcon").find("ul",class_="falllist falllist2 animated").find_all("li"): href0 = item.find("a")["href"] hrefa.append(baseurl[0:-1] + href0) # print(hrefa) for href2 in hrefa: url = href2 html = askURL(url) soup = BeautifulSoup(html,"html.parser") # print(type(href)) collectiondict = {} collectiondict["CRM_In"] = ID Id = re.findall(r'http.*/(.*?).aspx', url) # print(Id) collectiondict["C_ID"] = ID + '-' + str(Id[0]) item = soup.find("ul", class_="infolist") # print(item) title = re.findall(r'<h1>(.*)</h1>', str(item)) title = str(title) src = str(item.find_all("img")) src = re.findall(r'<img.*src="(.*?)"', src) txt0 = item.find("div",class_="pluscon").find("ul", class_="con").text txt0 = getText(txt0) # txt0 = txt0.split() # # print(txt0) txt = [] txt.append("藏品描述:") txt.append(txt0) collectiondict["C_Name"] = title collectiondict["C_Pictures"] = baseurl[0:-1] + src[0] collectiondict["C_Introduction"] = txt jsondata = json.dumps(collectiondict, ensure_ascii=False,indent = 4 ) with open("./collections/C"+collectiondict["C_ID"]+".json", 'w', encoding='utf-8') as f: f.write(jsondata) # exit() pass pass def getActivitiesData(): baseurl = "http://www.mtybwg.org.cn/" # 展览 index = "http://www.mtybwg.org.cn/zhanlan.aspx" html = askURL(index) soup = BeautifulSoup(html,"html.parser") # print(soup) # exit() href = [] for item in soup.find("div", class_="rightcon").find("ul",class_="falllist animated").find_all("li"): href0 = item.find("a",class_="pic")["href"] href.append(href0) # print(href) # exit() n = len(href) - 1 for href1 in href: if href1 == "http://vr1.mtybwg.org.cn/20160316/": break url = href1 html = askURL(url) soup = BeautifulSoup(html,"html.parser") hrefa = [] for item in soup.find("div", class_="rightcon").find("ul",class_="falllist animated").find_all("li"): href0 = item.find("a")["href"] hrefa.append(baseurl[0:-1] + href0) # print(hrefa) for href2 in hrefa: url = href2 html = askURL(url) soup = BeautifulSoup(html,"html.parser") # print(type(href)) activityDict = {} activityDict["ARM_In"] = ID Id = re.findall(r'http.*/(.*?).aspx', url) activityDict["A_ID"] = ID + '-' + str(Id[0]) item = soup.find("ul", class_="infolist") # print(item) title = re.findall(r'<h1>(.*)</h1>', str(item)) title = str(title) src = str(item.find_all("img")) src = re.findall(r'<img.*src="(.*?)"', src) txt0 = item.find("ul",class_="detailcon").find("p", class_="MsoNormal").text txt0 = getText(txt0) # txt0 = txt0.split() # # print(txt0) txt = [] txt.append("活动描述:") txt.append(txt0) activityDict["A_Name"] = title activityDict["A_Type"] = "1" activityDict["A_Pictures"] = baseurl[0:-1] + src[0] activityDict["A_Information"] = txt jsondata = json.dumps(activityDict, ensure_ascii=False,indent = 4) with open("./activities/A"+activityDict["A_ID"]+".json", 'w', encoding='utf-8') as f: f.write(jsondata) # exit() pass pass # 教育及学术活动(——微信推送格式,故只存链接) baseurl = "http://www.mtybwg.org.cn/" index = "http://www.mtybwg.org.cn/{}/0-1.aspx" # html = askURL(index) for pi in {"xueshu","xuanjiao"}: index0 = index.format(i) html = askURL(index0) # print(index0) soup = BeautifulSoup(html, "html.parser") # print("hhh") item = soup.find("ul", class_="infolist").find("ul",class_="iflist") href = [] title = [] if pi == "xuanjiao": type = "3" else: type = "2" for li in item.find_all("li"): # print("=3=3=3=3=3=3=") # print(li) if li.text == "": pass else: href.append(li.find("a")["href"]) title.append(li.text) # print(title) # print(href) n = len(title) for i in range(n): activityDict = {} activityDict["ARM_In"] = ID activityDict["A_ID"] = ID + "-" + str(i+1) activityDict["A_Name"] = title[i] activityDict["A_Type"] = type activityDict["A_Information"] = baseurl[0:-1]+href[i] jsondata = json.dumps(activityDict, ensure_ascii=False,indent = 4) with open("./activities/A"+activityDict["A_ID"]+".json", 'w', encoding='utf-8') as f: f.write(jsondata) exit() def askURL(url): # head = headers[0] head = { 'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36(KHTML, like Gecko) Chrome / 80.0.3987.122 Safari / 537.36' } html = "" try: res = requests.get(url, headers=head) res.raise_for_status() res.encoding = res.apparent_encoding html = res.text except requests.RequestException as e: print(e) return html def askPic(url): head = { 'User-Agent':'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; QQDownload 732; .NET4.0C; .NET4.0E)' } try: res = requests.get(url, headers=head) res.raise_for_status() res.encoding = res.apparent_encoding except requests.RequestException as e: print(e) return res if __name__ == "__main__": getMuseumData() getCollectionsData() getActivitiesData()
atadict["M_ID
identifier_name
M130104.py
# -*- coding: utf-8 -*- """ 中国闽台缘博物馆博物馆爬虫文件 @author: lxx http://www.mtybwg.org.cn/index.aspx 130104 代码更新: 2020.05.10 代码完成 2020.05.10 代码创建 """ import requests import os from bs4 import BeautifulSoup import re import json from selenium import webdriver import time ID = "130104" def debugPrint(message): if __name__ == "__main__": print(message) def getText(item, newline=False): item = re.sub(r"<br/>","",item, flags=re.I) item = re.sub(r"<br />","",item,flags=re.I) item = re.sub(r"<br>","",item,flags=re.I) item = re.sub(r"\r","",item) item = re.sub(r"\t","",item) item = re.sub(r"\xa0","",item) item = re.sub(r" ","",item) item = re.sub(r"\u3000","",item) item = re.sub(r"&emsp;","",item) item = re.sub(r"&nbsp;","",item) il = re.findall(r'(?<=).*?(?=)', item) s = "" flag = 0 flagp = 0 for i in il: if i == '/' and flag != 0: flagp = 1 else: flagp = 0 if i == '<': flag = flag + 1 elif i == '>': flag = flag - 1 else: if flag == 0: s = s+i if flagp == 1: if newline: s = s + '\n' pass # print(s) return s def getMuseumData(): datadict = {} #用来存储爬取的网页信息 datadict["M_ID"] = "130104" datadict["M_CName"] = "中国闽台缘博物馆" datadict["M_EName"] = "China Museum for Fujian Taiwan kinship" datadict["M_Batch"] = 1 datadict["M_Address"] = "福建泉州北清东路212号" #官网主页相关内容 baseurl = "http://www.mtybwg.org.cn/" #要爬取的网页链接 datadict["M_Web"] = baseurl html = askURL(baseurl) # 保存获取到的网页源码 soup = BeautifulSoup(html, "html.parser") datadict["M_Logo"] = "http://www.mtybwg.org.cn/templates/mty/images/logo.jpg" # 博物馆开放时间及门票 i = 0 time = [] item = soup.find("div", class_="top").find("ul",class_="notice").find("p") item = item.find("span").text # print(item) # time = item.split() # print(time) time0 = re.findall(r'开放时间:(.*))', item) # print(time0) # exit() datadict["M_Openingtime"] = time0[0] datadict["M_Ticket"] = "免费开放" # 门票信息 url = "http://www.mtybwg.org.cn/about/detail/249.aspx" html = askURL(url) soup = BeautifulSoup(html,"html.parser") # print(soup) item = soup.find("ul",class_="detailcon") # print(item) # item = str(item) time = [] # time = re.findall(r'<(.*。)', string) for pi in item.find_all(style="white-space:normal;line-height:32px;margin:0cm 0cm 0pt;"): pi = getText(pi.text) time.append(pi) # print(time) datadict["M_OpeningInformation"] = time[0:2] datadict["M_Booking"] = time[17:20] datadict["M_TicketInformation"] = time[16] datadict["M_Triffic"] = time[10:14] # 博物馆图片(list) url = "http://www.mtybwg.org.cn/about/924.aspx" html = askURL(url) soup = BeautifulSoup(html,"html.parser") src = [] for item in soup.find("ul", class_="detailcon").find_all("img"): src.append(item["src"]) p = [] for pi in src: pi = baseurl[0:-1] + pi p.append(pi) # print(p) datadict["M_Pictures"] = p # print(p) # 博物馆介绍 src.clear() for item in soup.find("ul", class_="detailcon").find_all("p",class_="MsoNormal"): # print("===========") item = getText(item.text) src.append(item) # print(src) p = [] for pi in src: if len(pi) >= 10: p.append(pi) # srcs = re.findall('<img src="(.*?)"/>', str(src)) datadict["M_Introduction"] = p jsondata = json.dumps(datadict, ensure_ascii=False,indent = 4) with open("./museums/M130104.json", 'w', encoding='utf-8') as f: f.write(jsondata) return datadict exit() def getCollectionsData(): baseurl = "http://www.mtybwg.org.cn/" index = "http://www.mtybwg.org.cn/cangpin.aspx" html = askURL(index) soup = BeautifulSoup(html,"html.parser") # print(soup) # exit() href = [] for item in soup.find("div", class_="rightcon").find("ul",class_="falllist animated").find_all("li"): href0 = item.find("a",class_="pic")["href"] href.append(href0) # print(href) # exit() n = len(href) for href1 in href: url = href1 html = askURL(url) soup = BeautifulSoup(html,"html.parser") hrefa = [] for item in soup.find("div", class_="rightcon").find("ul",class_="falllist falllist2 animated").find_all("li"): href0 = item.find("a")["href"] hrefa.append(baseurl[0:-1] + href0) # print(hrefa) for href2 in hrefa: url = href2 html = askURL(url) soup = BeautifulSoup(html,"html.parser") # print(type(href)) collectiondict = {} collectiondict["CRM_In"] = ID Id = re.findall(r'http.*/(.*?).aspx', url) # print(Id) collectiondict["C_ID"] = ID + '-' + str(Id[0]) item = soup.find("ul", class_="infolist") # print(item) title = re.findall(r'<h1>(.*)</h1>', str(item)) title = str(title) src = str(item.find_all("img")) src = re.findall(r'<img.*src="(.*?)"', src) txt0 = item.find("div",class_="pluscon").find("ul", class_="con").text txt0 = getText(txt0) # txt0 = txt0.split() # # print(txt0) txt = [] txt.append("藏品描述:") txt.append(txt0) collectiondict["C_Name"] = title collectiondict["C_Pictures"] = baseurl[0:-1] + src[0] collectiondict["C_Introduction"] = txt jsondata = json.dumps(collectiondict, ensure_ascii=False,indent = 4 ) with open("./collections/C"+collectiondict["C_ID"]+".json", 'w', encoding='utf-8') as f: f.write(jsondata) # exit() pass pass def getActivitiesData(): baseurl = "http://www.mtybwg.org.cn/"
html = askURL(index) soup = BeautifulSoup(html,"html.parser") # print(soup) # exit() href = [] for item in soup.find("div", class_="rightcon").find("ul",class_="falllist animated").find_all("li"): href0 = item.find("a",class_="pic")["href"] href.append(href0) # print(href) # exit() n = len(href) - 1 for href1 in href: if href1 == "http://vr1.mtybwg.org.cn/20160316/": break url = href1 html = askURL(url) soup = BeautifulSoup(html,"html.parser") hrefa = [] for item in soup.find("div", class_="rightcon").find("ul",class_="falllist animated").find_all("li"): href0 = item.find("a")["href"] hrefa.append(baseurl[0:-1] + href0) # print(hrefa) for href2 in hrefa: url = href2 html = askURL(url) soup = BeautifulSoup(html,"html.parser") # print(type(href)) activityDict = {} activityDict["ARM_In"] = ID Id = re.findall(r'http.*/(.*?).aspx', url) activityDict["A_ID"] = ID + '-' + str(Id[0]) item = soup.find("ul", class_="infolist") # print(item) title = re.findall(r'<h1>(.*)</h1>', str(item)) title = str(title) src = str(item.find_all("img")) src = re.findall(r'<img.*src="(.*?)"', src) txt0 = item.find("ul",class_="detailcon").find("p", class_="MsoNormal").text txt0 = getText(txt0) # txt0 = txt0.split() # # print(txt0) txt = [] txt.append("活动描述:") txt.append(txt0) activityDict["A_Name"] = title activityDict["A_Type"] = "1" activityDict["A_Pictures"] = baseurl[0:-1] + src[0] activityDict["A_Information"] = txt jsondata = json.dumps(activityDict, ensure_ascii=False,indent = 4) with open("./activities/A"+activityDict["A_ID"]+".json", 'w', encoding='utf-8') as f: f.write(jsondata) # exit() pass pass # 教育及学术活动(——微信推送格式,故只存链接) baseurl = "http://www.mtybwg.org.cn/" index = "http://www.mtybwg.org.cn/{}/0-1.aspx" # html = askURL(index) for pi in {"xueshu","xuanjiao"}: index0 = index.format(i) html = askURL(index0) # print(index0) soup = BeautifulSoup(html, "html.parser") # print("hhh") item = soup.find("ul", class_="infolist").find("ul",class_="iflist") href = [] title = [] if pi == "xuanjiao": type = "3" else: type = "2" for li in item.find_all("li"): # print("=3=3=3=3=3=3=") # print(li) if li.text == "": pass else: href.append(li.find("a")["href"]) title.append(li.text) # print(title) # print(href) n = len(title) for i in range(n): activityDict = {} activityDict["ARM_In"] = ID activityDict["A_ID"] = ID + "-" + str(i+1) activityDict["A_Name"] = title[i] activityDict["A_Type"] = type activityDict["A_Information"] = baseurl[0:-1]+href[i] jsondata = json.dumps(activityDict, ensure_ascii=False,indent = 4) with open("./activities/A"+activityDict["A_ID"]+".json", 'w', encoding='utf-8') as f: f.write(jsondata) exit() def askURL(url): # head = headers[0] head = { 'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36(KHTML, like Gecko) Chrome / 80.0.3987.122 Safari / 537.36' } html = "" try: res = requests.get(url, headers=head) res.raise_for_status() res.encoding = res.apparent_encoding html = res.text except requests.RequestException as e: print(e) return html def askPic(url): head = { 'User-Agent':'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; QQDownload 732; .NET4.0C; .NET4.0E)' } try: res = requests.get(url, headers=head) res.raise_for_status() res.encoding = res.apparent_encoding except requests.RequestException as e: print(e) return res if __name__ == "__main__": getMuseumData() getCollectionsData() getActivitiesData()
# 展览 index = "http://www.mtybwg.org.cn/zhanlan.aspx"
random_line_split
M130104.py
# -*- coding: utf-8 -*- """ 中国闽台缘博物馆博物馆爬虫文件 @author: lxx http://www.mtybwg.org.cn/index.aspx 130104 代码更新: 2020.05.10 代码完成 2020.05.10 代码创建 """ import requests import os from bs4 import BeautifulSoup import re import json from selenium import webdriver import time ID = "130104" def debugPrint(message): if __name__ == "__main__": print(message) def getText(item, newline=False): item = re.sub(r"<br/>","",item, flags=re.I) item = re.sub(r"<br />","",item,flags=re.I) item = re.sub(r"<br>","",item,flags=re.I) item = re.sub(r"\r","",item) item = re.sub(r"\t","",item) item = re.sub(r"\xa0","",item) item = re.sub(r" ","",item) item = re.sub(r"\u3000","",item) item = re.sub(r"&emsp;","",item) item = re.sub(r"&nbsp;","",item) il = re.findall(r'(?<=).*?(?=)', item) s = "" flag = 0 flagp = 0 for i in il: if i == '/' and flag != 0: flagp = 1 else: flagp = 0 if i == '<': flag = flag + 1 elif i == '>': flag = flag - 1 else: if flag == 0: s = s+i if flagp == 1: if newline: s = s + '\n' pass # print(s) return s def getMuseumData(): datadict = {} #用来存储爬取的网页信息 datadict["M_ID"] = "130104" datadict["M_CName"] = "中国闽台缘博物馆" datadict["M_EName"] = "China Museum for Fujian Taiwan kinship" datadict["M_Batch"] = 1 datadict["M_Address"] = "福建泉州北清东路212号" #官网主页相关内容 baseurl = "http://www.mtybwg.org.cn/" #要爬取的网页链接 datadict["M_Web"] = baseurl html = askURL(baseurl) # 保存获取到的网页源码 soup = BeautifulSoup(html, "html.parser") datadict["M_Logo"] = "http://www.mtybwg.org.cn/templates/mty/images/logo.jpg" # 博物馆开放时间及门票 i = 0 time = [] item = soup.find("div", class_="top").find("ul",class_="notice").find("p") item = item.find("span").text # print(item) # time = item.split() # print(time) time0 = re.findall(r'开放时间:(.*))', item) # print(time0) # exit() datadict["M_Openingtime"] = time0[0] datadict["M_Ticket"] = "免费开放" # 门票信息 url = "http://www.mtybwg.org.cn/about/detail/249.aspx" html = askURL(url) soup = BeautifulSoup(html,"html.parser") # print(soup) item = soup.find("ul",class_="detailcon") # print(item) # item = str(item) time = [] # time = re.findall(r'<(.*。)', string) for pi in item.find_all(style="white-space:normal;line-height:32px;margin:0cm 0cm 0pt;"): pi = getText(pi.text) time.append(pi) # print(time) datadict["M_OpeningInformation"] = time[0:2] datadict["M_Booking"] = time[17:20] datadict["M_TicketInformation"] = time[16] datadict["M_Triffic"] = time[10:14] # 博物馆图片(list) url = "http://www.mtybwg.org.cn/about/924.aspx" html = askURL(url) soup = BeautifulSoup(html,"html.parser") src = [] for item in soup.find("ul", class_="detailcon").find_all("img"): src.append(item["src"]) p = [] for pi in src: pi = baseurl[0:-1] + pi p.append(pi) # print(p) datadict["M_Pictures"] = p # print(p) # 博物馆介绍 src.clear() for item in soup.find("ul", class_="detailcon").find_all("p",class_="MsoNormal"): # print("===========") item = getText(item.text) src.append(item) # print(src) p = [] for pi in src: if len(pi) >= 10: p.append(pi) # srcs = re.findall('<img src="(.*?)"/>', str(src)) datadict["M_Introduction"] = p jsondata = json.dumps(datadict, ensure_ascii=False,indent = 4) with open("./museums/M130104.json", 'w', encoding='utf-8') as f: f.write(jsondata) return datadict exit() def getCollectionsData(): baseurl = "http://www.mtybwg.org.cn/" index = "http://www.mtybwg.org.cn/cangpin.aspx" html = askURL(index) soup = BeautifulSoup(html,"html.parser") # print(soup) # exit() href = [] for item in soup.find("div", class_="rightcon").find("ul",class_="falllist animated").find_all("li"): href0 = item.find("a",class_="pic")["href"] href.append(href0) # print(href) # exit() n = len(href) for href1 in href: url = href1 html = askURL(url) soup = BeautifulSoup(html,"html.parser") hrefa = [] for item in soup.find("div", class_="rightcon").find("ul",class_="falllist falllist2 animated").find_all("li"): href0 = item.find("a")["href"] hrefa.append(baseurl[0:-1] + href0) # print(hrefa) for href2 in hrefa: url = href2 html = askURL(url) soup = BeautifulSoup(html,"html.parser") # print(type(href)) collectiondict = {} collectiondict["CRM_In"] = ID Id = re.findall(r'http.*/(.*?).aspx', url) # print(Id) collectiondict["C_ID"] = ID + '-' + str(Id[0]) item = soup.find("ul", class_="infolist") # print(item) title = re.findall(r'<h1>(.*)</h1>', str(item)) title = str(title) src = str(item.find_all("img")) src = re.findall(r'<img.*src="(.*?)"', src) txt0 = item.find("div",class_="pluscon").find("ul", class_="con").text txt0 = getText(txt0) # txt0 = txt0.split() # # print(txt0) txt = [] txt.append("藏品描述:") txt.append(txt0) collectiondict["C_Name"] = title collectiondict["C_Pictures"] = baseurl[0:-1] + src[0] collectiondict["C_Introduction"] = txt jsondata = json.dumps(collectiondict, ensure_ascii=False,indent = 4 ) with open("./collections/C"+collectiondict["C_ID"]+".json", 'w', encoding='utf-8') as f: f.write(jsondata) # exit() pass pass def getActivitiesData(): baseurl = "http://www.mtybwg.org.cn/" # 展览 index = "http://www.mtybwg.org.cn/zhanlan.aspx" html = askURL(index) soup = BeautifulSoup(html,"html.parser") # print(soup) # exit() href = [] for item in soup.find("div", class_="rightcon").find("ul",class_="falllist animated").find_all("li"): href0 = item.find("a",class_="pic")["href"] href.append(href0) # print(href) # exit() n = len(href) - 1 for href1 in href: if href1 == "http://vr1.mtybwg.org.cn/20160316/": break url = href1 html = askURL(url) soup = BeautifulSoup(html,"html.parser") hrefa = [] for item in soup.find("div", class_="rightcon").find("ul",class_="falllist animated").find_all("li"): href0 = item.find("a")["href"] hrefa.append(baseurl[0:-1] + href0) # print(hrefa) for href2 in hrefa: url = href2 html = askURL(url) soup = BeautifulSoup(html,"html.parser") # print(type(href)) activityDict = {} activityDict["ARM_In"] = ID Id = re.findall(r'http.*/(.*?).aspx', url) activityDict["A_ID"] = ID + '-' + str(Id[0]) item = soup.find("ul", class_="infolist") # print(item) title = re.findall(r'<h1>(.*)</h1>', str(item)) title = str(title) src = str(item.find_all("img")) src = re.findall(r'<img.*src="(.*?)"', src) txt0 = item.find("ul",class_="detailcon").find("p", class_="MsoNormal").text txt0 = getText(txt0) # txt0 = txt0.split() # # print(txt0) txt = [] txt.append("活动描述:") txt.append(txt0) activityDict["A_Name"] = title activityDict["A_Type"] = "1" activityDict["A_Pictures"] = baseurl[0:-1] + src[0] activityDict["A_Information"] = txt jsondata = json.dumps(activityDict, ensure_ascii=False,indent = 4) with open("./activities/A"+activityDict["A_ID"]+".json", 'w', encoding='utf-8') as f: f.write(jsondata) # exit() pass pass # 教育及学术活动(——微信推送格式,故只存链接) baseurl = "http://www.mtybwg.org.cn/" index = "http://www.mtybwg.org.cn/{}/0-1.aspx" # html = askURL(index) for pi in {"xueshu","xuanjiao"}: index0 = index.format(i) html = askURL(index0) # print(index0) soup = BeautifulSoup(html, "html.parser") # print("hhh") item = soup.find("ul", class_="infolist").find("ul",class_="iflist") href = [] title = [] if pi == "xuanjiao": type = "3" else: type = "2" for li in item.find_all("li"): # print("=3=3=3=3=3=3=") # print(li) if li.text == "": pass else: href.append(li.find("a")["href"]) title.append(li.text) # print(title) # print(href) n = len(title) for i in range(n): activityDict = {} activityDict["ARM_In"] = ID activityDict["A_ID"] = ID + "-" + str(i+1) activityDict["A_Name"] = title[i] activityDict["A_Type"] = type activityDict["A_Information"] = baseurl[0:-1]+href[i] jsondata = json.dumps(activityDict, ensure_ascii=False,indent = 4) with open("./activities/A"+activityDict["A_ID"]+".json", 'w', encoding='utf-8') as f: f.write(jsondata) exit() def askURL(url): # head = headers[0] head = { 'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36(KHTML, like Gecko) Chrome / 80.0.3987.122 Safari / 537.36' } html = "" try: res = requests.get(url, headers=head) res.raise_for_status() res.encoding = res.apparent_encoding html = res.text except requests.RequestException as e: print(e) return html def askPic(url): head = { 'User-Agent':'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; QQDownload 732; .NET4.0C; .NET4.0E)' } try: res = requests.get(url, headers=head) res.raise_for_status() res.encoding = res.apparent_encoding except requests.RequestException as e: print(e) return res if __name__ == "__main__": getMuseumData() getCollectionsData() getActivitiesData()
conditional_block
routes.rs
use rocket::State; use rocket::response::{Flash, Redirect}; use rocket::request::{Form, FormItems, FromForm}; use option_filter::OptionFilterExt; use super::{html, StudentPreferences, TimeSlotRating}; use config; use db::Db; use dict::{self, Locale}; use errors::*; use state::PreparationState; use template::{NavItem, Page}; use user::{AuthUser, Role, User}; use timeslot::Rating; fn
(locale: Locale) -> Vec<NavItem> { // TODO: pass `Dict` once possible let dict = dict::new(locale).prep; vec![ NavItem::new(dict.nav_overview_title(), "/prep"), NavItem::new(dict.nav_timeslots_title(), "/prep/timeslots"), ] } #[get("/prep")] pub fn overview( auth_user: AuthUser, locale: Locale, db: State<Db>, _state: PreparationState, ) -> Result<Page> { let dict = dict::new(locale).prep; match auth_user.role() { // ===== Student ====================================================== Role::Student => { let student = auth_user.into_user().into_student().unwrap(); let pref = StudentPreferences::load_for(&student, &db)?; let partner = pref.partner.as_ref() .map_or(Ok(None), |name| User::load_by_username(name, &db))? .and_then(|u| u.into_student().ok()) .filter(|s| s.id() != student.id()); Page::empty() .with_title(dict.overview_title()) .add_nav_items(nav_items(locale)) .with_active_nav_route("/prep") .with_content(html::student_overview( locale, &pref, &partner, )) } // ===== Tutor or admin =============================================== Role::Tutor | Role::Admin => { use diesel::prelude::*; use diesel::expression::sql; use db::schema::{timeslot_ratings, users}; let conn = &*db.conn()?; let stats = { let num_students = users::table .filter(sql("role = 'student'")) .count() .get_result::<i64>(conn)?; let num_students_with_slots = users::table .inner_join(timeslot_ratings::table) .filter(sql("rating <> 'bad' AND role = 'student'")) .select(sql("count(distinct user_id) as count")) .get_result::<i64>(conn)?; let avg_good_rating_per_student = sql(" select cast(avg(count) as float) from ( select count(*) as count, user_id from timeslot_ratings inner join users on users.id = user_id where rating = 'good' and role = 'student' group by user_id ) as counts ").get_result::<f64>(conn)?; let avg_ok_rating_per_student = sql(" select cast(avg(count) as float) from ( select count(*) as count, user_id from timeslot_ratings inner join users on users.id = user_id where rating <> 'bad' and role = 'student' group by user_id ) as counts ").get_result::<f64>(conn)?; html::TutorAdminStats { num_students: num_students as u64, num_students_with_slots: num_students_with_slots as u64, avg_good_rating_per_student, avg_ok_rating_per_student, } }; let tutors = users::table .inner_join(timeslot_ratings::table) .filter(sql("role = 'tutor'")) .group_by(users::columns::id) .select(sql(" username, name, sum(case when rating='good' then 1 else 0 end) as num_good, sum(case when rating<>'bad' then 1 else 0 end) as num_ok ")) .load::<(String, Option<String>, i64, i64)>(conn)?; let content = html::tutor_admin_overview( locale, auth_user.is_tutor(), stats, &tutors, ); Page::empty() .with_title(dict.overview_title()) .add_nav_items(nav_items(locale)) .with_active_nav_route("/prep") .with_content(content) } }.make_ok() } #[post("/prep_student_settings", data = "<form>")] pub fn set_general_settings( auth_user: AuthUser, form: Form<GeneralStudentSettings>, db: State<Db>, _state: PreparationState, locale: Locale, ) -> Result<Flash<Redirect>> { fn err<S: AsRef<str>>(msg: S) -> Result<Flash<Redirect>> { Ok(Flash::error(Redirect::to("/prep"), msg)) } let dict = dict::new(locale).prep; // The auth_user needs to be a student. Tutors and admins should not be // forwarded to this route. let student = match auth_user.into_user().into_student() { Ok(s) => s, Err(_) => { return err(bad_request(locale)); } }; let mut pref = StudentPreferences::load_for(&student, &db)?; let form = form.into_inner(); // Set partner match form.partner.as_ref() { "random" => { pref.partner = None; } "chosen" => { if let Some(id) = form.partner_id { match User::load_by_username(&id, &db)? { Some(ref u) if u.is_student() => { pref.partner = Some(id); } Some(ref u) => { return Ok(Flash::error( Redirect::to("/prep"), dict.flash_err_partner_not_a_student(u.username()), )); } None => { return Ok(Flash::error( Redirect::to("/prep"), dict.flash_err_user_not_found(), )); } } } else { return err(bad_request(locale)); } } _ => return err(bad_request(locale)), } // Set preferred language match form.language.as_ref() { "de" => pref.prefers_english = false, "en" => pref.prefers_english = true, _ => return err(bad_request(locale)), } // Finally, store the changes in the database. pref.update(&db)?; Ok(Flash::success(Redirect::to("/prep"), dict.flash_success_storing_preferences())) } #[derive(Debug, Clone, FromForm)] pub struct GeneralStudentSettings { partner: String, partner_id: Option<String>, language: String, } #[get("/prep/timeslots")] pub fn timeslots( auth_user: AuthUser, locale: Locale, db: State<Db>, _state: PreparationState, ) -> Result<Page> { let dict = dict::new(locale).prep; // Load all ratings of the user. let ratings = TimeSlotRating::load_all_of_user(&auth_user, &db)?; match auth_user.role() { Role::Student | Role::Tutor => { let (explanation, min_good, min_ok) = match auth_user.role() { Role::Student => ( dict.timeslots_student_explanation(), config::MIN_GOOD_SLOTS_STUDENT, config::MIN_OK_SLOTS_STUDENT, ), Role::Tutor => ( dict.timeslots_tutor_explanation(), config::MIN_GOOD_SLOTS_TUTOR, config::MIN_OK_SLOTS_TUTOR, ), _ => unreachable!(), }; let content = html::timeslots( &explanation, min_good, min_ok, &ratings, locale, ); Page::empty() .with_title(dict.timeslots_title()) .add_nav_items(nav_items(locale)) .with_active_nav_route("/prep/timeslots") .with_content(content) .make_ok() } Role::Admin => { Page::unimplemented().make_ok() } } } /// Stores a list of (timeslot_id, rating). #[derive(Debug)] pub struct TimeSlotForm { slots: Vec<(i16, Rating)>, } impl<'f> FromForm<'f> for TimeSlotForm { type Error = TimeSlotFormError; fn from_form(items: &mut FormItems<'f>, _: bool) -> StdResult<Self, Self::Error> { let slots = items.into_iter().map(|(key, value)| { // The keys come in the form `slot-34` and we want this number. if !key.starts_with("slot-") { return Err(TimeSlotFormError::InvalidId); } let id = match key[5..].parse() { Err(_) => return Err(TimeSlotFormError::InvalidId), Ok(id) => id, }; // The value should only be one of those three values. let rating = match value.as_str() { "good" => Rating::Good, "tolerable" => Rating::Tolerable, "bad" => Rating::Bad, _ => return Err(TimeSlotFormError::InvalidRating), }; Ok((id, rating)) }).collect::<StdResult<Vec<_>, _>>()?; Ok(Self { slots }) } } #[derive(Debug)] pub enum TimeSlotFormError { InvalidRating, InvalidId, } #[post("/prep/update_timeslots", data = "<form>")] fn update_timeslots( auth_user: AuthUser, form: Form<TimeSlotForm>, locale: Locale, db: State<Db>, _state: PreparationState, ) -> Result<Flash<Redirect>> { let form = form.into_inner(); TimeSlotRating::update_all(&auth_user, &form.slots, &db)?; Ok(Flash::success( Redirect::to("/prep/timeslots"), dict::new(locale).prep.flash_success_storing_timeslot_ratings(), )) }
nav_items
identifier_name
routes.rs
use rocket::State; use rocket::response::{Flash, Redirect}; use rocket::request::{Form, FormItems, FromForm}; use option_filter::OptionFilterExt; use super::{html, StudentPreferences, TimeSlotRating}; use config; use db::Db; use dict::{self, Locale}; use errors::*; use state::PreparationState; use template::{NavItem, Page}; use user::{AuthUser, Role, User}; use timeslot::Rating; fn nav_items(locale: Locale) -> Vec<NavItem> { // TODO: pass `Dict` once possible let dict = dict::new(locale).prep; vec![ NavItem::new(dict.nav_overview_title(), "/prep"), NavItem::new(dict.nav_timeslots_title(), "/prep/timeslots"), ] } #[get("/prep")] pub fn overview( auth_user: AuthUser, locale: Locale, db: State<Db>, _state: PreparationState, ) -> Result<Page> { let dict = dict::new(locale).prep; match auth_user.role() { // ===== Student ====================================================== Role::Student => { let student = auth_user.into_user().into_student().unwrap(); let pref = StudentPreferences::load_for(&student, &db)?; let partner = pref.partner.as_ref() .map_or(Ok(None), |name| User::load_by_username(name, &db))? .and_then(|u| u.into_student().ok()) .filter(|s| s.id() != student.id()); Page::empty() .with_title(dict.overview_title()) .add_nav_items(nav_items(locale)) .with_active_nav_route("/prep") .with_content(html::student_overview( locale, &pref, &partner, )) } // ===== Tutor or admin =============================================== Role::Tutor | Role::Admin => { use diesel::prelude::*; use diesel::expression::sql; use db::schema::{timeslot_ratings, users}; let conn = &*db.conn()?; let stats = { let num_students = users::table .filter(sql("role = 'student'")) .count() .get_result::<i64>(conn)?; let num_students_with_slots = users::table .inner_join(timeslot_ratings::table) .filter(sql("rating <> 'bad' AND role = 'student'")) .select(sql("count(distinct user_id) as count")) .get_result::<i64>(conn)?; let avg_good_rating_per_student = sql(" select cast(avg(count) as float) from ( select count(*) as count, user_id from timeslot_ratings inner join users on users.id = user_id
").get_result::<f64>(conn)?; let avg_ok_rating_per_student = sql(" select cast(avg(count) as float) from ( select count(*) as count, user_id from timeslot_ratings inner join users on users.id = user_id where rating <> 'bad' and role = 'student' group by user_id ) as counts ").get_result::<f64>(conn)?; html::TutorAdminStats { num_students: num_students as u64, num_students_with_slots: num_students_with_slots as u64, avg_good_rating_per_student, avg_ok_rating_per_student, } }; let tutors = users::table .inner_join(timeslot_ratings::table) .filter(sql("role = 'tutor'")) .group_by(users::columns::id) .select(sql(" username, name, sum(case when rating='good' then 1 else 0 end) as num_good, sum(case when rating<>'bad' then 1 else 0 end) as num_ok ")) .load::<(String, Option<String>, i64, i64)>(conn)?; let content = html::tutor_admin_overview( locale, auth_user.is_tutor(), stats, &tutors, ); Page::empty() .with_title(dict.overview_title()) .add_nav_items(nav_items(locale)) .with_active_nav_route("/prep") .with_content(content) } }.make_ok() } #[post("/prep_student_settings", data = "<form>")] pub fn set_general_settings( auth_user: AuthUser, form: Form<GeneralStudentSettings>, db: State<Db>, _state: PreparationState, locale: Locale, ) -> Result<Flash<Redirect>> { fn err<S: AsRef<str>>(msg: S) -> Result<Flash<Redirect>> { Ok(Flash::error(Redirect::to("/prep"), msg)) } let dict = dict::new(locale).prep; // The auth_user needs to be a student. Tutors and admins should not be // forwarded to this route. let student = match auth_user.into_user().into_student() { Ok(s) => s, Err(_) => { return err(bad_request(locale)); } }; let mut pref = StudentPreferences::load_for(&student, &db)?; let form = form.into_inner(); // Set partner match form.partner.as_ref() { "random" => { pref.partner = None; } "chosen" => { if let Some(id) = form.partner_id { match User::load_by_username(&id, &db)? { Some(ref u) if u.is_student() => { pref.partner = Some(id); } Some(ref u) => { return Ok(Flash::error( Redirect::to("/prep"), dict.flash_err_partner_not_a_student(u.username()), )); } None => { return Ok(Flash::error( Redirect::to("/prep"), dict.flash_err_user_not_found(), )); } } } else { return err(bad_request(locale)); } } _ => return err(bad_request(locale)), } // Set preferred language match form.language.as_ref() { "de" => pref.prefers_english = false, "en" => pref.prefers_english = true, _ => return err(bad_request(locale)), } // Finally, store the changes in the database. pref.update(&db)?; Ok(Flash::success(Redirect::to("/prep"), dict.flash_success_storing_preferences())) } #[derive(Debug, Clone, FromForm)] pub struct GeneralStudentSettings { partner: String, partner_id: Option<String>, language: String, } #[get("/prep/timeslots")] pub fn timeslots( auth_user: AuthUser, locale: Locale, db: State<Db>, _state: PreparationState, ) -> Result<Page> { let dict = dict::new(locale).prep; // Load all ratings of the user. let ratings = TimeSlotRating::load_all_of_user(&auth_user, &db)?; match auth_user.role() { Role::Student | Role::Tutor => { let (explanation, min_good, min_ok) = match auth_user.role() { Role::Student => ( dict.timeslots_student_explanation(), config::MIN_GOOD_SLOTS_STUDENT, config::MIN_OK_SLOTS_STUDENT, ), Role::Tutor => ( dict.timeslots_tutor_explanation(), config::MIN_GOOD_SLOTS_TUTOR, config::MIN_OK_SLOTS_TUTOR, ), _ => unreachable!(), }; let content = html::timeslots( &explanation, min_good, min_ok, &ratings, locale, ); Page::empty() .with_title(dict.timeslots_title()) .add_nav_items(nav_items(locale)) .with_active_nav_route("/prep/timeslots") .with_content(content) .make_ok() } Role::Admin => { Page::unimplemented().make_ok() } } } /// Stores a list of (timeslot_id, rating). #[derive(Debug)] pub struct TimeSlotForm { slots: Vec<(i16, Rating)>, } impl<'f> FromForm<'f> for TimeSlotForm { type Error = TimeSlotFormError; fn from_form(items: &mut FormItems<'f>, _: bool) -> StdResult<Self, Self::Error> { let slots = items.into_iter().map(|(key, value)| { // The keys come in the form `slot-34` and we want this number. if !key.starts_with("slot-") { return Err(TimeSlotFormError::InvalidId); } let id = match key[5..].parse() { Err(_) => return Err(TimeSlotFormError::InvalidId), Ok(id) => id, }; // The value should only be one of those three values. let rating = match value.as_str() { "good" => Rating::Good, "tolerable" => Rating::Tolerable, "bad" => Rating::Bad, _ => return Err(TimeSlotFormError::InvalidRating), }; Ok((id, rating)) }).collect::<StdResult<Vec<_>, _>>()?; Ok(Self { slots }) } } #[derive(Debug)] pub enum TimeSlotFormError { InvalidRating, InvalidId, } #[post("/prep/update_timeslots", data = "<form>")] fn update_timeslots( auth_user: AuthUser, form: Form<TimeSlotForm>, locale: Locale, db: State<Db>, _state: PreparationState, ) -> Result<Flash<Redirect>> { let form = form.into_inner(); TimeSlotRating::update_all(&auth_user, &form.slots, &db)?; Ok(Flash::success( Redirect::to("/prep/timeslots"), dict::new(locale).prep.flash_success_storing_timeslot_ratings(), )) }
where rating = 'good' and role = 'student' group by user_id ) as counts
random_line_split
handlers.go
package device import ( "bytes" "context" "net/http" "strconv" "sync" "sync/atomic" "time" "github.com/Comcast/webpa-common/httperror" "github.com/Comcast/webpa-common/logging" "github.com/Comcast/webpa-common/wrp" "github.com/gorilla/mux" ) const ( DefaultMessageTimeout time.Duration = 2 * time.Minute DefaultRefreshInterval time.Duration = 10 * time.Second DefaultListBacklog uint32 = 150 ) // Timeout returns an Alice-style constructor which enforces a timeout for all device request contexts. func Timeout(o *Options) func(http.Handler) http.Handler { timeout := o.requestTimeout() return func(delegate http.Handler) http.Handler { return http.HandlerFunc(func(response http.ResponseWriter, request *http.Request) { ctx, cancel := context.WithTimeout(request.Context(), timeout) defer cancel() delegate.ServeHTTP(response, request.WithContext(ctx)) }) } } // IDFromRequest is a strategy type for extracting the device identifier from an HTTP request type IDFromRequest func(*http.Request) (ID, error) // UseID is a collection of Alice-style constructors that all insert the device ID // into the delegate's request Context using various strategies. var UseID = struct { // F is a configurable constructor that allows an arbitrary IDFromRequest strategy F func(IDFromRequest) func(http.Handler) http.Handler // FromHeader uses the device name header to extract the device identifier. // This constructor isn't configurable, and is used as-is: device.UseID.FromHeader. FromHeader func(http.Handler) http.Handler // FromPath is a configurable constructor that extracts the device identifier // from the URI path using the supplied variable name. This constructor is // configurable: device.UseID.FromPath("deviceId"). FromPath func(string) func(http.Handler) http.Handler }{ F: useID, FromHeader: useID( func(request *http.Request) (ID, error) { deviceName := request.Header.Get(DeviceNameHeader) if len(deviceName) == 0 { return invalidID, ErrorMissingDeviceNameHeader } return ParseID(deviceName) }, ), FromPath: func(variableName string) func(http.Handler) http.Handler { return useID( func(request *http.Request) (ID, error) { vars := mux.Vars(request) if vars == nil { return invalidID, ErrorMissingPathVars } deviceName := vars[variableName] if len(deviceName) == 0 { return invalidID, ErrorMissingDeviceNameVar } return ParseID(deviceName) }, ) }, } // useID is the general purpose creator for an Alice-style constructor that passes the ID // to the delegate via the request Context. This internal function is exported via UseID.F. func useID(f IDFromRequest) func(http.Handler) http.Handler { return func(delegate http.Handler) http.Handler { return http.HandlerFunc(func(response http.ResponseWriter, request *http.Request) { id, err := f(request) if err != nil { httperror.Formatf( response, http.StatusBadRequest, "Could extract device id: %s", err, ) return } ctx := WithID(id, request.Context()) delegate.ServeHTTP(response, request.WithContext(ctx)) }) } } // MessageHandler is a configurable http.Handler which handles inbound WRP traffic // to be sent to devices. type MessageHandler struct { // Logger is the sink for logging output. If not set, logging will be sent to logging.DefaultLogger(). Logger logging.Logger // Decoders is the pool of wrp.Decoder objects used to decode http.Request bodies // sent to this handler. This field is required. Decoders *wrp.DecoderPool // Encoders is the optional pool of wrp.Encoder objects used to encode wrp messages sent // as HTTP responses. If not supplied, this handler assumes the format returned by the Router // is the format to be sent back in the HTTP response. Encoders *wrp.EncoderPool // Router is the device message Router to use. This field is required. Router Router } func (mh *MessageHandler) logger() logging.Logger { if mh.Logger != nil { return mh.Logger } return logging.DefaultLogger() } // decodeRequest transforms an HTTP request into a device request. func (mh *MessageHandler) decodeRequest(httpRequest *http.Request) (deviceRequest *Request, err error) { deviceRequest, err = DecodeRequest(httpRequest.Body, mh.Decoders) if err == nil { deviceRequest = deviceRequest.WithContext(httpRequest.Context()) } return } func (mh *MessageHandler) ServeHTTP(httpResponse http.ResponseWriter, httpRequest *http.Request)
type ConnectHandler struct { Logger logging.Logger Connector Connector ResponseHeader http.Header } func (ch *ConnectHandler) logger() logging.Logger { if ch.Logger != nil { return ch.Logger } return logging.DefaultLogger() } func (ch *ConnectHandler) ServeHTTP(response http.ResponseWriter, request *http.Request) { if device, err := ch.Connector.Connect(response, request, ch.ResponseHeader); err != nil { ch.logger().Error("Failed to connect device: %s", err) } else { ch.logger().Debug("Connected device: %s", device.ID()) } } // ConnectedDeviceListener listens for connection and disconnection events and produces // a JSON document containing information about connected devices. It produces this document // on a certain interval. type ConnectedDeviceListener struct { // RefreshInterval is the time interval at which the cached JSON device list is updated. // If this field is nonpositive, DefaultRefreshInterval is used. RefreshInterval time.Duration // Tick is a factory function that produces a ticker channel and a stop function. // If not set, time.Ticker is used and the stop function is ticker.Stop. Tick func(time.Duration) (<-chan time.Time, func()) lock sync.Mutex initializeOnce sync.Once devices map[Key][]byte changeCount uint32 updates chan []byte shutdown chan struct{} } func (l *ConnectedDeviceListener) refreshInterval() time.Duration { if l.RefreshInterval > 0 { return l.RefreshInterval } return DefaultRefreshInterval } // newTick returns a ticker channel and a stop function for cleanup. If tick is set, // that function is used. Otherwise, a time.Ticker is created and (ticker.C, ticker.Stop) is returned. func (l *ConnectedDeviceListener) newTick() (<-chan time.Time, func()) { refreshInterval := l.refreshInterval() if l.Tick != nil { return l.Tick(refreshInterval) } ticker := time.NewTicker(refreshInterval) return ticker.C, ticker.Stop } func (l *ConnectedDeviceListener) onDeviceEvent(e *Event) { switch e.Type { case Connect: l.lock.Lock() defer l.lock.Unlock() l.changeCount++ l.devices[e.Device.Key()] = []byte(e.Device.String()) case Disconnect: l.lock.Lock() defer l.lock.Unlock() l.changeCount++ delete(l.devices, e.Device.Key()) } } func (l *ConnectedDeviceListener) refresh() { l.lock.Lock() defer l.lock.Unlock() if l.changeCount > 0 { l.changeCount = 0 var ( output = bytes.NewBufferString(`{"devices":[`) needsComma bool comma = []byte(`,`) ) for _, deviceJSON := range l.devices { if needsComma { output.Write(comma) } output.Write(deviceJSON) needsComma = true } output.WriteString(`]}`) l.updates <- output.Bytes() } } // Stop stops updates coming from this listener. func (l *ConnectedDeviceListener) Stop() { l.lock.Lock() defer l.lock.Unlock() if l.shutdown != nil { close(l.shutdown) close(l.updates) l.shutdown = nil l.updates = nil } } // Listen starts listening for changes to the set of connected devices. The returned Listener may // be placed into an Options. This method is idempotent, and may be called to restart this handler // after Stop is called. If this method is called multiple times without calling Stop, it simply // returns the same Listener and output channel. // // The returned channel will received updated JSON device list documents. This channel can be // used with ListHandler.Consume. func (l *ConnectedDeviceListener) Listen() (Listener, <-chan []byte) { l.lock.Lock() defer l.lock.Unlock() l.initializeOnce.Do(func() { l.devices = make(map[Key][]byte, 1000) }) if l.shutdown == nil { l.shutdown = make(chan struct{}) l.updates = make(chan []byte, 1) // spawn the monitor goroutine go func(shutdown <-chan struct{}) { refreshC, refreshStop := l.newTick() defer refreshStop() for { select { case <-shutdown: return case <-refreshC: l.refresh() } } }(l.shutdown) } return l.onDeviceEvent, l.updates } // ListHandler is an HTTP handler which can take updated JSON device lists. type ListHandler struct { initializeOnce sync.Once cachedJSON atomic.Value } // Consume spawns a goroutine that processes updated JSON from the given channel. // This method can be called multiple times with different update sources. Typically, // this method is called once to consume updates from a ConnectedDeviceListener. func (lh *ListHandler) Consume(updates <-chan []byte) { lh.initializeOnce.Do(func() { lh.cachedJSON.Store([]byte(`{"devices":[]}`)) }) go func() { for updatedJson := range updates { lh.cachedJSON.Store(updatedJson) } }() } // ServeHTTP emits the cached JSON into the response. If Listen has not been called yet, // or if for any reason there is no cached JSON, this handler returns http.StatusServiceUnavailable. func (lh *ListHandler) ServeHTTP(response http.ResponseWriter, request *http.Request) { if jsonResponse, _ := lh.cachedJSON.Load().([]byte); len(jsonResponse) > 0 { response.Header().Set("Content-Type", "application/json") response.Header().Set("Content-Length", strconv.Itoa(len(jsonResponse))) response.Write(jsonResponse) } else { response.WriteHeader(http.StatusServiceUnavailable) } }
{ deviceRequest, err := mh.decodeRequest(httpRequest) if err != nil { httperror.Formatf( httpResponse, http.StatusBadRequest, "Could not decode WRP message: %s", err, ) return } // deviceRequest carries the context through the routing infrastructure if deviceResponse, err := mh.Router.Route(deviceRequest); err != nil { code := http.StatusInternalServerError switch err { case ErrorInvalidDeviceName: code = http.StatusBadRequest case ErrorDeviceNotFound: code = http.StatusNotFound case ErrorNonUniqueID: code = http.StatusBadRequest case ErrorInvalidTransactionKey: code = http.StatusBadRequest case ErrorTransactionAlreadyRegistered: code = http.StatusBadRequest } httperror.Formatf( httpResponse, code, "Could not process device request: %s", err, ) } else if deviceResponse != nil { if err := EncodeResponse(httpResponse, deviceResponse, mh.Encoders); err != nil { mh.logger().Error("Error while writing transaction response: %s", err) } } // if deviceReponse == nil, that just means the request was not something that represented // the start of a transaction. For example, events do not carry a transaction key because // they do not expect responses. }
identifier_body
handlers.go
package device import ( "bytes" "context" "net/http" "strconv" "sync" "sync/atomic" "time" "github.com/Comcast/webpa-common/httperror" "github.com/Comcast/webpa-common/logging" "github.com/Comcast/webpa-common/wrp" "github.com/gorilla/mux" ) const ( DefaultMessageTimeout time.Duration = 2 * time.Minute DefaultRefreshInterval time.Duration = 10 * time.Second DefaultListBacklog uint32 = 150 ) // Timeout returns an Alice-style constructor which enforces a timeout for all device request contexts. func Timeout(o *Options) func(http.Handler) http.Handler { timeout := o.requestTimeout() return func(delegate http.Handler) http.Handler { return http.HandlerFunc(func(response http.ResponseWriter, request *http.Request) { ctx, cancel := context.WithTimeout(request.Context(), timeout) defer cancel() delegate.ServeHTTP(response, request.WithContext(ctx)) }) } } // IDFromRequest is a strategy type for extracting the device identifier from an HTTP request type IDFromRequest func(*http.Request) (ID, error) // UseID is a collection of Alice-style constructors that all insert the device ID // into the delegate's request Context using various strategies. var UseID = struct { // F is a configurable constructor that allows an arbitrary IDFromRequest strategy F func(IDFromRequest) func(http.Handler) http.Handler // FromHeader uses the device name header to extract the device identifier. // This constructor isn't configurable, and is used as-is: device.UseID.FromHeader. FromHeader func(http.Handler) http.Handler // FromPath is a configurable constructor that extracts the device identifier // from the URI path using the supplied variable name. This constructor is // configurable: device.UseID.FromPath("deviceId"). FromPath func(string) func(http.Handler) http.Handler }{ F: useID, FromHeader: useID( func(request *http.Request) (ID, error) { deviceName := request.Header.Get(DeviceNameHeader) if len(deviceName) == 0 { return invalidID, ErrorMissingDeviceNameHeader } return ParseID(deviceName) }, ), FromPath: func(variableName string) func(http.Handler) http.Handler { return useID( func(request *http.Request) (ID, error) { vars := mux.Vars(request) if vars == nil { return invalidID, ErrorMissingPathVars } deviceName := vars[variableName] if len(deviceName) == 0 { return invalidID, ErrorMissingDeviceNameVar } return ParseID(deviceName) }, ) }, } // useID is the general purpose creator for an Alice-style constructor that passes the ID // to the delegate via the request Context. This internal function is exported via UseID.F. func useID(f IDFromRequest) func(http.Handler) http.Handler { return func(delegate http.Handler) http.Handler { return http.HandlerFunc(func(response http.ResponseWriter, request *http.Request) { id, err := f(request) if err != nil { httperror.Formatf( response, http.StatusBadRequest, "Could extract device id: %s", err, ) return } ctx := WithID(id, request.Context()) delegate.ServeHTTP(response, request.WithContext(ctx)) }) } } // MessageHandler is a configurable http.Handler which handles inbound WRP traffic // to be sent to devices. type MessageHandler struct { // Logger is the sink for logging output. If not set, logging will be sent to logging.DefaultLogger(). Logger logging.Logger // Decoders is the pool of wrp.Decoder objects used to decode http.Request bodies // sent to this handler. This field is required. Decoders *wrp.DecoderPool // Encoders is the optional pool of wrp.Encoder objects used to encode wrp messages sent // as HTTP responses. If not supplied, this handler assumes the format returned by the Router // is the format to be sent back in the HTTP response. Encoders *wrp.EncoderPool // Router is the device message Router to use. This field is required. Router Router } func (mh *MessageHandler) logger() logging.Logger { if mh.Logger != nil { return mh.Logger } return logging.DefaultLogger() } // decodeRequest transforms an HTTP request into a device request. func (mh *MessageHandler) decodeRequest(httpRequest *http.Request) (deviceRequest *Request, err error) { deviceRequest, err = DecodeRequest(httpRequest.Body, mh.Decoders) if err == nil
return } func (mh *MessageHandler) ServeHTTP(httpResponse http.ResponseWriter, httpRequest *http.Request) { deviceRequest, err := mh.decodeRequest(httpRequest) if err != nil { httperror.Formatf( httpResponse, http.StatusBadRequest, "Could not decode WRP message: %s", err, ) return } // deviceRequest carries the context through the routing infrastructure if deviceResponse, err := mh.Router.Route(deviceRequest); err != nil { code := http.StatusInternalServerError switch err { case ErrorInvalidDeviceName: code = http.StatusBadRequest case ErrorDeviceNotFound: code = http.StatusNotFound case ErrorNonUniqueID: code = http.StatusBadRequest case ErrorInvalidTransactionKey: code = http.StatusBadRequest case ErrorTransactionAlreadyRegistered: code = http.StatusBadRequest } httperror.Formatf( httpResponse, code, "Could not process device request: %s", err, ) } else if deviceResponse != nil { if err := EncodeResponse(httpResponse, deviceResponse, mh.Encoders); err != nil { mh.logger().Error("Error while writing transaction response: %s", err) } } // if deviceReponse == nil, that just means the request was not something that represented // the start of a transaction. For example, events do not carry a transaction key because // they do not expect responses. } type ConnectHandler struct { Logger logging.Logger Connector Connector ResponseHeader http.Header } func (ch *ConnectHandler) logger() logging.Logger { if ch.Logger != nil { return ch.Logger } return logging.DefaultLogger() } func (ch *ConnectHandler) ServeHTTP(response http.ResponseWriter, request *http.Request) { if device, err := ch.Connector.Connect(response, request, ch.ResponseHeader); err != nil { ch.logger().Error("Failed to connect device: %s", err) } else { ch.logger().Debug("Connected device: %s", device.ID()) } } // ConnectedDeviceListener listens for connection and disconnection events and produces // a JSON document containing information about connected devices. It produces this document // on a certain interval. type ConnectedDeviceListener struct { // RefreshInterval is the time interval at which the cached JSON device list is updated. // If this field is nonpositive, DefaultRefreshInterval is used. RefreshInterval time.Duration // Tick is a factory function that produces a ticker channel and a stop function. // If not set, time.Ticker is used and the stop function is ticker.Stop. Tick func(time.Duration) (<-chan time.Time, func()) lock sync.Mutex initializeOnce sync.Once devices map[Key][]byte changeCount uint32 updates chan []byte shutdown chan struct{} } func (l *ConnectedDeviceListener) refreshInterval() time.Duration { if l.RefreshInterval > 0 { return l.RefreshInterval } return DefaultRefreshInterval } // newTick returns a ticker channel and a stop function for cleanup. If tick is set, // that function is used. Otherwise, a time.Ticker is created and (ticker.C, ticker.Stop) is returned. func (l *ConnectedDeviceListener) newTick() (<-chan time.Time, func()) { refreshInterval := l.refreshInterval() if l.Tick != nil { return l.Tick(refreshInterval) } ticker := time.NewTicker(refreshInterval) return ticker.C, ticker.Stop } func (l *ConnectedDeviceListener) onDeviceEvent(e *Event) { switch e.Type { case Connect: l.lock.Lock() defer l.lock.Unlock() l.changeCount++ l.devices[e.Device.Key()] = []byte(e.Device.String()) case Disconnect: l.lock.Lock() defer l.lock.Unlock() l.changeCount++ delete(l.devices, e.Device.Key()) } } func (l *ConnectedDeviceListener) refresh() { l.lock.Lock() defer l.lock.Unlock() if l.changeCount > 0 { l.changeCount = 0 var ( output = bytes.NewBufferString(`{"devices":[`) needsComma bool comma = []byte(`,`) ) for _, deviceJSON := range l.devices { if needsComma { output.Write(comma) } output.Write(deviceJSON) needsComma = true } output.WriteString(`]}`) l.updates <- output.Bytes() } } // Stop stops updates coming from this listener. func (l *ConnectedDeviceListener) Stop() { l.lock.Lock() defer l.lock.Unlock() if l.shutdown != nil { close(l.shutdown) close(l.updates) l.shutdown = nil l.updates = nil } } // Listen starts listening for changes to the set of connected devices. The returned Listener may // be placed into an Options. This method is idempotent, and may be called to restart this handler // after Stop is called. If this method is called multiple times without calling Stop, it simply // returns the same Listener and output channel. // // The returned channel will received updated JSON device list documents. This channel can be // used with ListHandler.Consume. func (l *ConnectedDeviceListener) Listen() (Listener, <-chan []byte) { l.lock.Lock() defer l.lock.Unlock() l.initializeOnce.Do(func() { l.devices = make(map[Key][]byte, 1000) }) if l.shutdown == nil { l.shutdown = make(chan struct{}) l.updates = make(chan []byte, 1) // spawn the monitor goroutine go func(shutdown <-chan struct{}) { refreshC, refreshStop := l.newTick() defer refreshStop() for { select { case <-shutdown: return case <-refreshC: l.refresh() } } }(l.shutdown) } return l.onDeviceEvent, l.updates } // ListHandler is an HTTP handler which can take updated JSON device lists. type ListHandler struct { initializeOnce sync.Once cachedJSON atomic.Value } // Consume spawns a goroutine that processes updated JSON from the given channel. // This method can be called multiple times with different update sources. Typically, // this method is called once to consume updates from a ConnectedDeviceListener. func (lh *ListHandler) Consume(updates <-chan []byte) { lh.initializeOnce.Do(func() { lh.cachedJSON.Store([]byte(`{"devices":[]}`)) }) go func() { for updatedJson := range updates { lh.cachedJSON.Store(updatedJson) } }() } // ServeHTTP emits the cached JSON into the response. If Listen has not been called yet, // or if for any reason there is no cached JSON, this handler returns http.StatusServiceUnavailable. func (lh *ListHandler) ServeHTTP(response http.ResponseWriter, request *http.Request) { if jsonResponse, _ := lh.cachedJSON.Load().([]byte); len(jsonResponse) > 0 { response.Header().Set("Content-Type", "application/json") response.Header().Set("Content-Length", strconv.Itoa(len(jsonResponse))) response.Write(jsonResponse) } else { response.WriteHeader(http.StatusServiceUnavailable) } }
{ deviceRequest = deviceRequest.WithContext(httpRequest.Context()) }
conditional_block
handlers.go
package device import ( "bytes" "context" "net/http" "strconv" "sync" "sync/atomic" "time" "github.com/Comcast/webpa-common/httperror" "github.com/Comcast/webpa-common/logging" "github.com/Comcast/webpa-common/wrp" "github.com/gorilla/mux" ) const ( DefaultMessageTimeout time.Duration = 2 * time.Minute DefaultRefreshInterval time.Duration = 10 * time.Second DefaultListBacklog uint32 = 150 ) // Timeout returns an Alice-style constructor which enforces a timeout for all device request contexts. func Timeout(o *Options) func(http.Handler) http.Handler { timeout := o.requestTimeout() return func(delegate http.Handler) http.Handler { return http.HandlerFunc(func(response http.ResponseWriter, request *http.Request) { ctx, cancel := context.WithTimeout(request.Context(), timeout) defer cancel() delegate.ServeHTTP(response, request.WithContext(ctx)) }) } } // IDFromRequest is a strategy type for extracting the device identifier from an HTTP request type IDFromRequest func(*http.Request) (ID, error) // UseID is a collection of Alice-style constructors that all insert the device ID // into the delegate's request Context using various strategies. var UseID = struct { // F is a configurable constructor that allows an arbitrary IDFromRequest strategy F func(IDFromRequest) func(http.Handler) http.Handler // FromHeader uses the device name header to extract the device identifier. // This constructor isn't configurable, and is used as-is: device.UseID.FromHeader. FromHeader func(http.Handler) http.Handler // FromPath is a configurable constructor that extracts the device identifier // from the URI path using the supplied variable name. This constructor is // configurable: device.UseID.FromPath("deviceId"). FromPath func(string) func(http.Handler) http.Handler }{ F: useID, FromHeader: useID( func(request *http.Request) (ID, error) { deviceName := request.Header.Get(DeviceNameHeader) if len(deviceName) == 0 { return invalidID, ErrorMissingDeviceNameHeader } return ParseID(deviceName) }, ), FromPath: func(variableName string) func(http.Handler) http.Handler { return useID( func(request *http.Request) (ID, error) { vars := mux.Vars(request) if vars == nil { return invalidID, ErrorMissingPathVars } deviceName := vars[variableName] if len(deviceName) == 0 { return invalidID, ErrorMissingDeviceNameVar } return ParseID(deviceName) }, ) }, } // useID is the general purpose creator for an Alice-style constructor that passes the ID // to the delegate via the request Context. This internal function is exported via UseID.F. func useID(f IDFromRequest) func(http.Handler) http.Handler { return func(delegate http.Handler) http.Handler { return http.HandlerFunc(func(response http.ResponseWriter, request *http.Request) { id, err := f(request) if err != nil { httperror.Formatf( response, http.StatusBadRequest, "Could extract device id: %s", err, ) return } ctx := WithID(id, request.Context()) delegate.ServeHTTP(response, request.WithContext(ctx)) }) } } // MessageHandler is a configurable http.Handler which handles inbound WRP traffic // to be sent to devices. type MessageHandler struct { // Logger is the sink for logging output. If not set, logging will be sent to logging.DefaultLogger(). Logger logging.Logger // Decoders is the pool of wrp.Decoder objects used to decode http.Request bodies // sent to this handler. This field is required. Decoders *wrp.DecoderPool // Encoders is the optional pool of wrp.Encoder objects used to encode wrp messages sent // as HTTP responses. If not supplied, this handler assumes the format returned by the Router // is the format to be sent back in the HTTP response. Encoders *wrp.EncoderPool // Router is the device message Router to use. This field is required. Router Router } func (mh *MessageHandler) logger() logging.Logger { if mh.Logger != nil { return mh.Logger } return logging.DefaultLogger() } // decodeRequest transforms an HTTP request into a device request. func (mh *MessageHandler) decodeRequest(httpRequest *http.Request) (deviceRequest *Request, err error) { deviceRequest, err = DecodeRequest(httpRequest.Body, mh.Decoders) if err == nil { deviceRequest = deviceRequest.WithContext(httpRequest.Context()) } return } func (mh *MessageHandler) ServeHTTP(httpResponse http.ResponseWriter, httpRequest *http.Request) { deviceRequest, err := mh.decodeRequest(httpRequest) if err != nil { httperror.Formatf( httpResponse, http.StatusBadRequest, "Could not decode WRP message: %s", err, ) return } // deviceRequest carries the context through the routing infrastructure if deviceResponse, err := mh.Router.Route(deviceRequest); err != nil { code := http.StatusInternalServerError switch err { case ErrorInvalidDeviceName: code = http.StatusBadRequest case ErrorDeviceNotFound: code = http.StatusNotFound case ErrorNonUniqueID: code = http.StatusBadRequest case ErrorInvalidTransactionKey: code = http.StatusBadRequest case ErrorTransactionAlreadyRegistered: code = http.StatusBadRequest } httperror.Formatf( httpResponse, code, "Could not process device request: %s", err, ) } else if deviceResponse != nil { if err := EncodeResponse(httpResponse, deviceResponse, mh.Encoders); err != nil { mh.logger().Error("Error while writing transaction response: %s", err) } } // if deviceReponse == nil, that just means the request was not something that represented // the start of a transaction. For example, events do not carry a transaction key because // they do not expect responses. } type ConnectHandler struct { Logger logging.Logger Connector Connector ResponseHeader http.Header } func (ch *ConnectHandler) logger() logging.Logger { if ch.Logger != nil { return ch.Logger } return logging.DefaultLogger() } func (ch *ConnectHandler) ServeHTTP(response http.ResponseWriter, request *http.Request) { if device, err := ch.Connector.Connect(response, request, ch.ResponseHeader); err != nil { ch.logger().Error("Failed to connect device: %s", err) } else { ch.logger().Debug("Connected device: %s", device.ID()) } } // ConnectedDeviceListener listens for connection and disconnection events and produces // a JSON document containing information about connected devices. It produces this document // on a certain interval. type ConnectedDeviceListener struct { // RefreshInterval is the time interval at which the cached JSON device list is updated. // If this field is nonpositive, DefaultRefreshInterval is used. RefreshInterval time.Duration // Tick is a factory function that produces a ticker channel and a stop function. // If not set, time.Ticker is used and the stop function is ticker.Stop. Tick func(time.Duration) (<-chan time.Time, func()) lock sync.Mutex initializeOnce sync.Once devices map[Key][]byte changeCount uint32 updates chan []byte shutdown chan struct{} } func (l *ConnectedDeviceListener) refreshInterval() time.Duration { if l.RefreshInterval > 0 { return l.RefreshInterval } return DefaultRefreshInterval } // newTick returns a ticker channel and a stop function for cleanup. If tick is set, // that function is used. Otherwise, a time.Ticker is created and (ticker.C, ticker.Stop) is returned. func (l *ConnectedDeviceListener) newTick() (<-chan time.Time, func()) { refreshInterval := l.refreshInterval() if l.Tick != nil { return l.Tick(refreshInterval) } ticker := time.NewTicker(refreshInterval) return ticker.C, ticker.Stop } func (l *ConnectedDeviceListener) onDeviceEvent(e *Event) { switch e.Type { case Connect: l.lock.Lock() defer l.lock.Unlock() l.changeCount++ l.devices[e.Device.Key()] = []byte(e.Device.String()) case Disconnect: l.lock.Lock() defer l.lock.Unlock() l.changeCount++ delete(l.devices, e.Device.Key()) } } func (l *ConnectedDeviceListener) refresh() { l.lock.Lock() defer l.lock.Unlock() if l.changeCount > 0 { l.changeCount = 0 var ( output = bytes.NewBufferString(`{"devices":[`) needsComma bool comma = []byte(`,`) ) for _, deviceJSON := range l.devices { if needsComma { output.Write(comma) } output.Write(deviceJSON) needsComma = true } output.WriteString(`]}`) l.updates <- output.Bytes() } } // Stop stops updates coming from this listener. func (l *ConnectedDeviceListener) Stop() { l.lock.Lock() defer l.lock.Unlock() if l.shutdown != nil { close(l.shutdown) close(l.updates) l.shutdown = nil l.updates = nil } } // Listen starts listening for changes to the set of connected devices. The returned Listener may // be placed into an Options. This method is idempotent, and may be called to restart this handler // after Stop is called. If this method is called multiple times without calling Stop, it simply // returns the same Listener and output channel. // // The returned channel will received updated JSON device list documents. This channel can be // used with ListHandler.Consume. func (l *ConnectedDeviceListener) Listen() (Listener, <-chan []byte) { l.lock.Lock() defer l.lock.Unlock() l.initializeOnce.Do(func() { l.devices = make(map[Key][]byte, 1000) }) if l.shutdown == nil { l.shutdown = make(chan struct{}) l.updates = make(chan []byte, 1) // spawn the monitor goroutine go func(shutdown <-chan struct{}) { refreshC, refreshStop := l.newTick() defer refreshStop() for { select { case <-shutdown: return case <-refreshC: l.refresh() } } }(l.shutdown) } return l.onDeviceEvent, l.updates }
// ListHandler is an HTTP handler which can take updated JSON device lists. type ListHandler struct { initializeOnce sync.Once cachedJSON atomic.Value } // Consume spawns a goroutine that processes updated JSON from the given channel. // This method can be called multiple times with different update sources. Typically, // this method is called once to consume updates from a ConnectedDeviceListener. func (lh *ListHandler) Consume(updates <-chan []byte) { lh.initializeOnce.Do(func() { lh.cachedJSON.Store([]byte(`{"devices":[]}`)) }) go func() { for updatedJson := range updates { lh.cachedJSON.Store(updatedJson) } }() } // ServeHTTP emits the cached JSON into the response. If Listen has not been called yet, // or if for any reason there is no cached JSON, this handler returns http.StatusServiceUnavailable. func (lh *ListHandler) ServeHTTP(response http.ResponseWriter, request *http.Request) { if jsonResponse, _ := lh.cachedJSON.Load().([]byte); len(jsonResponse) > 0 { response.Header().Set("Content-Type", "application/json") response.Header().Set("Content-Length", strconv.Itoa(len(jsonResponse))) response.Write(jsonResponse) } else { response.WriteHeader(http.StatusServiceUnavailable) } }
random_line_split
handlers.go
package device import ( "bytes" "context" "net/http" "strconv" "sync" "sync/atomic" "time" "github.com/Comcast/webpa-common/httperror" "github.com/Comcast/webpa-common/logging" "github.com/Comcast/webpa-common/wrp" "github.com/gorilla/mux" ) const ( DefaultMessageTimeout time.Duration = 2 * time.Minute DefaultRefreshInterval time.Duration = 10 * time.Second DefaultListBacklog uint32 = 150 ) // Timeout returns an Alice-style constructor which enforces a timeout for all device request contexts. func Timeout(o *Options) func(http.Handler) http.Handler { timeout := o.requestTimeout() return func(delegate http.Handler) http.Handler { return http.HandlerFunc(func(response http.ResponseWriter, request *http.Request) { ctx, cancel := context.WithTimeout(request.Context(), timeout) defer cancel() delegate.ServeHTTP(response, request.WithContext(ctx)) }) } } // IDFromRequest is a strategy type for extracting the device identifier from an HTTP request type IDFromRequest func(*http.Request) (ID, error) // UseID is a collection of Alice-style constructors that all insert the device ID // into the delegate's request Context using various strategies. var UseID = struct { // F is a configurable constructor that allows an arbitrary IDFromRequest strategy F func(IDFromRequest) func(http.Handler) http.Handler // FromHeader uses the device name header to extract the device identifier. // This constructor isn't configurable, and is used as-is: device.UseID.FromHeader. FromHeader func(http.Handler) http.Handler // FromPath is a configurable constructor that extracts the device identifier // from the URI path using the supplied variable name. This constructor is // configurable: device.UseID.FromPath("deviceId"). FromPath func(string) func(http.Handler) http.Handler }{ F: useID, FromHeader: useID( func(request *http.Request) (ID, error) { deviceName := request.Header.Get(DeviceNameHeader) if len(deviceName) == 0 { return invalidID, ErrorMissingDeviceNameHeader } return ParseID(deviceName) }, ), FromPath: func(variableName string) func(http.Handler) http.Handler { return useID( func(request *http.Request) (ID, error) { vars := mux.Vars(request) if vars == nil { return invalidID, ErrorMissingPathVars } deviceName := vars[variableName] if len(deviceName) == 0 { return invalidID, ErrorMissingDeviceNameVar } return ParseID(deviceName) }, ) }, } // useID is the general purpose creator for an Alice-style constructor that passes the ID // to the delegate via the request Context. This internal function is exported via UseID.F. func useID(f IDFromRequest) func(http.Handler) http.Handler { return func(delegate http.Handler) http.Handler { return http.HandlerFunc(func(response http.ResponseWriter, request *http.Request) { id, err := f(request) if err != nil { httperror.Formatf( response, http.StatusBadRequest, "Could extract device id: %s", err, ) return } ctx := WithID(id, request.Context()) delegate.ServeHTTP(response, request.WithContext(ctx)) }) } } // MessageHandler is a configurable http.Handler which handles inbound WRP traffic // to be sent to devices. type MessageHandler struct { // Logger is the sink for logging output. If not set, logging will be sent to logging.DefaultLogger(). Logger logging.Logger // Decoders is the pool of wrp.Decoder objects used to decode http.Request bodies // sent to this handler. This field is required. Decoders *wrp.DecoderPool // Encoders is the optional pool of wrp.Encoder objects used to encode wrp messages sent // as HTTP responses. If not supplied, this handler assumes the format returned by the Router // is the format to be sent back in the HTTP response. Encoders *wrp.EncoderPool // Router is the device message Router to use. This field is required. Router Router } func (mh *MessageHandler) logger() logging.Logger { if mh.Logger != nil { return mh.Logger } return logging.DefaultLogger() } // decodeRequest transforms an HTTP request into a device request. func (mh *MessageHandler)
(httpRequest *http.Request) (deviceRequest *Request, err error) { deviceRequest, err = DecodeRequest(httpRequest.Body, mh.Decoders) if err == nil { deviceRequest = deviceRequest.WithContext(httpRequest.Context()) } return } func (mh *MessageHandler) ServeHTTP(httpResponse http.ResponseWriter, httpRequest *http.Request) { deviceRequest, err := mh.decodeRequest(httpRequest) if err != nil { httperror.Formatf( httpResponse, http.StatusBadRequest, "Could not decode WRP message: %s", err, ) return } // deviceRequest carries the context through the routing infrastructure if deviceResponse, err := mh.Router.Route(deviceRequest); err != nil { code := http.StatusInternalServerError switch err { case ErrorInvalidDeviceName: code = http.StatusBadRequest case ErrorDeviceNotFound: code = http.StatusNotFound case ErrorNonUniqueID: code = http.StatusBadRequest case ErrorInvalidTransactionKey: code = http.StatusBadRequest case ErrorTransactionAlreadyRegistered: code = http.StatusBadRequest } httperror.Formatf( httpResponse, code, "Could not process device request: %s", err, ) } else if deviceResponse != nil { if err := EncodeResponse(httpResponse, deviceResponse, mh.Encoders); err != nil { mh.logger().Error("Error while writing transaction response: %s", err) } } // if deviceReponse == nil, that just means the request was not something that represented // the start of a transaction. For example, events do not carry a transaction key because // they do not expect responses. } type ConnectHandler struct { Logger logging.Logger Connector Connector ResponseHeader http.Header } func (ch *ConnectHandler) logger() logging.Logger { if ch.Logger != nil { return ch.Logger } return logging.DefaultLogger() } func (ch *ConnectHandler) ServeHTTP(response http.ResponseWriter, request *http.Request) { if device, err := ch.Connector.Connect(response, request, ch.ResponseHeader); err != nil { ch.logger().Error("Failed to connect device: %s", err) } else { ch.logger().Debug("Connected device: %s", device.ID()) } } // ConnectedDeviceListener listens for connection and disconnection events and produces // a JSON document containing information about connected devices. It produces this document // on a certain interval. type ConnectedDeviceListener struct { // RefreshInterval is the time interval at which the cached JSON device list is updated. // If this field is nonpositive, DefaultRefreshInterval is used. RefreshInterval time.Duration // Tick is a factory function that produces a ticker channel and a stop function. // If not set, time.Ticker is used and the stop function is ticker.Stop. Tick func(time.Duration) (<-chan time.Time, func()) lock sync.Mutex initializeOnce sync.Once devices map[Key][]byte changeCount uint32 updates chan []byte shutdown chan struct{} } func (l *ConnectedDeviceListener) refreshInterval() time.Duration { if l.RefreshInterval > 0 { return l.RefreshInterval } return DefaultRefreshInterval } // newTick returns a ticker channel and a stop function for cleanup. If tick is set, // that function is used. Otherwise, a time.Ticker is created and (ticker.C, ticker.Stop) is returned. func (l *ConnectedDeviceListener) newTick() (<-chan time.Time, func()) { refreshInterval := l.refreshInterval() if l.Tick != nil { return l.Tick(refreshInterval) } ticker := time.NewTicker(refreshInterval) return ticker.C, ticker.Stop } func (l *ConnectedDeviceListener) onDeviceEvent(e *Event) { switch e.Type { case Connect: l.lock.Lock() defer l.lock.Unlock() l.changeCount++ l.devices[e.Device.Key()] = []byte(e.Device.String()) case Disconnect: l.lock.Lock() defer l.lock.Unlock() l.changeCount++ delete(l.devices, e.Device.Key()) } } func (l *ConnectedDeviceListener) refresh() { l.lock.Lock() defer l.lock.Unlock() if l.changeCount > 0 { l.changeCount = 0 var ( output = bytes.NewBufferString(`{"devices":[`) needsComma bool comma = []byte(`,`) ) for _, deviceJSON := range l.devices { if needsComma { output.Write(comma) } output.Write(deviceJSON) needsComma = true } output.WriteString(`]}`) l.updates <- output.Bytes() } } // Stop stops updates coming from this listener. func (l *ConnectedDeviceListener) Stop() { l.lock.Lock() defer l.lock.Unlock() if l.shutdown != nil { close(l.shutdown) close(l.updates) l.shutdown = nil l.updates = nil } } // Listen starts listening for changes to the set of connected devices. The returned Listener may // be placed into an Options. This method is idempotent, and may be called to restart this handler // after Stop is called. If this method is called multiple times without calling Stop, it simply // returns the same Listener and output channel. // // The returned channel will received updated JSON device list documents. This channel can be // used with ListHandler.Consume. func (l *ConnectedDeviceListener) Listen() (Listener, <-chan []byte) { l.lock.Lock() defer l.lock.Unlock() l.initializeOnce.Do(func() { l.devices = make(map[Key][]byte, 1000) }) if l.shutdown == nil { l.shutdown = make(chan struct{}) l.updates = make(chan []byte, 1) // spawn the monitor goroutine go func(shutdown <-chan struct{}) { refreshC, refreshStop := l.newTick() defer refreshStop() for { select { case <-shutdown: return case <-refreshC: l.refresh() } } }(l.shutdown) } return l.onDeviceEvent, l.updates } // ListHandler is an HTTP handler which can take updated JSON device lists. type ListHandler struct { initializeOnce sync.Once cachedJSON atomic.Value } // Consume spawns a goroutine that processes updated JSON from the given channel. // This method can be called multiple times with different update sources. Typically, // this method is called once to consume updates from a ConnectedDeviceListener. func (lh *ListHandler) Consume(updates <-chan []byte) { lh.initializeOnce.Do(func() { lh.cachedJSON.Store([]byte(`{"devices":[]}`)) }) go func() { for updatedJson := range updates { lh.cachedJSON.Store(updatedJson) } }() } // ServeHTTP emits the cached JSON into the response. If Listen has not been called yet, // or if for any reason there is no cached JSON, this handler returns http.StatusServiceUnavailable. func (lh *ListHandler) ServeHTTP(response http.ResponseWriter, request *http.Request) { if jsonResponse, _ := lh.cachedJSON.Load().([]byte); len(jsonResponse) > 0 { response.Header().Set("Content-Type", "application/json") response.Header().Set("Content-Length", strconv.Itoa(len(jsonResponse))) response.Write(jsonResponse) } else { response.WriteHeader(http.StatusServiceUnavailable) } }
decodeRequest
identifier_name
server.py
#!/usr/bin/python3 # -*- encoding: utf-8 -*- """An API for the Critical Pāli Dictionary""" import argparse import configparser import datetime import json import logging import os.path import re import flask from flask import request, current_app import sqlalchemy from sqlalchemy.sql import text import flask_sqlalchemy from werkzeug.routing import Map, Rule LANG = 'pi-Latn-x-iso' MAX_RESULTS = 100 re_integer_arg = re.compile (r'^[0-9]+$') re_normalize_headword = re.compile (r'^[-\[\(√°~]*(?:<sup>\d+</sup>)?(.*?)[-°~\)\]]*$') class MySQLEngine (object): """ Database Interface """ def __init__ (self, **kwargs): args = self.get_connection_params (kwargs) self.url = 'mysql+pymysql://{user}:{password}@{host}:{port}/{database}'.format (**args) logger.log (logging.INFO, 'MySQLEngine: Connecting to mysql+pymysql://{user}:password@{host}:{port}/{database}'.format (**args)) self.engine = sqlalchemy.create_engine (self.url + '?charset=utf8mb4&sql_mode=ANSI', pool_recycle = 300) def get_connection_params (self, kwargs = {}): """ Get connection parameters from .my.cnf file. """ config = configparser.ConfigParser () if 'MYSQL_CONF' in kwargs: config.read (('/etc/my.cnf', os.path.expanduser (kwargs['MYSQL_CONF']))) else: config.read (('/etc/my.cnf', os.path.expanduser ('~/.my.cnf'))) section = config[kwargs.get ('MYSQL_GROUP', 'mysql')] from_my_cnf = { 'host' : section.get ('host', 'localhost').strip ('"'), 'port' : section.get ('port', '3306').strip ('"'), 'database' : section.get ('database', '').strip ('"'), 'user' : section.get ('user', '').strip ('"'), 'password' : section.get ('password', '').strip ('"'), } return from_my_cnf def execute (conn, sql, parameters, debug_level = logging.DEBUG): start_time = datetime.datetime.now () result = conn.execute (text (sql.strip ()), parameters) logger.log (debug_level, '%d rows in %.3fs', result.rowcount, (datetime.datetime.now () - start_time).total_seconds ()) return result def clip (i, min_, max_): return max (min (int (i), max_), min_) def arg (name, default, regex, msg = None): arg = request.args.get (name, default) if not regex.match (arg): if msg is None: msg = 'Invalid %s parameter' % name flask.abort (msg) return arg cpd_iso_trans = str.maketrans ('âêîôû', 'aeiou') def normalize_iso (text): """Normalize to ISO 15919 CPD transliteration is almost ISO 15919, but uses uppercase for proper names and 'â' instead of 'a' to signal a syncope 'a' + 'a'. We have to replace all 'â's because they definitely do not conform to ISO. We get away with serving uppercase letters in proper names because it is an easy fix on the client's side. """ return text.translate (cpd_iso_trans) def make_headword (row, lang = LANG): """ row is: headword_id, text, article_id """ normalized = text = normalize_iso (row[1]) m = re_normalize_headword.match (normalized) if m: normalized = m.group (1).lower () return { 'articles_url' : 'v1/articles/%d' % row[2], 'headwords_url' : 'v1/headwords/%d' % row[0], 'lang' : lang, 'normalized_text' : normalized, 'text' : text, } def make_json_response (obj): resp = flask.Response (json.dumps (obj, indent=2, sort_keys=True), mimetype='application/json') resp.headers['Access-Control-Allow-Origin'] = '*' return resp def make_headwords_response (res, limit = MAX_RESULTS, lang = LANG): return make_json_response ({ 'limit' : limit, 'data' : [ make_headword (row, lang) for row in res ] }) # need this before first @app.endpoint declaration app = flask.Flask (__name__) @app.endpoint ('info') def info (): """ Endpoint. The root of the application. """ info = { 'name' : app.config['APPLICATION_NAME'], 'short_name' : app.config['APPLICATION_SHORT_NAME'], 'main_page_url' : app.config['APPLICATION_MAIN_URL'], # 'css_url' : app.config.get ('APPLICATION_CSS_URL', ''), 'css' : 'span.smalltext { font-size: smaller }', 'supported_langs_query' : [ LANG ], } return make_json_response (info) @app.endpoint ('headwords') def headwords (): """ Endpoint. Retrieve a list of headword IDs. This implements the search query and wordlist. """ q = request.args.get ('q') fulltext = request.args.get ('fulltext') offset = int (arg ('offset', '0', re_integer_arg)) limit = clip (arg ('limit', str (MAX_RESULTS), re_integer_arg), 1, MAX_RESULTS) where = '' if (not q) and (not fulltext): # Retrieve full list of headwords with current_app.config.dba.engine.begin () as conn: res = execute (conn, r""" SELECT id, webkeyword, no FROM keyword ORDER BY sortkeyword, n, no LIMIT :limit OFFSET :offset """, { 'offset' : offset, 'limit' : limit }) return make_headwords_response (res, limit) if q: q = q.replace ('-', '') q = q.replace ('%', '') q = q.replace ('?', '_') q = q.replace ('*', '%') where = "(keyword LIKE :q) AND" if not fulltext: # easy out with current_app.config.dba.engine.begin () as conn: res = execute (conn, r""" SELECT id, webkeyword, no FROM keyword WHERE keyword LIKE :q ORDER BY sortkeyword, n, no LIMIT :limit OFFSET :offset """, { 'q' : q, 'offset' : offset, 'limit' : limit }) return make_headwords_response (res, limit) with current_app.config.dba.engine.begin () as conn: res = execute (conn, r""" SELECT DISTINCT k.id, k.webkeyword COLLATE utf8mb4_bin AS webkeyword, k.no FROM keyword k, article a WHERE {where} (MATCH (a.idxtext) AGAINST (:fulltext IN BOOLEAN MODE)) AND a.no = k.no ORDER BY k.sortkeyword, k.n, k.no LIMIT :limit OFFSET :offset """.format (where = where), { 'q' : q, 'fulltext' : fulltext, 'offset' : offset, 'limit' : limit }) return make_headwords_response (res, limit) @app.endpoint ('headwords_id') def headwords_id (_id): """ Retrieve a headword. """ with current_app.config.dba.engine.begin () as conn: res = execute (conn, """ SELECT id, webkeyword, no FROM keyword WHERE id = :id """, { 'id' : _id }) return make_headwords_response (res) @app.endpoint ('headwords_id_context') def headwords_id_context (_id): """ Retrieve a list of headwords around a given headword. """ limit = clip (arg ('limit', str (MAX_RESULTS), re_integer_arg), 1, MAX_RESULTS) with current_app.config.dba.engine.begin () as conn: res = execute (conn, """ SELECT keyword, sortkeyword FROM keyword WHERE id = :id """, { 'id' : _id }) keyword, sortkeyword = res.fetchone () res1 = execute (conn, """ SELECT id, webkeyword, no FROM keyword WHERE sortkeyword < :sortkeyword ORDER BY sortkeyword DESC, n DESC, no DESC LIMIT :limit """, { 'sortkeyword' : sortkeyword, 'limit' : limit }) res2 = execute (conn, """ SELECT id, webkeyword, no FROM keyword WHERE sortkeyword >= :sortkeyword ORDER BY sortkeyword, n, no LIMIT :limit """, { 'sortkeyword' : sortkeyword, 'limit' : limit + 1 }) res = [] for row in reversed (res1.fetchall ()): res.append (row[:3]) for row in res2: res.append (row[:3]) return make_headwords_response (res, limit) def make_article (row, lang = LANG): """ row is: article_id """ return { 'articles_url' : 'v1/articles/%d' % row[0], } def make_articles_response (res, limit = MAX_RESULTS, lang = LANG): return make_json_response ({ 'limit' : limit, 'data' : [ make_article (row, lang) for row in res ] }) @app.endpoint ('articles') def articles (): """ Endpoint. Retrieve a list of articles. """ offset = int (arg ('offset', '0', re_integer_arg)) limit = clip (arg ('limit', str (MAX_RESULTS), re_integer_arg), 1, MAX_RESULTS) with current_app.config.dba.engine.begin () as conn: res = execute (conn, r""" SELECT no FROM article ORDER BY no LIMIT :limit OFFSET :offset """, { 'offset' : offset, 'limit' : limit }) return make_articles_response (res, limit) @app.endpoint ('articles_id') def articles_id (_id = None): """ Endpoint. Retrieve an article. """ with current_app.config.dba.engine.begin () as conn: res = execute (conn, r""" SELECT no FROM article WHERE no = :id """, { 'id' : _id }) return make_articles_response (res) @app.endpoint ('articles_id_formats') def articles_id_formats (_id): """ Endpoint. Retrieve an article's available formats. """ canonical_url = app.config['APPLICATION_MAIN_URL'] + 'search?article_id=' with current_app.config.dba.engine.begin () as conn: res = execute (conn, r""" SELECT webtext FROM article WHERE no=:no """, { 'no' : _id }) return make_json_response ([ { 'mimetype' : 'text/x-html-literal', 'lang' : LANG, 'embeddable' : True, 'text' : normalize_iso ('<div>%s</div>' % res.fetchone ()[0]), }, { 'mimetype' : 'text/html', 'lang' : LANG, 'canonical' : True, 'urls' : [ canonical_url + str (_id) ], } ]) @app.endpoint ('articles_id_headwords') def articles_id_headwords (_id): """ Endpoint. Retrieve the list of headwords for an article. """ offset = int (arg ('offset', '0', re_integer_arg)) limit = clip (arg ('limit', str (MAX_RESULTS), re_integer_arg), 1, MAX_RESULTS) with current_app.config.dba.engine.begin () as conn: res = execute (conn, r""" SELECT id, webkeyword, no FROM keyword WHERE no = :id ORDER BY sortkeyword LIMIT :limit OFFSET :offset """, { 'id' : _id, 'offset' : offset, 'limit' : limit }) return make_headwords_response (res, limit) # # main # parser = argparse.ArgumentParser (description='A simple API for dictionares') parser.add_argument ('-v', '--verbose', dest='verbose', action='count', help='increase output verbosity', default=0) parser.add_argument ('-c', '--config-file', dest='config_file', action='append', required=True, metavar='CONFIG_FILE', help="a config file (repeat for more than one, later ones overwrite)") args = parser.parse_args () args.start_time = datetime.datetime.now () LOG_LEVELS = { 0: logging.CRITICAL, 1: logging.ERROR, 2: logging.WARN, 3: logging.INFO, 4: logging.DEBUG } args.log_level = LOG_LEVELS.get (args.verbose + 1, logging.CRITICAL) logging.basicConfig (format = '%(asctime)s - %(levelname)s - %(message)s') logging.getLogger ('sqlalchemy.engine').setLevel (args.log_level) logging.getLogger ('server').setLevel (args.log_level) logger = logging.getLogger ('server') for config_file in args.config_file: app.config.from_pyfile (config_file) app.config.dba = MySQLEngine (**app.config) app.config['SQLALCHEMY_DATABASE_URI'] = app.config.dba.url app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False app.config['server_start_time'] = str (int (args.start_time.timestamp ())) app.url_map = Map ([ Rule ('/v1', endpoint = 'info'), Rule ('/v1/headwords', endpoint = 'headwords'), Rule ('/v1/headwords/<int:_id>', endpoint = 'headwords_id'), Rule ('/v1/headwords/<int:_id>/context', endpoint = 'headwords_id_context'), Rule ('/v1/articles', endpoint = 'articles'), Rule ('/v1/articles/<int:_id>', endpoint = 'articles_id'), Rule ('/v1/articles/<int:_id>/formats', endpoint = 'articles_id_formats'), Rule ('/v1/articles/<int:_id>/headwords', endpoint = 'articles_id_headwords'), ]) dba = flask_sqlalchemy.SQLAlchemy () dba.init_app (app) port = app.config.get ('APPLICATION_PORT', 5000) path = app.config.get ('APPLICATION_ROOT', '') logger.log (logging.INFO, "'{name}' is now served from localhost:{port}{path}/v1".format ( name = app.config['APPLICATION_NAME'], port = port, path = path)) if __name__ == "__main__": from werkzeug.serving import run_simple if path == '': run_simple ('localhost', port, app) else: from werkzeug.wsgi import DispatcherMiddleware application = DispatcherMiddleware (flask.Flask ('dummy_app_for_root'), {
run_simple ('localhost', port, application)
app.config['APPLICATION_ROOT'] : app, })
random_line_split
server.py
#!/usr/bin/python3 # -*- encoding: utf-8 -*- """An API for the Critical Pāli Dictionary""" import argparse import configparser import datetime import json import logging import os.path import re import flask from flask import request, current_app import sqlalchemy from sqlalchemy.sql import text import flask_sqlalchemy from werkzeug.routing import Map, Rule LANG = 'pi-Latn-x-iso' MAX_RESULTS = 100 re_integer_arg = re.compile (r'^[0-9]+$') re_normalize_headword = re.compile (r'^[-\[\(√°~]*(?:<sup>\d+</sup>)?(.*?)[-°~\)\]]*$') class MySQLEngine (object): """ Database Interface """ def __init__ (self, **kwargs): args = self.get_connection_params (kwargs) self.url = 'mysql+pymysql://{user}:{password}@{host}:{port}/{database}'.format (**args) logger.log (logging.INFO, 'MySQLEngine: Connecting to mysql+pymysql://{user}:password@{host}:{port}/{database}'.format (**args)) self.engine = sqlalchemy.create_engine (self.url + '?charset=utf8mb4&sql_mode=ANSI', pool_recycle = 300) def get_connection_params (self, kwargs = {}): """ Get connection parameters from .my.cnf file. """ config = configparser.ConfigParser () if 'MYSQL_CONF' in kwargs: config.read (('/etc/my.cnf', os.path.expanduser (kwargs['MYSQL_CONF']))) else: config.read (('/etc/my.cnf', os.path.expanduser ('~/.my.cnf'))) section = config[kwargs.get ('MYSQL_GROUP', 'mysql')] from_my_cnf = { 'host' : section.get ('host', 'localhost').strip ('"'), 'port' : section.get ('port', '3306').strip ('"'), 'database' : section.get ('database', '').strip ('"'), 'user' : section.get ('user', '').strip ('"'), 'password' : section.get ('password', '').strip ('"'), } return from_my_cnf def execute (conn, sql, parameters, debug_level = logging.DEBUG): start_time = datetime.datetime.now () result = conn.execute (text (sql.strip ()), parameters) logger.log (debug_level, '%d rows in %.3fs', result.rowcount, (datetime.datetime.now () - start_time).total_seconds ()) return result def clip (i, min_, max_): return max (min (int (i), max_), min_) def arg (name, default, regex, msg = None): arg = request.args.get (name, default) if not regex.match (arg): if ms
return arg cpd_iso_trans = str.maketrans ('âêîôû', 'aeiou') def normalize_iso (text): """Normalize to ISO 15919 CPD transliteration is almost ISO 15919, but uses uppercase for proper names and 'â' instead of 'a' to signal a syncope 'a' + 'a'. We have to replace all 'â's because they definitely do not conform to ISO. We get away with serving uppercase letters in proper names because it is an easy fix on the client's side. """ return text.translate (cpd_iso_trans) def make_headword (row, lang = LANG): """ row is: headword_id, text, article_id """ normalized = text = normalize_iso (row[1]) m = re_normalize_headword.match (normalized) if m: normalized = m.group (1).lower () return { 'articles_url' : 'v1/articles/%d' % row[2], 'headwords_url' : 'v1/headwords/%d' % row[0], 'lang' : lang, 'normalized_text' : normalized, 'text' : text, } def make_json_response (obj): resp = flask.Response (json.dumps (obj, indent=2, sort_keys=True), mimetype='application/json') resp.headers['Access-Control-Allow-Origin'] = '*' return resp def make_headwords_response (res, limit = MAX_RESULTS, lang = LANG): return make_json_response ({ 'limit' : limit, 'data' : [ make_headword (row, lang) for row in res ] }) # need this before first @app.endpoint declaration app = flask.Flask (__name__) @app.endpoint ('info') def info (): """ Endpoint. The root of the application. """ info = { 'name' : app.config['APPLICATION_NAME'], 'short_name' : app.config['APPLICATION_SHORT_NAME'], 'main_page_url' : app.config['APPLICATION_MAIN_URL'], # 'css_url' : app.config.get ('APPLICATION_CSS_URL', ''), 'css' : 'span.smalltext { font-size: smaller }', 'supported_langs_query' : [ LANG ], } return make_json_response (info) @app.endpoint ('headwords') def headwords (): """ Endpoint. Retrieve a list of headword IDs. This implements the search query and wordlist. """ q = request.args.get ('q') fulltext = request.args.get ('fulltext') offset = int (arg ('offset', '0', re_integer_arg)) limit = clip (arg ('limit', str (MAX_RESULTS), re_integer_arg), 1, MAX_RESULTS) where = '' if (not q) and (not fulltext): # Retrieve full list of headwords with current_app.config.dba.engine.begin () as conn: res = execute (conn, r""" SELECT id, webkeyword, no FROM keyword ORDER BY sortkeyword, n, no LIMIT :limit OFFSET :offset """, { 'offset' : offset, 'limit' : limit }) return make_headwords_response (res, limit) if q: q = q.replace ('-', '') q = q.replace ('%', '') q = q.replace ('?', '_') q = q.replace ('*', '%') where = "(keyword LIKE :q) AND" if not fulltext: # easy out with current_app.config.dba.engine.begin () as conn: res = execute (conn, r""" SELECT id, webkeyword, no FROM keyword WHERE keyword LIKE :q ORDER BY sortkeyword, n, no LIMIT :limit OFFSET :offset """, { 'q' : q, 'offset' : offset, 'limit' : limit }) return make_headwords_response (res, limit) with current_app.config.dba.engine.begin () as conn: res = execute (conn, r""" SELECT DISTINCT k.id, k.webkeyword COLLATE utf8mb4_bin AS webkeyword, k.no FROM keyword k, article a WHERE {where} (MATCH (a.idxtext) AGAINST (:fulltext IN BOOLEAN MODE)) AND a.no = k.no ORDER BY k.sortkeyword, k.n, k.no LIMIT :limit OFFSET :offset """.format (where = where), { 'q' : q, 'fulltext' : fulltext, 'offset' : offset, 'limit' : limit }) return make_headwords_response (res, limit) @app.endpoint ('headwords_id') def headwords_id (_id): """ Retrieve a headword. """ with current_app.config.dba.engine.begin () as conn: res = execute (conn, """ SELECT id, webkeyword, no FROM keyword WHERE id = :id """, { 'id' : _id }) return make_headwords_response (res) @app.endpoint ('headwords_id_context') def headwords_id_context (_id): """ Retrieve a list of headwords around a given headword. """ limit = clip (arg ('limit', str (MAX_RESULTS), re_integer_arg), 1, MAX_RESULTS) with current_app.config.dba.engine.begin () as conn: res = execute (conn, """ SELECT keyword, sortkeyword FROM keyword WHERE id = :id """, { 'id' : _id }) keyword, sortkeyword = res.fetchone () res1 = execute (conn, """ SELECT id, webkeyword, no FROM keyword WHERE sortkeyword < :sortkeyword ORDER BY sortkeyword DESC, n DESC, no DESC LIMIT :limit """, { 'sortkeyword' : sortkeyword, 'limit' : limit }) res2 = execute (conn, """ SELECT id, webkeyword, no FROM keyword WHERE sortkeyword >= :sortkeyword ORDER BY sortkeyword, n, no LIMIT :limit """, { 'sortkeyword' : sortkeyword, 'limit' : limit + 1 }) res = [] for row in reversed (res1.fetchall ()): res.append (row[:3]) for row in res2: res.append (row[:3]) return make_headwords_response (res, limit) def make_article (row, lang = LANG): """ row is: article_id """ return { 'articles_url' : 'v1/articles/%d' % row[0], } def make_articles_response (res, limit = MAX_RESULTS, lang = LANG): return make_json_response ({ 'limit' : limit, 'data' : [ make_article (row, lang) for row in res ] }) @app.endpoint ('articles') def articles (): """ Endpoint. Retrieve a list of articles. """ offset = int (arg ('offset', '0', re_integer_arg)) limit = clip (arg ('limit', str (MAX_RESULTS), re_integer_arg), 1, MAX_RESULTS) with current_app.config.dba.engine.begin () as conn: res = execute (conn, r""" SELECT no FROM article ORDER BY no LIMIT :limit OFFSET :offset """, { 'offset' : offset, 'limit' : limit }) return make_articles_response (res, limit) @app.endpoint ('articles_id') def articles_id (_id = None): """ Endpoint. Retrieve an article. """ with current_app.config.dba.engine.begin () as conn: res = execute (conn, r""" SELECT no FROM article WHERE no = :id """, { 'id' : _id }) return make_articles_response (res) @app.endpoint ('articles_id_formats') def articles_id_formats (_id): """ Endpoint. Retrieve an article's available formats. """ canonical_url = app.config['APPLICATION_MAIN_URL'] + 'search?article_id=' with current_app.config.dba.engine.begin () as conn: res = execute (conn, r""" SELECT webtext FROM article WHERE no=:no """, { 'no' : _id }) return make_json_response ([ { 'mimetype' : 'text/x-html-literal', 'lang' : LANG, 'embeddable' : True, 'text' : normalize_iso ('<div>%s</div>' % res.fetchone ()[0]), }, { 'mimetype' : 'text/html', 'lang' : LANG, 'canonical' : True, 'urls' : [ canonical_url + str (_id) ], } ]) @app.endpoint ('articles_id_headwords') def articles_id_headwords (_id): """ Endpoint. Retrieve the list of headwords for an article. """ offset = int (arg ('offset', '0', re_integer_arg)) limit = clip (arg ('limit', str (MAX_RESULTS), re_integer_arg), 1, MAX_RESULTS) with current_app.config.dba.engine.begin () as conn: res = execute (conn, r""" SELECT id, webkeyword, no FROM keyword WHERE no = :id ORDER BY sortkeyword LIMIT :limit OFFSET :offset """, { 'id' : _id, 'offset' : offset, 'limit' : limit }) return make_headwords_response (res, limit) # # main # parser = argparse.ArgumentParser (description='A simple API for dictionares') parser.add_argument ('-v', '--verbose', dest='verbose', action='count', help='increase output verbosity', default=0) parser.add_argument ('-c', '--config-file', dest='config_file', action='append', required=True, metavar='CONFIG_FILE', help="a config file (repeat for more than one, later ones overwrite)") args = parser.parse_args () args.start_time = datetime.datetime.now () LOG_LEVELS = { 0: logging.CRITICAL, 1: logging.ERROR, 2: logging.WARN, 3: logging.INFO, 4: logging.DEBUG } args.log_level = LOG_LEVELS.get (args.verbose + 1, logging.CRITICAL) logging.basicConfig (format = '%(asctime)s - %(levelname)s - %(message)s') logging.getLogger ('sqlalchemy.engine').setLevel (args.log_level) logging.getLogger ('server').setLevel (args.log_level) logger = logging.getLogger ('server') for config_file in args.config_file: app.config.from_pyfile (config_file) app.config.dba = MySQLEngine (**app.config) app.config['SQLALCHEMY_DATABASE_URI'] = app.config.dba.url app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False app.config['server_start_time'] = str (int (args.start_time.timestamp ())) app.url_map = Map ([ Rule ('/v1', endpoint = 'info'), Rule ('/v1/headwords', endpoint = 'headwords'), Rule ('/v1/headwords/<int:_id>', endpoint = 'headwords_id'), Rule ('/v1/headwords/<int:_id>/context', endpoint = 'headwords_id_context'), Rule ('/v1/articles', endpoint = 'articles'), Rule ('/v1/articles/<int:_id>', endpoint = 'articles_id'), Rule ('/v1/articles/<int:_id>/formats', endpoint = 'articles_id_formats'), Rule ('/v1/articles/<int:_id>/headwords', endpoint = 'articles_id_headwords'), ]) dba = flask_sqlalchemy.SQLAlchemy () dba.init_app (app) port = app.config.get ('APPLICATION_PORT', 5000) path = app.config.get ('APPLICATION_ROOT', '') logger.log (logging.INFO, "'{name}' is now served from localhost:{port}{path}/v1".format ( name = app.config['APPLICATION_NAME'], port = port, path = path)) if __name__ == "__main__": from werkzeug.serving import run_simple if path == '': run_simple ('localhost', port, app) else: from werkzeug.wsgi import DispatcherMiddleware application = DispatcherMiddleware (flask.Flask ('dummy_app_for_root'), { app.config['APPLICATION_ROOT'] : app, }) run_simple ('localhost', port, application)
g is None: msg = 'Invalid %s parameter' % name flask.abort (msg)
conditional_block
server.py
#!/usr/bin/python3 # -*- encoding: utf-8 -*- """An API for the Critical Pāli Dictionary""" import argparse import configparser import datetime import json import logging import os.path import re import flask from flask import request, current_app import sqlalchemy from sqlalchemy.sql import text import flask_sqlalchemy from werkzeug.routing import Map, Rule LANG = 'pi-Latn-x-iso' MAX_RESULTS = 100 re_integer_arg = re.compile (r'^[0-9]+$') re_normalize_headword = re.compile (r'^[-\[\(√°~]*(?:<sup>\d+</sup>)?(.*?)[-°~\)\]]*$') class MySQL
ect): """ Database Interface """ def __init__ (self, **kwargs): args = self.get_connection_params (kwargs) self.url = 'mysql+pymysql://{user}:{password}@{host}:{port}/{database}'.format (**args) logger.log (logging.INFO, 'MySQLEngine: Connecting to mysql+pymysql://{user}:password@{host}:{port}/{database}'.format (**args)) self.engine = sqlalchemy.create_engine (self.url + '?charset=utf8mb4&sql_mode=ANSI', pool_recycle = 300) def get_connection_params (self, kwargs = {}): """ Get connection parameters from .my.cnf file. """ config = configparser.ConfigParser () if 'MYSQL_CONF' in kwargs: config.read (('/etc/my.cnf', os.path.expanduser (kwargs['MYSQL_CONF']))) else: config.read (('/etc/my.cnf', os.path.expanduser ('~/.my.cnf'))) section = config[kwargs.get ('MYSQL_GROUP', 'mysql')] from_my_cnf = { 'host' : section.get ('host', 'localhost').strip ('"'), 'port' : section.get ('port', '3306').strip ('"'), 'database' : section.get ('database', '').strip ('"'), 'user' : section.get ('user', '').strip ('"'), 'password' : section.get ('password', '').strip ('"'), } return from_my_cnf def execute (conn, sql, parameters, debug_level = logging.DEBUG): start_time = datetime.datetime.now () result = conn.execute (text (sql.strip ()), parameters) logger.log (debug_level, '%d rows in %.3fs', result.rowcount, (datetime.datetime.now () - start_time).total_seconds ()) return result def clip (i, min_, max_): return max (min (int (i), max_), min_) def arg (name, default, regex, msg = None): arg = request.args.get (name, default) if not regex.match (arg): if msg is None: msg = 'Invalid %s parameter' % name flask.abort (msg) return arg cpd_iso_trans = str.maketrans ('âêîôû', 'aeiou') def normalize_iso (text): """Normalize to ISO 15919 CPD transliteration is almost ISO 15919, but uses uppercase for proper names and 'â' instead of 'a' to signal a syncope 'a' + 'a'. We have to replace all 'â's because they definitely do not conform to ISO. We get away with serving uppercase letters in proper names because it is an easy fix on the client's side. """ return text.translate (cpd_iso_trans) def make_headword (row, lang = LANG): """ row is: headword_id, text, article_id """ normalized = text = normalize_iso (row[1]) m = re_normalize_headword.match (normalized) if m: normalized = m.group (1).lower () return { 'articles_url' : 'v1/articles/%d' % row[2], 'headwords_url' : 'v1/headwords/%d' % row[0], 'lang' : lang, 'normalized_text' : normalized, 'text' : text, } def make_json_response (obj): resp = flask.Response (json.dumps (obj, indent=2, sort_keys=True), mimetype='application/json') resp.headers['Access-Control-Allow-Origin'] = '*' return resp def make_headwords_response (res, limit = MAX_RESULTS, lang = LANG): return make_json_response ({ 'limit' : limit, 'data' : [ make_headword (row, lang) for row in res ] }) # need this before first @app.endpoint declaration app = flask.Flask (__name__) @app.endpoint ('info') def info (): """ Endpoint. The root of the application. """ info = { 'name' : app.config['APPLICATION_NAME'], 'short_name' : app.config['APPLICATION_SHORT_NAME'], 'main_page_url' : app.config['APPLICATION_MAIN_URL'], # 'css_url' : app.config.get ('APPLICATION_CSS_URL', ''), 'css' : 'span.smalltext { font-size: smaller }', 'supported_langs_query' : [ LANG ], } return make_json_response (info) @app.endpoint ('headwords') def headwords (): """ Endpoint. Retrieve a list of headword IDs. This implements the search query and wordlist. """ q = request.args.get ('q') fulltext = request.args.get ('fulltext') offset = int (arg ('offset', '0', re_integer_arg)) limit = clip (arg ('limit', str (MAX_RESULTS), re_integer_arg), 1, MAX_RESULTS) where = '' if (not q) and (not fulltext): # Retrieve full list of headwords with current_app.config.dba.engine.begin () as conn: res = execute (conn, r""" SELECT id, webkeyword, no FROM keyword ORDER BY sortkeyword, n, no LIMIT :limit OFFSET :offset """, { 'offset' : offset, 'limit' : limit }) return make_headwords_response (res, limit) if q: q = q.replace ('-', '') q = q.replace ('%', '') q = q.replace ('?', '_') q = q.replace ('*', '%') where = "(keyword LIKE :q) AND" if not fulltext: # easy out with current_app.config.dba.engine.begin () as conn: res = execute (conn, r""" SELECT id, webkeyword, no FROM keyword WHERE keyword LIKE :q ORDER BY sortkeyword, n, no LIMIT :limit OFFSET :offset """, { 'q' : q, 'offset' : offset, 'limit' : limit }) return make_headwords_response (res, limit) with current_app.config.dba.engine.begin () as conn: res = execute (conn, r""" SELECT DISTINCT k.id, k.webkeyword COLLATE utf8mb4_bin AS webkeyword, k.no FROM keyword k, article a WHERE {where} (MATCH (a.idxtext) AGAINST (:fulltext IN BOOLEAN MODE)) AND a.no = k.no ORDER BY k.sortkeyword, k.n, k.no LIMIT :limit OFFSET :offset """.format (where = where), { 'q' : q, 'fulltext' : fulltext, 'offset' : offset, 'limit' : limit }) return make_headwords_response (res, limit) @app.endpoint ('headwords_id') def headwords_id (_id): """ Retrieve a headword. """ with current_app.config.dba.engine.begin () as conn: res = execute (conn, """ SELECT id, webkeyword, no FROM keyword WHERE id = :id """, { 'id' : _id }) return make_headwords_response (res) @app.endpoint ('headwords_id_context') def headwords_id_context (_id): """ Retrieve a list of headwords around a given headword. """ limit = clip (arg ('limit', str (MAX_RESULTS), re_integer_arg), 1, MAX_RESULTS) with current_app.config.dba.engine.begin () as conn: res = execute (conn, """ SELECT keyword, sortkeyword FROM keyword WHERE id = :id """, { 'id' : _id }) keyword, sortkeyword = res.fetchone () res1 = execute (conn, """ SELECT id, webkeyword, no FROM keyword WHERE sortkeyword < :sortkeyword ORDER BY sortkeyword DESC, n DESC, no DESC LIMIT :limit """, { 'sortkeyword' : sortkeyword, 'limit' : limit }) res2 = execute (conn, """ SELECT id, webkeyword, no FROM keyword WHERE sortkeyword >= :sortkeyword ORDER BY sortkeyword, n, no LIMIT :limit """, { 'sortkeyword' : sortkeyword, 'limit' : limit + 1 }) res = [] for row in reversed (res1.fetchall ()): res.append (row[:3]) for row in res2: res.append (row[:3]) return make_headwords_response (res, limit) def make_article (row, lang = LANG): """ row is: article_id """ return { 'articles_url' : 'v1/articles/%d' % row[0], } def make_articles_response (res, limit = MAX_RESULTS, lang = LANG): return make_json_response ({ 'limit' : limit, 'data' : [ make_article (row, lang) for row in res ] }) @app.endpoint ('articles') def articles (): """ Endpoint. Retrieve a list of articles. """ offset = int (arg ('offset', '0', re_integer_arg)) limit = clip (arg ('limit', str (MAX_RESULTS), re_integer_arg), 1, MAX_RESULTS) with current_app.config.dba.engine.begin () as conn: res = execute (conn, r""" SELECT no FROM article ORDER BY no LIMIT :limit OFFSET :offset """, { 'offset' : offset, 'limit' : limit }) return make_articles_response (res, limit) @app.endpoint ('articles_id') def articles_id (_id = None): """ Endpoint. Retrieve an article. """ with current_app.config.dba.engine.begin () as conn: res = execute (conn, r""" SELECT no FROM article WHERE no = :id """, { 'id' : _id }) return make_articles_response (res) @app.endpoint ('articles_id_formats') def articles_id_formats (_id): """ Endpoint. Retrieve an article's available formats. """ canonical_url = app.config['APPLICATION_MAIN_URL'] + 'search?article_id=' with current_app.config.dba.engine.begin () as conn: res = execute (conn, r""" SELECT webtext FROM article WHERE no=:no """, { 'no' : _id }) return make_json_response ([ { 'mimetype' : 'text/x-html-literal', 'lang' : LANG, 'embeddable' : True, 'text' : normalize_iso ('<div>%s</div>' % res.fetchone ()[0]), }, { 'mimetype' : 'text/html', 'lang' : LANG, 'canonical' : True, 'urls' : [ canonical_url + str (_id) ], } ]) @app.endpoint ('articles_id_headwords') def articles_id_headwords (_id): """ Endpoint. Retrieve the list of headwords for an article. """ offset = int (arg ('offset', '0', re_integer_arg)) limit = clip (arg ('limit', str (MAX_RESULTS), re_integer_arg), 1, MAX_RESULTS) with current_app.config.dba.engine.begin () as conn: res = execute (conn, r""" SELECT id, webkeyword, no FROM keyword WHERE no = :id ORDER BY sortkeyword LIMIT :limit OFFSET :offset """, { 'id' : _id, 'offset' : offset, 'limit' : limit }) return make_headwords_response (res, limit) # # main # parser = argparse.ArgumentParser (description='A simple API for dictionares') parser.add_argument ('-v', '--verbose', dest='verbose', action='count', help='increase output verbosity', default=0) parser.add_argument ('-c', '--config-file', dest='config_file', action='append', required=True, metavar='CONFIG_FILE', help="a config file (repeat for more than one, later ones overwrite)") args = parser.parse_args () args.start_time = datetime.datetime.now () LOG_LEVELS = { 0: logging.CRITICAL, 1: logging.ERROR, 2: logging.WARN, 3: logging.INFO, 4: logging.DEBUG } args.log_level = LOG_LEVELS.get (args.verbose + 1, logging.CRITICAL) logging.basicConfig (format = '%(asctime)s - %(levelname)s - %(message)s') logging.getLogger ('sqlalchemy.engine').setLevel (args.log_level) logging.getLogger ('server').setLevel (args.log_level) logger = logging.getLogger ('server') for config_file in args.config_file: app.config.from_pyfile (config_file) app.config.dba = MySQLEngine (**app.config) app.config['SQLALCHEMY_DATABASE_URI'] = app.config.dba.url app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False app.config['server_start_time'] = str (int (args.start_time.timestamp ())) app.url_map = Map ([ Rule ('/v1', endpoint = 'info'), Rule ('/v1/headwords', endpoint = 'headwords'), Rule ('/v1/headwords/<int:_id>', endpoint = 'headwords_id'), Rule ('/v1/headwords/<int:_id>/context', endpoint = 'headwords_id_context'), Rule ('/v1/articles', endpoint = 'articles'), Rule ('/v1/articles/<int:_id>', endpoint = 'articles_id'), Rule ('/v1/articles/<int:_id>/formats', endpoint = 'articles_id_formats'), Rule ('/v1/articles/<int:_id>/headwords', endpoint = 'articles_id_headwords'), ]) dba = flask_sqlalchemy.SQLAlchemy () dba.init_app (app) port = app.config.get ('APPLICATION_PORT', 5000) path = app.config.get ('APPLICATION_ROOT', '') logger.log (logging.INFO, "'{name}' is now served from localhost:{port}{path}/v1".format ( name = app.config['APPLICATION_NAME'], port = port, path = path)) if __name__ == "__main__": from werkzeug.serving import run_simple if path == '': run_simple ('localhost', port, app) else: from werkzeug.wsgi import DispatcherMiddleware application = DispatcherMiddleware (flask.Flask ('dummy_app_for_root'), { app.config['APPLICATION_ROOT'] : app, }) run_simple ('localhost', port, application)
Engine (obj
identifier_name
server.py
#!/usr/bin/python3 # -*- encoding: utf-8 -*- """An API for the Critical Pāli Dictionary""" import argparse import configparser import datetime import json import logging import os.path import re import flask from flask import request, current_app import sqlalchemy from sqlalchemy.sql import text import flask_sqlalchemy from werkzeug.routing import Map, Rule LANG = 'pi-Latn-x-iso' MAX_RESULTS = 100 re_integer_arg = re.compile (r'^[0-9]+$') re_normalize_headword = re.compile (r'^[-\[\(√°~]*(?:<sup>\d+</sup>)?(.*?)[-°~\)\]]*$') class MySQLEngine (object): """ Database Interface """ def __init__ (self, **kwargs): args = self.get_connection_params (kwargs) self.url = 'mysql+pymysql://{user}:{password}@{host}:{port}/{database}'.format (**args) logger.log (logging.INFO, 'MySQLEngine: Connecting to mysql+pymysql://{user}:password@{host}:{port}/{database}'.format (**args)) self.engine = sqlalchemy.create_engine (self.url + '?charset=utf8mb4&sql_mode=ANSI', pool_recycle = 300) def get_connection_params (self, kwargs = {}): """ Get connection parameters from .my.cnf file. """ config = configparser.ConfigParser () if 'MYSQL_CONF' in kwargs: config.read (('/etc/my.cnf', os.path.expanduser (kwargs['MYSQL_CONF']))) else: config.read (('/etc/my.cnf', os.path.expanduser ('~/.my.cnf'))) section = config[kwargs.get ('MYSQL_GROUP', 'mysql')] from_my_cnf = { 'host' : section.get ('host', 'localhost').strip ('"'), 'port' : section.get ('port', '3306').strip ('"'), 'database' : section.get ('database', '').strip ('"'), 'user' : section.get ('user', '').strip ('"'), 'password' : section.get ('password', '').strip ('"'), } return from_my_cnf def execute (conn, sql, parameters, debug_level = logging.DEBUG): start_time = datetime.datetime.now () result = conn.execute (text (sql.strip ()), parameters) logger.log (debug_level, '%d rows in %.3fs', result.rowcount, (datetime.datetime.now () - start_time).total_seconds ()) return result def clip (i, min_, max_): return max (min (int (i), max_), min_) def arg (name, default, regex, msg = None): arg = request.args.get (name, default) if not regex.match (arg): if msg is None: msg = 'Invalid %s parameter' % name flask.abort (msg) return arg cpd_iso_trans = str.maketrans ('âêîôû', 'aeiou') def normalize_iso (text): """Normalize to ISO 15919 CPD transliteration is almost ISO 15919, but uses uppercase for proper names and 'â' instead of 'a' to signal a syncope 'a' + 'a'. We have to replace all 'â's because they definitely do not conform to ISO. We get away with serving uppercase letters in proper names because it is an easy fix on the client's side. """ return text.translate (cpd_iso_trans) def make_headword (row, lang = LANG): """ row is: headword_id, text, article_id """ normalized = text = normalize_iso (row[1]) m = re_normalize_headword.match (normalized) if m: normalized = m.group (1).lower () return { 'articles_url' : 'v1/articles/%d' % row[2], 'headwords_url' : 'v1/headwords/%d' % row[0], 'lang' : lang, 'normalized_text' : normalized, 'text' : text, } def make_json_response (obj): resp = flask.Response (json.dumps (obj, indent=2, sort_keys=True), mimetype='application/json') resp.headers['Access-Control-Allow-Origin'] = '*' return resp def make_headwords_response (res, limit = MAX_RESULTS, lang = LANG): return make_json_response ({ 'limit' : limit, 'data' : [ make_headword (row, lang) for row in res ] }) # need this before first @app.endpoint declaration app = flask.Flask (__name__) @app.endpoint ('info') def info (): """ Endpoint. The root of the application. """ info = { 'name' : app.config['APPLICATION_NAME'], 'short_name' : app.config['APPLICATION_SHORT_NAME'], 'main_page_url' : app.config['APPLICATION_MAIN_URL'], # 'css_url' : app.config.get ('APPLICATION_CSS_URL', ''), 'css' : 'span.smalltext { font-size: smaller }', 'supported_langs_query' : [ LANG ], } return make_json_response (info) @app.endpoint ('headwords') def headwords (): """ Endpoint. Retrieve a list of headword IDs. This implements the search query and wordlist. """ q = request.args.get ('q') fulltext = request.args.get ('fulltext') offset = int (arg ('offset', '0', re_integer_arg)) limit = clip (arg ('limit', str (MAX_RESULTS), re_integer_arg), 1, MAX_RESULTS) where = '' if (not q) and (not fulltext): # Retrieve full list of headwords with current_app.config.dba.engine.begin () as conn: res = execute (conn, r""" SELECT id, webkeyword, no FROM keyword ORDER BY sortkeyword, n, no LIMIT :limit OFFSET :offset """, { 'offset' : offset, 'limit' : limit }) return make_headwords_response (res, limit) if q: q = q.replace ('-', '') q = q.replace ('%', '') q = q.replace ('?', '_') q = q.replace ('*', '%') where = "(keyword LIKE :q) AND" if not fulltext: # easy out with current_app.config.dba.engine.begin () as conn: res = execute (conn, r""" SELECT id, webkeyword, no FROM keyword WHERE keyword LIKE :q ORDER BY sortkeyword, n, no LIMIT :limit OFFSET :offset """, { 'q' : q, 'offset' : offset, 'limit' : limit }) return make_headwords_response (res, limit) with current_app.config.dba.engine.begin () as conn: res = execute (conn, r""" SELECT DISTINCT k.id, k.webkeyword COLLATE utf8mb4_bin AS webkeyword, k.no FROM keyword k, article a WHERE {where} (MATCH (a.idxtext) AGAINST (:fulltext IN BOOLEAN MODE)) AND a.no = k.no ORDER BY k.sortkeyword, k.n, k.no LIMIT :limit OFFSET :offset """.format (where = where), { 'q' : q, 'fulltext' : fulltext, 'offset' : offset, 'limit' : limit }) return make_headwords_response (res, limit) @app.endpoint ('headwords_id') def headwords_id (_id): """ Retrieve a headword. """ with current_app.config.dba.engine.begin () as conn: res = execute (conn, """ SELECT id, webkeyword, no FROM keyword WHERE id = :id """, { 'id' : _id }) return make_headwords_response (res) @app.endpoint ('headwords_id_context') def headwords_id_context (_id): """ Retrieve a list of headwords around a given headword. """ limit = clip (arg ('limit', str (MAX_RESULTS), re_integer_arg), 1, MAX_RESULTS) with current_app.config.dba.engine.begin () as conn: res = execute (conn, """ SELECT keyword, sortkeyword FROM keyword WHERE id = :id """, { 'id' : _id }) keyword, sortkeyword = res.fetchone () res1 = execute (conn, """ SELECT id, webkeyword, no FROM keyword WHERE sortkeyword < :sortkeyword ORDER BY sortkeyword DESC, n DESC, no DESC LIMIT :limit """, { 'sortkeyword' : sortkeyword, 'limit' : limit }) res2 = execute (conn, """ SELECT id, webkeyword, no FROM keyword WHERE sortkeyword >= :sortkeyword ORDER BY sortkeyword, n, no LIMIT :limit """, { 'sortkeyword' : sortkeyword, 'limit' : limit + 1 }) res = [] for row in reversed (res1.fetchall ()): res.append (row[:3]) for row in res2: res.append (row[:3]) return make_headwords_response (res, limit) def make_article (row, lang = LANG): """ row is: article_id """ return { 'articles_url' : 'v1/articles/%d' % row[0], } def make_articles_response (res, limit = MAX_RESULTS, lang = LANG): return make_json_response ({ 'limit' : limit, 'data' : [ make_article (row, lang) for row in res ] }) @app.endpoint ('articles') def articles (): """ Endpoint
oint ('articles_id') def articles_id (_id = None): """ Endpoint. Retrieve an article. """ with current_app.config.dba.engine.begin () as conn: res = execute (conn, r""" SELECT no FROM article WHERE no = :id """, { 'id' : _id }) return make_articles_response (res) @app.endpoint ('articles_id_formats') def articles_id_formats (_id): """ Endpoint. Retrieve an article's available formats. """ canonical_url = app.config['APPLICATION_MAIN_URL'] + 'search?article_id=' with current_app.config.dba.engine.begin () as conn: res = execute (conn, r""" SELECT webtext FROM article WHERE no=:no """, { 'no' : _id }) return make_json_response ([ { 'mimetype' : 'text/x-html-literal', 'lang' : LANG, 'embeddable' : True, 'text' : normalize_iso ('<div>%s</div>' % res.fetchone ()[0]), }, { 'mimetype' : 'text/html', 'lang' : LANG, 'canonical' : True, 'urls' : [ canonical_url + str (_id) ], } ]) @app.endpoint ('articles_id_headwords') def articles_id_headwords (_id): """ Endpoint. Retrieve the list of headwords for an article. """ offset = int (arg ('offset', '0', re_integer_arg)) limit = clip (arg ('limit', str (MAX_RESULTS), re_integer_arg), 1, MAX_RESULTS) with current_app.config.dba.engine.begin () as conn: res = execute (conn, r""" SELECT id, webkeyword, no FROM keyword WHERE no = :id ORDER BY sortkeyword LIMIT :limit OFFSET :offset """, { 'id' : _id, 'offset' : offset, 'limit' : limit }) return make_headwords_response (res, limit) # # main # parser = argparse.ArgumentParser (description='A simple API for dictionares') parser.add_argument ('-v', '--verbose', dest='verbose', action='count', help='increase output verbosity', default=0) parser.add_argument ('-c', '--config-file', dest='config_file', action='append', required=True, metavar='CONFIG_FILE', help="a config file (repeat for more than one, later ones overwrite)") args = parser.parse_args () args.start_time = datetime.datetime.now () LOG_LEVELS = { 0: logging.CRITICAL, 1: logging.ERROR, 2: logging.WARN, 3: logging.INFO, 4: logging.DEBUG } args.log_level = LOG_LEVELS.get (args.verbose + 1, logging.CRITICAL) logging.basicConfig (format = '%(asctime)s - %(levelname)s - %(message)s') logging.getLogger ('sqlalchemy.engine').setLevel (args.log_level) logging.getLogger ('server').setLevel (args.log_level) logger = logging.getLogger ('server') for config_file in args.config_file: app.config.from_pyfile (config_file) app.config.dba = MySQLEngine (**app.config) app.config['SQLALCHEMY_DATABASE_URI'] = app.config.dba.url app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False app.config['server_start_time'] = str (int (args.start_time.timestamp ())) app.url_map = Map ([ Rule ('/v1', endpoint = 'info'), Rule ('/v1/headwords', endpoint = 'headwords'), Rule ('/v1/headwords/<int:_id>', endpoint = 'headwords_id'), Rule ('/v1/headwords/<int:_id>/context', endpoint = 'headwords_id_context'), Rule ('/v1/articles', endpoint = 'articles'), Rule ('/v1/articles/<int:_id>', endpoint = 'articles_id'), Rule ('/v1/articles/<int:_id>/formats', endpoint = 'articles_id_formats'), Rule ('/v1/articles/<int:_id>/headwords', endpoint = 'articles_id_headwords'), ]) dba = flask_sqlalchemy.SQLAlchemy () dba.init_app (app) port = app.config.get ('APPLICATION_PORT', 5000) path = app.config.get ('APPLICATION_ROOT', '') logger.log (logging.INFO, "'{name}' is now served from localhost:{port}{path}/v1".format ( name = app.config['APPLICATION_NAME'], port = port, path = path)) if __name__ == "__main__": from werkzeug.serving import run_simple if path == '': run_simple ('localhost', port, app) else: from werkzeug.wsgi import DispatcherMiddleware application = DispatcherMiddleware (flask.Flask ('dummy_app_for_root'), { app.config['APPLICATION_ROOT'] : app, }) run_simple ('localhost', port, application)
. Retrieve a list of articles. """ offset = int (arg ('offset', '0', re_integer_arg)) limit = clip (arg ('limit', str (MAX_RESULTS), re_integer_arg), 1, MAX_RESULTS) with current_app.config.dba.engine.begin () as conn: res = execute (conn, r""" SELECT no FROM article ORDER BY no LIMIT :limit OFFSET :offset """, { 'offset' : offset, 'limit' : limit }) return make_articles_response (res, limit) @app.endp
identifier_body
Train.py
# -*- coding: utf-8 -*- """ # Part1: Training """ import os import numpy as np import pandas as pd # Commented out IPython magic to ensure Python compatibility. # %matplotlib inline # Python 2/3 compatibility from __future__ import print_function, division import itertools import time import numpy as np import matplotlib.pyplot as plt # Colors from Colorbrewer Paired_12 colors = [[31, 120, 180], [51, 160, 44]] colors = [(r / 255, g / 255, b / 255) for (r, g, b) in colors] def plot_losses(train_history, val_history): x = np.arange(1, len(train_history) + 1) plt.figure(figsize=(8, 6)) plt.plot(x, train_history, color=colors[0], label="Training loss", linewidth=2) plt.plot(x, val_history, color=colors[1], label="Validation loss", linewidth=2) plt.xlabel('Epoch') plt.ylabel('Loss') plt.legend(loc='upper right') plt.title("Evolution of the training and validation loss") plt.show() data = pd.read_csv(os.getcwd() +"/cleaned_train.csv", sep=",") Ids = data.values[:,1] classes = np.unique(Ids) len(classes) #!/usr/bin/env python3 import csv import os from PIL import Image import torch from torch.utils.data import Dataset def image_loader(path): with open(path, 'rb') as f: img = Image.open(f) return img.convert('RGB') def load_labels(path): with open(path, newline='') as csvfile:
headers = next(reader) return [{ headers[column_index]: row[column_index] for column_index in range(len(row)) } for row in reader] class CustomDataset(Dataset): def __init__(self, root, split='train', incr=None, transform=None): self.root = root = os.path.expanduser(root) category = 'id' self.category = category self.split = split self.incr = incr if incr is None: labels = load_labels(os.path.join(root, f'cleaned_{split}.csv')) else: labels = load_labels(os.path.join(root, split+str(incr)+'.csv')) self.entries = [ (label_entry['Image'], int(label_entry[category])) for label_entry in labels if os.path.exists( os.path.join(self.root, f'{split}/{split}', label_entry['Image'])) ] self.transform = transform def __len__(self): return len(self.entries) def __getitem__(self, index): image_filename, label = self.entries[index] image_filepath = os.path.join(self.root, f'{self.split}/{self.split}', image_filename) image = image_loader(image_filepath) if self.transform is not None: image = self.transform(image) return image, label """#### Data Augmentation & Data Normalization""" import torch import torch.nn as nn import torch.nn.init as init import torch.nn.functional as F import torchvision import torchvision.transforms as transforms num_workers = 2 # Data augmentation and normalization for training # Just normalization for validation transforms_train = transforms.Compose([ transforms.Resize([224,224]), # Resizing the image transforms.RandomHorizontalFlip(), # Flip the data horizontally transforms.RandomVerticalFlip(), # Flip the data vertically transforms.Grayscale(num_output_channels=3), transforms.ToTensor(), transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) ]) transforms_val = transforms.Compose([ transforms.Resize([224,224]), transforms.Grayscale(num_output_channels=3), transforms.ToTensor(), transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) ]) transforms_oversampling = transforms.Compose([ transforms.Resize([230,230]), transforms.RandomHorizontalFlip(), transforms.RandomVerticalFlip(), transforms.RandomRotation(15), transforms.CenterCrop(224), transforms.Grayscale(num_output_channels=3), transforms.ToTensor(), transforms.Normalize(mean=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225)) ]) train_set = CustomDataset(root = os.getcwd()+'/', transform=transforms_train) for i in range(1,10): for j in range(i,10): train_set += CustomDataset(root = os.getcwd()+'/', incr=i, transform=transforms_oversampling) train_loader = torch.utils.data.DataLoader(train_set, batch_size=128, shuffle=True, num_workers=num_workers) val_set = CustomDataset(root = os.getcwd()+'/', transform=transforms_val) val_loader = torch.utils.data.DataLoader(val_set, batch_size=128, shuffle=True, num_workers=num_workers) len(train_set) len(classes) def imshow(inp): """Imshow for Tensor.""" inp = inp.numpy().transpose((1, 2, 0)) mean = np.array([0.485, 0.456, 0.406]) std = np.array([0.229, 0.224, 0.225]) inp = std * inp + mean inp = np.clip(inp, 0, 1) plt.imshow(inp) plt.pause(0.001) # pause a bit so that plots are updated # Get a batch of training data inputs, labels = next(iter(train_loader)) # Make a grid from batch out = torchvision.utils.make_grid(inputs[:4]) # print labels print(' '.join('{:>10}'.format(classes[labels[j]]) for j in range(4))) # image show imshow(out) """#### Models""" def _weights_init(m): classname = m.__class__.__name__ if isinstance(m, nn.Linear) or isinstance(m, nn.Conv2d): init.kaiming_normal_(m.weight) class LambdaLayer(nn.Module): def __init__(self, lambd): super(LambdaLayer, self).__init__() self.lambd = lambd def forward(self, x): return self.lambd(x) class BasicBlock(nn.Module): expansion = 1 def __init__(self, in_planes, planes, stride=1, option='A'): super(BasicBlock, self).__init__() self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=False) self.bn1 = nn.BatchNorm2d(planes) self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=1, padding=1, bias=False) self.bn2 = nn.BatchNorm2d(planes) self.shortcut = nn.Sequential() if stride != 1 or in_planes != planes: if option == 'A': """ For CIFAR10 ResNet paper uses option A. """ self.shortcut = LambdaLayer(lambda x: F.pad(x[:, :, ::2, ::2], (0, 0, 0, 0, planes//4, planes//4), "constant", 0)) elif option == 'B': self.shortcut = nn.Sequential( nn.Conv2d(in_planes, self.expansion * planes, kernel_size=1, stride=stride, bias=False), nn.BatchNorm2d(self.expansion * planes) ) def forward(self, x): out = F.relu(self.bn1(self.conv1(x))) out = self.bn2(self.conv2(out)) out += self.shortcut(x) out = F.relu(out) return out class ResNet(nn.Module): def __init__(self, block, num_blocks, num_classes=4246): super(ResNet, self).__init__() self.in_planes = 16 self.conv1 = nn.Conv2d(3, 16, kernel_size=3, stride=1, padding=1, bias=False) self.bn1 = nn.BatchNorm2d(16) self.layer1 = self._make_layer(block, 16, num_blocks[0], stride=1) self.layer2 = self._make_layer(block, 32, num_blocks[1], stride=2) self.layer3 = self._make_layer(block, 64, num_blocks[2], stride=2) self.linear = nn.Linear(64, num_classes) self.apply(_weights_init) def _make_layer(self, block, planes, num_blocks, stride): strides = [stride] + [1]*(num_blocks-1) layers = [] for stride in strides: layers.append(block(self.in_planes, planes, stride)) self.in_planes = planes * block.expansion return nn.Sequential(*layers) def forward(self, x): out = F.relu(self.bn1(self.conv1(x))) out = self.layer1(out) out = self.layer2(out) out = self.layer3(out) out = F.avg_pool2d(out, out.size()[3]) out = out.view(out.size(0), -1) out = self.linear(out) return out import torchvision.models as models device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") # net = models.resnet152(pretrained=False) #Method 2 # net = models.resnet50(pretrained=False) #Method 3 net = models.resnet50(pretrained=True) #Method 4, best one num_ftrs = net.fc.in_features net.fc = nn.Linear(num_ftrs, 4246) # net = ResNet(BasicBlock, [3, 3, 3]) #Method 1 net = net.to(device) """#### Loss function & Optimizer""" import torch.optim as optim def createLossAndOptimizer(net, learning_rate): # it combines softmax with negative log likelihood loss criterion = nn.CrossEntropyLoss() optimizer = optim.SGD(net.parameters(), lr=learning_rate, momentum=0.9, weight_decay=1e-4) #optimizer = optim.Adam(net.parameters(), lr=learning_rate) return criterion, optimizer """#### Training Model Batch size = 128, number of epochs = 20, starting learning rate = 0.01 Save the trained model """ def train(net, batch_size, n_epochs, learning_rate): """ Train a neural network and print statistics of the training :param net: (PyTorch Neural Network) :param batch_size: (int) :param n_epochs: (int) Number of iterations on the training set :param learning_rate: (float) learning rate used by the optimizer """ print("===== HYPERPARAMETERS =====") print("batch_size=", batch_size) print("n_epochs=", n_epochs) print("Starting learning_rate=", learning_rate) print("=" * 30) n_minibatches = len(train_loader) criterion, optimizer = createLossAndOptimizer(net, learning_rate) # Init variables used for plotting the loss train_history = [] val_history = [] training_start_time = time.time() best_error = np.inf best_model_path = os.getcwd()+"/best_model.pth" # Move model to gpu if possible net = net.to(device) scheduler = optim.lr_scheduler.MultiStepLR(optimizer, milestones=[17,19], gamma=0.1) # Decay LR by a factor of 0.1 after epoch 3 and 7 for epoch in range(n_epochs): # loop over the dataset multiple times running_loss = 0.0 print_every = n_minibatches // 10 start_time = time.time() total_train_loss = 0 for i, (inputs, labels) in enumerate(train_loader): # Move tensors to correct device inputs, labels = inputs.to(device), labels.to(device) # zero the parameter gradients optimizer.zero_grad() # forward + backward + optimize with torch.set_grad_enabled(True): outputs = net(inputs) loss = criterion(outputs, labels) loss.backward() optimizer.step() # print statistics running_loss += loss.item() total_train_loss += loss.item() # print every 10th of epoch if (i + 1) % (print_every + 1) == 0: print("Epoch {}, {:d}% \t train_loss: {:.2f} took: {:.2f}s".format( epoch + 1, int(100 * (i + 1) / n_minibatches), running_loss / print_every, time.time() - start_time)) running_loss = 0.0 start_time = time.time() train_history.append(total_train_loss / len(train_loader)) total_val_loss = 0 # Do a pass on the validation set # We don't need to compute gradient, # we save memory and computation using torch.no_grad() with torch.no_grad(): for inputs, labels in val_loader: # Move tensors to correct device inputs, labels = inputs.to(device), labels.to(device) # Forward pass predictions = net(inputs) val_loss = criterion(predictions, labels) total_val_loss += val_loss.item() val_history.append(total_val_loss / len(val_loader)) # Save model that performs best on validation set if total_val_loss < best_error: best_error = total_val_loss torch.save(net.state_dict(), best_model_path) print("Validation loss = {:.2f}".format(total_val_loss / len(val_loader))) scheduler.step() print("Training Finished, took {:.2f}s".format(time.time() - training_start_time)) # Load best model net.load_state_dict(torch.load(best_model_path)) return train_history, val_history # train the model train_history, val_history = train(net, batch_size=128, n_epochs=20, learning_rate=0.01) plot_losses(train_history, val_history) """See next part for testing and prediction """
reader = csv.reader(csvfile)
random_line_split
Train.py
# -*- coding: utf-8 -*- """ # Part1: Training """ import os import numpy as np import pandas as pd # Commented out IPython magic to ensure Python compatibility. # %matplotlib inline # Python 2/3 compatibility from __future__ import print_function, division import itertools import time import numpy as np import matplotlib.pyplot as plt # Colors from Colorbrewer Paired_12 colors = [[31, 120, 180], [51, 160, 44]] colors = [(r / 255, g / 255, b / 255) for (r, g, b) in colors] def plot_losses(train_history, val_history): x = np.arange(1, len(train_history) + 1) plt.figure(figsize=(8, 6)) plt.plot(x, train_history, color=colors[0], label="Training loss", linewidth=2) plt.plot(x, val_history, color=colors[1], label="Validation loss", linewidth=2) plt.xlabel('Epoch') plt.ylabel('Loss') plt.legend(loc='upper right') plt.title("Evolution of the training and validation loss") plt.show() data = pd.read_csv(os.getcwd() +"/cleaned_train.csv", sep=",") Ids = data.values[:,1] classes = np.unique(Ids) len(classes) #!/usr/bin/env python3 import csv import os from PIL import Image import torch from torch.utils.data import Dataset def image_loader(path): with open(path, 'rb') as f: img = Image.open(f) return img.convert('RGB') def load_labels(path): with open(path, newline='') as csvfile: reader = csv.reader(csvfile) headers = next(reader) return [{ headers[column_index]: row[column_index] for column_index in range(len(row)) } for row in reader] class CustomDataset(Dataset): def __init__(self, root, split='train', incr=None, transform=None): self.root = root = os.path.expanduser(root) category = 'id' self.category = category self.split = split self.incr = incr if incr is None: labels = load_labels(os.path.join(root, f'cleaned_{split}.csv')) else: labels = load_labels(os.path.join(root, split+str(incr)+'.csv')) self.entries = [ (label_entry['Image'], int(label_entry[category])) for label_entry in labels if os.path.exists( os.path.join(self.root, f'{split}/{split}', label_entry['Image'])) ] self.transform = transform def __len__(self): return len(self.entries) def __getitem__(self, index): image_filename, label = self.entries[index] image_filepath = os.path.join(self.root, f'{self.split}/{self.split}', image_filename) image = image_loader(image_filepath) if self.transform is not None: image = self.transform(image) return image, label """#### Data Augmentation & Data Normalization""" import torch import torch.nn as nn import torch.nn.init as init import torch.nn.functional as F import torchvision import torchvision.transforms as transforms num_workers = 2 # Data augmentation and normalization for training # Just normalization for validation transforms_train = transforms.Compose([ transforms.Resize([224,224]), # Resizing the image transforms.RandomHorizontalFlip(), # Flip the data horizontally transforms.RandomVerticalFlip(), # Flip the data vertically transforms.Grayscale(num_output_channels=3), transforms.ToTensor(), transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) ]) transforms_val = transforms.Compose([ transforms.Resize([224,224]), transforms.Grayscale(num_output_channels=3), transforms.ToTensor(), transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) ]) transforms_oversampling = transforms.Compose([ transforms.Resize([230,230]), transforms.RandomHorizontalFlip(), transforms.RandomVerticalFlip(), transforms.RandomRotation(15), transforms.CenterCrop(224), transforms.Grayscale(num_output_channels=3), transforms.ToTensor(), transforms.Normalize(mean=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225)) ]) train_set = CustomDataset(root = os.getcwd()+'/', transform=transforms_train) for i in range(1,10): for j in range(i,10): train_set += CustomDataset(root = os.getcwd()+'/', incr=i, transform=transforms_oversampling) train_loader = torch.utils.data.DataLoader(train_set, batch_size=128, shuffle=True, num_workers=num_workers) val_set = CustomDataset(root = os.getcwd()+'/', transform=transforms_val) val_loader = torch.utils.data.DataLoader(val_set, batch_size=128, shuffle=True, num_workers=num_workers) len(train_set) len(classes) def imshow(inp): """Imshow for Tensor.""" inp = inp.numpy().transpose((1, 2, 0)) mean = np.array([0.485, 0.456, 0.406]) std = np.array([0.229, 0.224, 0.225]) inp = std * inp + mean inp = np.clip(inp, 0, 1) plt.imshow(inp) plt.pause(0.001) # pause a bit so that plots are updated # Get a batch of training data inputs, labels = next(iter(train_loader)) # Make a grid from batch out = torchvision.utils.make_grid(inputs[:4]) # print labels print(' '.join('{:>10}'.format(classes[labels[j]]) for j in range(4))) # image show imshow(out) """#### Models""" def _weights_init(m): classname = m.__class__.__name__ if isinstance(m, nn.Linear) or isinstance(m, nn.Conv2d): init.kaiming_normal_(m.weight) class LambdaLayer(nn.Module): def __init__(self, lambd): super(LambdaLayer, self).__init__() self.lambd = lambd def forward(self, x): return self.lambd(x) class BasicBlock(nn.Module): expansion = 1 def __init__(self, in_planes, planes, stride=1, option='A'): super(BasicBlock, self).__init__() self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=False) self.bn1 = nn.BatchNorm2d(planes) self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=1, padding=1, bias=False) self.bn2 = nn.BatchNorm2d(planes) self.shortcut = nn.Sequential() if stride != 1 or in_planes != planes:
def forward(self, x): out = F.relu(self.bn1(self.conv1(x))) out = self.bn2(self.conv2(out)) out += self.shortcut(x) out = F.relu(out) return out class ResNet(nn.Module): def __init__(self, block, num_blocks, num_classes=4246): super(ResNet, self).__init__() self.in_planes = 16 self.conv1 = nn.Conv2d(3, 16, kernel_size=3, stride=1, padding=1, bias=False) self.bn1 = nn.BatchNorm2d(16) self.layer1 = self._make_layer(block, 16, num_blocks[0], stride=1) self.layer2 = self._make_layer(block, 32, num_blocks[1], stride=2) self.layer3 = self._make_layer(block, 64, num_blocks[2], stride=2) self.linear = nn.Linear(64, num_classes) self.apply(_weights_init) def _make_layer(self, block, planes, num_blocks, stride): strides = [stride] + [1]*(num_blocks-1) layers = [] for stride in strides: layers.append(block(self.in_planes, planes, stride)) self.in_planes = planes * block.expansion return nn.Sequential(*layers) def forward(self, x): out = F.relu(self.bn1(self.conv1(x))) out = self.layer1(out) out = self.layer2(out) out = self.layer3(out) out = F.avg_pool2d(out, out.size()[3]) out = out.view(out.size(0), -1) out = self.linear(out) return out import torchvision.models as models device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") # net = models.resnet152(pretrained=False) #Method 2 # net = models.resnet50(pretrained=False) #Method 3 net = models.resnet50(pretrained=True) #Method 4, best one num_ftrs = net.fc.in_features net.fc = nn.Linear(num_ftrs, 4246) # net = ResNet(BasicBlock, [3, 3, 3]) #Method 1 net = net.to(device) """#### Loss function & Optimizer""" import torch.optim as optim def createLossAndOptimizer(net, learning_rate): # it combines softmax with negative log likelihood loss criterion = nn.CrossEntropyLoss() optimizer = optim.SGD(net.parameters(), lr=learning_rate, momentum=0.9, weight_decay=1e-4) #optimizer = optim.Adam(net.parameters(), lr=learning_rate) return criterion, optimizer """#### Training Model Batch size = 128, number of epochs = 20, starting learning rate = 0.01 Save the trained model """ def train(net, batch_size, n_epochs, learning_rate): """ Train a neural network and print statistics of the training :param net: (PyTorch Neural Network) :param batch_size: (int) :param n_epochs: (int) Number of iterations on the training set :param learning_rate: (float) learning rate used by the optimizer """ print("===== HYPERPARAMETERS =====") print("batch_size=", batch_size) print("n_epochs=", n_epochs) print("Starting learning_rate=", learning_rate) print("=" * 30) n_minibatches = len(train_loader) criterion, optimizer = createLossAndOptimizer(net, learning_rate) # Init variables used for plotting the loss train_history = [] val_history = [] training_start_time = time.time() best_error = np.inf best_model_path = os.getcwd()+"/best_model.pth" # Move model to gpu if possible net = net.to(device) scheduler = optim.lr_scheduler.MultiStepLR(optimizer, milestones=[17,19], gamma=0.1) # Decay LR by a factor of 0.1 after epoch 3 and 7 for epoch in range(n_epochs): # loop over the dataset multiple times running_loss = 0.0 print_every = n_minibatches // 10 start_time = time.time() total_train_loss = 0 for i, (inputs, labels) in enumerate(train_loader): # Move tensors to correct device inputs, labels = inputs.to(device), labels.to(device) # zero the parameter gradients optimizer.zero_grad() # forward + backward + optimize with torch.set_grad_enabled(True): outputs = net(inputs) loss = criterion(outputs, labels) loss.backward() optimizer.step() # print statistics running_loss += loss.item() total_train_loss += loss.item() # print every 10th of epoch if (i + 1) % (print_every + 1) == 0: print("Epoch {}, {:d}% \t train_loss: {:.2f} took: {:.2f}s".format( epoch + 1, int(100 * (i + 1) / n_minibatches), running_loss / print_every, time.time() - start_time)) running_loss = 0.0 start_time = time.time() train_history.append(total_train_loss / len(train_loader)) total_val_loss = 0 # Do a pass on the validation set # We don't need to compute gradient, # we save memory and computation using torch.no_grad() with torch.no_grad(): for inputs, labels in val_loader: # Move tensors to correct device inputs, labels = inputs.to(device), labels.to(device) # Forward pass predictions = net(inputs) val_loss = criterion(predictions, labels) total_val_loss += val_loss.item() val_history.append(total_val_loss / len(val_loader)) # Save model that performs best on validation set if total_val_loss < best_error: best_error = total_val_loss torch.save(net.state_dict(), best_model_path) print("Validation loss = {:.2f}".format(total_val_loss / len(val_loader))) scheduler.step() print("Training Finished, took {:.2f}s".format(time.time() - training_start_time)) # Load best model net.load_state_dict(torch.load(best_model_path)) return train_history, val_history # train the model train_history, val_history = train(net, batch_size=128, n_epochs=20, learning_rate=0.01) plot_losses(train_history, val_history) """See next part for testing and prediction """
if option == 'A': """ For CIFAR10 ResNet paper uses option A. """ self.shortcut = LambdaLayer(lambda x: F.pad(x[:, :, ::2, ::2], (0, 0, 0, 0, planes//4, planes//4), "constant", 0)) elif option == 'B': self.shortcut = nn.Sequential( nn.Conv2d(in_planes, self.expansion * planes, kernel_size=1, stride=stride, bias=False), nn.BatchNorm2d(self.expansion * planes) )
conditional_block
Train.py
# -*- coding: utf-8 -*- """ # Part1: Training """ import os import numpy as np import pandas as pd # Commented out IPython magic to ensure Python compatibility. # %matplotlib inline # Python 2/3 compatibility from __future__ import print_function, division import itertools import time import numpy as np import matplotlib.pyplot as plt # Colors from Colorbrewer Paired_12 colors = [[31, 120, 180], [51, 160, 44]] colors = [(r / 255, g / 255, b / 255) for (r, g, b) in colors] def plot_losses(train_history, val_history): x = np.arange(1, len(train_history) + 1) plt.figure(figsize=(8, 6)) plt.plot(x, train_history, color=colors[0], label="Training loss", linewidth=2) plt.plot(x, val_history, color=colors[1], label="Validation loss", linewidth=2) plt.xlabel('Epoch') plt.ylabel('Loss') plt.legend(loc='upper right') plt.title("Evolution of the training and validation loss") plt.show() data = pd.read_csv(os.getcwd() +"/cleaned_train.csv", sep=",") Ids = data.values[:,1] classes = np.unique(Ids) len(classes) #!/usr/bin/env python3 import csv import os from PIL import Image import torch from torch.utils.data import Dataset def image_loader(path): with open(path, 'rb') as f: img = Image.open(f) return img.convert('RGB') def load_labels(path): with open(path, newline='') as csvfile: reader = csv.reader(csvfile) headers = next(reader) return [{ headers[column_index]: row[column_index] for column_index in range(len(row)) } for row in reader] class CustomDataset(Dataset): def __init__(self, root, split='train', incr=None, transform=None): self.root = root = os.path.expanduser(root) category = 'id' self.category = category self.split = split self.incr = incr if incr is None: labels = load_labels(os.path.join(root, f'cleaned_{split}.csv')) else: labels = load_labels(os.path.join(root, split+str(incr)+'.csv')) self.entries = [ (label_entry['Image'], int(label_entry[category])) for label_entry in labels if os.path.exists( os.path.join(self.root, f'{split}/{split}', label_entry['Image'])) ] self.transform = transform def __len__(self): return len(self.entries) def __getitem__(self, index): image_filename, label = self.entries[index] image_filepath = os.path.join(self.root, f'{self.split}/{self.split}', image_filename) image = image_loader(image_filepath) if self.transform is not None: image = self.transform(image) return image, label """#### Data Augmentation & Data Normalization""" import torch import torch.nn as nn import torch.nn.init as init import torch.nn.functional as F import torchvision import torchvision.transforms as transforms num_workers = 2 # Data augmentation and normalization for training # Just normalization for validation transforms_train = transforms.Compose([ transforms.Resize([224,224]), # Resizing the image transforms.RandomHorizontalFlip(), # Flip the data horizontally transforms.RandomVerticalFlip(), # Flip the data vertically transforms.Grayscale(num_output_channels=3), transforms.ToTensor(), transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) ]) transforms_val = transforms.Compose([ transforms.Resize([224,224]), transforms.Grayscale(num_output_channels=3), transforms.ToTensor(), transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) ]) transforms_oversampling = transforms.Compose([ transforms.Resize([230,230]), transforms.RandomHorizontalFlip(), transforms.RandomVerticalFlip(), transforms.RandomRotation(15), transforms.CenterCrop(224), transforms.Grayscale(num_output_channels=3), transforms.ToTensor(), transforms.Normalize(mean=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225)) ]) train_set = CustomDataset(root = os.getcwd()+'/', transform=transforms_train) for i in range(1,10): for j in range(i,10): train_set += CustomDataset(root = os.getcwd()+'/', incr=i, transform=transforms_oversampling) train_loader = torch.utils.data.DataLoader(train_set, batch_size=128, shuffle=True, num_workers=num_workers) val_set = CustomDataset(root = os.getcwd()+'/', transform=transforms_val) val_loader = torch.utils.data.DataLoader(val_set, batch_size=128, shuffle=True, num_workers=num_workers) len(train_set) len(classes) def imshow(inp): """Imshow for Tensor.""" inp = inp.numpy().transpose((1, 2, 0)) mean = np.array([0.485, 0.456, 0.406]) std = np.array([0.229, 0.224, 0.225]) inp = std * inp + mean inp = np.clip(inp, 0, 1) plt.imshow(inp) plt.pause(0.001) # pause a bit so that plots are updated # Get a batch of training data inputs, labels = next(iter(train_loader)) # Make a grid from batch out = torchvision.utils.make_grid(inputs[:4]) # print labels print(' '.join('{:>10}'.format(classes[labels[j]]) for j in range(4))) # image show imshow(out) """#### Models""" def _weights_init(m): classname = m.__class__.__name__ if isinstance(m, nn.Linear) or isinstance(m, nn.Conv2d): init.kaiming_normal_(m.weight) class LambdaLayer(nn.Module): def __init__(self, lambd): super(LambdaLayer, self).__init__() self.lambd = lambd def forward(self, x): return self.lambd(x) class BasicBlock(nn.Module): expansion = 1 def __init__(self, in_planes, planes, stride=1, option='A'): super(BasicBlock, self).__init__() self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=False) self.bn1 = nn.BatchNorm2d(planes) self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=1, padding=1, bias=False) self.bn2 = nn.BatchNorm2d(planes) self.shortcut = nn.Sequential() if stride != 1 or in_planes != planes: if option == 'A': """ For CIFAR10 ResNet paper uses option A. """ self.shortcut = LambdaLayer(lambda x: F.pad(x[:, :, ::2, ::2], (0, 0, 0, 0, planes//4, planes//4), "constant", 0)) elif option == 'B': self.shortcut = nn.Sequential( nn.Conv2d(in_planes, self.expansion * planes, kernel_size=1, stride=stride, bias=False), nn.BatchNorm2d(self.expansion * planes) ) def forward(self, x): out = F.relu(self.bn1(self.conv1(x))) out = self.bn2(self.conv2(out)) out += self.shortcut(x) out = F.relu(out) return out class ResNet(nn.Module): def __init__(self, block, num_blocks, num_classes=4246): super(ResNet, self).__init__() self.in_planes = 16 self.conv1 = nn.Conv2d(3, 16, kernel_size=3, stride=1, padding=1, bias=False) self.bn1 = nn.BatchNorm2d(16) self.layer1 = self._make_layer(block, 16, num_blocks[0], stride=1) self.layer2 = self._make_layer(block, 32, num_blocks[1], stride=2) self.layer3 = self._make_layer(block, 64, num_blocks[2], stride=2) self.linear = nn.Linear(64, num_classes) self.apply(_weights_init) def _make_layer(self, block, planes, num_blocks, stride): strides = [stride] + [1]*(num_blocks-1) layers = [] for stride in strides: layers.append(block(self.in_planes, planes, stride)) self.in_planes = planes * block.expansion return nn.Sequential(*layers) def forward(self, x): out = F.relu(self.bn1(self.conv1(x))) out = self.layer1(out) out = self.layer2(out) out = self.layer3(out) out = F.avg_pool2d(out, out.size()[3]) out = out.view(out.size(0), -1) out = self.linear(out) return out import torchvision.models as models device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") # net = models.resnet152(pretrained=False) #Method 2 # net = models.resnet50(pretrained=False) #Method 3 net = models.resnet50(pretrained=True) #Method 4, best one num_ftrs = net.fc.in_features net.fc = nn.Linear(num_ftrs, 4246) # net = ResNet(BasicBlock, [3, 3, 3]) #Method 1 net = net.to(device) """#### Loss function & Optimizer""" import torch.optim as optim def createLossAndOptimizer(net, learning_rate): # it combines softmax with negative log likelihood loss criterion = nn.CrossEntropyLoss() optimizer = optim.SGD(net.parameters(), lr=learning_rate, momentum=0.9, weight_decay=1e-4) #optimizer = optim.Adam(net.parameters(), lr=learning_rate) return criterion, optimizer """#### Training Model Batch size = 128, number of epochs = 20, starting learning rate = 0.01 Save the trained model """ def train(net, batch_size, n_epochs, learning_rate):
# train the model train_history, val_history = train(net, batch_size=128, n_epochs=20, learning_rate=0.01) plot_losses(train_history, val_history) """See next part for testing and prediction """
""" Train a neural network and print statistics of the training :param net: (PyTorch Neural Network) :param batch_size: (int) :param n_epochs: (int) Number of iterations on the training set :param learning_rate: (float) learning rate used by the optimizer """ print("===== HYPERPARAMETERS =====") print("batch_size=", batch_size) print("n_epochs=", n_epochs) print("Starting learning_rate=", learning_rate) print("=" * 30) n_minibatches = len(train_loader) criterion, optimizer = createLossAndOptimizer(net, learning_rate) # Init variables used for plotting the loss train_history = [] val_history = [] training_start_time = time.time() best_error = np.inf best_model_path = os.getcwd()+"/best_model.pth" # Move model to gpu if possible net = net.to(device) scheduler = optim.lr_scheduler.MultiStepLR(optimizer, milestones=[17,19], gamma=0.1) # Decay LR by a factor of 0.1 after epoch 3 and 7 for epoch in range(n_epochs): # loop over the dataset multiple times running_loss = 0.0 print_every = n_minibatches // 10 start_time = time.time() total_train_loss = 0 for i, (inputs, labels) in enumerate(train_loader): # Move tensors to correct device inputs, labels = inputs.to(device), labels.to(device) # zero the parameter gradients optimizer.zero_grad() # forward + backward + optimize with torch.set_grad_enabled(True): outputs = net(inputs) loss = criterion(outputs, labels) loss.backward() optimizer.step() # print statistics running_loss += loss.item() total_train_loss += loss.item() # print every 10th of epoch if (i + 1) % (print_every + 1) == 0: print("Epoch {}, {:d}% \t train_loss: {:.2f} took: {:.2f}s".format( epoch + 1, int(100 * (i + 1) / n_minibatches), running_loss / print_every, time.time() - start_time)) running_loss = 0.0 start_time = time.time() train_history.append(total_train_loss / len(train_loader)) total_val_loss = 0 # Do a pass on the validation set # We don't need to compute gradient, # we save memory and computation using torch.no_grad() with torch.no_grad(): for inputs, labels in val_loader: # Move tensors to correct device inputs, labels = inputs.to(device), labels.to(device) # Forward pass predictions = net(inputs) val_loss = criterion(predictions, labels) total_val_loss += val_loss.item() val_history.append(total_val_loss / len(val_loader)) # Save model that performs best on validation set if total_val_loss < best_error: best_error = total_val_loss torch.save(net.state_dict(), best_model_path) print("Validation loss = {:.2f}".format(total_val_loss / len(val_loader))) scheduler.step() print("Training Finished, took {:.2f}s".format(time.time() - training_start_time)) # Load best model net.load_state_dict(torch.load(best_model_path)) return train_history, val_history
identifier_body
Train.py
# -*- coding: utf-8 -*- """ # Part1: Training """ import os import numpy as np import pandas as pd # Commented out IPython magic to ensure Python compatibility. # %matplotlib inline # Python 2/3 compatibility from __future__ import print_function, division import itertools import time import numpy as np import matplotlib.pyplot as plt # Colors from Colorbrewer Paired_12 colors = [[31, 120, 180], [51, 160, 44]] colors = [(r / 255, g / 255, b / 255) for (r, g, b) in colors] def plot_losses(train_history, val_history): x = np.arange(1, len(train_history) + 1) plt.figure(figsize=(8, 6)) plt.plot(x, train_history, color=colors[0], label="Training loss", linewidth=2) plt.plot(x, val_history, color=colors[1], label="Validation loss", linewidth=2) plt.xlabel('Epoch') plt.ylabel('Loss') plt.legend(loc='upper right') plt.title("Evolution of the training and validation loss") plt.show() data = pd.read_csv(os.getcwd() +"/cleaned_train.csv", sep=",") Ids = data.values[:,1] classes = np.unique(Ids) len(classes) #!/usr/bin/env python3 import csv import os from PIL import Image import torch from torch.utils.data import Dataset def image_loader(path): with open(path, 'rb') as f: img = Image.open(f) return img.convert('RGB') def load_labels(path): with open(path, newline='') as csvfile: reader = csv.reader(csvfile) headers = next(reader) return [{ headers[column_index]: row[column_index] for column_index in range(len(row)) } for row in reader] class CustomDataset(Dataset): def __init__(self, root, split='train', incr=None, transform=None): self.root = root = os.path.expanduser(root) category = 'id' self.category = category self.split = split self.incr = incr if incr is None: labels = load_labels(os.path.join(root, f'cleaned_{split}.csv')) else: labels = load_labels(os.path.join(root, split+str(incr)+'.csv')) self.entries = [ (label_entry['Image'], int(label_entry[category])) for label_entry in labels if os.path.exists( os.path.join(self.root, f'{split}/{split}', label_entry['Image'])) ] self.transform = transform def __len__(self): return len(self.entries) def __getitem__(self, index): image_filename, label = self.entries[index] image_filepath = os.path.join(self.root, f'{self.split}/{self.split}', image_filename) image = image_loader(image_filepath) if self.transform is not None: image = self.transform(image) return image, label """#### Data Augmentation & Data Normalization""" import torch import torch.nn as nn import torch.nn.init as init import torch.nn.functional as F import torchvision import torchvision.transforms as transforms num_workers = 2 # Data augmentation and normalization for training # Just normalization for validation transforms_train = transforms.Compose([ transforms.Resize([224,224]), # Resizing the image transforms.RandomHorizontalFlip(), # Flip the data horizontally transforms.RandomVerticalFlip(), # Flip the data vertically transforms.Grayscale(num_output_channels=3), transforms.ToTensor(), transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) ]) transforms_val = transforms.Compose([ transforms.Resize([224,224]), transforms.Grayscale(num_output_channels=3), transforms.ToTensor(), transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) ]) transforms_oversampling = transforms.Compose([ transforms.Resize([230,230]), transforms.RandomHorizontalFlip(), transforms.RandomVerticalFlip(), transforms.RandomRotation(15), transforms.CenterCrop(224), transforms.Grayscale(num_output_channels=3), transforms.ToTensor(), transforms.Normalize(mean=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225)) ]) train_set = CustomDataset(root = os.getcwd()+'/', transform=transforms_train) for i in range(1,10): for j in range(i,10): train_set += CustomDataset(root = os.getcwd()+'/', incr=i, transform=transforms_oversampling) train_loader = torch.utils.data.DataLoader(train_set, batch_size=128, shuffle=True, num_workers=num_workers) val_set = CustomDataset(root = os.getcwd()+'/', transform=transforms_val) val_loader = torch.utils.data.DataLoader(val_set, batch_size=128, shuffle=True, num_workers=num_workers) len(train_set) len(classes) def imshow(inp): """Imshow for Tensor.""" inp = inp.numpy().transpose((1, 2, 0)) mean = np.array([0.485, 0.456, 0.406]) std = np.array([0.229, 0.224, 0.225]) inp = std * inp + mean inp = np.clip(inp, 0, 1) plt.imshow(inp) plt.pause(0.001) # pause a bit so that plots are updated # Get a batch of training data inputs, labels = next(iter(train_loader)) # Make a grid from batch out = torchvision.utils.make_grid(inputs[:4]) # print labels print(' '.join('{:>10}'.format(classes[labels[j]]) for j in range(4))) # image show imshow(out) """#### Models""" def _weights_init(m): classname = m.__class__.__name__ if isinstance(m, nn.Linear) or isinstance(m, nn.Conv2d): init.kaiming_normal_(m.weight) class LambdaLayer(nn.Module): def __init__(self, lambd): super(LambdaLayer, self).__init__() self.lambd = lambd def forward(self, x): return self.lambd(x) class BasicBlock(nn.Module): expansion = 1 def __init__(self, in_planes, planes, stride=1, option='A'): super(BasicBlock, self).__init__() self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=False) self.bn1 = nn.BatchNorm2d(planes) self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=1, padding=1, bias=False) self.bn2 = nn.BatchNorm2d(planes) self.shortcut = nn.Sequential() if stride != 1 or in_planes != planes: if option == 'A': """ For CIFAR10 ResNet paper uses option A. """ self.shortcut = LambdaLayer(lambda x: F.pad(x[:, :, ::2, ::2], (0, 0, 0, 0, planes//4, planes//4), "constant", 0)) elif option == 'B': self.shortcut = nn.Sequential( nn.Conv2d(in_planes, self.expansion * planes, kernel_size=1, stride=stride, bias=False), nn.BatchNorm2d(self.expansion * planes) ) def forward(self, x): out = F.relu(self.bn1(self.conv1(x))) out = self.bn2(self.conv2(out)) out += self.shortcut(x) out = F.relu(out) return out class ResNet(nn.Module): def __init__(self, block, num_blocks, num_classes=4246): super(ResNet, self).__init__() self.in_planes = 16 self.conv1 = nn.Conv2d(3, 16, kernel_size=3, stride=1, padding=1, bias=False) self.bn1 = nn.BatchNorm2d(16) self.layer1 = self._make_layer(block, 16, num_blocks[0], stride=1) self.layer2 = self._make_layer(block, 32, num_blocks[1], stride=2) self.layer3 = self._make_layer(block, 64, num_blocks[2], stride=2) self.linear = nn.Linear(64, num_classes) self.apply(_weights_init) def _make_layer(self, block, planes, num_blocks, stride): strides = [stride] + [1]*(num_blocks-1) layers = [] for stride in strides: layers.append(block(self.in_planes, planes, stride)) self.in_planes = planes * block.expansion return nn.Sequential(*layers) def forward(self, x): out = F.relu(self.bn1(self.conv1(x))) out = self.layer1(out) out = self.layer2(out) out = self.layer3(out) out = F.avg_pool2d(out, out.size()[3]) out = out.view(out.size(0), -1) out = self.linear(out) return out import torchvision.models as models device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") # net = models.resnet152(pretrained=False) #Method 2 # net = models.resnet50(pretrained=False) #Method 3 net = models.resnet50(pretrained=True) #Method 4, best one num_ftrs = net.fc.in_features net.fc = nn.Linear(num_ftrs, 4246) # net = ResNet(BasicBlock, [3, 3, 3]) #Method 1 net = net.to(device) """#### Loss function & Optimizer""" import torch.optim as optim def createLossAndOptimizer(net, learning_rate): # it combines softmax with negative log likelihood loss criterion = nn.CrossEntropyLoss() optimizer = optim.SGD(net.parameters(), lr=learning_rate, momentum=0.9, weight_decay=1e-4) #optimizer = optim.Adam(net.parameters(), lr=learning_rate) return criterion, optimizer """#### Training Model Batch size = 128, number of epochs = 20, starting learning rate = 0.01 Save the trained model """ def
(net, batch_size, n_epochs, learning_rate): """ Train a neural network and print statistics of the training :param net: (PyTorch Neural Network) :param batch_size: (int) :param n_epochs: (int) Number of iterations on the training set :param learning_rate: (float) learning rate used by the optimizer """ print("===== HYPERPARAMETERS =====") print("batch_size=", batch_size) print("n_epochs=", n_epochs) print("Starting learning_rate=", learning_rate) print("=" * 30) n_minibatches = len(train_loader) criterion, optimizer = createLossAndOptimizer(net, learning_rate) # Init variables used for plotting the loss train_history = [] val_history = [] training_start_time = time.time() best_error = np.inf best_model_path = os.getcwd()+"/best_model.pth" # Move model to gpu if possible net = net.to(device) scheduler = optim.lr_scheduler.MultiStepLR(optimizer, milestones=[17,19], gamma=0.1) # Decay LR by a factor of 0.1 after epoch 3 and 7 for epoch in range(n_epochs): # loop over the dataset multiple times running_loss = 0.0 print_every = n_minibatches // 10 start_time = time.time() total_train_loss = 0 for i, (inputs, labels) in enumerate(train_loader): # Move tensors to correct device inputs, labels = inputs.to(device), labels.to(device) # zero the parameter gradients optimizer.zero_grad() # forward + backward + optimize with torch.set_grad_enabled(True): outputs = net(inputs) loss = criterion(outputs, labels) loss.backward() optimizer.step() # print statistics running_loss += loss.item() total_train_loss += loss.item() # print every 10th of epoch if (i + 1) % (print_every + 1) == 0: print("Epoch {}, {:d}% \t train_loss: {:.2f} took: {:.2f}s".format( epoch + 1, int(100 * (i + 1) / n_minibatches), running_loss / print_every, time.time() - start_time)) running_loss = 0.0 start_time = time.time() train_history.append(total_train_loss / len(train_loader)) total_val_loss = 0 # Do a pass on the validation set # We don't need to compute gradient, # we save memory and computation using torch.no_grad() with torch.no_grad(): for inputs, labels in val_loader: # Move tensors to correct device inputs, labels = inputs.to(device), labels.to(device) # Forward pass predictions = net(inputs) val_loss = criterion(predictions, labels) total_val_loss += val_loss.item() val_history.append(total_val_loss / len(val_loader)) # Save model that performs best on validation set if total_val_loss < best_error: best_error = total_val_loss torch.save(net.state_dict(), best_model_path) print("Validation loss = {:.2f}".format(total_val_loss / len(val_loader))) scheduler.step() print("Training Finished, took {:.2f}s".format(time.time() - training_start_time)) # Load best model net.load_state_dict(torch.load(best_model_path)) return train_history, val_history # train the model train_history, val_history = train(net, batch_size=128, n_epochs=20, learning_rate=0.01) plot_losses(train_history, val_history) """See next part for testing and prediction """
train
identifier_name
PAD.py
#!/usr/bin/env python # -*- coding: utf-8 -*- """ photoangular distributions for isotropically oriented ensembles of molecules in the gas phase Initial bound and final continuum orbitals are obtained by the multiple scattering (MS) and the continuum multiple scattering (CMS) methods, respectively. """ from __future__ import print_function from DFTB import AtomicData from DFTB.MolecularIntegrals import settings from DFTB.MultipleScattering.MuffinTin import MuffinTinPotential, minimize_golden, save_cubefile import numpy as np from scipy import signal class PhotoelectronAngularDistribution:
def save_pads(pke,pad, pol, tbl_file, units="eV-Mb"): """ A table with the PAD is written to `tbl_file`. It contains the 4 columns PKE SIGMA BETA1 BETA_2 which define the PAD(th) at each energy according to .. code-block:: none PAD(th) = SIMGA/(4pi) [ 1 + BETA P (cos(th)) + BETA P (cos(th)) ] 1 1 2 2 For each orbital a block separated by a newline is written. """ npke,norb,dummy = pad.shape sigma = pad[:,:,0] beta1 = pad[:,:,1] beta2 = pad[:,:,2] fh = open(tbl_file, "w") pol2str = {0 : "0 (linear)", -1 : "-1 (left)", +1: "+1 (right)"} print( """ # # photoelectron angular distributions (PAD) for an isotropic ensemble # # PAD(th) = SIMGA/(4pi) [ 1 + BETA P (cos(th)) + BETA P (cos(th)) ] # 1 1 2 2 # # light polarization = %s # """ % pol2str[pol], file=fh) if units == "eV-Mb": pke = pke * AtomicData.hartree_to_eV # convert cross section sigma from bohr^2 to Mb sigma = sigma * AtomicData.bohr2_to_megabarn header = "# PKE/eV SIGMA/Mb BETA1 BETA2" else: header = "# PKE/Hartree SIGMA/bohr^2 BETA1 BETA2" for b in range(0, norb): print( "# photoionization from orbital %d" % (b+1), file=fh) print( header, file=fh) block = np.vstack((pke, sigma[:,b], beta1[:,b], beta2[:,b])).transpose() np.savetxt(fh, block, fmt=["%e","%e","%+10.7f", "%+10.7f"]) print( "" , file=fh) print( "PAD written to '%s'" % tbl_file ) fh.close() def plot_pads(pke,pad, pol, units='eV-Mb'): """ plot PAD parameters sigma, beta1 and beta2 as functions of PKE for different orbitals """ npke,norb,dummy = pad.shape sigma = pad[:,:,0] beta1 = pad[:,:,1] beta2 = pad[:,:,2] import matplotlib matplotlib.rc('xtick', labelsize=17) matplotlib.rc('ytick', labelsize=17) matplotlib.rc('legend', fontsize=17) matplotlib.rc('axes', labelsize=17) import matplotlib.pyplot as plt fig, axes = plt.subplots(1,3) plt.title("polarization = %s" % pol) if units == "eV-Mb": pke = pke * AtomicData.hartree_to_eV # convert cross section sigma from bohr^2 to Mb sigma = sigma * AtomicData.bohr2_to_megabarn for ax in [0,1,2]: axes[ax].set_xlabel("PKE / eV") axes[0].set_ylabel(r"$\sigma$ / Mb") else: for ax in [0,1,2]: axes[ax].set_xlabel("PKE / Hartree") axes[0].set_ylabel(r"$\sigma$ / bohr$^2$") # plot sigma in log-scale axes[0].set_yscale("log") axes[1].set_ylabel(r"$\beta_1$") axes[2].set_ylabel(r"$\beta_2$") axes[2].set_ylim((-1.1,2.1)) for b in range(0, norb): l, = axes[0].plot(pke, sigma[:,b], lw=2) axes[1].plot(pke, beta1[:,b], lw=2, color=l.get_color(), label=r"Orb. %d" % (b+1)) axes[2].plot(pke, beta2[:,b], lw=2, color=l.get_color()) axes[1].legend(loc="upper center") plt.subplots_adjust(wspace=0.5) plt.show() ################################################## # # # Testing # # # ################################################## def test_hmi_pads(): """ compute PADs for ionization from 1sigma orbital of the hydrogen molecular ion (H2^+) """ # choose resolution of multicenter grids settings.radial_grid_factor = 10 # controls size of radial grid settings.lebedev_order = 23 # controls size of angular grid # The radius of H2+ is set to R=2 bohr atomlist = [ (1, (0.0, 0.0, -1.0)), (1, (0.0, 0.0, +1.0)) ] # The overall charge of the HMI is +1, however the single electron will # feel the nuclear attraction from both protons, therefore chargeIII=+2. muffin = MuffinTinPotential(atomlist, lmax=10, charge=+1, chargeIII=+2, potential_type="molecular", Nr=2000, debug=0) # We search for bound state in the energy range [-1.2, -0.5] (in Hartree), # since we only want to find the 1\sigma_g and 1\sigma_u orbitals. search_energies = np.linspace(-1.2, -0.5, 20) bound_orbitals = muffin.find_eigenstates(search_energies) pad = PhotoelectronAngularDistribution(muffin) pke_energies = np.linspace(0.1,400.0,100) / AtomicData.hartree_to_eV pad.compute_pads(bound_orbitals, pke_energies, pol=0, pad_file="/tmp/pad.dat") if __name__ == "__main__": test_hmi_pads()
""" Parameters ---------- muffin : instance of `MuffinTinPotential` muffin tin potential, which provides the continuum orbitals """ def __init__(self, muffin): self.muffin = muffin def compute_pads(self, bound_orbitals, pke, pol=0, pad_file="/tmp/pad.dat", units="eV-Mb"): """ compute photoelectron angular distributions (PADs) for isotropic ensemble as a function of kinetic energy of the photoelectron using the CMS method Parameters ---------- bound_orbitals : list of callables `bound_orbitals[b](x,y,z)` should evaluate the `b`-th orbital on a cartesian grid pke : np.1darray (shape (npke,)) photokinetic energies (in Hartree) for which the PADs should be calculated pol : int, optional polarization of light, 0 (linear), -1 (left), +1 (right) pad_file : str, optional path to file for storing table with PADs units : str, optional units of energies and cross sections in PAD table * 'eV-Mb' : PKE in eV, sigma in megabarn * 'au' : PKE in Hartree, sigma in bohr^2 Returns ------- pad : np.ndarray (shape (npke,norb,3)) `pad[k,b,:]` contains the three parameters `sigma`,`beta1` and `beta2` for ionization from orbital `b` into the continuum at energy `pke[k]`. """ print( " " ) print( " *******************" ) print( " * PADs *" ) print( " *******************" ) print( " " ) # number of bound orbitals norb = len(bound_orbitals) # number of energies npke = len(pke) # find values of bound orbitals on a Becke grid for numerical integration grid, orbs = self.muffin.evaluate_orbitals(bound_orbitals) # compute orientation-averaged PAD for each energy pad = np.zeros((npke,norb,3)) for i,energy in enumerate(pke): if (self.muffin.debug > -1): print( "%3.1d of %3.1d PKE = %e Hartree ( %e eV )" % (i+1,npke, energy, energy * AtomicData.hartree_to_eV) ) pad[i,:,:] = self.muffin.photoelectron_distribution(energy, grid, orbs, pol=pol) plot_pads(pke, pad, pol) save_pads(pke, pad, pol, pad_file, units=units) # save intermediate variables for locating resonances self._pke = pke self._pad = pad self._pol = pol self._grid = grid self._orbs = orbs return pad def find_resonances(self, sigma_thresh=1.0): """ identify resonances as peaks in the photoionization cross section Resonances are highly peaked local maxima of sigma(E) which exceed a certain threshold. First the local maxima are identified in the curve sigma(E) that was calculated in the a previous call to `calculate_pads(...)`. The energetic positions of the maxima are refined by bisection. The kinetic energy grid use to compute sigma(E) has to be fine enough to obtain the initial guesses Parameters ---------- sigma_thresh : float, optional Maxima in the photoionization cross section are considered to be resonances if they exceed a threshold, sigma > sigma_thresh (in magebarn) Returns ------- resonances : dict `resonances[i]` contains a list of continuum orbitals at the resonances for ionization from initial orbital `i`. The energy of the resonance can be accessed `resonances[i].energy`. """ assert hasattr(self, "_pke"), "`find_resonances(...)` must be preceded by call to `calculate_pads(...)`." # retrieve data from previous PAD calculation pke = self._pke pad = self._pad pol = self._pol grid = self._grid orbs = self._orbs print( " " ) print( " **********************" ) print( " * Resonances *" ) print( " **********************" ) print( " " ) npke, norb, dummy = pad.shape # `energies[i]` is a list of photoelectron kinetic energy at resonance # for ionization from orbital `i` energies = {} # `sigmas[i]` contains list of values of sigma at resonances sigmas = {} # `resonances[i] contains list of continuum orbitals at resonances # (instances of `CMSWavefunction`) resonances = {} for i in range(0, norb): # Instead of maximizing sigma we minimize (-1)*sigma. # find indices of local minima minima = signal.argrelmin(-pad[:,i,0])[0].tolist() # and indices of local maxima maxima = signal.argrelmax(-pad[:,i,0])[0].tolist() if len(minima) == 0: # No local maximum of sigma, which is a local minimum of (-1)*sigma, # so no resonance continue if len(maxima) == 0: # No maximum was found, bracket minimum by end points maxima += [0,-1] # Each local minimum should be bracketed by two local maxima if pke[minima[0]] < pke[maxima[0]]: # first extremum is a minimum, so # maxima[j-1] < minima[j] < maxima[j] maxima = [0] + maxima # After prepending the first point, we have # maxima[j ] < minima[j] < maxima[j+1] if pke[minima[-1]] > pke[maxima[-1]]: # last extremum is a minimum, which is not bracketed # by two maxima maxima = maxima + [-1] # After appending last point, we have # maxima[i ] < minima[i] < maxima[i+1] # for all minima assert len(minima) == len(maxima)-1 def func(energy): # compute (-1) x photoionization cross section for initial orbital # with index `i` at energy `energy` pad_i = self.muffin.photoelectron_distribution(energy, grid, orbs[i:i+1,:], pol=pol) sigma_i = pad_i[0,0] return (-1)*sigma_i # list of photoelectron kinetic energy at resonance energies[i] = [] # values of sigma at resonances sigmas[i] = [] # list of continuum orbitals at resonances (instances of `CMSWavefunction`) resonances[i] = [] # Refine energy at each local minimum of func(E) (= maximum of sigma) for j in range(0, len(minima)): # initial guess emin0 = pke[minima[j]] # maxima that bracket this local minimum emax_lower = pke[maxima[j] ] emax_upper = pke[maxima[j+1]] assert emax_lower < emin0 < emax_upper # We search for a minimum around emin0 by Golden search # (https://en.wikipedia.org/wiki/Golden-section_search) # which assumes that there is a single minimum in the interval [l,u] alpha = 0.2 l = (1.0-alpha)*emin0 + alpha*emax_lower u = (1.0-alpha)*emin0 + alpha*emax_upper # find minimum of func(E) = -log(cond(M(E))) in the interval [l,u] try: emin = minimize_golden(func, l, u) except StopIteration: continue fmin = func(emin) assert self.muffin.energy == emin sigma_max = -fmin if (sigma_max < sigma_thresh): # sigma at maximum is too small to classify as a resonance continue resonances[i] += self.muffin.eigenchannel_analysis() energies[i].append(emin) sigmas[i].append(sigma_max) if len(resonances.keys()) > 0: print( " -----------------------------------------------------------------------------" ) print( " Orbital Resonance Energy Sigma " ) print( " Hartree eV Mb " ) print( " -----------------------------------------------------------------------------" ) else: print( " no resonances found with sigma > %e Mb" % sigma_thresh ) for i in sorted(resonances.keys()): for j,res in enumerate(resonances[i]): print( " %4.1d %4.1d %12.8f %12.8f %6.4e" % (i+1, j+1, res.energy, res.energy * AtomicData.hartree_to_eV, res.tags["sigma"] * AtomicData.bohr2_to_megabarn) ) print( "" ) return resonances
identifier_body
PAD.py
#!/usr/bin/env python # -*- coding: utf-8 -*- """ photoangular distributions for isotropically oriented ensembles of molecules in the gas phase Initial bound and final continuum orbitals are obtained by the multiple scattering (MS) and the continuum multiple scattering (CMS) methods, respectively. """ from __future__ import print_function from DFTB import AtomicData from DFTB.MolecularIntegrals import settings from DFTB.MultipleScattering.MuffinTin import MuffinTinPotential, minimize_golden, save_cubefile import numpy as np from scipy import signal class PhotoelectronAngularDistribution: """ Parameters ---------- muffin : instance of `MuffinTinPotential` muffin tin potential, which provides the continuum orbitals """ def __init__(self, muffin): self.muffin = muffin def compute_pads(self, bound_orbitals, pke, pol=0, pad_file="/tmp/pad.dat", units="eV-Mb"): """ compute photoelectron angular distributions (PADs) for isotropic ensemble as a function of kinetic energy of the photoelectron using the CMS method Parameters ---------- bound_orbitals : list of callables `bound_orbitals[b](x,y,z)` should evaluate the `b`-th orbital on a cartesian grid pke : np.1darray (shape (npke,)) photokinetic energies (in Hartree) for which the PADs should be calculated pol : int, optional polarization of light, 0 (linear), -1 (left), +1 (right) pad_file : str, optional path to file for storing table with PADs units : str, optional units of energies and cross sections in PAD table * 'eV-Mb' : PKE in eV, sigma in megabarn * 'au' : PKE in Hartree, sigma in bohr^2 Returns ------- pad : np.ndarray (shape (npke,norb,3)) `pad[k,b,:]` contains the three parameters `sigma`,`beta1` and `beta2` for ionization from orbital `b` into the continuum at energy `pke[k]`. """ print( " " ) print( " *******************" ) print( " * PADs *" ) print( " *******************" ) print( " " ) # number of bound orbitals norb = len(bound_orbitals) # number of energies npke = len(pke) # find values of bound orbitals on a Becke grid for numerical integration grid, orbs = self.muffin.evaluate_orbitals(bound_orbitals) # compute orientation-averaged PAD for each energy pad = np.zeros((npke,norb,3)) for i,energy in enumerate(pke): if (self.muffin.debug > -1): print( "%3.1d of %3.1d PKE = %e Hartree ( %e eV )" % (i+1,npke, energy, energy * AtomicData.hartree_to_eV) ) pad[i,:,:] = self.muffin.photoelectron_distribution(energy, grid, orbs, pol=pol) plot_pads(pke, pad, pol) save_pads(pke, pad, pol, pad_file, units=units) # save intermediate variables for locating resonances self._pke = pke self._pad = pad self._pol = pol self._grid = grid self._orbs = orbs return pad def find_resonances(self, sigma_thresh=1.0): """ identify resonances as peaks in the photoionization cross section Resonances are highly peaked local maxima of sigma(E) which exceed a certain threshold. First the local maxima are identified in the curve sigma(E) that was calculated in the a previous call to `calculate_pads(...)`. The energetic positions of the maxima are refined by bisection. The kinetic energy grid use to compute sigma(E) has to be fine enough to obtain the initial guesses Parameters ---------- sigma_thresh : float, optional Maxima in the photoionization cross section are considered to be resonances if they exceed a threshold, sigma > sigma_thresh (in magebarn) Returns ------- resonances : dict `resonances[i]` contains a list of continuum orbitals at the resonances for ionization from initial orbital `i`. The energy of the resonance can be accessed `resonances[i].energy`. """ assert hasattr(self, "_pke"), "`find_resonances(...)` must be preceded by call to `calculate_pads(...)`." # retrieve data from previous PAD calculation pke = self._pke pad = self._pad pol = self._pol grid = self._grid orbs = self._orbs print( " " ) print( " **********************" ) print( " * Resonances *" ) print( " **********************" ) print( " " ) npke, norb, dummy = pad.shape # `energies[i]` is a list of photoelectron kinetic energy at resonance # for ionization from orbital `i` energies = {} # `sigmas[i]` contains list of values of sigma at resonances sigmas = {} # `resonances[i] contains list of continuum orbitals at resonances # (instances of `CMSWavefunction`) resonances = {} for i in range(0, norb): # Instead of maximizing sigma we minimize (-1)*sigma. # find indices of local minima minima = signal.argrelmin(-pad[:,i,0])[0].tolist() # and indices of local maxima maxima = signal.argrelmax(-pad[:,i,0])[0].tolist() if len(minima) == 0: # No local maximum of sigma, which is a local minimum of (-1)*sigma, # so no resonance continue if len(maxima) == 0: # No maximum was found, bracket minimum by end points maxima += [0,-1] # Each local minimum should be bracketed by two local maxima if pke[minima[0]] < pke[maxima[0]]: # first extremum is a minimum, so # maxima[j-1] < minima[j] < maxima[j] maxima = [0] + maxima # After prepending the first point, we have
# last extremum is a minimum, which is not bracketed # by two maxima maxima = maxima + [-1] # After appending last point, we have # maxima[i ] < minima[i] < maxima[i+1] # for all minima assert len(minima) == len(maxima)-1 def func(energy): # compute (-1) x photoionization cross section for initial orbital # with index `i` at energy `energy` pad_i = self.muffin.photoelectron_distribution(energy, grid, orbs[i:i+1,:], pol=pol) sigma_i = pad_i[0,0] return (-1)*sigma_i # list of photoelectron kinetic energy at resonance energies[i] = [] # values of sigma at resonances sigmas[i] = [] # list of continuum orbitals at resonances (instances of `CMSWavefunction`) resonances[i] = [] # Refine energy at each local minimum of func(E) (= maximum of sigma) for j in range(0, len(minima)): # initial guess emin0 = pke[minima[j]] # maxima that bracket this local minimum emax_lower = pke[maxima[j] ] emax_upper = pke[maxima[j+1]] assert emax_lower < emin0 < emax_upper # We search for a minimum around emin0 by Golden search # (https://en.wikipedia.org/wiki/Golden-section_search) # which assumes that there is a single minimum in the interval [l,u] alpha = 0.2 l = (1.0-alpha)*emin0 + alpha*emax_lower u = (1.0-alpha)*emin0 + alpha*emax_upper # find minimum of func(E) = -log(cond(M(E))) in the interval [l,u] try: emin = minimize_golden(func, l, u) except StopIteration: continue fmin = func(emin) assert self.muffin.energy == emin sigma_max = -fmin if (sigma_max < sigma_thresh): # sigma at maximum is too small to classify as a resonance continue resonances[i] += self.muffin.eigenchannel_analysis() energies[i].append(emin) sigmas[i].append(sigma_max) if len(resonances.keys()) > 0: print( " -----------------------------------------------------------------------------" ) print( " Orbital Resonance Energy Sigma " ) print( " Hartree eV Mb " ) print( " -----------------------------------------------------------------------------" ) else: print( " no resonances found with sigma > %e Mb" % sigma_thresh ) for i in sorted(resonances.keys()): for j,res in enumerate(resonances[i]): print( " %4.1d %4.1d %12.8f %12.8f %6.4e" % (i+1, j+1, res.energy, res.energy * AtomicData.hartree_to_eV, res.tags["sigma"] * AtomicData.bohr2_to_megabarn) ) print( "" ) return resonances def save_pads(pke,pad, pol, tbl_file, units="eV-Mb"): """ A table with the PAD is written to `tbl_file`. It contains the 4 columns PKE SIGMA BETA1 BETA_2 which define the PAD(th) at each energy according to .. code-block:: none PAD(th) = SIMGA/(4pi) [ 1 + BETA P (cos(th)) + BETA P (cos(th)) ] 1 1 2 2 For each orbital a block separated by a newline is written. """ npke,norb,dummy = pad.shape sigma = pad[:,:,0] beta1 = pad[:,:,1] beta2 = pad[:,:,2] fh = open(tbl_file, "w") pol2str = {0 : "0 (linear)", -1 : "-1 (left)", +1: "+1 (right)"} print( """ # # photoelectron angular distributions (PAD) for an isotropic ensemble # # PAD(th) = SIMGA/(4pi) [ 1 + BETA P (cos(th)) + BETA P (cos(th)) ] # 1 1 2 2 # # light polarization = %s # """ % pol2str[pol], file=fh) if units == "eV-Mb": pke = pke * AtomicData.hartree_to_eV # convert cross section sigma from bohr^2 to Mb sigma = sigma * AtomicData.bohr2_to_megabarn header = "# PKE/eV SIGMA/Mb BETA1 BETA2" else: header = "# PKE/Hartree SIGMA/bohr^2 BETA1 BETA2" for b in range(0, norb): print( "# photoionization from orbital %d" % (b+1), file=fh) print( header, file=fh) block = np.vstack((pke, sigma[:,b], beta1[:,b], beta2[:,b])).transpose() np.savetxt(fh, block, fmt=["%e","%e","%+10.7f", "%+10.7f"]) print( "" , file=fh) print( "PAD written to '%s'" % tbl_file ) fh.close() def plot_pads(pke,pad, pol, units='eV-Mb'): """ plot PAD parameters sigma, beta1 and beta2 as functions of PKE for different orbitals """ npke,norb,dummy = pad.shape sigma = pad[:,:,0] beta1 = pad[:,:,1] beta2 = pad[:,:,2] import matplotlib matplotlib.rc('xtick', labelsize=17) matplotlib.rc('ytick', labelsize=17) matplotlib.rc('legend', fontsize=17) matplotlib.rc('axes', labelsize=17) import matplotlib.pyplot as plt fig, axes = plt.subplots(1,3) plt.title("polarization = %s" % pol) if units == "eV-Mb": pke = pke * AtomicData.hartree_to_eV # convert cross section sigma from bohr^2 to Mb sigma = sigma * AtomicData.bohr2_to_megabarn for ax in [0,1,2]: axes[ax].set_xlabel("PKE / eV") axes[0].set_ylabel(r"$\sigma$ / Mb") else: for ax in [0,1,2]: axes[ax].set_xlabel("PKE / Hartree") axes[0].set_ylabel(r"$\sigma$ / bohr$^2$") # plot sigma in log-scale axes[0].set_yscale("log") axes[1].set_ylabel(r"$\beta_1$") axes[2].set_ylabel(r"$\beta_2$") axes[2].set_ylim((-1.1,2.1)) for b in range(0, norb): l, = axes[0].plot(pke, sigma[:,b], lw=2) axes[1].plot(pke, beta1[:,b], lw=2, color=l.get_color(), label=r"Orb. %d" % (b+1)) axes[2].plot(pke, beta2[:,b], lw=2, color=l.get_color()) axes[1].legend(loc="upper center") plt.subplots_adjust(wspace=0.5) plt.show() ################################################## # # # Testing # # # ################################################## def test_hmi_pads(): """ compute PADs for ionization from 1sigma orbital of the hydrogen molecular ion (H2^+) """ # choose resolution of multicenter grids settings.radial_grid_factor = 10 # controls size of radial grid settings.lebedev_order = 23 # controls size of angular grid # The radius of H2+ is set to R=2 bohr atomlist = [ (1, (0.0, 0.0, -1.0)), (1, (0.0, 0.0, +1.0)) ] # The overall charge of the HMI is +1, however the single electron will # feel the nuclear attraction from both protons, therefore chargeIII=+2. muffin = MuffinTinPotential(atomlist, lmax=10, charge=+1, chargeIII=+2, potential_type="molecular", Nr=2000, debug=0) # We search for bound state in the energy range [-1.2, -0.5] (in Hartree), # since we only want to find the 1\sigma_g and 1\sigma_u orbitals. search_energies = np.linspace(-1.2, -0.5, 20) bound_orbitals = muffin.find_eigenstates(search_energies) pad = PhotoelectronAngularDistribution(muffin) pke_energies = np.linspace(0.1,400.0,100) / AtomicData.hartree_to_eV pad.compute_pads(bound_orbitals, pke_energies, pol=0, pad_file="/tmp/pad.dat") if __name__ == "__main__": test_hmi_pads()
# maxima[j ] < minima[j] < maxima[j+1] if pke[minima[-1]] > pke[maxima[-1]]:
random_line_split
PAD.py
#!/usr/bin/env python # -*- coding: utf-8 -*- """ photoangular distributions for isotropically oriented ensembles of molecules in the gas phase Initial bound and final continuum orbitals are obtained by the multiple scattering (MS) and the continuum multiple scattering (CMS) methods, respectively. """ from __future__ import print_function from DFTB import AtomicData from DFTB.MolecularIntegrals import settings from DFTB.MultipleScattering.MuffinTin import MuffinTinPotential, minimize_golden, save_cubefile import numpy as np from scipy import signal class PhotoelectronAngularDistribution: """ Parameters ---------- muffin : instance of `MuffinTinPotential` muffin tin potential, which provides the continuum orbitals """ def __init__(self, muffin): self.muffin = muffin def compute_pads(self, bound_orbitals, pke, pol=0, pad_file="/tmp/pad.dat", units="eV-Mb"): """ compute photoelectron angular distributions (PADs) for isotropic ensemble as a function of kinetic energy of the photoelectron using the CMS method Parameters ---------- bound_orbitals : list of callables `bound_orbitals[b](x,y,z)` should evaluate the `b`-th orbital on a cartesian grid pke : np.1darray (shape (npke,)) photokinetic energies (in Hartree) for which the PADs should be calculated pol : int, optional polarization of light, 0 (linear), -1 (left), +1 (right) pad_file : str, optional path to file for storing table with PADs units : str, optional units of energies and cross sections in PAD table * 'eV-Mb' : PKE in eV, sigma in megabarn * 'au' : PKE in Hartree, sigma in bohr^2 Returns ------- pad : np.ndarray (shape (npke,norb,3)) `pad[k,b,:]` contains the three parameters `sigma`,`beta1` and `beta2` for ionization from orbital `b` into the continuum at energy `pke[k]`. """ print( " " ) print( " *******************" ) print( " * PADs *" ) print( " *******************" ) print( " " ) # number of bound orbitals norb = len(bound_orbitals) # number of energies npke = len(pke) # find values of bound orbitals on a Becke grid for numerical integration grid, orbs = self.muffin.evaluate_orbitals(bound_orbitals) # compute orientation-averaged PAD for each energy pad = np.zeros((npke,norb,3)) for i,energy in enumerate(pke): if (self.muffin.debug > -1): print( "%3.1d of %3.1d PKE = %e Hartree ( %e eV )" % (i+1,npke, energy, energy * AtomicData.hartree_to_eV) ) pad[i,:,:] = self.muffin.photoelectron_distribution(energy, grid, orbs, pol=pol) plot_pads(pke, pad, pol) save_pads(pke, pad, pol, pad_file, units=units) # save intermediate variables for locating resonances self._pke = pke self._pad = pad self._pol = pol self._grid = grid self._orbs = orbs return pad def find_resonances(self, sigma_thresh=1.0): """ identify resonances as peaks in the photoionization cross section Resonances are highly peaked local maxima of sigma(E) which exceed a certain threshold. First the local maxima are identified in the curve sigma(E) that was calculated in the a previous call to `calculate_pads(...)`. The energetic positions of the maxima are refined by bisection. The kinetic energy grid use to compute sigma(E) has to be fine enough to obtain the initial guesses Parameters ---------- sigma_thresh : float, optional Maxima in the photoionization cross section are considered to be resonances if they exceed a threshold, sigma > sigma_thresh (in magebarn) Returns ------- resonances : dict `resonances[i]` contains a list of continuum orbitals at the resonances for ionization from initial orbital `i`. The energy of the resonance can be accessed `resonances[i].energy`. """ assert hasattr(self, "_pke"), "`find_resonances(...)` must be preceded by call to `calculate_pads(...)`." # retrieve data from previous PAD calculation pke = self._pke pad = self._pad pol = self._pol grid = self._grid orbs = self._orbs print( " " ) print( " **********************" ) print( " * Resonances *" ) print( " **********************" ) print( " " ) npke, norb, dummy = pad.shape # `energies[i]` is a list of photoelectron kinetic energy at resonance # for ionization from orbital `i` energies = {} # `sigmas[i]` contains list of values of sigma at resonances sigmas = {} # `resonances[i] contains list of continuum orbitals at resonances # (instances of `CMSWavefunction`) resonances = {} for i in range(0, norb): # Instead of maximizing sigma we minimize (-1)*sigma. # find indices of local minima minima = signal.argrelmin(-pad[:,i,0])[0].tolist() # and indices of local maxima maxima = signal.argrelmax(-pad[:,i,0])[0].tolist() if len(minima) == 0: # No local maximum of sigma, which is a local minimum of (-1)*sigma, # so no resonance continue if len(maxima) == 0: # No maximum was found, bracket minimum by end points maxima += [0,-1] # Each local minimum should be bracketed by two local maxima if pke[minima[0]] < pke[maxima[0]]: # first extremum is a minimum, so # maxima[j-1] < minima[j] < maxima[j] maxima = [0] + maxima # After prepending the first point, we have # maxima[j ] < minima[j] < maxima[j+1] if pke[minima[-1]] > pke[maxima[-1]]: # last extremum is a minimum, which is not bracketed # by two maxima maxima = maxima + [-1] # After appending last point, we have # maxima[i ] < minima[i] < maxima[i+1] # for all minima assert len(minima) == len(maxima)-1 def
(energy): # compute (-1) x photoionization cross section for initial orbital # with index `i` at energy `energy` pad_i = self.muffin.photoelectron_distribution(energy, grid, orbs[i:i+1,:], pol=pol) sigma_i = pad_i[0,0] return (-1)*sigma_i # list of photoelectron kinetic energy at resonance energies[i] = [] # values of sigma at resonances sigmas[i] = [] # list of continuum orbitals at resonances (instances of `CMSWavefunction`) resonances[i] = [] # Refine energy at each local minimum of func(E) (= maximum of sigma) for j in range(0, len(minima)): # initial guess emin0 = pke[minima[j]] # maxima that bracket this local minimum emax_lower = pke[maxima[j] ] emax_upper = pke[maxima[j+1]] assert emax_lower < emin0 < emax_upper # We search for a minimum around emin0 by Golden search # (https://en.wikipedia.org/wiki/Golden-section_search) # which assumes that there is a single minimum in the interval [l,u] alpha = 0.2 l = (1.0-alpha)*emin0 + alpha*emax_lower u = (1.0-alpha)*emin0 + alpha*emax_upper # find minimum of func(E) = -log(cond(M(E))) in the interval [l,u] try: emin = minimize_golden(func, l, u) except StopIteration: continue fmin = func(emin) assert self.muffin.energy == emin sigma_max = -fmin if (sigma_max < sigma_thresh): # sigma at maximum is too small to classify as a resonance continue resonances[i] += self.muffin.eigenchannel_analysis() energies[i].append(emin) sigmas[i].append(sigma_max) if len(resonances.keys()) > 0: print( " -----------------------------------------------------------------------------" ) print( " Orbital Resonance Energy Sigma " ) print( " Hartree eV Mb " ) print( " -----------------------------------------------------------------------------" ) else: print( " no resonances found with sigma > %e Mb" % sigma_thresh ) for i in sorted(resonances.keys()): for j,res in enumerate(resonances[i]): print( " %4.1d %4.1d %12.8f %12.8f %6.4e" % (i+1, j+1, res.energy, res.energy * AtomicData.hartree_to_eV, res.tags["sigma"] * AtomicData.bohr2_to_megabarn) ) print( "" ) return resonances def save_pads(pke,pad, pol, tbl_file, units="eV-Mb"): """ A table with the PAD is written to `tbl_file`. It contains the 4 columns PKE SIGMA BETA1 BETA_2 which define the PAD(th) at each energy according to .. code-block:: none PAD(th) = SIMGA/(4pi) [ 1 + BETA P (cos(th)) + BETA P (cos(th)) ] 1 1 2 2 For each orbital a block separated by a newline is written. """ npke,norb,dummy = pad.shape sigma = pad[:,:,0] beta1 = pad[:,:,1] beta2 = pad[:,:,2] fh = open(tbl_file, "w") pol2str = {0 : "0 (linear)", -1 : "-1 (left)", +1: "+1 (right)"} print( """ # # photoelectron angular distributions (PAD) for an isotropic ensemble # # PAD(th) = SIMGA/(4pi) [ 1 + BETA P (cos(th)) + BETA P (cos(th)) ] # 1 1 2 2 # # light polarization = %s # """ % pol2str[pol], file=fh) if units == "eV-Mb": pke = pke * AtomicData.hartree_to_eV # convert cross section sigma from bohr^2 to Mb sigma = sigma * AtomicData.bohr2_to_megabarn header = "# PKE/eV SIGMA/Mb BETA1 BETA2" else: header = "# PKE/Hartree SIGMA/bohr^2 BETA1 BETA2" for b in range(0, norb): print( "# photoionization from orbital %d" % (b+1), file=fh) print( header, file=fh) block = np.vstack((pke, sigma[:,b], beta1[:,b], beta2[:,b])).transpose() np.savetxt(fh, block, fmt=["%e","%e","%+10.7f", "%+10.7f"]) print( "" , file=fh) print( "PAD written to '%s'" % tbl_file ) fh.close() def plot_pads(pke,pad, pol, units='eV-Mb'): """ plot PAD parameters sigma, beta1 and beta2 as functions of PKE for different orbitals """ npke,norb,dummy = pad.shape sigma = pad[:,:,0] beta1 = pad[:,:,1] beta2 = pad[:,:,2] import matplotlib matplotlib.rc('xtick', labelsize=17) matplotlib.rc('ytick', labelsize=17) matplotlib.rc('legend', fontsize=17) matplotlib.rc('axes', labelsize=17) import matplotlib.pyplot as plt fig, axes = plt.subplots(1,3) plt.title("polarization = %s" % pol) if units == "eV-Mb": pke = pke * AtomicData.hartree_to_eV # convert cross section sigma from bohr^2 to Mb sigma = sigma * AtomicData.bohr2_to_megabarn for ax in [0,1,2]: axes[ax].set_xlabel("PKE / eV") axes[0].set_ylabel(r"$\sigma$ / Mb") else: for ax in [0,1,2]: axes[ax].set_xlabel("PKE / Hartree") axes[0].set_ylabel(r"$\sigma$ / bohr$^2$") # plot sigma in log-scale axes[0].set_yscale("log") axes[1].set_ylabel(r"$\beta_1$") axes[2].set_ylabel(r"$\beta_2$") axes[2].set_ylim((-1.1,2.1)) for b in range(0, norb): l, = axes[0].plot(pke, sigma[:,b], lw=2) axes[1].plot(pke, beta1[:,b], lw=2, color=l.get_color(), label=r"Orb. %d" % (b+1)) axes[2].plot(pke, beta2[:,b], lw=2, color=l.get_color()) axes[1].legend(loc="upper center") plt.subplots_adjust(wspace=0.5) plt.show() ################################################## # # # Testing # # # ################################################## def test_hmi_pads(): """ compute PADs for ionization from 1sigma orbital of the hydrogen molecular ion (H2^+) """ # choose resolution of multicenter grids settings.radial_grid_factor = 10 # controls size of radial grid settings.lebedev_order = 23 # controls size of angular grid # The radius of H2+ is set to R=2 bohr atomlist = [ (1, (0.0, 0.0, -1.0)), (1, (0.0, 0.0, +1.0)) ] # The overall charge of the HMI is +1, however the single electron will # feel the nuclear attraction from both protons, therefore chargeIII=+2. muffin = MuffinTinPotential(atomlist, lmax=10, charge=+1, chargeIII=+2, potential_type="molecular", Nr=2000, debug=0) # We search for bound state in the energy range [-1.2, -0.5] (in Hartree), # since we only want to find the 1\sigma_g and 1\sigma_u orbitals. search_energies = np.linspace(-1.2, -0.5, 20) bound_orbitals = muffin.find_eigenstates(search_energies) pad = PhotoelectronAngularDistribution(muffin) pke_energies = np.linspace(0.1,400.0,100) / AtomicData.hartree_to_eV pad.compute_pads(bound_orbitals, pke_energies, pol=0, pad_file="/tmp/pad.dat") if __name__ == "__main__": test_hmi_pads()
func
identifier_name
PAD.py
#!/usr/bin/env python # -*- coding: utf-8 -*- """ photoangular distributions for isotropically oriented ensembles of molecules in the gas phase Initial bound and final continuum orbitals are obtained by the multiple scattering (MS) and the continuum multiple scattering (CMS) methods, respectively. """ from __future__ import print_function from DFTB import AtomicData from DFTB.MolecularIntegrals import settings from DFTB.MultipleScattering.MuffinTin import MuffinTinPotential, minimize_golden, save_cubefile import numpy as np from scipy import signal class PhotoelectronAngularDistribution: """ Parameters ---------- muffin : instance of `MuffinTinPotential` muffin tin potential, which provides the continuum orbitals """ def __init__(self, muffin): self.muffin = muffin def compute_pads(self, bound_orbitals, pke, pol=0, pad_file="/tmp/pad.dat", units="eV-Mb"): """ compute photoelectron angular distributions (PADs) for isotropic ensemble as a function of kinetic energy of the photoelectron using the CMS method Parameters ---------- bound_orbitals : list of callables `bound_orbitals[b](x,y,z)` should evaluate the `b`-th orbital on a cartesian grid pke : np.1darray (shape (npke,)) photokinetic energies (in Hartree) for which the PADs should be calculated pol : int, optional polarization of light, 0 (linear), -1 (left), +1 (right) pad_file : str, optional path to file for storing table with PADs units : str, optional units of energies and cross sections in PAD table * 'eV-Mb' : PKE in eV, sigma in megabarn * 'au' : PKE in Hartree, sigma in bohr^2 Returns ------- pad : np.ndarray (shape (npke,norb,3)) `pad[k,b,:]` contains the three parameters `sigma`,`beta1` and `beta2` for ionization from orbital `b` into the continuum at energy `pke[k]`. """ print( " " ) print( " *******************" ) print( " * PADs *" ) print( " *******************" ) print( " " ) # number of bound orbitals norb = len(bound_orbitals) # number of energies npke = len(pke) # find values of bound orbitals on a Becke grid for numerical integration grid, orbs = self.muffin.evaluate_orbitals(bound_orbitals) # compute orientation-averaged PAD for each energy pad = np.zeros((npke,norb,3)) for i,energy in enumerate(pke): if (self.muffin.debug > -1): print( "%3.1d of %3.1d PKE = %e Hartree ( %e eV )" % (i+1,npke, energy, energy * AtomicData.hartree_to_eV) ) pad[i,:,:] = self.muffin.photoelectron_distribution(energy, grid, orbs, pol=pol) plot_pads(pke, pad, pol) save_pads(pke, pad, pol, pad_file, units=units) # save intermediate variables for locating resonances self._pke = pke self._pad = pad self._pol = pol self._grid = grid self._orbs = orbs return pad def find_resonances(self, sigma_thresh=1.0): """ identify resonances as peaks in the photoionization cross section Resonances are highly peaked local maxima of sigma(E) which exceed a certain threshold. First the local maxima are identified in the curve sigma(E) that was calculated in the a previous call to `calculate_pads(...)`. The energetic positions of the maxima are refined by bisection. The kinetic energy grid use to compute sigma(E) has to be fine enough to obtain the initial guesses Parameters ---------- sigma_thresh : float, optional Maxima in the photoionization cross section are considered to be resonances if they exceed a threshold, sigma > sigma_thresh (in magebarn) Returns ------- resonances : dict `resonances[i]` contains a list of continuum orbitals at the resonances for ionization from initial orbital `i`. The energy of the resonance can be accessed `resonances[i].energy`. """ assert hasattr(self, "_pke"), "`find_resonances(...)` must be preceded by call to `calculate_pads(...)`." # retrieve data from previous PAD calculation pke = self._pke pad = self._pad pol = self._pol grid = self._grid orbs = self._orbs print( " " ) print( " **********************" ) print( " * Resonances *" ) print( " **********************" ) print( " " ) npke, norb, dummy = pad.shape # `energies[i]` is a list of photoelectron kinetic energy at resonance # for ionization from orbital `i` energies = {} # `sigmas[i]` contains list of values of sigma at resonances sigmas = {} # `resonances[i] contains list of continuum orbitals at resonances # (instances of `CMSWavefunction`) resonances = {} for i in range(0, norb): # Instead of maximizing sigma we minimize (-1)*sigma. # find indices of local minima minima = signal.argrelmin(-pad[:,i,0])[0].tolist() # and indices of local maxima maxima = signal.argrelmax(-pad[:,i,0])[0].tolist() if len(minima) == 0: # No local maximum of sigma, which is a local minimum of (-1)*sigma, # so no resonance continue if len(maxima) == 0: # No maximum was found, bracket minimum by end points maxima += [0,-1] # Each local minimum should be bracketed by two local maxima if pke[minima[0]] < pke[maxima[0]]: # first extremum is a minimum, so # maxima[j-1] < minima[j] < maxima[j] maxima = [0] + maxima # After prepending the first point, we have # maxima[j ] < minima[j] < maxima[j+1] if pke[minima[-1]] > pke[maxima[-1]]: # last extremum is a minimum, which is not bracketed # by two maxima maxima = maxima + [-1] # After appending last point, we have # maxima[i ] < minima[i] < maxima[i+1] # for all minima assert len(minima) == len(maxima)-1 def func(energy): # compute (-1) x photoionization cross section for initial orbital # with index `i` at energy `energy` pad_i = self.muffin.photoelectron_distribution(energy, grid, orbs[i:i+1,:], pol=pol) sigma_i = pad_i[0,0] return (-1)*sigma_i # list of photoelectron kinetic energy at resonance energies[i] = [] # values of sigma at resonances sigmas[i] = [] # list of continuum orbitals at resonances (instances of `CMSWavefunction`) resonances[i] = [] # Refine energy at each local minimum of func(E) (= maximum of sigma) for j in range(0, len(minima)): # initial guess emin0 = pke[minima[j]] # maxima that bracket this local minimum emax_lower = pke[maxima[j] ] emax_upper = pke[maxima[j+1]] assert emax_lower < emin0 < emax_upper # We search for a minimum around emin0 by Golden search # (https://en.wikipedia.org/wiki/Golden-section_search) # which assumes that there is a single minimum in the interval [l,u] alpha = 0.2 l = (1.0-alpha)*emin0 + alpha*emax_lower u = (1.0-alpha)*emin0 + alpha*emax_upper # find minimum of func(E) = -log(cond(M(E))) in the interval [l,u] try: emin = minimize_golden(func, l, u) except StopIteration: continue fmin = func(emin) assert self.muffin.energy == emin sigma_max = -fmin if (sigma_max < sigma_thresh): # sigma at maximum is too small to classify as a resonance continue resonances[i] += self.muffin.eigenchannel_analysis() energies[i].append(emin) sigmas[i].append(sigma_max) if len(resonances.keys()) > 0: print( " -----------------------------------------------------------------------------" ) print( " Orbital Resonance Energy Sigma " ) print( " Hartree eV Mb " ) print( " -----------------------------------------------------------------------------" ) else: print( " no resonances found with sigma > %e Mb" % sigma_thresh ) for i in sorted(resonances.keys()): for j,res in enumerate(resonances[i]):
print( "" ) return resonances def save_pads(pke,pad, pol, tbl_file, units="eV-Mb"): """ A table with the PAD is written to `tbl_file`. It contains the 4 columns PKE SIGMA BETA1 BETA_2 which define the PAD(th) at each energy according to .. code-block:: none PAD(th) = SIMGA/(4pi) [ 1 + BETA P (cos(th)) + BETA P (cos(th)) ] 1 1 2 2 For each orbital a block separated by a newline is written. """ npke,norb,dummy = pad.shape sigma = pad[:,:,0] beta1 = pad[:,:,1] beta2 = pad[:,:,2] fh = open(tbl_file, "w") pol2str = {0 : "0 (linear)", -1 : "-1 (left)", +1: "+1 (right)"} print( """ # # photoelectron angular distributions (PAD) for an isotropic ensemble # # PAD(th) = SIMGA/(4pi) [ 1 + BETA P (cos(th)) + BETA P (cos(th)) ] # 1 1 2 2 # # light polarization = %s # """ % pol2str[pol], file=fh) if units == "eV-Mb": pke = pke * AtomicData.hartree_to_eV # convert cross section sigma from bohr^2 to Mb sigma = sigma * AtomicData.bohr2_to_megabarn header = "# PKE/eV SIGMA/Mb BETA1 BETA2" else: header = "# PKE/Hartree SIGMA/bohr^2 BETA1 BETA2" for b in range(0, norb): print( "# photoionization from orbital %d" % (b+1), file=fh) print( header, file=fh) block = np.vstack((pke, sigma[:,b], beta1[:,b], beta2[:,b])).transpose() np.savetxt(fh, block, fmt=["%e","%e","%+10.7f", "%+10.7f"]) print( "" , file=fh) print( "PAD written to '%s'" % tbl_file ) fh.close() def plot_pads(pke,pad, pol, units='eV-Mb'): """ plot PAD parameters sigma, beta1 and beta2 as functions of PKE for different orbitals """ npke,norb,dummy = pad.shape sigma = pad[:,:,0] beta1 = pad[:,:,1] beta2 = pad[:,:,2] import matplotlib matplotlib.rc('xtick', labelsize=17) matplotlib.rc('ytick', labelsize=17) matplotlib.rc('legend', fontsize=17) matplotlib.rc('axes', labelsize=17) import matplotlib.pyplot as plt fig, axes = plt.subplots(1,3) plt.title("polarization = %s" % pol) if units == "eV-Mb": pke = pke * AtomicData.hartree_to_eV # convert cross section sigma from bohr^2 to Mb sigma = sigma * AtomicData.bohr2_to_megabarn for ax in [0,1,2]: axes[ax].set_xlabel("PKE / eV") axes[0].set_ylabel(r"$\sigma$ / Mb") else: for ax in [0,1,2]: axes[ax].set_xlabel("PKE / Hartree") axes[0].set_ylabel(r"$\sigma$ / bohr$^2$") # plot sigma in log-scale axes[0].set_yscale("log") axes[1].set_ylabel(r"$\beta_1$") axes[2].set_ylabel(r"$\beta_2$") axes[2].set_ylim((-1.1,2.1)) for b in range(0, norb): l, = axes[0].plot(pke, sigma[:,b], lw=2) axes[1].plot(pke, beta1[:,b], lw=2, color=l.get_color(), label=r"Orb. %d" % (b+1)) axes[2].plot(pke, beta2[:,b], lw=2, color=l.get_color()) axes[1].legend(loc="upper center") plt.subplots_adjust(wspace=0.5) plt.show() ################################################## # # # Testing # # # ################################################## def test_hmi_pads(): """ compute PADs for ionization from 1sigma orbital of the hydrogen molecular ion (H2^+) """ # choose resolution of multicenter grids settings.radial_grid_factor = 10 # controls size of radial grid settings.lebedev_order = 23 # controls size of angular grid # The radius of H2+ is set to R=2 bohr atomlist = [ (1, (0.0, 0.0, -1.0)), (1, (0.0, 0.0, +1.0)) ] # The overall charge of the HMI is +1, however the single electron will # feel the nuclear attraction from both protons, therefore chargeIII=+2. muffin = MuffinTinPotential(atomlist, lmax=10, charge=+1, chargeIII=+2, potential_type="molecular", Nr=2000, debug=0) # We search for bound state in the energy range [-1.2, -0.5] (in Hartree), # since we only want to find the 1\sigma_g and 1\sigma_u orbitals. search_energies = np.linspace(-1.2, -0.5, 20) bound_orbitals = muffin.find_eigenstates(search_energies) pad = PhotoelectronAngularDistribution(muffin) pke_energies = np.linspace(0.1,400.0,100) / AtomicData.hartree_to_eV pad.compute_pads(bound_orbitals, pke_energies, pol=0, pad_file="/tmp/pad.dat") if __name__ == "__main__": test_hmi_pads()
print( " %4.1d %4.1d %12.8f %12.8f %6.4e" % (i+1, j+1, res.energy, res.energy * AtomicData.hartree_to_eV, res.tags["sigma"] * AtomicData.bohr2_to_megabarn) )
conditional_block
IssuanceHelpers.ts
/*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License in the project root for license information. *--------------------------------------------------------------------------------------------*/ import { JoseBuilder, Subtle } from 'verifiablecredentials-crypto-sdk-typescript'; import TestSetup from './TestSetup'; import { DidDocument } from '@decentralized-identity/did-common-typescript'; import ClaimToken, { TokenType } from '../lib/verifiable_credential/ClaimToken'; import ValidationOptions from '../lib/options/ValidationOptions'; import { KeyReference, IExpectedBase, IExpectedSelfIssued, IExpectedIdToken, IExpectedSiop, IExpectedVerifiablePresentation, IExpectedVerifiableCredential, JsonWebSignatureToken, TokenPayload, createJwkThumbprint } from '../lib/index'; import VerifiableCredentialConstants from '../lib/verifiable_credential/VerifiableCredentialConstants'; export class IssuanceHelpers { public static readonly jti: string = 'testJti'; /** * Create siop request */ public static async createSiopRequestWithPayload(setup: TestSetup, siop: any, key: any): Promise<ClaimToken> { const claimToken = await IssuanceHelpers.signAToken(setup, siop, '', key); return claimToken; } /** * Create siop request */ public static async createSiopRequest(setup: TestSetup, key: any, contract: string | undefined, nonce: string, attestations: any): Promise<ClaimToken> { let siop: TokenPayload = { nonce, contract, attestations, iss: 'https://self-issued.me', aud: setup.AUDIENCE, jti: IssuanceHelpers.jti, sub_jwk: key, sub: createJwkThumbprint(key), did: setup.defaultUserDid } if(setup.siopMutator){ siop = setup.siopMutator(siop); } return IssuanceHelpers.createSiopRequestWithPayload(setup, siop, key); } /** * Create a verifiable credentiaL * @param claims Credential claims */ public static createSelfIssuedToken(claims: TokenPayload): ClaimToken { const header = { alg: "none", typ: 'JWT' }; const jwt = JsonWebSignatureToken.encode(header, claims); return new ClaimToken(TokenType.selfIssued, jwt, ''); } /** * Create a verifiable credential * @param claims Token claims */ public static async createVc(setup: TestSetup, credentialSubject: TokenPayload, configuration: string, jwkPrivate: any, jwkPublic: any): Promise<ClaimToken> { // Set the mock because we will resolve the signing key as did await this.resolverMock(setup, setup.defaultIssuerDid, jwkPrivate, jwkPublic); const statusUrl = 'https://portableidentitycards.azure-api.net/42b39d9d-0cdd-4ae0-b251-b7b39a561f91/api/portable/v1.0/status'; // Status mock setup.fetchMock.post(statusUrl, {}, { overwriteRoutes: true }); let vcTemplate = { "jti": "urn:pic:80a509d2-99d4-4d6c-86a7-7b2636944080", "vc": { "@context": [ "https://www.w3.org/2018/credentials/v1", "https://portableidentitycards.azure-api.net/42b39d9d-0cdd-4ae0-b251-b7b39a561f91/api/portable/v1.0/contracts/test/schema" ], "type": [ "VerifiableCredential", "DrivingLicense" ], "credentialSubject": { }, "credentialStatus": { "id": `${statusUrl}`, "type": "PortableIdentityCardServiceCredentialStatus2020" } }, iss: `${setup.defaultIssuerDid}`, sub: `${setup.defaultUserDid}` }; vcTemplate.vc.credentialSubject = credentialSubject; return IssuanceHelpers.signAToken(setup, vcTemplate, configuration, jwkPrivate); } /** * Create a verifiable presentation * @param claims Token claims */ public static async createVp(setup: TestSetup, vcs: ClaimToken[], jwkPrivate: any): Promise<ClaimToken> { let vpTemplate = { "jti": "baab2cdccb38408d8f1179071fe37dbe", "scope": "openid did_authn verify", "vp": { "@context": [ "https://www.w3.org/2018/credentials/v1" ], "type": [ "VerifiablePresentation" ], "verifiableCredential": [] }, iss: `${setup.defaultUserDid}`, aud: `${setup.defaultIssuerDid}`, }; for (let inx = 0; inx < vcs.length; inx++) { (vpTemplate.vp.verifiableCredential as string[]).push(<string>vcs[inx].rawToken); } return IssuanceHelpers.signAToken(setup, vpTemplate, '', jwkPrivate); } /** * Generate a signing keys and set the configuration mock */ public static async generateSigningKey(_setup: TestSetup, kid: string): Promise<[any, any]> { const generator = new Subtle(); const key: any = await generator.generateKey( <any>{ name: "RSASSA-PKCS1-v1_5", modulusLength: 2048, publicExponent: new Uint8Array([0x01, 0x00, 0x01]), hash: { name: "SHA-256" }, }, true, ["sign", "verify"]); const jwkPublic = await generator.exportKey('jwk', key.publicKey); const jwkPrivate = await generator.exportKey('jwk', key.privateKey); (<any>jwkPrivate).kid = (<any>jwkPublic).kid = kid; return [jwkPrivate, jwkPublic]; } // Generate a signing keys and set the configuration mock public static async generateSigningKeyAndSetConfigurationMock(setup: TestSetup, kid: string, configuration?: string, issuer?: string): Promise<[any, any, string]> { // setup http mock configuration = configuration || setup.defaultIdTokenConfiguration; issuer = issuer || setup.tokenIssuer; const jwks = setup.defaultIdTokenJwksConfiguration; setup.fetchMock.get(configuration, { "jwks_uri": `${jwks}`, "issuer": `${issuer}` }, { overwriteRoutes: true }); const [jwkPrivate, jwkPublic] = await IssuanceHelpers.generateSigningKey(setup, kid); setup.fetchMock.get(jwks, `{"keys": [${JSON.stringify(jwkPublic)}]}`, { overwriteRoutes: true }); return [jwkPrivate, jwkPublic, configuration]; } // Set resolver mock public static async resolverMock(setup: TestSetup, did: string, jwkPrivate?: any, jwkPublic?: any): Promise<[DidDocument, any, any]> { // setup http mock if (!jwkPrivate && !jwkPublic) { [jwkPrivate, jwkPublic] = await IssuanceHelpers.generateSigningKey(setup, `${did}#signing`); } const didDocument = { didDocument: new DidDocument({ "@context": "https://w3id.org/did/v1", id: did, publicKey: <any>[{ id: jwkPublic.kid, type: 'RsaVerificationKey2018', controller: did, publicKeyJwk: jwkPublic }] }) }; (didDocument.didDocument as any)['@context'] = 'https://w3id.org/did/v1'; // Resolver mock const resolverUrl = `${setup.resolverUrl}/${did}`; setup.fetchMock.get(resolverUrl, didDocument, { overwriteRoutes: true }); return [didDocument.didDocument, jwkPrivate, jwkPublic]; } // Sign a token public static async signAToken(setup: TestSetup, payload: object, configuration: string, jwkPrivate: any): Promise<ClaimToken> { const keyId = new KeyReference(jwkPrivate.kid); await setup.keyStore.save(keyId, <any>jwkPrivate); setup.validatorOptions.crypto.builder.useSigningKeyReference(keyId); setup.validatorOptions.crypto.signingProtocol(JoseBuilder.JWT).builder.useKid(keyId.keyReference); const signature = await setup.validatorOptions.crypto.signingProtocol(JoseBuilder.JWT).sign(payload); const token = await setup.validatorOptions.crypto.signingProtocol(JoseBuilder.JWT).serialize(); let claimToken = ClaimToken.create(token, configuration); return claimToken; } public static async createRequest( setup: TestSetup, tokenDescription: TokenType, issuance: boolean, idTokenIssuer?: string, idTokenAudience?: string, idTokenExp?: number): Promise<[ClaimToken, ValidationOptions, any]> { const options = new ValidationOptions(setup.validatorOptions, tokenDescription); const [didJwkPrivate, didJwkPublic] = await IssuanceHelpers.generateSigningKey(setup, setup.defaulUserDidKid); const [tokenJwkPrivate, tokenJwkPublic, tokenConfiguration] = await IssuanceHelpers.generateSigningKeyAndSetConfigurationMock(setup, setup.defaulIssuerDidKid); const [didDocument, jwkPrivate2, jwkPublic2] = await IssuanceHelpers.resolverMock(setup, setup.defaultUserDid, didJwkPrivate, didJwkPublic); const idTokenPayload = { upn: '[email protected]', name: 'Jules Winnfield', iss: idTokenIssuer ?? setup.tokenIssuer, aud: idTokenAudience ?? setup.tokenAudience, exp: idTokenExp ?? Math.trunc(Date.now() / 1000) + 10000, }; const idToken = await IssuanceHelpers.signAToken( setup, idTokenPayload, tokenConfiguration, tokenJwkPrivate); const vcConfiguration = 'https://vcexample.com/schema'; const vcPayload = { givenName: 'Jules', familyName: 'Winnfield' }; const vc = await IssuanceHelpers.createVc( setup, vcPayload, vcConfiguration, tokenJwkPrivate, tokenJwkPublic); const vp = await IssuanceHelpers.createVp(setup, [vc], didJwkPrivate); const si = IssuanceHelpers.createSelfIssuedToken({ name: 'jules', birthDate: new Date().toString() }); let attestations: { [claim: string]: any }; if (issuance) { attestations = { selfIssued: si.rawToken, idTokens: {}, presentations: {} }; attestations.idTokens[setup.defaultIdTokenConfiguration] = idToken.rawToken; attestations.presentations['DrivingLicense'] = vp.rawToken; } else
const contract = 'https://portableidentitycards.azure-api.net/42b39d9d-0cdd-4ae0-b251-b7b39a561f91/api/portable/v1.0/contracts/test/schema'; const request = await IssuanceHelpers.createSiopRequest( setup, didJwkPrivate, issuance ? contract : undefined, '', attestations ); const vcContractIssuers: { [credentialType: string]: string[] } = {}; vcContractIssuers['DrivingLicense'] = [setup.defaultIssuerDid]; const idTokenConfiguration: string[] =[setup.defaultIdTokenConfiguration]; const expected: IExpectedBase[] = [ <IExpectedSelfIssued>{ type: TokenType.selfIssued }, <IExpectedIdToken>{ type: TokenType.idToken, configuration: idTokenConfiguration, audience: setup.AUDIENCE }, <IExpectedSiop>{ type: TokenType.siopIssuance, audience: setup.AUDIENCE }, <IExpectedSiop>{ type: TokenType.siopPresentationAttestation, audience: setup.AUDIENCE }, <IExpectedVerifiablePresentation>{ type: TokenType.verifiablePresentationJwt, didAudience: setup.defaultIssuerDid }, <IExpectedVerifiableCredential>{ type: TokenType.verifiableCredential, contractIssuers: vcContractIssuers } ]; const siopRequest = { didJwkPrivate, didJwkPublic, tokenJwkPrivate, didDocument, contract, attestations, tokenConfiguration, idToken, vp, vc, si, expected, jti: IssuanceHelpers.jti } return [request, options, siopRequest]; } }
{ attestations = { presentations: {} }; attestations.presentations['DrivingLicense'] = vp.rawToken; }
conditional_block
IssuanceHelpers.ts
/*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License in the project root for license information. *--------------------------------------------------------------------------------------------*/ import { JoseBuilder, Subtle } from 'verifiablecredentials-crypto-sdk-typescript'; import TestSetup from './TestSetup'; import { DidDocument } from '@decentralized-identity/did-common-typescript'; import ClaimToken, { TokenType } from '../lib/verifiable_credential/ClaimToken'; import ValidationOptions from '../lib/options/ValidationOptions'; import { KeyReference, IExpectedBase, IExpectedSelfIssued, IExpectedIdToken, IExpectedSiop, IExpectedVerifiablePresentation, IExpectedVerifiableCredential, JsonWebSignatureToken, TokenPayload, createJwkThumbprint } from '../lib/index'; import VerifiableCredentialConstants from '../lib/verifiable_credential/VerifiableCredentialConstants'; export class IssuanceHelpers { public static readonly jti: string = 'testJti'; /** * Create siop request */ public static async createSiopRequestWithPayload(setup: TestSetup, siop: any, key: any): Promise<ClaimToken> { const claimToken = await IssuanceHelpers.signAToken(setup, siop, '', key); return claimToken; } /** * Create siop request */ public static async createSiopRequest(setup: TestSetup, key: any, contract: string | undefined, nonce: string, attestations: any): Promise<ClaimToken> { let siop: TokenPayload = { nonce, contract, attestations, iss: 'https://self-issued.me', aud: setup.AUDIENCE, jti: IssuanceHelpers.jti, sub_jwk: key, sub: createJwkThumbprint(key), did: setup.defaultUserDid } if(setup.siopMutator){ siop = setup.siopMutator(siop); } return IssuanceHelpers.createSiopRequestWithPayload(setup, siop, key); } /** * Create a verifiable credentiaL * @param claims Credential claims */ public static createSelfIssuedToken(claims: TokenPayload): ClaimToken { const header = { alg: "none", typ: 'JWT' }; const jwt = JsonWebSignatureToken.encode(header, claims); return new ClaimToken(TokenType.selfIssued, jwt, ''); } /** * Create a verifiable credential * @param claims Token claims */ public static async createVc(setup: TestSetup, credentialSubject: TokenPayload, configuration: string, jwkPrivate: any, jwkPublic: any): Promise<ClaimToken> { // Set the mock because we will resolve the signing key as did await this.resolverMock(setup, setup.defaultIssuerDid, jwkPrivate, jwkPublic); const statusUrl = 'https://portableidentitycards.azure-api.net/42b39d9d-0cdd-4ae0-b251-b7b39a561f91/api/portable/v1.0/status'; // Status mock setup.fetchMock.post(statusUrl, {}, { overwriteRoutes: true }); let vcTemplate = { "jti": "urn:pic:80a509d2-99d4-4d6c-86a7-7b2636944080", "vc": { "@context": [ "https://www.w3.org/2018/credentials/v1", "https://portableidentitycards.azure-api.net/42b39d9d-0cdd-4ae0-b251-b7b39a561f91/api/portable/v1.0/contracts/test/schema" ], "type": [ "VerifiableCredential", "DrivingLicense" ], "credentialSubject": { }, "credentialStatus": { "id": `${statusUrl}`, "type": "PortableIdentityCardServiceCredentialStatus2020" } }, iss: `${setup.defaultIssuerDid}`, sub: `${setup.defaultUserDid}` }; vcTemplate.vc.credentialSubject = credentialSubject; return IssuanceHelpers.signAToken(setup, vcTemplate, configuration, jwkPrivate); } /** * Create a verifiable presentation * @param claims Token claims */ public static async createVp(setup: TestSetup, vcs: ClaimToken[], jwkPrivate: any): Promise<ClaimToken> { let vpTemplate = { "jti": "baab2cdccb38408d8f1179071fe37dbe", "scope": "openid did_authn verify", "vp": { "@context": [ "https://www.w3.org/2018/credentials/v1" ], "type": [ "VerifiablePresentation" ], "verifiableCredential": [] }, iss: `${setup.defaultUserDid}`, aud: `${setup.defaultIssuerDid}`, }; for (let inx = 0; inx < vcs.length; inx++) { (vpTemplate.vp.verifiableCredential as string[]).push(<string>vcs[inx].rawToken); } return IssuanceHelpers.signAToken(setup, vpTemplate, '', jwkPrivate); } /** * Generate a signing keys and set the configuration mock */ public static async generateSigningKey(_setup: TestSetup, kid: string): Promise<[any, any]> { const generator = new Subtle(); const key: any = await generator.generateKey( <any>{ name: "RSASSA-PKCS1-v1_5", modulusLength: 2048, publicExponent: new Uint8Array([0x01, 0x00, 0x01]), hash: { name: "SHA-256" }, }, true, ["sign", "verify"]); const jwkPublic = await generator.exportKey('jwk', key.publicKey); const jwkPrivate = await generator.exportKey('jwk', key.privateKey); (<any>jwkPrivate).kid = (<any>jwkPublic).kid = kid; return [jwkPrivate, jwkPublic]; } // Generate a signing keys and set the configuration mock public static async generateSigningKeyAndSetConfigurationMock(setup: TestSetup, kid: string, configuration?: string, issuer?: string): Promise<[any, any, string]> { // setup http mock configuration = configuration || setup.defaultIdTokenConfiguration; issuer = issuer || setup.tokenIssuer; const jwks = setup.defaultIdTokenJwksConfiguration; setup.fetchMock.get(configuration, { "jwks_uri": `${jwks}`, "issuer": `${issuer}` }, { overwriteRoutes: true }); const [jwkPrivate, jwkPublic] = await IssuanceHelpers.generateSigningKey(setup, kid); setup.fetchMock.get(jwks, `{"keys": [${JSON.stringify(jwkPublic)}]}`, { overwriteRoutes: true }); return [jwkPrivate, jwkPublic, configuration]; } // Set resolver mock public static async resolverMock(setup: TestSetup, did: string, jwkPrivate?: any, jwkPublic?: any): Promise<[DidDocument, any, any]> { // setup http mock if (!jwkPrivate && !jwkPublic) { [jwkPrivate, jwkPublic] = await IssuanceHelpers.generateSigningKey(setup, `${did}#signing`); } const didDocument = { didDocument: new DidDocument({ "@context": "https://w3id.org/did/v1", id: did, publicKey: <any>[{ id: jwkPublic.kid, type: 'RsaVerificationKey2018', controller: did, publicKeyJwk: jwkPublic }] }) }; (didDocument.didDocument as any)['@context'] = 'https://w3id.org/did/v1'; // Resolver mock const resolverUrl = `${setup.resolverUrl}/${did}`; setup.fetchMock.get(resolverUrl, didDocument, { overwriteRoutes: true }); return [didDocument.didDocument, jwkPrivate, jwkPublic]; } // Sign a token public static async signAToken(setup: TestSetup, payload: object, configuration: string, jwkPrivate: any): Promise<ClaimToken> { const keyId = new KeyReference(jwkPrivate.kid); await setup.keyStore.save(keyId, <any>jwkPrivate); setup.validatorOptions.crypto.builder.useSigningKeyReference(keyId); setup.validatorOptions.crypto.signingProtocol(JoseBuilder.JWT).builder.useKid(keyId.keyReference); const signature = await setup.validatorOptions.crypto.signingProtocol(JoseBuilder.JWT).sign(payload); const token = await setup.validatorOptions.crypto.signingProtocol(JoseBuilder.JWT).serialize(); let claimToken = ClaimToken.create(token, configuration); return claimToken; } public static async createRequest( setup: TestSetup, tokenDescription: TokenType, issuance: boolean, idTokenIssuer?: string, idTokenAudience?: string, idTokenExp?: number): Promise<[ClaimToken, ValidationOptions, any]> { const options = new ValidationOptions(setup.validatorOptions, tokenDescription); const [didJwkPrivate, didJwkPublic] = await IssuanceHelpers.generateSigningKey(setup, setup.defaulUserDidKid); const [tokenJwkPrivate, tokenJwkPublic, tokenConfiguration] = await IssuanceHelpers.generateSigningKeyAndSetConfigurationMock(setup, setup.defaulIssuerDidKid); const [didDocument, jwkPrivate2, jwkPublic2] = await IssuanceHelpers.resolverMock(setup, setup.defaultUserDid, didJwkPrivate, didJwkPublic); const idTokenPayload = { upn: '[email protected]', name: 'Jules Winnfield', iss: idTokenIssuer ?? setup.tokenIssuer, aud: idTokenAudience ?? setup.tokenAudience, exp: idTokenExp ?? Math.trunc(Date.now() / 1000) + 10000, }; const idToken = await IssuanceHelpers.signAToken( setup, idTokenPayload, tokenConfiguration, tokenJwkPrivate); const vcConfiguration = 'https://vcexample.com/schema'; const vcPayload = { givenName: 'Jules', familyName: 'Winnfield'
tokenJwkPrivate, tokenJwkPublic); const vp = await IssuanceHelpers.createVp(setup, [vc], didJwkPrivate); const si = IssuanceHelpers.createSelfIssuedToken({ name: 'jules', birthDate: new Date().toString() }); let attestations: { [claim: string]: any }; if (issuance) { attestations = { selfIssued: si.rawToken, idTokens: {}, presentations: {} }; attestations.idTokens[setup.defaultIdTokenConfiguration] = idToken.rawToken; attestations.presentations['DrivingLicense'] = vp.rawToken; } else { attestations = { presentations: {} }; attestations.presentations['DrivingLicense'] = vp.rawToken; } const contract = 'https://portableidentitycards.azure-api.net/42b39d9d-0cdd-4ae0-b251-b7b39a561f91/api/portable/v1.0/contracts/test/schema'; const request = await IssuanceHelpers.createSiopRequest( setup, didJwkPrivate, issuance ? contract : undefined, '', attestations ); const vcContractIssuers: { [credentialType: string]: string[] } = {}; vcContractIssuers['DrivingLicense'] = [setup.defaultIssuerDid]; const idTokenConfiguration: string[] =[setup.defaultIdTokenConfiguration]; const expected: IExpectedBase[] = [ <IExpectedSelfIssued>{ type: TokenType.selfIssued }, <IExpectedIdToken>{ type: TokenType.idToken, configuration: idTokenConfiguration, audience: setup.AUDIENCE }, <IExpectedSiop>{ type: TokenType.siopIssuance, audience: setup.AUDIENCE }, <IExpectedSiop>{ type: TokenType.siopPresentationAttestation, audience: setup.AUDIENCE }, <IExpectedVerifiablePresentation>{ type: TokenType.verifiablePresentationJwt, didAudience: setup.defaultIssuerDid }, <IExpectedVerifiableCredential>{ type: TokenType.verifiableCredential, contractIssuers: vcContractIssuers } ]; const siopRequest = { didJwkPrivate, didJwkPublic, tokenJwkPrivate, didDocument, contract, attestations, tokenConfiguration, idToken, vp, vc, si, expected, jti: IssuanceHelpers.jti } return [request, options, siopRequest]; } }
}; const vc = await IssuanceHelpers.createVc( setup, vcPayload, vcConfiguration,
random_line_split
IssuanceHelpers.ts
/*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License in the project root for license information. *--------------------------------------------------------------------------------------------*/ import { JoseBuilder, Subtle } from 'verifiablecredentials-crypto-sdk-typescript'; import TestSetup from './TestSetup'; import { DidDocument } from '@decentralized-identity/did-common-typescript'; import ClaimToken, { TokenType } from '../lib/verifiable_credential/ClaimToken'; import ValidationOptions from '../lib/options/ValidationOptions'; import { KeyReference, IExpectedBase, IExpectedSelfIssued, IExpectedIdToken, IExpectedSiop, IExpectedVerifiablePresentation, IExpectedVerifiableCredential, JsonWebSignatureToken, TokenPayload, createJwkThumbprint } from '../lib/index'; import VerifiableCredentialConstants from '../lib/verifiable_credential/VerifiableCredentialConstants'; export class IssuanceHelpers { public static readonly jti: string = 'testJti'; /** * Create siop request */ public static async createSiopRequestWithPayload(setup: TestSetup, siop: any, key: any): Promise<ClaimToken> { const claimToken = await IssuanceHelpers.signAToken(setup, siop, '', key); return claimToken; } /** * Create siop request */ public static async createSiopRequest(setup: TestSetup, key: any, contract: string | undefined, nonce: string, attestations: any): Promise<ClaimToken> { let siop: TokenPayload = { nonce, contract, attestations, iss: 'https://self-issued.me', aud: setup.AUDIENCE, jti: IssuanceHelpers.jti, sub_jwk: key, sub: createJwkThumbprint(key), did: setup.defaultUserDid } if(setup.siopMutator){ siop = setup.siopMutator(siop); } return IssuanceHelpers.createSiopRequestWithPayload(setup, siop, key); } /** * Create a verifiable credentiaL * @param claims Credential claims */ public static createSelfIssuedToken(claims: TokenPayload): ClaimToken { const header = { alg: "none", typ: 'JWT' }; const jwt = JsonWebSignatureToken.encode(header, claims); return new ClaimToken(TokenType.selfIssued, jwt, ''); } /** * Create a verifiable credential * @param claims Token claims */ public static async createVc(setup: TestSetup, credentialSubject: TokenPayload, configuration: string, jwkPrivate: any, jwkPublic: any): Promise<ClaimToken> { // Set the mock because we will resolve the signing key as did await this.resolverMock(setup, setup.defaultIssuerDid, jwkPrivate, jwkPublic); const statusUrl = 'https://portableidentitycards.azure-api.net/42b39d9d-0cdd-4ae0-b251-b7b39a561f91/api/portable/v1.0/status'; // Status mock setup.fetchMock.post(statusUrl, {}, { overwriteRoutes: true }); let vcTemplate = { "jti": "urn:pic:80a509d2-99d4-4d6c-86a7-7b2636944080", "vc": { "@context": [ "https://www.w3.org/2018/credentials/v1", "https://portableidentitycards.azure-api.net/42b39d9d-0cdd-4ae0-b251-b7b39a561f91/api/portable/v1.0/contracts/test/schema" ], "type": [ "VerifiableCredential", "DrivingLicense" ], "credentialSubject": { }, "credentialStatus": { "id": `${statusUrl}`, "type": "PortableIdentityCardServiceCredentialStatus2020" } }, iss: `${setup.defaultIssuerDid}`, sub: `${setup.defaultUserDid}` }; vcTemplate.vc.credentialSubject = credentialSubject; return IssuanceHelpers.signAToken(setup, vcTemplate, configuration, jwkPrivate); } /** * Create a verifiable presentation * @param claims Token claims */ public static async createVp(setup: TestSetup, vcs: ClaimToken[], jwkPrivate: any): Promise<ClaimToken> { let vpTemplate = { "jti": "baab2cdccb38408d8f1179071fe37dbe", "scope": "openid did_authn verify", "vp": { "@context": [ "https://www.w3.org/2018/credentials/v1" ], "type": [ "VerifiablePresentation" ], "verifiableCredential": [] }, iss: `${setup.defaultUserDid}`, aud: `${setup.defaultIssuerDid}`, }; for (let inx = 0; inx < vcs.length; inx++) { (vpTemplate.vp.verifiableCredential as string[]).push(<string>vcs[inx].rawToken); } return IssuanceHelpers.signAToken(setup, vpTemplate, '', jwkPrivate); } /** * Generate a signing keys and set the configuration mock */ public static async generateSigningKey(_setup: TestSetup, kid: string): Promise<[any, any]> { const generator = new Subtle(); const key: any = await generator.generateKey( <any>{ name: "RSASSA-PKCS1-v1_5", modulusLength: 2048, publicExponent: new Uint8Array([0x01, 0x00, 0x01]), hash: { name: "SHA-256" }, }, true, ["sign", "verify"]); const jwkPublic = await generator.exportKey('jwk', key.publicKey); const jwkPrivate = await generator.exportKey('jwk', key.privateKey); (<any>jwkPrivate).kid = (<any>jwkPublic).kid = kid; return [jwkPrivate, jwkPublic]; } // Generate a signing keys and set the configuration mock public static async generateSigningKeyAndSetConfigurationMock(setup: TestSetup, kid: string, configuration?: string, issuer?: string): Promise<[any, any, string]> { // setup http mock configuration = configuration || setup.defaultIdTokenConfiguration; issuer = issuer || setup.tokenIssuer; const jwks = setup.defaultIdTokenJwksConfiguration; setup.fetchMock.get(configuration, { "jwks_uri": `${jwks}`, "issuer": `${issuer}` }, { overwriteRoutes: true }); const [jwkPrivate, jwkPublic] = await IssuanceHelpers.generateSigningKey(setup, kid); setup.fetchMock.get(jwks, `{"keys": [${JSON.stringify(jwkPublic)}]}`, { overwriteRoutes: true }); return [jwkPrivate, jwkPublic, configuration]; } // Set resolver mock public static async resolverMock(setup: TestSetup, did: string, jwkPrivate?: any, jwkPublic?: any): Promise<[DidDocument, any, any]> { // setup http mock if (!jwkPrivate && !jwkPublic) { [jwkPrivate, jwkPublic] = await IssuanceHelpers.generateSigningKey(setup, `${did}#signing`); } const didDocument = { didDocument: new DidDocument({ "@context": "https://w3id.org/did/v1", id: did, publicKey: <any>[{ id: jwkPublic.kid, type: 'RsaVerificationKey2018', controller: did, publicKeyJwk: jwkPublic }] }) }; (didDocument.didDocument as any)['@context'] = 'https://w3id.org/did/v1'; // Resolver mock const resolverUrl = `${setup.resolverUrl}/${did}`; setup.fetchMock.get(resolverUrl, didDocument, { overwriteRoutes: true }); return [didDocument.didDocument, jwkPrivate, jwkPublic]; } // Sign a token public static async signAToken(setup: TestSetup, payload: object, configuration: string, jwkPrivate: any): Promise<ClaimToken>
public static async createRequest( setup: TestSetup, tokenDescription: TokenType, issuance: boolean, idTokenIssuer?: string, idTokenAudience?: string, idTokenExp?: number): Promise<[ClaimToken, ValidationOptions, any]> { const options = new ValidationOptions(setup.validatorOptions, tokenDescription); const [didJwkPrivate, didJwkPublic] = await IssuanceHelpers.generateSigningKey(setup, setup.defaulUserDidKid); const [tokenJwkPrivate, tokenJwkPublic, tokenConfiguration] = await IssuanceHelpers.generateSigningKeyAndSetConfigurationMock(setup, setup.defaulIssuerDidKid); const [didDocument, jwkPrivate2, jwkPublic2] = await IssuanceHelpers.resolverMock(setup, setup.defaultUserDid, didJwkPrivate, didJwkPublic); const idTokenPayload = { upn: '[email protected]', name: 'Jules Winnfield', iss: idTokenIssuer ?? setup.tokenIssuer, aud: idTokenAudience ?? setup.tokenAudience, exp: idTokenExp ?? Math.trunc(Date.now() / 1000) + 10000, }; const idToken = await IssuanceHelpers.signAToken( setup, idTokenPayload, tokenConfiguration, tokenJwkPrivate); const vcConfiguration = 'https://vcexample.com/schema'; const vcPayload = { givenName: 'Jules', familyName: 'Winnfield' }; const vc = await IssuanceHelpers.createVc( setup, vcPayload, vcConfiguration, tokenJwkPrivate, tokenJwkPublic); const vp = await IssuanceHelpers.createVp(setup, [vc], didJwkPrivate); const si = IssuanceHelpers.createSelfIssuedToken({ name: 'jules', birthDate: new Date().toString() }); let attestations: { [claim: string]: any }; if (issuance) { attestations = { selfIssued: si.rawToken, idTokens: {}, presentations: {} }; attestations.idTokens[setup.defaultIdTokenConfiguration] = idToken.rawToken; attestations.presentations['DrivingLicense'] = vp.rawToken; } else { attestations = { presentations: {} }; attestations.presentations['DrivingLicense'] = vp.rawToken; } const contract = 'https://portableidentitycards.azure-api.net/42b39d9d-0cdd-4ae0-b251-b7b39a561f91/api/portable/v1.0/contracts/test/schema'; const request = await IssuanceHelpers.createSiopRequest( setup, didJwkPrivate, issuance ? contract : undefined, '', attestations ); const vcContractIssuers: { [credentialType: string]: string[] } = {}; vcContractIssuers['DrivingLicense'] = [setup.defaultIssuerDid]; const idTokenConfiguration: string[] =[setup.defaultIdTokenConfiguration]; const expected: IExpectedBase[] = [ <IExpectedSelfIssued>{ type: TokenType.selfIssued }, <IExpectedIdToken>{ type: TokenType.idToken, configuration: idTokenConfiguration, audience: setup.AUDIENCE }, <IExpectedSiop>{ type: TokenType.siopIssuance, audience: setup.AUDIENCE }, <IExpectedSiop>{ type: TokenType.siopPresentationAttestation, audience: setup.AUDIENCE }, <IExpectedVerifiablePresentation>{ type: TokenType.verifiablePresentationJwt, didAudience: setup.defaultIssuerDid }, <IExpectedVerifiableCredential>{ type: TokenType.verifiableCredential, contractIssuers: vcContractIssuers } ]; const siopRequest = { didJwkPrivate, didJwkPublic, tokenJwkPrivate, didDocument, contract, attestations, tokenConfiguration, idToken, vp, vc, si, expected, jti: IssuanceHelpers.jti } return [request, options, siopRequest]; } }
{ const keyId = new KeyReference(jwkPrivate.kid); await setup.keyStore.save(keyId, <any>jwkPrivate); setup.validatorOptions.crypto.builder.useSigningKeyReference(keyId); setup.validatorOptions.crypto.signingProtocol(JoseBuilder.JWT).builder.useKid(keyId.keyReference); const signature = await setup.validatorOptions.crypto.signingProtocol(JoseBuilder.JWT).sign(payload); const token = await setup.validatorOptions.crypto.signingProtocol(JoseBuilder.JWT).serialize(); let claimToken = ClaimToken.create(token, configuration); return claimToken; }
identifier_body
IssuanceHelpers.ts
/*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License in the project root for license information. *--------------------------------------------------------------------------------------------*/ import { JoseBuilder, Subtle } from 'verifiablecredentials-crypto-sdk-typescript'; import TestSetup from './TestSetup'; import { DidDocument } from '@decentralized-identity/did-common-typescript'; import ClaimToken, { TokenType } from '../lib/verifiable_credential/ClaimToken'; import ValidationOptions from '../lib/options/ValidationOptions'; import { KeyReference, IExpectedBase, IExpectedSelfIssued, IExpectedIdToken, IExpectedSiop, IExpectedVerifiablePresentation, IExpectedVerifiableCredential, JsonWebSignatureToken, TokenPayload, createJwkThumbprint } from '../lib/index'; import VerifiableCredentialConstants from '../lib/verifiable_credential/VerifiableCredentialConstants'; export class IssuanceHelpers { public static readonly jti: string = 'testJti'; /** * Create siop request */ public static async createSiopRequestWithPayload(setup: TestSetup, siop: any, key: any): Promise<ClaimToken> { const claimToken = await IssuanceHelpers.signAToken(setup, siop, '', key); return claimToken; } /** * Create siop request */ public static async createSiopRequest(setup: TestSetup, key: any, contract: string | undefined, nonce: string, attestations: any): Promise<ClaimToken> { let siop: TokenPayload = { nonce, contract, attestations, iss: 'https://self-issued.me', aud: setup.AUDIENCE, jti: IssuanceHelpers.jti, sub_jwk: key, sub: createJwkThumbprint(key), did: setup.defaultUserDid } if(setup.siopMutator){ siop = setup.siopMutator(siop); } return IssuanceHelpers.createSiopRequestWithPayload(setup, siop, key); } /** * Create a verifiable credentiaL * @param claims Credential claims */ public static createSelfIssuedToken(claims: TokenPayload): ClaimToken { const header = { alg: "none", typ: 'JWT' }; const jwt = JsonWebSignatureToken.encode(header, claims); return new ClaimToken(TokenType.selfIssued, jwt, ''); } /** * Create a verifiable credential * @param claims Token claims */ public static async createVc(setup: TestSetup, credentialSubject: TokenPayload, configuration: string, jwkPrivate: any, jwkPublic: any): Promise<ClaimToken> { // Set the mock because we will resolve the signing key as did await this.resolverMock(setup, setup.defaultIssuerDid, jwkPrivate, jwkPublic); const statusUrl = 'https://portableidentitycards.azure-api.net/42b39d9d-0cdd-4ae0-b251-b7b39a561f91/api/portable/v1.0/status'; // Status mock setup.fetchMock.post(statusUrl, {}, { overwriteRoutes: true }); let vcTemplate = { "jti": "urn:pic:80a509d2-99d4-4d6c-86a7-7b2636944080", "vc": { "@context": [ "https://www.w3.org/2018/credentials/v1", "https://portableidentitycards.azure-api.net/42b39d9d-0cdd-4ae0-b251-b7b39a561f91/api/portable/v1.0/contracts/test/schema" ], "type": [ "VerifiableCredential", "DrivingLicense" ], "credentialSubject": { }, "credentialStatus": { "id": `${statusUrl}`, "type": "PortableIdentityCardServiceCredentialStatus2020" } }, iss: `${setup.defaultIssuerDid}`, sub: `${setup.defaultUserDid}` }; vcTemplate.vc.credentialSubject = credentialSubject; return IssuanceHelpers.signAToken(setup, vcTemplate, configuration, jwkPrivate); } /** * Create a verifiable presentation * @param claims Token claims */ public static async createVp(setup: TestSetup, vcs: ClaimToken[], jwkPrivate: any): Promise<ClaimToken> { let vpTemplate = { "jti": "baab2cdccb38408d8f1179071fe37dbe", "scope": "openid did_authn verify", "vp": { "@context": [ "https://www.w3.org/2018/credentials/v1" ], "type": [ "VerifiablePresentation" ], "verifiableCredential": [] }, iss: `${setup.defaultUserDid}`, aud: `${setup.defaultIssuerDid}`, }; for (let inx = 0; inx < vcs.length; inx++) { (vpTemplate.vp.verifiableCredential as string[]).push(<string>vcs[inx].rawToken); } return IssuanceHelpers.signAToken(setup, vpTemplate, '', jwkPrivate); } /** * Generate a signing keys and set the configuration mock */ public static async generateSigningKey(_setup: TestSetup, kid: string): Promise<[any, any]> { const generator = new Subtle(); const key: any = await generator.generateKey( <any>{ name: "RSASSA-PKCS1-v1_5", modulusLength: 2048, publicExponent: new Uint8Array([0x01, 0x00, 0x01]), hash: { name: "SHA-256" }, }, true, ["sign", "verify"]); const jwkPublic = await generator.exportKey('jwk', key.publicKey); const jwkPrivate = await generator.exportKey('jwk', key.privateKey); (<any>jwkPrivate).kid = (<any>jwkPublic).kid = kid; return [jwkPrivate, jwkPublic]; } // Generate a signing keys and set the configuration mock public static async generateSigningKeyAndSetConfigurationMock(setup: TestSetup, kid: string, configuration?: string, issuer?: string): Promise<[any, any, string]> { // setup http mock configuration = configuration || setup.defaultIdTokenConfiguration; issuer = issuer || setup.tokenIssuer; const jwks = setup.defaultIdTokenJwksConfiguration; setup.fetchMock.get(configuration, { "jwks_uri": `${jwks}`, "issuer": `${issuer}` }, { overwriteRoutes: true }); const [jwkPrivate, jwkPublic] = await IssuanceHelpers.generateSigningKey(setup, kid); setup.fetchMock.get(jwks, `{"keys": [${JSON.stringify(jwkPublic)}]}`, { overwriteRoutes: true }); return [jwkPrivate, jwkPublic, configuration]; } // Set resolver mock public static async resolverMock(setup: TestSetup, did: string, jwkPrivate?: any, jwkPublic?: any): Promise<[DidDocument, any, any]> { // setup http mock if (!jwkPrivate && !jwkPublic) { [jwkPrivate, jwkPublic] = await IssuanceHelpers.generateSigningKey(setup, `${did}#signing`); } const didDocument = { didDocument: new DidDocument({ "@context": "https://w3id.org/did/v1", id: did, publicKey: <any>[{ id: jwkPublic.kid, type: 'RsaVerificationKey2018', controller: did, publicKeyJwk: jwkPublic }] }) }; (didDocument.didDocument as any)['@context'] = 'https://w3id.org/did/v1'; // Resolver mock const resolverUrl = `${setup.resolverUrl}/${did}`; setup.fetchMock.get(resolverUrl, didDocument, { overwriteRoutes: true }); return [didDocument.didDocument, jwkPrivate, jwkPublic]; } // Sign a token public static async
(setup: TestSetup, payload: object, configuration: string, jwkPrivate: any): Promise<ClaimToken> { const keyId = new KeyReference(jwkPrivate.kid); await setup.keyStore.save(keyId, <any>jwkPrivate); setup.validatorOptions.crypto.builder.useSigningKeyReference(keyId); setup.validatorOptions.crypto.signingProtocol(JoseBuilder.JWT).builder.useKid(keyId.keyReference); const signature = await setup.validatorOptions.crypto.signingProtocol(JoseBuilder.JWT).sign(payload); const token = await setup.validatorOptions.crypto.signingProtocol(JoseBuilder.JWT).serialize(); let claimToken = ClaimToken.create(token, configuration); return claimToken; } public static async createRequest( setup: TestSetup, tokenDescription: TokenType, issuance: boolean, idTokenIssuer?: string, idTokenAudience?: string, idTokenExp?: number): Promise<[ClaimToken, ValidationOptions, any]> { const options = new ValidationOptions(setup.validatorOptions, tokenDescription); const [didJwkPrivate, didJwkPublic] = await IssuanceHelpers.generateSigningKey(setup, setup.defaulUserDidKid); const [tokenJwkPrivate, tokenJwkPublic, tokenConfiguration] = await IssuanceHelpers.generateSigningKeyAndSetConfigurationMock(setup, setup.defaulIssuerDidKid); const [didDocument, jwkPrivate2, jwkPublic2] = await IssuanceHelpers.resolverMock(setup, setup.defaultUserDid, didJwkPrivate, didJwkPublic); const idTokenPayload = { upn: '[email protected]', name: 'Jules Winnfield', iss: idTokenIssuer ?? setup.tokenIssuer, aud: idTokenAudience ?? setup.tokenAudience, exp: idTokenExp ?? Math.trunc(Date.now() / 1000) + 10000, }; const idToken = await IssuanceHelpers.signAToken( setup, idTokenPayload, tokenConfiguration, tokenJwkPrivate); const vcConfiguration = 'https://vcexample.com/schema'; const vcPayload = { givenName: 'Jules', familyName: 'Winnfield' }; const vc = await IssuanceHelpers.createVc( setup, vcPayload, vcConfiguration, tokenJwkPrivate, tokenJwkPublic); const vp = await IssuanceHelpers.createVp(setup, [vc], didJwkPrivate); const si = IssuanceHelpers.createSelfIssuedToken({ name: 'jules', birthDate: new Date().toString() }); let attestations: { [claim: string]: any }; if (issuance) { attestations = { selfIssued: si.rawToken, idTokens: {}, presentations: {} }; attestations.idTokens[setup.defaultIdTokenConfiguration] = idToken.rawToken; attestations.presentations['DrivingLicense'] = vp.rawToken; } else { attestations = { presentations: {} }; attestations.presentations['DrivingLicense'] = vp.rawToken; } const contract = 'https://portableidentitycards.azure-api.net/42b39d9d-0cdd-4ae0-b251-b7b39a561f91/api/portable/v1.0/contracts/test/schema'; const request = await IssuanceHelpers.createSiopRequest( setup, didJwkPrivate, issuance ? contract : undefined, '', attestations ); const vcContractIssuers: { [credentialType: string]: string[] } = {}; vcContractIssuers['DrivingLicense'] = [setup.defaultIssuerDid]; const idTokenConfiguration: string[] =[setup.defaultIdTokenConfiguration]; const expected: IExpectedBase[] = [ <IExpectedSelfIssued>{ type: TokenType.selfIssued }, <IExpectedIdToken>{ type: TokenType.idToken, configuration: idTokenConfiguration, audience: setup.AUDIENCE }, <IExpectedSiop>{ type: TokenType.siopIssuance, audience: setup.AUDIENCE }, <IExpectedSiop>{ type: TokenType.siopPresentationAttestation, audience: setup.AUDIENCE }, <IExpectedVerifiablePresentation>{ type: TokenType.verifiablePresentationJwt, didAudience: setup.defaultIssuerDid }, <IExpectedVerifiableCredential>{ type: TokenType.verifiableCredential, contractIssuers: vcContractIssuers } ]; const siopRequest = { didJwkPrivate, didJwkPublic, tokenJwkPrivate, didDocument, contract, attestations, tokenConfiguration, idToken, vp, vc, si, expected, jti: IssuanceHelpers.jti } return [request, options, siopRequest]; } }
signAToken
identifier_name
ui.rs
//! Implements how the user interfaces with the application. pub(crate) use crate::num::{Length, NonNegativeI32}; use crate::{fmt, Debug, Display, Formatter, Mrc, TryFrom, TryFromIntError}; use pancurses::Input; use std::cell::RefCell; use std::error; use std::rc::Rc; /// The [`Result`] returned by functions of this module. pub type Outcome = Result<(), Error>; /// The type specified by all grid index values /// /// Specified by [`pancurses`]. pub(crate) type IndexType = i32; /// The type of all grid index values. pub type Index = NonNegativeI32; /// The character that represents the `Backspace` key. pub const BACKSPACE: char = '\u{08}'; /// The character that represents the `Enter` key. pub(crate) const ENTER: char = '\n'; // Currently ESC is set to Ctrl-C to allow manual testing within vim terminal where ESC is already // mapped. /// The character that represents the `Esc` key. pub const ESC: char = ''; /// Represents the default color. const DEFAULT_COLOR: i16 = -1; /// Describes possible errors during ui functions. #[derive(Clone, Copy, Debug)] pub enum Error { /// Describes an error due to no user interface being created. NoUi, /// Describes a possible error during call to `endwin()`. Endwin, /// Describes a possible error during call to `flash()`. Flash, /// Describes a possible error during call to `init_pair()`. InitPair, /// Describes a possible error during call to `noecho()`. Noecho, /// Describes a possible error during call to `start_color()`. StartColor, /// Describes a possible error during call to `use_default_colors()`. UseDefaultColors, /// Describes a possible error during call to `waddch()`. Waddch, /// Describes a possible error during call to `waddstr()`. Waddstr, /// Describes a possible error during call to `wchgat()`. Wchgat, /// Describes a possible error during call to `wclear()`. Wclear, /// Describes a possible error during call to `wcleartoeol()`. Wcleartoeol, /// Describes a possible error during call to `wdelch()`. Wdelch, /// Describes a possible error during call to `winsch()`. Winsch, /// Describes a possible error during call to `wmove()`. Wmove, /// Describes a possible error during call to `nodelay()`. Nodelay, } impl Error { /// Returns the function that caused the current `Error`. fn get_function(&self) -> &str { match self { Error::Endwin => "endwin", Error::Flash => "flash", Error::InitPair => "init_pair", Error::Noecho => "noecho", Error::StartColor => "start_color", Error::UseDefaultColors => "use_default_colors", Error::Waddch => "waddch", Error::Waddstr => "waddstr", Error::Wchgat => "wchgat", Error::Wclear => "wclear", Error::Wcleartoeol => "wcleartoeol", Error::Wdelch => "wdelch", Error::Winsch => "winsch", Error::Wmove => "wmove", Error::Nodelay => "nodelay", Error::NoUi => "", } } } impl Display for Error { #[inline] fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { match self { Error::NoUi => write!(f, "No UserInterface was created."), _ => write!(f, "Failed while calling {}().", self.get_function()), } } } impl error::Error for Error {} /// Signifies a specific cell in the grid. #[derive(Clone, Copy, Eq, Debug, Default, Hash, Ord, PartialEq, PartialOrd)] pub struct Address { /// The index of the row that contains the cell (starts at 0). row: Index, /// The index of the column that contains the cell (starts at 0). column: Index, } impl Address { /// Creates a new `Address` with a given row and column. #[inline] pub fn new(row: Index, column: Index) -> Self { Self { row, column } } /// Returns the column of `self`. /// /// Used with [`pancurses`]. /// /// [`pancurses`]: ../../pancurses/index.html fn x(self) -> IndexType { IndexType::from(self.column) } /// Returns the row of `self`. /// /// Used with [`pancurses`]. /// /// [`pancurses`]: ../../pancurses/index.html fn y(self) -> IndexType { IndexType::from(self.row) } } impl Display for Address { #[inline] fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { write!(f, "({}, {})", self.row, self.column) } } /// Signifies a modification to the grid. #[derive(Clone, Debug, Eq, Hash, PartialEq)] pub enum Change { /// Removes the previous cell, moving all subsequent cells to the left. Backspace, /// Clears all cells. Clear, /// Sets the color of a given number of cells. Format(Length, Color), /// Inserts a cell containing a character, moving all subsequent cells to the right. Insert(char), /// Does nothing. Nothing, /// Writes the characters of a string in sequence and clears all subsequent cells. Row(String), /// Flashes the display. Flash, } impl Default for Change { #[inline] fn default() -> Self { Change::Nothing } } impl Display for Change {
Change::Clear => write!(f, "Clear"), Change::Format(n, color) => write!(f, "Format {} cells to {}", n, color), Change::Insert(input) => write!(f, "Insert '{}'", input), Change::Nothing => write!(f, "Nothing"), Change::Row(row_str) => write!(f, "Write row '{}'", row_str), Change::Flash => write!(f, "Flash"), } } } /// Signifies a color. // Order must be kept as defined to match pancurses. #[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] pub enum Color { /// The default foreground on the default background. Default, /// The default foreground on a red background. Red, /// The default foreground on a green background. Green, /// The default foreground on a yellow background. Yellow, /// The default foreground on a blue background. Blue, } impl Color { /// Converts `self` to a `color-pair` as specified in [`pancurses`]. fn cp(self) -> i16 { self as i16 } } impl Display for Color { #[inline] fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { match self { Color::Default => write!(f, "Default"), Color::Red => write!(f, "Red"), Color::Green => write!(f, "Green"), Color::Yellow => write!(f, "Yellow"), Color::Blue => write!(f, "Blue"), } } } /// Signifies a [`Change`] to make to an [`Address`]. /// /// [`Change`]: enum.Change.html /// [`Address`]: struct.Address.html #[derive(Clone, Debug, Default, Eq, Hash, PartialEq)] pub struct Edit { /// The [`Change`] to be made. change: Change, /// The [`Address`] on which the [`Change`] is intended. address: Option<Address>, } impl Edit { /// Creates a new `Edit`. #[inline] pub fn new(address: Option<Address>, change: Change) -> Self { Self { address, change } } } /// The interface between the user and the application. /// /// All output is displayed in a grid of cells. Each cell contains one character and can change its /// background color. pub trait UserInterface: Debug { /// Sets up the user interface for use. fn init(&self) -> Outcome; /// Closes the user interface. fn close(&self) -> Outcome; /// Returns the number of cells that make up the height of the grid. fn grid_height(&self) -> Result<Index, TryFromIntError>; /// Applies the edit to the output. fn apply(&self, edit: Edit) -> Outcome; /// Flashes the output. fn flash(&self) -> Outcome; /// Returns the input from the user. /// /// Returns [`None`] if no character input is provided. fn receive_input(&self) -> Option<Input>; } /// The user interface provided by a terminal. #[derive(Debug)] pub struct Terminal { /// The window that interfaces with the application. window: pancurses::Window, } impl Terminal { /// Creates a new `Terminal`. #[inline] pub fn new() -> Mrc<Self> { Rc::new(RefCell::new(Self { // Must call initscr() first. window: pancurses::initscr(), })) } /// Converts given result of ui function to a [`Outcome`]. fn process(result: i32, error: Error) -> Outcome { if result == pancurses::OK { Ok(()) } else { Err(error) } } /// Overwrites the block at cursor with a character. fn add_char(&self, c: char) -> Outcome { Self::process(self.window.addch(c), Error::Waddch) } /// Writes a string starting at the cursor. fn add_str(&self, s: String) -> Outcome { Self::process(self.window.addstr(s), Error::Waddstr) } /// Clears the entire window. fn clear_all(&self) -> Outcome { Self::process(self.window.clear(), Error::Wclear) } /// Clears all blocks from the cursor to the end of the row. fn clear_to_row_end(&self) -> Outcome { Self::process(self.window.clrtoeol(), Error::Wcleartoeol) } /// Defines [`Color`] as having a background color. fn define_color(&self, color: Color, background: i16) -> Outcome { Self::process( pancurses::init_pair(color.cp(), DEFAULT_COLOR, background), Error::InitPair, ) } /// Deletes the character at the cursor. /// /// All subseqent characters are shifted to the left and a blank block is added at the end. fn delete_char(&self) -> Outcome { Self::process(self.window.delch(), Error::Wdelch) } /// Disables echoing received characters on the screen. fn disable_echo(&self) -> Outcome { Self::process(pancurses::noecho(), Error::Noecho) } /// Sets user interface to not wait for an input. fn enable_nodelay(&self) -> Outcome { Self::process(self.window.nodelay(true), Error::Nodelay) } /// Sets the color of the next specified number of blocks from the cursor. fn format(&self, length: Length, color: Color) -> Outcome { Self::process( self.window .chgat(i32::from(length), pancurses::A_NORMAL, color.cp()), Error::Wchgat, ) } /// Inserts a character at the cursor, shifting all subsequent blocks to the right. fn insert_char(&self, c: char) -> Outcome { Self::process(self.window.insch(c), Error::Winsch) } /// Moves the cursor to an [`Address`]. fn move_to(&self, address: Address) -> Outcome { Self::process(self.window.mv(address.y(), address.x()), Error::Wmove) } /// Initializes color processing. /// /// Must be called before any other color manipulation routine is called. fn start_color(&self) -> Outcome { Self::process(pancurses::start_color(), Error::StartColor) } /// Initializes the default colors. fn use_default_colors(&self) -> Outcome { Self::process(pancurses::use_default_colors(), Error::UseDefaultColors) } } impl UserInterface for Terminal { #[inline] fn init(&self) -> Outcome { self.start_color()?; self.use_default_colors()?; self.disable_echo()?; self.enable_nodelay()?; self.define_color(Color::Red, pancurses::COLOR_RED)?; self.define_color(Color::Blue, pancurses::COLOR_BLUE)?; Ok(()) } #[inline] fn close(&self) -> Outcome { Self::process(pancurses::endwin(), Error::Endwin) } #[inline] fn flash(&self) -> Outcome { Self::process(pancurses::flash(), Error::Flash) } #[inline] fn apply(&self, edit: Edit) -> Outcome { if let Some(address) = edit.address { self.move_to(address)?; } match edit.change { Change::Backspace => { // Add BACKSPACE (move cursor 1 cell to the left) and delete that character. self.add_char(BACKSPACE)?; self.delete_char() } Change::Clear => self.clear_all(), Change::Format(n, color) => self.format(n, color), Change::Insert(c) => self.insert_char(c), Change::Nothing => Ok(()), Change::Row(s) => { self.add_str(s)?; self.clear_to_row_end() } Change::Flash => self.flash(), } } // TODO: Store this value and update when size is changed. #[inline] fn grid_height(&self) -> Result<Index, TryFromIntError> { Index::try_from(self.window.get_max_y()) } #[inline] fn receive_input(&self) -> Option<Input> { self.window.getch() } }
#[inline] fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { match self { Change::Backspace => write!(f, "Backspace"),
random_line_split
ui.rs
//! Implements how the user interfaces with the application. pub(crate) use crate::num::{Length, NonNegativeI32}; use crate::{fmt, Debug, Display, Formatter, Mrc, TryFrom, TryFromIntError}; use pancurses::Input; use std::cell::RefCell; use std::error; use std::rc::Rc; /// The [`Result`] returned by functions of this module. pub type Outcome = Result<(), Error>; /// The type specified by all grid index values /// /// Specified by [`pancurses`]. pub(crate) type IndexType = i32; /// The type of all grid index values. pub type Index = NonNegativeI32; /// The character that represents the `Backspace` key. pub const BACKSPACE: char = '\u{08}'; /// The character that represents the `Enter` key. pub(crate) const ENTER: char = '\n'; // Currently ESC is set to Ctrl-C to allow manual testing within vim terminal where ESC is already // mapped. /// The character that represents the `Esc` key. pub const ESC: char = ''; /// Represents the default color. const DEFAULT_COLOR: i16 = -1; /// Describes possible errors during ui functions. #[derive(Clone, Copy, Debug)] pub enum Error { /// Describes an error due to no user interface being created. NoUi, /// Describes a possible error during call to `endwin()`. Endwin, /// Describes a possible error during call to `flash()`. Flash, /// Describes a possible error during call to `init_pair()`. InitPair, /// Describes a possible error during call to `noecho()`. Noecho, /// Describes a possible error during call to `start_color()`. StartColor, /// Describes a possible error during call to `use_default_colors()`. UseDefaultColors, /// Describes a possible error during call to `waddch()`. Waddch, /// Describes a possible error during call to `waddstr()`. Waddstr, /// Describes a possible error during call to `wchgat()`. Wchgat, /// Describes a possible error during call to `wclear()`. Wclear, /// Describes a possible error during call to `wcleartoeol()`. Wcleartoeol, /// Describes a possible error during call to `wdelch()`. Wdelch, /// Describes a possible error during call to `winsch()`. Winsch, /// Describes a possible error during call to `wmove()`. Wmove, /// Describes a possible error during call to `nodelay()`. Nodelay, } impl Error { /// Returns the function that caused the current `Error`. fn get_function(&self) -> &str { match self { Error::Endwin => "endwin", Error::Flash => "flash", Error::InitPair => "init_pair", Error::Noecho => "noecho", Error::StartColor => "start_color", Error::UseDefaultColors => "use_default_colors", Error::Waddch => "waddch", Error::Waddstr => "waddstr", Error::Wchgat => "wchgat", Error::Wclear => "wclear", Error::Wcleartoeol => "wcleartoeol", Error::Wdelch => "wdelch", Error::Winsch => "winsch", Error::Wmove => "wmove", Error::Nodelay => "nodelay", Error::NoUi => "", } } } impl Display for Error { #[inline] fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { match self { Error::NoUi => write!(f, "No UserInterface was created."), _ => write!(f, "Failed while calling {}().", self.get_function()), } } } impl error::Error for Error {} /// Signifies a specific cell in the grid. #[derive(Clone, Copy, Eq, Debug, Default, Hash, Ord, PartialEq, PartialOrd)] pub struct Address { /// The index of the row that contains the cell (starts at 0). row: Index, /// The index of the column that contains the cell (starts at 0). column: Index, } impl Address { /// Creates a new `Address` with a given row and column. #[inline] pub fn new(row: Index, column: Index) -> Self { Self { row, column } } /// Returns the column of `self`. /// /// Used with [`pancurses`]. /// /// [`pancurses`]: ../../pancurses/index.html fn x(self) -> IndexType { IndexType::from(self.column) } /// Returns the row of `self`. /// /// Used with [`pancurses`]. /// /// [`pancurses`]: ../../pancurses/index.html fn y(self) -> IndexType { IndexType::from(self.row) } } impl Display for Address { #[inline] fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { write!(f, "({}, {})", self.row, self.column) } } /// Signifies a modification to the grid. #[derive(Clone, Debug, Eq, Hash, PartialEq)] pub enum Change { /// Removes the previous cell, moving all subsequent cells to the left. Backspace, /// Clears all cells. Clear, /// Sets the color of a given number of cells. Format(Length, Color), /// Inserts a cell containing a character, moving all subsequent cells to the right. Insert(char), /// Does nothing. Nothing, /// Writes the characters of a string in sequence and clears all subsequent cells. Row(String), /// Flashes the display. Flash, } impl Default for Change { #[inline] fn default() -> Self { Change::Nothing } } impl Display for Change { #[inline] fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { match self { Change::Backspace => write!(f, "Backspace"), Change::Clear => write!(f, "Clear"), Change::Format(n, color) => write!(f, "Format {} cells to {}", n, color), Change::Insert(input) => write!(f, "Insert '{}'", input), Change::Nothing => write!(f, "Nothing"), Change::Row(row_str) => write!(f, "Write row '{}'", row_str), Change::Flash => write!(f, "Flash"), } } } /// Signifies a color. // Order must be kept as defined to match pancurses. #[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] pub enum Color { /// The default foreground on the default background. Default, /// The default foreground on a red background. Red, /// The default foreground on a green background. Green, /// The default foreground on a yellow background. Yellow, /// The default foreground on a blue background. Blue, } impl Color { /// Converts `self` to a `color-pair` as specified in [`pancurses`]. fn cp(self) -> i16 { self as i16 } } impl Display for Color { #[inline] fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result
} /// Signifies a [`Change`] to make to an [`Address`]. /// /// [`Change`]: enum.Change.html /// [`Address`]: struct.Address.html #[derive(Clone, Debug, Default, Eq, Hash, PartialEq)] pub struct Edit { /// The [`Change`] to be made. change: Change, /// The [`Address`] on which the [`Change`] is intended. address: Option<Address>, } impl Edit { /// Creates a new `Edit`. #[inline] pub fn new(address: Option<Address>, change: Change) -> Self { Self { address, change } } } /// The interface between the user and the application. /// /// All output is displayed in a grid of cells. Each cell contains one character and can change its /// background color. pub trait UserInterface: Debug { /// Sets up the user interface for use. fn init(&self) -> Outcome; /// Closes the user interface. fn close(&self) -> Outcome; /// Returns the number of cells that make up the height of the grid. fn grid_height(&self) -> Result<Index, TryFromIntError>; /// Applies the edit to the output. fn apply(&self, edit: Edit) -> Outcome; /// Flashes the output. fn flash(&self) -> Outcome; /// Returns the input from the user. /// /// Returns [`None`] if no character input is provided. fn receive_input(&self) -> Option<Input>; } /// The user interface provided by a terminal. #[derive(Debug)] pub struct Terminal { /// The window that interfaces with the application. window: pancurses::Window, } impl Terminal { /// Creates a new `Terminal`. #[inline] pub fn new() -> Mrc<Self> { Rc::new(RefCell::new(Self { // Must call initscr() first. window: pancurses::initscr(), })) } /// Converts given result of ui function to a [`Outcome`]. fn process(result: i32, error: Error) -> Outcome { if result == pancurses::OK { Ok(()) } else { Err(error) } } /// Overwrites the block at cursor with a character. fn add_char(&self, c: char) -> Outcome { Self::process(self.window.addch(c), Error::Waddch) } /// Writes a string starting at the cursor. fn add_str(&self, s: String) -> Outcome { Self::process(self.window.addstr(s), Error::Waddstr) } /// Clears the entire window. fn clear_all(&self) -> Outcome { Self::process(self.window.clear(), Error::Wclear) } /// Clears all blocks from the cursor to the end of the row. fn clear_to_row_end(&self) -> Outcome { Self::process(self.window.clrtoeol(), Error::Wcleartoeol) } /// Defines [`Color`] as having a background color. fn define_color(&self, color: Color, background: i16) -> Outcome { Self::process( pancurses::init_pair(color.cp(), DEFAULT_COLOR, background), Error::InitPair, ) } /// Deletes the character at the cursor. /// /// All subseqent characters are shifted to the left and a blank block is added at the end. fn delete_char(&self) -> Outcome { Self::process(self.window.delch(), Error::Wdelch) } /// Disables echoing received characters on the screen. fn disable_echo(&self) -> Outcome { Self::process(pancurses::noecho(), Error::Noecho) } /// Sets user interface to not wait for an input. fn enable_nodelay(&self) -> Outcome { Self::process(self.window.nodelay(true), Error::Nodelay) } /// Sets the color of the next specified number of blocks from the cursor. fn format(&self, length: Length, color: Color) -> Outcome { Self::process( self.window .chgat(i32::from(length), pancurses::A_NORMAL, color.cp()), Error::Wchgat, ) } /// Inserts a character at the cursor, shifting all subsequent blocks to the right. fn insert_char(&self, c: char) -> Outcome { Self::process(self.window.insch(c), Error::Winsch) } /// Moves the cursor to an [`Address`]. fn move_to(&self, address: Address) -> Outcome { Self::process(self.window.mv(address.y(), address.x()), Error::Wmove) } /// Initializes color processing. /// /// Must be called before any other color manipulation routine is called. fn start_color(&self) -> Outcome { Self::process(pancurses::start_color(), Error::StartColor) } /// Initializes the default colors. fn use_default_colors(&self) -> Outcome { Self::process(pancurses::use_default_colors(), Error::UseDefaultColors) } } impl UserInterface for Terminal { #[inline] fn init(&self) -> Outcome { self.start_color()?; self.use_default_colors()?; self.disable_echo()?; self.enable_nodelay()?; self.define_color(Color::Red, pancurses::COLOR_RED)?; self.define_color(Color::Blue, pancurses::COLOR_BLUE)?; Ok(()) } #[inline] fn close(&self) -> Outcome { Self::process(pancurses::endwin(), Error::Endwin) } #[inline] fn flash(&self) -> Outcome { Self::process(pancurses::flash(), Error::Flash) } #[inline] fn apply(&self, edit: Edit) -> Outcome { if let Some(address) = edit.address { self.move_to(address)?; } match edit.change { Change::Backspace => { // Add BACKSPACE (move cursor 1 cell to the left) and delete that character. self.add_char(BACKSPACE)?; self.delete_char() } Change::Clear => self.clear_all(), Change::Format(n, color) => self.format(n, color), Change::Insert(c) => self.insert_char(c), Change::Nothing => Ok(()), Change::Row(s) => { self.add_str(s)?; self.clear_to_row_end() } Change::Flash => self.flash(), } } // TODO: Store this value and update when size is changed. #[inline] fn grid_height(&self) -> Result<Index, TryFromIntError> { Index::try_from(self.window.get_max_y()) } #[inline] fn receive_input(&self) -> Option<Input> { self.window.getch() } }
{ match self { Color::Default => write!(f, "Default"), Color::Red => write!(f, "Red"), Color::Green => write!(f, "Green"), Color::Yellow => write!(f, "Yellow"), Color::Blue => write!(f, "Blue"), } }
identifier_body
ui.rs
//! Implements how the user interfaces with the application. pub(crate) use crate::num::{Length, NonNegativeI32}; use crate::{fmt, Debug, Display, Formatter, Mrc, TryFrom, TryFromIntError}; use pancurses::Input; use std::cell::RefCell; use std::error; use std::rc::Rc; /// The [`Result`] returned by functions of this module. pub type Outcome = Result<(), Error>; /// The type specified by all grid index values /// /// Specified by [`pancurses`]. pub(crate) type IndexType = i32; /// The type of all grid index values. pub type Index = NonNegativeI32; /// The character that represents the `Backspace` key. pub const BACKSPACE: char = '\u{08}'; /// The character that represents the `Enter` key. pub(crate) const ENTER: char = '\n'; // Currently ESC is set to Ctrl-C to allow manual testing within vim terminal where ESC is already // mapped. /// The character that represents the `Esc` key. pub const ESC: char = ''; /// Represents the default color. const DEFAULT_COLOR: i16 = -1; /// Describes possible errors during ui functions. #[derive(Clone, Copy, Debug)] pub enum Error { /// Describes an error due to no user interface being created. NoUi, /// Describes a possible error during call to `endwin()`. Endwin, /// Describes a possible error during call to `flash()`. Flash, /// Describes a possible error during call to `init_pair()`. InitPair, /// Describes a possible error during call to `noecho()`. Noecho, /// Describes a possible error during call to `start_color()`. StartColor, /// Describes a possible error during call to `use_default_colors()`. UseDefaultColors, /// Describes a possible error during call to `waddch()`. Waddch, /// Describes a possible error during call to `waddstr()`. Waddstr, /// Describes a possible error during call to `wchgat()`. Wchgat, /// Describes a possible error during call to `wclear()`. Wclear, /// Describes a possible error during call to `wcleartoeol()`. Wcleartoeol, /// Describes a possible error during call to `wdelch()`. Wdelch, /// Describes a possible error during call to `winsch()`. Winsch, /// Describes a possible error during call to `wmove()`. Wmove, /// Describes a possible error during call to `nodelay()`. Nodelay, } impl Error { /// Returns the function that caused the current `Error`. fn get_function(&self) -> &str { match self { Error::Endwin => "endwin", Error::Flash => "flash", Error::InitPair => "init_pair", Error::Noecho => "noecho", Error::StartColor => "start_color", Error::UseDefaultColors => "use_default_colors", Error::Waddch => "waddch", Error::Waddstr => "waddstr", Error::Wchgat => "wchgat", Error::Wclear => "wclear", Error::Wcleartoeol => "wcleartoeol", Error::Wdelch => "wdelch", Error::Winsch => "winsch", Error::Wmove => "wmove", Error::Nodelay => "nodelay", Error::NoUi => "", } } } impl Display for Error { #[inline] fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { match self { Error::NoUi => write!(f, "No UserInterface was created."), _ => write!(f, "Failed while calling {}().", self.get_function()), } } } impl error::Error for Error {} /// Signifies a specific cell in the grid. #[derive(Clone, Copy, Eq, Debug, Default, Hash, Ord, PartialEq, PartialOrd)] pub struct Address { /// The index of the row that contains the cell (starts at 0). row: Index, /// The index of the column that contains the cell (starts at 0). column: Index, } impl Address { /// Creates a new `Address` with a given row and column. #[inline] pub fn new(row: Index, column: Index) -> Self { Self { row, column } } /// Returns the column of `self`. /// /// Used with [`pancurses`]. /// /// [`pancurses`]: ../../pancurses/index.html fn x(self) -> IndexType { IndexType::from(self.column) } /// Returns the row of `self`. /// /// Used with [`pancurses`]. /// /// [`pancurses`]: ../../pancurses/index.html fn y(self) -> IndexType { IndexType::from(self.row) } } impl Display for Address { #[inline] fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { write!(f, "({}, {})", self.row, self.column) } } /// Signifies a modification to the grid. #[derive(Clone, Debug, Eq, Hash, PartialEq)] pub enum Change { /// Removes the previous cell, moving all subsequent cells to the left. Backspace, /// Clears all cells. Clear, /// Sets the color of a given number of cells. Format(Length, Color), /// Inserts a cell containing a character, moving all subsequent cells to the right. Insert(char), /// Does nothing. Nothing, /// Writes the characters of a string in sequence and clears all subsequent cells. Row(String), /// Flashes the display. Flash, } impl Default for Change { #[inline] fn default() -> Self { Change::Nothing } } impl Display for Change { #[inline] fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { match self { Change::Backspace => write!(f, "Backspace"), Change::Clear => write!(f, "Clear"), Change::Format(n, color) => write!(f, "Format {} cells to {}", n, color), Change::Insert(input) => write!(f, "Insert '{}'", input), Change::Nothing => write!(f, "Nothing"), Change::Row(row_str) => write!(f, "Write row '{}'", row_str), Change::Flash => write!(f, "Flash"), } } } /// Signifies a color. // Order must be kept as defined to match pancurses. #[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] pub enum Color { /// The default foreground on the default background. Default, /// The default foreground on a red background. Red, /// The default foreground on a green background. Green, /// The default foreground on a yellow background. Yellow, /// The default foreground on a blue background. Blue, } impl Color { /// Converts `self` to a `color-pair` as specified in [`pancurses`]. fn cp(self) -> i16 { self as i16 } } impl Display for Color { #[inline] fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { match self { Color::Default => write!(f, "Default"), Color::Red => write!(f, "Red"), Color::Green => write!(f, "Green"), Color::Yellow => write!(f, "Yellow"), Color::Blue => write!(f, "Blue"), } } } /// Signifies a [`Change`] to make to an [`Address`]. /// /// [`Change`]: enum.Change.html /// [`Address`]: struct.Address.html #[derive(Clone, Debug, Default, Eq, Hash, PartialEq)] pub struct Edit { /// The [`Change`] to be made. change: Change, /// The [`Address`] on which the [`Change`] is intended. address: Option<Address>, } impl Edit { /// Creates a new `Edit`. #[inline] pub fn new(address: Option<Address>, change: Change) -> Self { Self { address, change } } } /// The interface between the user and the application. /// /// All output is displayed in a grid of cells. Each cell contains one character and can change its /// background color. pub trait UserInterface: Debug { /// Sets up the user interface for use. fn init(&self) -> Outcome; /// Closes the user interface. fn close(&self) -> Outcome; /// Returns the number of cells that make up the height of the grid. fn grid_height(&self) -> Result<Index, TryFromIntError>; /// Applies the edit to the output. fn apply(&self, edit: Edit) -> Outcome; /// Flashes the output. fn flash(&self) -> Outcome; /// Returns the input from the user. /// /// Returns [`None`] if no character input is provided. fn receive_input(&self) -> Option<Input>; } /// The user interface provided by a terminal. #[derive(Debug)] pub struct Terminal { /// The window that interfaces with the application. window: pancurses::Window, } impl Terminal { /// Creates a new `Terminal`. #[inline] pub fn new() -> Mrc<Self> { Rc::new(RefCell::new(Self { // Must call initscr() first. window: pancurses::initscr(), })) } /// Converts given result of ui function to a [`Outcome`]. fn process(result: i32, error: Error) -> Outcome { if result == pancurses::OK { Ok(()) } else { Err(error) } } /// Overwrites the block at cursor with a character. fn add_char(&self, c: char) -> Outcome { Self::process(self.window.addch(c), Error::Waddch) } /// Writes a string starting at the cursor. fn add_str(&self, s: String) -> Outcome { Self::process(self.window.addstr(s), Error::Waddstr) } /// Clears the entire window. fn clear_all(&self) -> Outcome { Self::process(self.window.clear(), Error::Wclear) } /// Clears all blocks from the cursor to the end of the row. fn clear_to_row_end(&self) -> Outcome { Self::process(self.window.clrtoeol(), Error::Wcleartoeol) } /// Defines [`Color`] as having a background color. fn define_color(&self, color: Color, background: i16) -> Outcome { Self::process( pancurses::init_pair(color.cp(), DEFAULT_COLOR, background), Error::InitPair, ) } /// Deletes the character at the cursor. /// /// All subseqent characters are shifted to the left and a blank block is added at the end. fn
(&self) -> Outcome { Self::process(self.window.delch(), Error::Wdelch) } /// Disables echoing received characters on the screen. fn disable_echo(&self) -> Outcome { Self::process(pancurses::noecho(), Error::Noecho) } /// Sets user interface to not wait for an input. fn enable_nodelay(&self) -> Outcome { Self::process(self.window.nodelay(true), Error::Nodelay) } /// Sets the color of the next specified number of blocks from the cursor. fn format(&self, length: Length, color: Color) -> Outcome { Self::process( self.window .chgat(i32::from(length), pancurses::A_NORMAL, color.cp()), Error::Wchgat, ) } /// Inserts a character at the cursor, shifting all subsequent blocks to the right. fn insert_char(&self, c: char) -> Outcome { Self::process(self.window.insch(c), Error::Winsch) } /// Moves the cursor to an [`Address`]. fn move_to(&self, address: Address) -> Outcome { Self::process(self.window.mv(address.y(), address.x()), Error::Wmove) } /// Initializes color processing. /// /// Must be called before any other color manipulation routine is called. fn start_color(&self) -> Outcome { Self::process(pancurses::start_color(), Error::StartColor) } /// Initializes the default colors. fn use_default_colors(&self) -> Outcome { Self::process(pancurses::use_default_colors(), Error::UseDefaultColors) } } impl UserInterface for Terminal { #[inline] fn init(&self) -> Outcome { self.start_color()?; self.use_default_colors()?; self.disable_echo()?; self.enable_nodelay()?; self.define_color(Color::Red, pancurses::COLOR_RED)?; self.define_color(Color::Blue, pancurses::COLOR_BLUE)?; Ok(()) } #[inline] fn close(&self) -> Outcome { Self::process(pancurses::endwin(), Error::Endwin) } #[inline] fn flash(&self) -> Outcome { Self::process(pancurses::flash(), Error::Flash) } #[inline] fn apply(&self, edit: Edit) -> Outcome { if let Some(address) = edit.address { self.move_to(address)?; } match edit.change { Change::Backspace => { // Add BACKSPACE (move cursor 1 cell to the left) and delete that character. self.add_char(BACKSPACE)?; self.delete_char() } Change::Clear => self.clear_all(), Change::Format(n, color) => self.format(n, color), Change::Insert(c) => self.insert_char(c), Change::Nothing => Ok(()), Change::Row(s) => { self.add_str(s)?; self.clear_to_row_end() } Change::Flash => self.flash(), } } // TODO: Store this value and update when size is changed. #[inline] fn grid_height(&self) -> Result<Index, TryFromIntError> { Index::try_from(self.window.get_max_y()) } #[inline] fn receive_input(&self) -> Option<Input> { self.window.getch() } }
delete_char
identifier_name
command.py
"""common/command.py Classes and utility functions for working with and creating ZeroBot commands. """ from __future__ import annotations from argparse import ArgumentParser, _SubParsersAction from dataclasses import dataclass, field from functools import partial from typing import Any, Optional, Union from ZeroBot.common.abc import Channel, Message, User from ZeroBot.common.enums import HelpType from ZeroBot.exceptions import CommandAlreadyRegistered, CommandParseError from ZeroBot.module import Module from ZeroBot.util import gen_repr __all__ = ["CommandHelp", "CommandParser", "ParsedCommand"] class _NoExitArgumentParser(ArgumentParser): """Modified `argparse.ArgumentParser` that doesn't exit on errors.""" # NOTE: Python 3.9 will add an `exit_on_error` parameter that will stop # argparse from exiting instead of having to override exit and error. def exit(self, status=0, message=None): pass def error(self, message): raise CommandParseError(message, cmd_name=self.prog) class CommandParser(_NoExitArgumentParser): """Definition and parser for ZeroBot commands. Creation of a `CommandParser` object necessarily entails defining the command itself: its name, what arguments and options it accepts, how they behave, etc. It is both the blueprint and interpreter for a command. Attributes ---------- name : str, optional The name of the command, i.e. how the command will be invoked. May be omitted, but this only makes sense when creating a parent parser for another parser. description : str, optional A short description of the command. May be omitted. usage : str, optional The text shown as the "usage" line in the command's help text. If omitted, it will be automatically generated by `argparse`. kwargs Any extra keyword arguments are passed to the underlying `argparse.ArgumentParser` constructor. Notes ----- Under the hood, `CommandParser` is simply a wrapper around an `argparse.ArgumentParser` with some ZeroBot-related members. """ def __init__( self, name: Optional[str] = None, description: Optional[str] = None, usage: Optional[str] = None, **kwargs ): # NOTE: Might be able to make use of formatter_class if need be if not name: name = kwargs.get("name", kwargs.get("prog")) kwargs.update( { "prog": name, "description": description, "usage": usage, "add_help": False, } ) super().__init__(**kwargs) self.name = name self._module = None # More minimal default argument grouping blank_group = self.add_argument_group() self._optionals = blank_group self._positionals = blank_group def __repr__(self): attrs = ["name", "description", "module"] return gen_repr(self, attrs) def __str__(self): return self.name def make_adder(self, *args, **kwargs): """Helper shortcut for creating subcommands. Accepts arguments for `add_subparsers`, creating a new subparser and returning a partial function wrapping `add_subcommand` for the new subparser. If the `dest` argument isn't specified, it defaults to `'subcmd'`. Example ------- cmd_foo = CommandParser('foo', 'Does foo stuff') foo_adder = cmd_foo.make_adder(metavar='OPERATION', required=True) bar_subcmd = foo_adder('bar', description='Does bar stuff to foo') """ kwargs.setdefault("dest", "subcmd") subp = self.add_subparsers(*args, **kwargs) return partial(self.add_subcommand, subp) @staticmethod def add_subcommand( subp: _SubParsersAction, name: str, description: Optional[str] = None, **kwargs ) -> "CommandParser": """Helper method for adding subcommands. Wrapper around `add_parser` that simplifies adding subcommands to ZeroBot commands. The same string is used for both the `description` and `help` parameters of `add_parser`. Parameters ---------- subp : Result of calling the `add_subparsers` method. The subparser object returned from the `add_subparsers` method. name : str The name of the subcommand. description : str, optional A short description of the command. May be omitted. The `help` parameter will be set to this value automatically. kwargs Extra arguments to pass to the `CommandParser` constructor. """ desc_help = {"description": description, "help": description} return subp.add_parser(name, **desc_help, **kwargs) @property def module(self) -> Optional[Module]: """The module that this command is registered to. Will return `None` if this command has not yet been registered. """ return self._module @dataclass class ParsedCommand: """A successfully parsed command with invoker and destination info. ZeroBot's `Core` will send these as the payload of `module_command_*` events. Attributes ---------- name : str The command name. args : dict A dictionary of the resultant parsed arguments and options and their values. parser : CommandParser The parser that created this instance. msg : Message The original message encompassing the command. invoker source subcmd """ name: str args: dict[str, Any] parser: CommandParser msg: Message def __post_init__(self): # pylint: disable=protected-access try: action = self.parser._actions[0] if isinstance(action, _SubParsersAction): name_map = action.choices canon_parser = name_map[self.args[action.dest]] self._subcmd = canon_parser.name.split()[-1] else: self._subcmd = None except (KeyError, IndexError): self._subcmd = None @property def invoker(self) -> User: """The User that invoked the command.""" return self.msg.source @property def source(self) -> Union[User, Channel]: """Where the command was sent from. Can be either directly from a user, or from a user within a channel. """ return self.msg.destination @property def subcmd(self) -> Optional[str]: """The invoked subcommand name, if one was invoked. For subcommands with aliases, the name returned is always the canonical name that the aliases are associated with. For this reason, this attribute should be preferred to extracting the subcommand name from `ParsedCommand.args`. """ return self._subcmd def nested_subcmd(self, depth: int = 2) -> Optional[str]: """Get the name of a nested subcommand. Like the `subcmd` property, the name returned is always the canonical name for the subcommand. The `depth` parameter determines how many levels of nesting to traverse; the default of ``2`` gets the first nested subcommand. As a consequence, a value of ``1`` is the same as `subcmd`. """ # pylint: disable=protected-access current = 0 subparser = self.parser try: while current < depth:
return subparser.name.split()[-1] except (IndexError, KeyError, TypeError): return None @dataclass class CommandHelp: """Encapsulates the result of a command help request. ZeroBot's `Core` will create and pass these to `core_command_help` callbacks. Attributes ---------- type : HelpType An enum type representing the type of help request. name : str, optional The command or module name that the help is about. aliases : list, optional If applicable, a list of aliases for this command. description : str, optional The command or module description usage : str, optional The "usage" string for the command args : dict, optional A dictionary mapping each positional argument name and a tuple of their help strings and a boolean flag denoting whether or not the argument represents a subcommand. Only set when `type` is `CMD`. opts : dict, optional A dictionary mapping a tuple of option names representing a particular option to a tuple of the option's value name and its help strings. cmds : dict, optional A dictionary mapping module names to another dictionary of command names and their help strings. Only set when `type` is `MOD` or `ALL`. subcmds : dict, optional If applicable, a dictionary of subcommand names and their own `CommandHelp` objects. parent : CommandHelp Only set when `type` is `NO_SUCH_SUBCMD`, and refers to the parent `CommandHelp` object. """ type: HelpType name: str = None description: str = None usage: str = None aliases: list[str] = field(default_factory=list) args: dict[str, tuple[Optional[str], bool]] = field(default_factory=dict) opts: dict[tuple[str, ...], Optional[tuple[str, Optional[str]]]] = field(default_factory=dict) cmds: dict[str, dict[str, str]] = field(default_factory=dict) subcmds: dict[str, "CommandHelp"] = field(default_factory=dict, repr=False) parent: "CommandHelp" = None def get_subcmd(self, name: str) -> "CommandHelp": """Get the `CommandHelp` object for the given subcommand. `name` may be an alias, in which case it is resolved to the appropriate subcommand. """ try: return self.subcmds[name] except KeyError: # Try looking up by alias for sub_name, sub_help in self.subcmds.items(): for alias in sub_help.aliases: if name == alias: return self.subcmds[sub_name] raise
action = subparser._actions[0] if isinstance(action, _SubParsersAction): subparser = action.choices[self.args[action.dest]] current += 1 else: return None
conditional_block
command.py
"""common/command.py Classes and utility functions for working with and creating ZeroBot commands. """ from __future__ import annotations from argparse import ArgumentParser, _SubParsersAction from dataclasses import dataclass, field from functools import partial from typing import Any, Optional, Union from ZeroBot.common.abc import Channel, Message, User from ZeroBot.common.enums import HelpType from ZeroBot.exceptions import CommandAlreadyRegistered, CommandParseError from ZeroBot.module import Module from ZeroBot.util import gen_repr __all__ = ["CommandHelp", "CommandParser", "ParsedCommand"] class _NoExitArgumentParser(ArgumentParser): """Modified `argparse.ArgumentParser` that doesn't exit on errors.""" # NOTE: Python 3.9 will add an `exit_on_error` parameter that will stop # argparse from exiting instead of having to override exit and error. def exit(self, status=0, message=None): pass def error(self, message): raise CommandParseError(message, cmd_name=self.prog) class CommandParser(_NoExitArgumentParser): """Definition and parser for ZeroBot commands. Creation of a `CommandParser` object necessarily entails defining the command itself: its name, what arguments and options it accepts, how they behave, etc. It is both the blueprint and interpreter for a command. Attributes ---------- name : str, optional The name of the command, i.e. how the command will be invoked. May be omitted, but this only makes sense when creating a parent parser for another parser. description : str, optional A short description of the command. May be omitted. usage : str, optional The text shown as the "usage" line in the command's help text. If omitted, it will be automatically generated by `argparse`. kwargs Any extra keyword arguments are passed to the underlying `argparse.ArgumentParser` constructor. Notes ----- Under the hood, `CommandParser` is simply a wrapper around an `argparse.ArgumentParser` with some ZeroBot-related members. """ def __init__( self, name: Optional[str] = None, description: Optional[str] = None, usage: Optional[str] = None, **kwargs ): # NOTE: Might be able to make use of formatter_class if need be if not name: name = kwargs.get("name", kwargs.get("prog")) kwargs.update( { "prog": name, "description": description, "usage": usage, "add_help": False, } ) super().__init__(**kwargs) self.name = name self._module = None # More minimal default argument grouping blank_group = self.add_argument_group() self._optionals = blank_group self._positionals = blank_group def __repr__(self): attrs = ["name", "description", "module"] return gen_repr(self, attrs) def __str__(self): return self.name def make_adder(self, *args, **kwargs): """Helper shortcut for creating subcommands. Accepts arguments for `add_subparsers`, creating a new subparser and returning a partial function wrapping `add_subcommand` for the new subparser. If the `dest` argument isn't specified, it defaults to `'subcmd'`. Example ------- cmd_foo = CommandParser('foo', 'Does foo stuff') foo_adder = cmd_foo.make_adder(metavar='OPERATION', required=True) bar_subcmd = foo_adder('bar', description='Does bar stuff to foo') """ kwargs.setdefault("dest", "subcmd") subp = self.add_subparsers(*args, **kwargs) return partial(self.add_subcommand, subp) @staticmethod def add_subcommand( subp: _SubParsersAction, name: str, description: Optional[str] = None, **kwargs ) -> "CommandParser": """Helper method for adding subcommands. Wrapper around `add_parser` that simplifies adding subcommands to ZeroBot commands. The same string is used for both the `description` and `help` parameters of `add_parser`. Parameters ---------- subp : Result of calling the `add_subparsers` method. The subparser object returned from the `add_subparsers` method. name : str The name of the subcommand. description : str, optional A short description of the command. May be omitted. The `help` parameter will be set to this value automatically. kwargs Extra arguments to pass to the `CommandParser` constructor. """ desc_help = {"description": description, "help": description} return subp.add_parser(name, **desc_help, **kwargs) @property def module(self) -> Optional[Module]: """The module that this command is registered to. Will return `None` if this command has not yet been registered. """ return self._module @dataclass class ParsedCommand: """A successfully parsed command with invoker and destination info. ZeroBot's `Core` will send these as the payload of `module_command_*` events. Attributes ---------- name : str The command name. args : dict A dictionary of the resultant parsed arguments and options and their values. parser : CommandParser The parser that created this instance. msg : Message The original message encompassing the command. invoker source subcmd """ name: str args: dict[str, Any] parser: CommandParser msg: Message def __post_init__(self): # pylint: disable=protected-access try: action = self.parser._actions[0] if isinstance(action, _SubParsersAction): name_map = action.choices canon_parser = name_map[self.args[action.dest]] self._subcmd = canon_parser.name.split()[-1] else: self._subcmd = None except (KeyError, IndexError): self._subcmd = None @property def invoker(self) -> User: """The User that invoked the command.""" return self.msg.source @property def source(self) -> Union[User, Channel]: """Where the command was sent from. Can be either directly from a user, or from a user within a channel. """ return self.msg.destination @property def subcmd(self) -> Optional[str]: """The invoked subcommand name, if one was invoked. For subcommands with aliases, the name returned is always the canonical name that the aliases are associated with. For this reason, this attribute should be preferred to extracting the subcommand name from `ParsedCommand.args`. """ return self._subcmd def nested_subcmd(self, depth: int = 2) -> Optional[str]: """Get the name of a nested subcommand. Like the `subcmd` property, the name returned is always the canonical name for the subcommand. The `depth` parameter determines how many levels of nesting to traverse; the default of ``2`` gets the first nested subcommand. As a consequence, a value of ``1`` is the same as `subcmd`. """ # pylint: disable=protected-access current = 0 subparser = self.parser try: while current < depth: action = subparser._actions[0] if isinstance(action, _SubParsersAction): subparser = action.choices[self.args[action.dest]] current += 1 else: return None return subparser.name.split()[-1] except (IndexError, KeyError, TypeError): return None @dataclass class CommandHelp: """Encapsulates the result of a command help request. ZeroBot's `Core` will create and pass these to `core_command_help` callbacks. Attributes ---------- type : HelpType An enum type representing the type of help request. name : str, optional The command or module name that the help is about. aliases : list, optional If applicable, a list of aliases for this command. description : str, optional The command or module description usage : str, optional The "usage" string for the command args : dict, optional A dictionary mapping each positional argument name and a tuple of their help strings and a boolean flag denoting whether or not the argument represents a subcommand. Only set when `type` is `CMD`. opts : dict, optional A dictionary mapping a tuple of option names representing a particular option to a tuple of the option's value name and its help strings. cmds : dict, optional A dictionary mapping module names to another dictionary of command names and their help strings. Only set when `type` is `MOD` or `ALL`. subcmds : dict, optional If applicable, a dictionary of subcommand names and their own `CommandHelp` objects. parent : CommandHelp Only set when `type` is `NO_SUCH_SUBCMD`, and refers to the parent `CommandHelp` object. """ type: HelpType name: str = None description: str = None usage: str = None aliases: list[str] = field(default_factory=list) args: dict[str, tuple[Optional[str], bool]] = field(default_factory=dict) opts: dict[tuple[str, ...], Optional[tuple[str, Optional[str]]]] = field(default_factory=dict) cmds: dict[str, dict[str, str]] = field(default_factory=dict) subcmds: dict[str, "CommandHelp"] = field(default_factory=dict, repr=False) parent: "CommandHelp" = None def
(self, name: str) -> "CommandHelp": """Get the `CommandHelp` object for the given subcommand. `name` may be an alias, in which case it is resolved to the appropriate subcommand. """ try: return self.subcmds[name] except KeyError: # Try looking up by alias for sub_name, sub_help in self.subcmds.items(): for alias in sub_help.aliases: if name == alias: return self.subcmds[sub_name] raise
get_subcmd
identifier_name
command.py
"""common/command.py Classes and utility functions for working with and creating ZeroBot commands. """ from __future__ import annotations from argparse import ArgumentParser, _SubParsersAction from dataclasses import dataclass, field from functools import partial from typing import Any, Optional, Union from ZeroBot.common.abc import Channel, Message, User from ZeroBot.common.enums import HelpType from ZeroBot.exceptions import CommandAlreadyRegistered, CommandParseError from ZeroBot.module import Module from ZeroBot.util import gen_repr __all__ = ["CommandHelp", "CommandParser", "ParsedCommand"] class _NoExitArgumentParser(ArgumentParser): """Modified `argparse.ArgumentParser` that doesn't exit on errors.""" # NOTE: Python 3.9 will add an `exit_on_error` parameter that will stop # argparse from exiting instead of having to override exit and error. def exit(self, status=0, message=None): pass def error(self, message): raise CommandParseError(message, cmd_name=self.prog) class CommandParser(_NoExitArgumentParser): """Definition and parser for ZeroBot commands. Creation of a `CommandParser` object necessarily entails defining the command itself: its name, what arguments and options it accepts, how they behave, etc. It is both the blueprint and interpreter for a command. Attributes ---------- name : str, optional The name of the command, i.e. how the command will be invoked. May be omitted, but this only makes sense when creating a parent parser for another parser. description : str, optional A short description of the command. May be omitted. usage : str, optional The text shown as the "usage" line in the command's help text. If omitted, it will be automatically generated by `argparse`. kwargs Any extra keyword arguments are passed to the underlying `argparse.ArgumentParser` constructor. Notes ----- Under the hood, `CommandParser` is simply a wrapper around an `argparse.ArgumentParser` with some ZeroBot-related members. """ def __init__( self, name: Optional[str] = None, description: Optional[str] = None, usage: Optional[str] = None, **kwargs ): # NOTE: Might be able to make use of formatter_class if need be if not name: name = kwargs.get("name", kwargs.get("prog")) kwargs.update( { "prog": name, "description": description, "usage": usage, "add_help": False, } ) super().__init__(**kwargs) self.name = name self._module = None # More minimal default argument grouping blank_group = self.add_argument_group() self._optionals = blank_group self._positionals = blank_group def __repr__(self): attrs = ["name", "description", "module"] return gen_repr(self, attrs) def __str__(self): return self.name def make_adder(self, *args, **kwargs): """Helper shortcut for creating subcommands. Accepts arguments for `add_subparsers`, creating a new subparser and returning a partial function wrapping `add_subcommand` for the new subparser. If the `dest` argument isn't specified, it defaults to `'subcmd'`. Example ------- cmd_foo = CommandParser('foo', 'Does foo stuff') foo_adder = cmd_foo.make_adder(metavar='OPERATION', required=True) bar_subcmd = foo_adder('bar', description='Does bar stuff to foo') """ kwargs.setdefault("dest", "subcmd") subp = self.add_subparsers(*args, **kwargs) return partial(self.add_subcommand, subp) @staticmethod def add_subcommand( subp: _SubParsersAction, name: str, description: Optional[str] = None, **kwargs ) -> "CommandParser": """Helper method for adding subcommands. Wrapper around `add_parser` that simplifies adding subcommands to ZeroBot commands. The same string is used for both the `description` and `help` parameters of `add_parser`. Parameters ---------- subp : Result of calling the `add_subparsers` method. The subparser object returned from the `add_subparsers` method. name : str The name of the subcommand. description : str, optional A short description of the command. May be omitted. The `help` parameter will be set to this value automatically. kwargs Extra arguments to pass to the `CommandParser` constructor. """ desc_help = {"description": description, "help": description} return subp.add_parser(name, **desc_help, **kwargs) @property def module(self) -> Optional[Module]: """The module that this command is registered to. Will return `None` if this command has not yet been registered. """ return self._module @dataclass class ParsedCommand: """A successfully parsed command with invoker and destination info. ZeroBot's `Core` will send these as the payload of `module_command_*` events. Attributes ---------- name : str The command name. args : dict A dictionary of the resultant parsed arguments and options and their values. parser : CommandParser The parser that created this instance. msg : Message The original message encompassing the command. invoker source subcmd """ name: str args: dict[str, Any] parser: CommandParser msg: Message def __post_init__(self): # pylint: disable=protected-access try: action = self.parser._actions[0] if isinstance(action, _SubParsersAction):
except (KeyError, IndexError): self._subcmd = None @property def invoker(self) -> User: """The User that invoked the command.""" return self.msg.source @property def source(self) -> Union[User, Channel]: """Where the command was sent from. Can be either directly from a user, or from a user within a channel. """ return self.msg.destination @property def subcmd(self) -> Optional[str]: """The invoked subcommand name, if one was invoked. For subcommands with aliases, the name returned is always the canonical name that the aliases are associated with. For this reason, this attribute should be preferred to extracting the subcommand name from `ParsedCommand.args`. """ return self._subcmd def nested_subcmd(self, depth: int = 2) -> Optional[str]: """Get the name of a nested subcommand. Like the `subcmd` property, the name returned is always the canonical name for the subcommand. The `depth` parameter determines how many levels of nesting to traverse; the default of ``2`` gets the first nested subcommand. As a consequence, a value of ``1`` is the same as `subcmd`. """ # pylint: disable=protected-access current = 0 subparser = self.parser try: while current < depth: action = subparser._actions[0] if isinstance(action, _SubParsersAction): subparser = action.choices[self.args[action.dest]] current += 1 else: return None return subparser.name.split()[-1] except (IndexError, KeyError, TypeError): return None @dataclass class CommandHelp: """Encapsulates the result of a command help request. ZeroBot's `Core` will create and pass these to `core_command_help` callbacks. Attributes ---------- type : HelpType An enum type representing the type of help request. name : str, optional The command or module name that the help is about. aliases : list, optional If applicable, a list of aliases for this command. description : str, optional The command or module description usage : str, optional The "usage" string for the command args : dict, optional A dictionary mapping each positional argument name and a tuple of their help strings and a boolean flag denoting whether or not the argument represents a subcommand. Only set when `type` is `CMD`. opts : dict, optional A dictionary mapping a tuple of option names representing a particular option to a tuple of the option's value name and its help strings. cmds : dict, optional A dictionary mapping module names to another dictionary of command names and their help strings. Only set when `type` is `MOD` or `ALL`. subcmds : dict, optional If applicable, a dictionary of subcommand names and their own `CommandHelp` objects. parent : CommandHelp Only set when `type` is `NO_SUCH_SUBCMD`, and refers to the parent `CommandHelp` object. """ type: HelpType name: str = None description: str = None usage: str = None aliases: list[str] = field(default_factory=list) args: dict[str, tuple[Optional[str], bool]] = field(default_factory=dict) opts: dict[tuple[str, ...], Optional[tuple[str, Optional[str]]]] = field(default_factory=dict) cmds: dict[str, dict[str, str]] = field(default_factory=dict) subcmds: dict[str, "CommandHelp"] = field(default_factory=dict, repr=False) parent: "CommandHelp" = None def get_subcmd(self, name: str) -> "CommandHelp": """Get the `CommandHelp` object for the given subcommand. `name` may be an alias, in which case it is resolved to the appropriate subcommand. """ try: return self.subcmds[name] except KeyError: # Try looking up by alias for sub_name, sub_help in self.subcmds.items(): for alias in sub_help.aliases: if name == alias: return self.subcmds[sub_name] raise
name_map = action.choices canon_parser = name_map[self.args[action.dest]] self._subcmd = canon_parser.name.split()[-1] else: self._subcmd = None
random_line_split
command.py
"""common/command.py Classes and utility functions for working with and creating ZeroBot commands. """ from __future__ import annotations from argparse import ArgumentParser, _SubParsersAction from dataclasses import dataclass, field from functools import partial from typing import Any, Optional, Union from ZeroBot.common.abc import Channel, Message, User from ZeroBot.common.enums import HelpType from ZeroBot.exceptions import CommandAlreadyRegistered, CommandParseError from ZeroBot.module import Module from ZeroBot.util import gen_repr __all__ = ["CommandHelp", "CommandParser", "ParsedCommand"] class _NoExitArgumentParser(ArgumentParser): """Modified `argparse.ArgumentParser` that doesn't exit on errors.""" # NOTE: Python 3.9 will add an `exit_on_error` parameter that will stop # argparse from exiting instead of having to override exit and error. def exit(self, status=0, message=None): pass def error(self, message): raise CommandParseError(message, cmd_name=self.prog) class CommandParser(_NoExitArgumentParser): """Definition and parser for ZeroBot commands. Creation of a `CommandParser` object necessarily entails defining the command itself: its name, what arguments and options it accepts, how they behave, etc. It is both the blueprint and interpreter for a command. Attributes ---------- name : str, optional The name of the command, i.e. how the command will be invoked. May be omitted, but this only makes sense when creating a parent parser for another parser. description : str, optional A short description of the command. May be omitted. usage : str, optional The text shown as the "usage" line in the command's help text. If omitted, it will be automatically generated by `argparse`. kwargs Any extra keyword arguments are passed to the underlying `argparse.ArgumentParser` constructor. Notes ----- Under the hood, `CommandParser` is simply a wrapper around an `argparse.ArgumentParser` with some ZeroBot-related members. """ def __init__( self, name: Optional[str] = None, description: Optional[str] = None, usage: Optional[str] = None, **kwargs ): # NOTE: Might be able to make use of formatter_class if need be if not name: name = kwargs.get("name", kwargs.get("prog")) kwargs.update( { "prog": name, "description": description, "usage": usage, "add_help": False, } ) super().__init__(**kwargs) self.name = name self._module = None # More minimal default argument grouping blank_group = self.add_argument_group() self._optionals = blank_group self._positionals = blank_group def __repr__(self): attrs = ["name", "description", "module"] return gen_repr(self, attrs) def __str__(self): return self.name def make_adder(self, *args, **kwargs): """Helper shortcut for creating subcommands. Accepts arguments for `add_subparsers`, creating a new subparser and returning a partial function wrapping `add_subcommand` for the new subparser. If the `dest` argument isn't specified, it defaults to `'subcmd'`. Example ------- cmd_foo = CommandParser('foo', 'Does foo stuff') foo_adder = cmd_foo.make_adder(metavar='OPERATION', required=True) bar_subcmd = foo_adder('bar', description='Does bar stuff to foo') """ kwargs.setdefault("dest", "subcmd") subp = self.add_subparsers(*args, **kwargs) return partial(self.add_subcommand, subp) @staticmethod def add_subcommand( subp: _SubParsersAction, name: str, description: Optional[str] = None, **kwargs ) -> "CommandParser": """Helper method for adding subcommands. Wrapper around `add_parser` that simplifies adding subcommands to ZeroBot commands. The same string is used for both the `description` and `help` parameters of `add_parser`. Parameters ---------- subp : Result of calling the `add_subparsers` method. The subparser object returned from the `add_subparsers` method. name : str The name of the subcommand. description : str, optional A short description of the command. May be omitted. The `help` parameter will be set to this value automatically. kwargs Extra arguments to pass to the `CommandParser` constructor. """ desc_help = {"description": description, "help": description} return subp.add_parser(name, **desc_help, **kwargs) @property def module(self) -> Optional[Module]: """The module that this command is registered to. Will return `None` if this command has not yet been registered. """ return self._module @dataclass class ParsedCommand: """A successfully parsed command with invoker and destination info. ZeroBot's `Core` will send these as the payload of `module_command_*` events. Attributes ---------- name : str The command name. args : dict A dictionary of the resultant parsed arguments and options and their values. parser : CommandParser The parser that created this instance. msg : Message The original message encompassing the command. invoker source subcmd """ name: str args: dict[str, Any] parser: CommandParser msg: Message def __post_init__(self): # pylint: disable=protected-access try: action = self.parser._actions[0] if isinstance(action, _SubParsersAction): name_map = action.choices canon_parser = name_map[self.args[action.dest]] self._subcmd = canon_parser.name.split()[-1] else: self._subcmd = None except (KeyError, IndexError): self._subcmd = None @property def invoker(self) -> User: """The User that invoked the command.""" return self.msg.source @property def source(self) -> Union[User, Channel]: """Where the command was sent from. Can be either directly from a user, or from a user within a channel. """ return self.msg.destination @property def subcmd(self) -> Optional[str]:
def nested_subcmd(self, depth: int = 2) -> Optional[str]: """Get the name of a nested subcommand. Like the `subcmd` property, the name returned is always the canonical name for the subcommand. The `depth` parameter determines how many levels of nesting to traverse; the default of ``2`` gets the first nested subcommand. As a consequence, a value of ``1`` is the same as `subcmd`. """ # pylint: disable=protected-access current = 0 subparser = self.parser try: while current < depth: action = subparser._actions[0] if isinstance(action, _SubParsersAction): subparser = action.choices[self.args[action.dest]] current += 1 else: return None return subparser.name.split()[-1] except (IndexError, KeyError, TypeError): return None @dataclass class CommandHelp: """Encapsulates the result of a command help request. ZeroBot's `Core` will create and pass these to `core_command_help` callbacks. Attributes ---------- type : HelpType An enum type representing the type of help request. name : str, optional The command or module name that the help is about. aliases : list, optional If applicable, a list of aliases for this command. description : str, optional The command or module description usage : str, optional The "usage" string for the command args : dict, optional A dictionary mapping each positional argument name and a tuple of their help strings and a boolean flag denoting whether or not the argument represents a subcommand. Only set when `type` is `CMD`. opts : dict, optional A dictionary mapping a tuple of option names representing a particular option to a tuple of the option's value name and its help strings. cmds : dict, optional A dictionary mapping module names to another dictionary of command names and their help strings. Only set when `type` is `MOD` or `ALL`. subcmds : dict, optional If applicable, a dictionary of subcommand names and their own `CommandHelp` objects. parent : CommandHelp Only set when `type` is `NO_SUCH_SUBCMD`, and refers to the parent `CommandHelp` object. """ type: HelpType name: str = None description: str = None usage: str = None aliases: list[str] = field(default_factory=list) args: dict[str, tuple[Optional[str], bool]] = field(default_factory=dict) opts: dict[tuple[str, ...], Optional[tuple[str, Optional[str]]]] = field(default_factory=dict) cmds: dict[str, dict[str, str]] = field(default_factory=dict) subcmds: dict[str, "CommandHelp"] = field(default_factory=dict, repr=False) parent: "CommandHelp" = None def get_subcmd(self, name: str) -> "CommandHelp": """Get the `CommandHelp` object for the given subcommand. `name` may be an alias, in which case it is resolved to the appropriate subcommand. """ try: return self.subcmds[name] except KeyError: # Try looking up by alias for sub_name, sub_help in self.subcmds.items(): for alias in sub_help.aliases: if name == alias: return self.subcmds[sub_name] raise
"""The invoked subcommand name, if one was invoked. For subcommands with aliases, the name returned is always the canonical name that the aliases are associated with. For this reason, this attribute should be preferred to extracting the subcommand name from `ParsedCommand.args`. """ return self._subcmd
identifier_body
store.go
// Copyright 2022-2023 The Parca Authors // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package debuginfo import ( "context" "encoding/hex" "errors" "fmt" "io" "path" "time" "github.com/go-kit/log" "github.com/google/uuid" "github.com/thanos-io/objstore" "github.com/thanos-io/objstore/client" "go.opentelemetry.io/otel/attribute" "go.opentelemetry.io/otel/trace" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" "google.golang.org/protobuf/types/known/timestamppb" debuginfopb "github.com/parca-dev/parca/gen/proto/go/parca/debuginfo/v1alpha1" ) var ErrDebuginfoNotFound = errors.New("debuginfo not found") type CacheProvider string const ( FILESYSTEM CacheProvider = "FILESYSTEM" ) type Config struct { Bucket *client.BucketConfig `yaml:"bucket"` Cache *CacheConfig `yaml:"cache"` } type FilesystemCacheConfig struct { Directory string `yaml:"directory"` } type CacheConfig struct { Type CacheProvider `yaml:"type"` Config interface{} `yaml:"config"` } type MetadataManager interface { MarkAsDebuginfodSource(ctx context.Context, servers []string, buildID string, typ debuginfopb.DebuginfoType) error MarkAsUploading(ctx context.Context, buildID, uploadID, hash string, typ debuginfopb.DebuginfoType, startedAt *timestamppb.Timestamp) error MarkAsUploaded(ctx context.Context, buildID, uploadID string, typ debuginfopb.DebuginfoType, finishedAt *timestamppb.Timestamp) error Fetch(ctx context.Context, buildID string, typ debuginfopb.DebuginfoType) (*debuginfopb.Debuginfo, error) } type Store struct { debuginfopb.UnimplementedDebuginfoServiceServer tracer trace.Tracer logger log.Logger bucket objstore.Bucket metadata MetadataManager debuginfodClients DebuginfodClients signedUpload SignedUpload maxUploadDuration time.Duration maxUploadSize int64 timeNow func() time.Time } type SignedUploadClient interface { SignedPUT(ctx context.Context, objectKey string, size int64, expiry time.Time) (signedURL string, err error) } type SignedUpload struct { Enabled bool Client SignedUploadClient } // NewStore returns a new debug info store. func NewStore( tracer trace.Tracer, logger log.Logger, metadata MetadataManager, bucket objstore.Bucket, debuginfodClients DebuginfodClients, signedUpload SignedUpload, maxUploadDuration time.Duration, maxUploadSize int64, ) (*Store, error) { return &Store{ tracer: tracer, logger: log.With(logger, "component", "debuginfo"), bucket: bucket, metadata: metadata, debuginfodClients: debuginfodClients, signedUpload: signedUpload, maxUploadDuration: maxUploadDuration, maxUploadSize: maxUploadSize, timeNow: time.Now, }, nil } const ( ReasonDebuginfoInDebuginfod = "Debuginfo exists in debuginfod, therefore no upload is necessary." ReasonFirstTimeSeen = "First time we see this Build ID, and it does not exist in debuginfod, therefore please upload!" ReasonUploadStale = "A previous upload was started but not finished and is now stale, so it can be retried." ReasonUploadInProgress = "A previous upload is still in-progress and not stale yet (only stale uploads can be retried)." ReasonDebuginfoAlreadyExists = "Debuginfo already exists and is not marked as invalid, therefore no new upload is needed." ReasonDebuginfoAlreadyExistsButForced = "Debuginfo already exists and is not marked as invalid, therefore wouldn't have accepted a new upload, but accepting it because it's requested to be forced." ReasonDebuginfoInvalid = "Debuginfo already exists but is marked as invalid, therefore a new upload is needed. Hash the debuginfo and initiate the upload." ReasonDebuginfoEqual = "Debuginfo already exists and is marked as invalid, but the proposed hash is the same as the one already available, therefore the upload is not accepted as it would result in the same invalid debuginfos." ReasonDebuginfoNotEqual = "Debuginfo already exists but is marked as invalid, therefore a new upload will be accepted." ReasonDebuginfodSource = "Debuginfo is available from debuginfod already and not marked as invalid, therefore no new upload is needed." ReasonDebuginfodInvalid = "Debuginfo is available from debuginfod already but is marked as invalid, therefore a new upload is needed." ) // ShouldInitiateUpload returns whether an upload should be initiated for the // given build ID. Checking if an upload should even be initiated allows the // parca-agent to avoid extracting debuginfos unnecessarily from a binary. func (s *Store) ShouldInitiateUpload(ctx context.Context, req *debuginfopb.ShouldInitiateUploadRequest) (*debuginfopb.ShouldInitiateUploadResponse, error) { ctx, span := s.tracer.Start(ctx, "ShouldInitiateUpload") defer span.End() span.SetAttributes(attribute.String("build_id", req.BuildId)) buildID := req.BuildId if err := validateInput(buildID); err != nil { return nil, status.Error(codes.InvalidArgument, err.Error()) } dbginfo, err := s.metadata.Fetch(ctx, buildID, req.Type) if err != nil && !errors.Is(err, ErrMetadataNotFound) { return nil, status.Error(codes.Internal, err.Error()) } else if errors.Is(err, ErrMetadataNotFound) { // First time we see this Build ID. existsInDebuginfods, err := s.debuginfodClients.Exists(ctx, buildID) if err != nil { return nil, status.Error(codes.Internal, err.Error()) } if len(existsInDebuginfods) > 0 { if err := s.metadata.MarkAsDebuginfodSource(ctx, existsInDebuginfods, buildID, req.Type); err != nil { return nil, status.Error(codes.Internal, fmt.Errorf("mark Build ID to be available from debuginfod: %w", err).Error()) } return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: false, Reason: ReasonDebuginfoInDebuginfod, }, nil } return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: true, Reason: ReasonFirstTimeSeen, }, nil } else { // We have seen this Build ID before and there is metadata for it. switch dbginfo.Source { case debuginfopb.Debuginfo_SOURCE_UPLOAD: if dbginfo.Upload == nil { return nil, status.Error(codes.Internal, "metadata inconsistency: upload is nil") } switch dbginfo.Upload.State { case debuginfopb.DebuginfoUpload_STATE_UPLOADING: if s.uploadIsStale(dbginfo.Upload) { return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: true, Reason: ReasonUploadStale, }, nil } return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: false, Reason: ReasonUploadInProgress, }, nil case debuginfopb.DebuginfoUpload_STATE_UPLOADED: if dbginfo.Quality == nil || !dbginfo.Quality.NotValidElf { if req.Force { return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: true, Reason: ReasonDebuginfoAlreadyExistsButForced, }, nil } return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: false, Reason: ReasonDebuginfoAlreadyExists, }, nil } if req.Hash == "" { return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: true, Reason: ReasonDebuginfoInvalid, }, nil } if dbginfo.Upload.Hash == req.Hash { return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: false, Reason: ReasonDebuginfoEqual, }, nil } return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: true, Reason: ReasonDebuginfoNotEqual, }, nil default: return nil, status.Error(codes.Internal, "metadata inconsistency: unknown upload state") } case debuginfopb.Debuginfo_SOURCE_DEBUGINFOD: if dbginfo.Quality == nil || !dbginfo.Quality.NotValidElf { // We already have debuginfo that's also not marked to be // invalid, so we don't need to upload it again. return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: false, Reason: ReasonDebuginfodSource, }, nil } return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: true, Reason: ReasonDebuginfodInvalid, }, nil default: return nil, status.Errorf(codes.Internal, "unknown debuginfo source %q", dbginfo.Source) } } } func (s *Store) InitiateUpload(ctx context.Context, req *debuginfopb.InitiateUploadRequest) (*debuginfopb.InitiateUploadResponse, error) { ctx, span := s.tracer.Start(ctx, "InitiateUpload") defer span.End() span.SetAttributes(attribute.String("build_id", req.BuildId)) if req.Hash == "" { return nil, status.Error(codes.InvalidArgument, "hash must be set") } if req.Size == 0 { return nil, status.Error(codes.InvalidArgument, "size must be set") } // We don't want to blindly accept upload initiation requests that // shouldn't have happened. shouldInitiateResp, err := s.ShouldInitiateUpload(ctx, &debuginfopb.ShouldInitiateUploadRequest{ BuildId: req.BuildId, Hash: req.Hash, Force: req.Force, Type: req.Type, }) if err != nil { return nil, err } if !shouldInitiateResp.ShouldInitiateUpload { if shouldInitiateResp.Reason == ReasonDebuginfoEqual { return nil, status.Error(codes.AlreadyExists, ReasonDebuginfoEqual) } return nil, status.Errorf(codes.FailedPrecondition, "upload should not have been attempted to be initiated, a previous check should have failed with: %s", shouldInitiateResp.Reason) } if req.Size > s.maxUploadSize { return nil, status.Errorf(codes.InvalidArgument, "upload size %d exceeds maximum allowed size %d", req.Size, s.maxUploadSize) } uploadID := uuid.New().String() uploadStarted := s.timeNow() uploadExpiry := uploadStarted.Add(s.maxUploadDuration) if !s.signedUpload.Enabled { if err := s.metadata.MarkAsUploading(ctx, req.BuildId, uploadID, req.Hash, req.Type, timestamppb.New(uploadStarted)); err != nil { return nil, fmt.Errorf("mark debuginfo upload as uploading via gRPC: %w", err) } return &debuginfopb.InitiateUploadResponse{ UploadInstructions: &debuginfopb.UploadInstructions{ BuildId: req.BuildId, UploadId: uploadID, UploadStrategy: debuginfopb.UploadInstructions_UPLOAD_STRATEGY_GRPC, Type: req.Type, }, }, nil } signedURL, err := s.signedUpload.Client.SignedPUT(ctx, objectPath(req.BuildId, req.Type), req.Size, uploadExpiry) if err != nil { return nil, status.Error(codes.Internal, err.Error()) } if err := s.metadata.MarkAsUploading(ctx, req.BuildId, uploadID, req.Hash, req.Type, timestamppb.New(uploadStarted)); err != nil { return nil, fmt.Errorf("mark debuginfo upload as uploading via signed URL: %w", err) } return &debuginfopb.InitiateUploadResponse{ UploadInstructions: &debuginfopb.UploadInstructions{ BuildId: req.BuildId, UploadId: uploadID, UploadStrategy: debuginfopb.UploadInstructions_UPLOAD_STRATEGY_SIGNED_URL, SignedUrl: signedURL, Type: req.Type, }, }, nil } func (s *Store) MarkUploadFinished(ctx context.Context, req *debuginfopb.MarkUploadFinishedRequest) (*debuginfopb.MarkUploadFinishedResponse, error) { ctx, span := s.tracer.Start(ctx, "MarkUploadFinished") defer span.End() span.SetAttributes(attribute.String("build_id", req.BuildId)) span.SetAttributes(attribute.String("upload_id", req.UploadId)) buildID := req.BuildId if err := validateInput(buildID); err != nil { return nil, status.Error(codes.InvalidArgument, err.Error()) } err := s.metadata.MarkAsUploaded(ctx, buildID, req.UploadId, req.Type, timestamppb.New(s.timeNow())) if errors.Is(err, ErrDebuginfoNotFound) { return nil, status.Error(codes.NotFound, "no debuginfo metadata found for build id") } if errors.Is(err, ErrUploadMetadataNotFound) { return nil, status.Error(codes.NotFound, "no debuginfo upload metadata found for build id") } if errors.Is(err, ErrUploadIDMismatch) { return nil, status.Error(codes.InvalidArgument, "upload id mismatch") } if err != nil { return nil, status.Error(codes.Internal, err.Error()) } return &debuginfopb.MarkUploadFinishedResponse{}, nil } func (s *Store) Upload(stream debuginfopb.DebuginfoService_UploadServer) error { if s.signedUpload.Enabled { return status.Error(codes.Unimplemented, "signed URL uploads are the only supported upload strategy for this service") } req, err := stream.Recv() if err != nil { return status.Errorf(codes.Unknown, "failed to receive upload info: %q", err) } var ( buildID = req.GetInfo().BuildId uploadID = req.GetInfo().UploadId r = &UploadReader{stream: stream} typ = req.GetInfo().Type ) ctx, span := s.tracer.Start(stream.Context(), "Upload") defer span.End() span.SetAttributes(attribute.String("build_id", buildID)) span.SetAttributes(attribute.String("upload_id", uploadID)) if err := s.upload(ctx, buildID, uploadID, typ, r); err != nil { return err } return stream.SendAndClose(&debuginfopb.UploadResponse{ BuildId: buildID, Size: r.size, }) } func (s *Store) upload(ctx context.Context, buildID, uploadID string, typ debuginfopb.DebuginfoType, r io.Reader) error
func (s *Store) uploadIsStale(upload *debuginfopb.DebuginfoUpload) bool { return upload.StartedAt.AsTime().Add(s.maxUploadDuration + 2*time.Minute).Before(s.timeNow()) } func validateInput(id string) error { _, err := hex.DecodeString(id) if err != nil { return fmt.Errorf("failed to validate input: %w", err) } if len(id) <= 2 { return errors.New("unexpectedly short input") } return nil } func objectPath(buildID string, typ debuginfopb.DebuginfoType) string { switch typ { case debuginfopb.DebuginfoType_DEBUGINFO_TYPE_EXECUTABLE: return path.Join(buildID, "executable") case debuginfopb.DebuginfoType_DEBUGINFO_TYPE_SOURCES: return path.Join(buildID, "sources") default: return path.Join(buildID, "debuginfo") } }
{ if err := validateInput(buildID); err != nil { return status.Errorf(codes.InvalidArgument, "invalid build ID: %q", err) } dbginfo, err := s.metadata.Fetch(ctx, buildID, typ) if err != nil { if errors.Is(err, ErrMetadataNotFound) { return status.Error(codes.FailedPrecondition, "metadata not found, this indicates that the upload was not previously initiated") } return status.Error(codes.Internal, err.Error()) } if dbginfo.Upload == nil { return status.Error(codes.FailedPrecondition, "upload metadata not found, this indicates that the upload was not previously initiated") } if dbginfo.Upload.Id != uploadID { return status.Error(codes.InvalidArgument, "the upload ID does not match the one returned by the InitiateUpload call") } if err := s.bucket.Upload(ctx, objectPath(buildID, typ), r); err != nil { return status.Error(codes.Internal, fmt.Errorf("upload debuginfo: %w", err).Error()) } return nil }
identifier_body
store.go
// Copyright 2022-2023 The Parca Authors // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package debuginfo import ( "context" "encoding/hex" "errors" "fmt" "io" "path" "time" "github.com/go-kit/log" "github.com/google/uuid" "github.com/thanos-io/objstore" "github.com/thanos-io/objstore/client" "go.opentelemetry.io/otel/attribute" "go.opentelemetry.io/otel/trace" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" "google.golang.org/protobuf/types/known/timestamppb" debuginfopb "github.com/parca-dev/parca/gen/proto/go/parca/debuginfo/v1alpha1" ) var ErrDebuginfoNotFound = errors.New("debuginfo not found") type CacheProvider string const ( FILESYSTEM CacheProvider = "FILESYSTEM" ) type Config struct { Bucket *client.BucketConfig `yaml:"bucket"` Cache *CacheConfig `yaml:"cache"` } type FilesystemCacheConfig struct { Directory string `yaml:"directory"` } type CacheConfig struct { Type CacheProvider `yaml:"type"` Config interface{} `yaml:"config"` } type MetadataManager interface { MarkAsDebuginfodSource(ctx context.Context, servers []string, buildID string, typ debuginfopb.DebuginfoType) error MarkAsUploading(ctx context.Context, buildID, uploadID, hash string, typ debuginfopb.DebuginfoType, startedAt *timestamppb.Timestamp) error MarkAsUploaded(ctx context.Context, buildID, uploadID string, typ debuginfopb.DebuginfoType, finishedAt *timestamppb.Timestamp) error Fetch(ctx context.Context, buildID string, typ debuginfopb.DebuginfoType) (*debuginfopb.Debuginfo, error) } type Store struct { debuginfopb.UnimplementedDebuginfoServiceServer tracer trace.Tracer logger log.Logger bucket objstore.Bucket metadata MetadataManager debuginfodClients DebuginfodClients signedUpload SignedUpload maxUploadDuration time.Duration maxUploadSize int64 timeNow func() time.Time } type SignedUploadClient interface { SignedPUT(ctx context.Context, objectKey string, size int64, expiry time.Time) (signedURL string, err error) } type SignedUpload struct { Enabled bool Client SignedUploadClient } // NewStore returns a new debug info store. func NewStore( tracer trace.Tracer, logger log.Logger, metadata MetadataManager, bucket objstore.Bucket, debuginfodClients DebuginfodClients, signedUpload SignedUpload, maxUploadDuration time.Duration, maxUploadSize int64, ) (*Store, error) { return &Store{ tracer: tracer, logger: log.With(logger, "component", "debuginfo"), bucket: bucket, metadata: metadata, debuginfodClients: debuginfodClients, signedUpload: signedUpload, maxUploadDuration: maxUploadDuration, maxUploadSize: maxUploadSize, timeNow: time.Now, }, nil } const ( ReasonDebuginfoInDebuginfod = "Debuginfo exists in debuginfod, therefore no upload is necessary." ReasonFirstTimeSeen = "First time we see this Build ID, and it does not exist in debuginfod, therefore please upload!" ReasonUploadStale = "A previous upload was started but not finished and is now stale, so it can be retried." ReasonUploadInProgress = "A previous upload is still in-progress and not stale yet (only stale uploads can be retried)." ReasonDebuginfoAlreadyExists = "Debuginfo already exists and is not marked as invalid, therefore no new upload is needed." ReasonDebuginfoAlreadyExistsButForced = "Debuginfo already exists and is not marked as invalid, therefore wouldn't have accepted a new upload, but accepting it because it's requested to be forced." ReasonDebuginfoInvalid = "Debuginfo already exists but is marked as invalid, therefore a new upload is needed. Hash the debuginfo and initiate the upload." ReasonDebuginfoEqual = "Debuginfo already exists and is marked as invalid, but the proposed hash is the same as the one already available, therefore the upload is not accepted as it would result in the same invalid debuginfos." ReasonDebuginfoNotEqual = "Debuginfo already exists but is marked as invalid, therefore a new upload will be accepted." ReasonDebuginfodSource = "Debuginfo is available from debuginfod already and not marked as invalid, therefore no new upload is needed." ReasonDebuginfodInvalid = "Debuginfo is available from debuginfod already but is marked as invalid, therefore a new upload is needed." ) // ShouldInitiateUpload returns whether an upload should be initiated for the // given build ID. Checking if an upload should even be initiated allows the // parca-agent to avoid extracting debuginfos unnecessarily from a binary. func (s *Store) ShouldInitiateUpload(ctx context.Context, req *debuginfopb.ShouldInitiateUploadRequest) (*debuginfopb.ShouldInitiateUploadResponse, error) { ctx, span := s.tracer.Start(ctx, "ShouldInitiateUpload") defer span.End() span.SetAttributes(attribute.String("build_id", req.BuildId)) buildID := req.BuildId if err := validateInput(buildID); err != nil { return nil, status.Error(codes.InvalidArgument, err.Error()) } dbginfo, err := s.metadata.Fetch(ctx, buildID, req.Type) if err != nil && !errors.Is(err, ErrMetadataNotFound) { return nil, status.Error(codes.Internal, err.Error()) } else if errors.Is(err, ErrMetadataNotFound) { // First time we see this Build ID. existsInDebuginfods, err := s.debuginfodClients.Exists(ctx, buildID) if err != nil { return nil, status.Error(codes.Internal, err.Error()) } if len(existsInDebuginfods) > 0 { if err := s.metadata.MarkAsDebuginfodSource(ctx, existsInDebuginfods, buildID, req.Type); err != nil { return nil, status.Error(codes.Internal, fmt.Errorf("mark Build ID to be available from debuginfod: %w", err).Error()) } return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: false, Reason: ReasonDebuginfoInDebuginfod, }, nil } return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: true, Reason: ReasonFirstTimeSeen, }, nil } else { // We have seen this Build ID before and there is metadata for it. switch dbginfo.Source { case debuginfopb.Debuginfo_SOURCE_UPLOAD: if dbginfo.Upload == nil { return nil, status.Error(codes.Internal, "metadata inconsistency: upload is nil") } switch dbginfo.Upload.State { case debuginfopb.DebuginfoUpload_STATE_UPLOADING: if s.uploadIsStale(dbginfo.Upload) { return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: true, Reason: ReasonUploadStale, }, nil } return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: false, Reason: ReasonUploadInProgress, }, nil case debuginfopb.DebuginfoUpload_STATE_UPLOADED: if dbginfo.Quality == nil || !dbginfo.Quality.NotValidElf { if req.Force { return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: true, Reason: ReasonDebuginfoAlreadyExistsButForced, }, nil } return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: false, Reason: ReasonDebuginfoAlreadyExists, }, nil } if req.Hash == "" { return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: true, Reason: ReasonDebuginfoInvalid, }, nil } if dbginfo.Upload.Hash == req.Hash { return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: false, Reason: ReasonDebuginfoEqual, }, nil } return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: true, Reason: ReasonDebuginfoNotEqual, }, nil default: return nil, status.Error(codes.Internal, "metadata inconsistency: unknown upload state") } case debuginfopb.Debuginfo_SOURCE_DEBUGINFOD: if dbginfo.Quality == nil || !dbginfo.Quality.NotValidElf { // We already have debuginfo that's also not marked to be // invalid, so we don't need to upload it again. return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: false, Reason: ReasonDebuginfodSource, }, nil } return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: true, Reason: ReasonDebuginfodInvalid, }, nil default: return nil, status.Errorf(codes.Internal, "unknown debuginfo source %q", dbginfo.Source) } } } func (s *Store) InitiateUpload(ctx context.Context, req *debuginfopb.InitiateUploadRequest) (*debuginfopb.InitiateUploadResponse, error) { ctx, span := s.tracer.Start(ctx, "InitiateUpload") defer span.End() span.SetAttributes(attribute.String("build_id", req.BuildId)) if req.Hash == "" { return nil, status.Error(codes.InvalidArgument, "hash must be set") } if req.Size == 0 { return nil, status.Error(codes.InvalidArgument, "size must be set") } // We don't want to blindly accept upload initiation requests that // shouldn't have happened. shouldInitiateResp, err := s.ShouldInitiateUpload(ctx, &debuginfopb.ShouldInitiateUploadRequest{ BuildId: req.BuildId, Hash: req.Hash, Force: req.Force, Type: req.Type, }) if err != nil { return nil, err } if !shouldInitiateResp.ShouldInitiateUpload { if shouldInitiateResp.Reason == ReasonDebuginfoEqual { return nil, status.Error(codes.AlreadyExists, ReasonDebuginfoEqual) } return nil, status.Errorf(codes.FailedPrecondition, "upload should not have been attempted to be initiated, a previous check should have failed with: %s", shouldInitiateResp.Reason) } if req.Size > s.maxUploadSize { return nil, status.Errorf(codes.InvalidArgument, "upload size %d exceeds maximum allowed size %d", req.Size, s.maxUploadSize) } uploadID := uuid.New().String() uploadStarted := s.timeNow() uploadExpiry := uploadStarted.Add(s.maxUploadDuration) if !s.signedUpload.Enabled { if err := s.metadata.MarkAsUploading(ctx, req.BuildId, uploadID, req.Hash, req.Type, timestamppb.New(uploadStarted)); err != nil { return nil, fmt.Errorf("mark debuginfo upload as uploading via gRPC: %w", err) } return &debuginfopb.InitiateUploadResponse{ UploadInstructions: &debuginfopb.UploadInstructions{ BuildId: req.BuildId, UploadId: uploadID, UploadStrategy: debuginfopb.UploadInstructions_UPLOAD_STRATEGY_GRPC, Type: req.Type, }, }, nil } signedURL, err := s.signedUpload.Client.SignedPUT(ctx, objectPath(req.BuildId, req.Type), req.Size, uploadExpiry) if err != nil { return nil, status.Error(codes.Internal, err.Error()) } if err := s.metadata.MarkAsUploading(ctx, req.BuildId, uploadID, req.Hash, req.Type, timestamppb.New(uploadStarted)); err != nil { return nil, fmt.Errorf("mark debuginfo upload as uploading via signed URL: %w", err) } return &debuginfopb.InitiateUploadResponse{ UploadInstructions: &debuginfopb.UploadInstructions{ BuildId: req.BuildId, UploadId: uploadID, UploadStrategy: debuginfopb.UploadInstructions_UPLOAD_STRATEGY_SIGNED_URL, SignedUrl: signedURL, Type: req.Type, }, }, nil } func (s *Store) MarkUploadFinished(ctx context.Context, req *debuginfopb.MarkUploadFinishedRequest) (*debuginfopb.MarkUploadFinishedResponse, error) { ctx, span := s.tracer.Start(ctx, "MarkUploadFinished") defer span.End() span.SetAttributes(attribute.String("build_id", req.BuildId)) span.SetAttributes(attribute.String("upload_id", req.UploadId)) buildID := req.BuildId if err := validateInput(buildID); err != nil { return nil, status.Error(codes.InvalidArgument, err.Error()) } err := s.metadata.MarkAsUploaded(ctx, buildID, req.UploadId, req.Type, timestamppb.New(s.timeNow())) if errors.Is(err, ErrDebuginfoNotFound) { return nil, status.Error(codes.NotFound, "no debuginfo metadata found for build id") } if errors.Is(err, ErrUploadMetadataNotFound) { return nil, status.Error(codes.NotFound, "no debuginfo upload metadata found for build id") } if errors.Is(err, ErrUploadIDMismatch) { return nil, status.Error(codes.InvalidArgument, "upload id mismatch") } if err != nil { return nil, status.Error(codes.Internal, err.Error()) } return &debuginfopb.MarkUploadFinishedResponse{}, nil } func (s *Store) Upload(stream debuginfopb.DebuginfoService_UploadServer) error { if s.signedUpload.Enabled { return status.Error(codes.Unimplemented, "signed URL uploads are the only supported upload strategy for this service") } req, err := stream.Recv() if err != nil { return status.Errorf(codes.Unknown, "failed to receive upload info: %q", err) } var ( buildID = req.GetInfo().BuildId uploadID = req.GetInfo().UploadId r = &UploadReader{stream: stream} typ = req.GetInfo().Type ) ctx, span := s.tracer.Start(stream.Context(), "Upload") defer span.End() span.SetAttributes(attribute.String("build_id", buildID)) span.SetAttributes(attribute.String("upload_id", uploadID)) if err := s.upload(ctx, buildID, uploadID, typ, r); err != nil { return err } return stream.SendAndClose(&debuginfopb.UploadResponse{ BuildId: buildID, Size: r.size, }) } func (s *Store) upload(ctx context.Context, buildID, uploadID string, typ debuginfopb.DebuginfoType, r io.Reader) error { if err := validateInput(buildID); err != nil { return status.Errorf(codes.InvalidArgument, "invalid build ID: %q", err) } dbginfo, err := s.metadata.Fetch(ctx, buildID, typ) if err != nil { if errors.Is(err, ErrMetadataNotFound) { return status.Error(codes.FailedPrecondition, "metadata not found, this indicates that the upload was not previously initiated") } return status.Error(codes.Internal, err.Error()) } if dbginfo.Upload == nil { return status.Error(codes.FailedPrecondition, "upload metadata not found, this indicates that the upload was not previously initiated") } if dbginfo.Upload.Id != uploadID { return status.Error(codes.InvalidArgument, "the upload ID does not match the one returned by the InitiateUpload call") } if err := s.bucket.Upload(ctx, objectPath(buildID, typ), r); err != nil { return status.Error(codes.Internal, fmt.Errorf("upload debuginfo: %w", err).Error()) } return nil } func (s *Store) uploadIsStale(upload *debuginfopb.DebuginfoUpload) bool { return upload.StartedAt.AsTime().Add(s.maxUploadDuration + 2*time.Minute).Before(s.timeNow()) } func validateInput(id string) error { _, err := hex.DecodeString(id) if err != nil { return fmt.Errorf("failed to validate input: %w", err) } if len(id) <= 2 { return errors.New("unexpectedly short input") } return nil } func
(buildID string, typ debuginfopb.DebuginfoType) string { switch typ { case debuginfopb.DebuginfoType_DEBUGINFO_TYPE_EXECUTABLE: return path.Join(buildID, "executable") case debuginfopb.DebuginfoType_DEBUGINFO_TYPE_SOURCES: return path.Join(buildID, "sources") default: return path.Join(buildID, "debuginfo") } }
objectPath
identifier_name
store.go
// Copyright 2022-2023 The Parca Authors // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package debuginfo import ( "context" "encoding/hex" "errors" "fmt" "io" "path" "time" "github.com/go-kit/log" "github.com/google/uuid" "github.com/thanos-io/objstore" "github.com/thanos-io/objstore/client" "go.opentelemetry.io/otel/attribute" "go.opentelemetry.io/otel/trace" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" "google.golang.org/protobuf/types/known/timestamppb" debuginfopb "github.com/parca-dev/parca/gen/proto/go/parca/debuginfo/v1alpha1" ) var ErrDebuginfoNotFound = errors.New("debuginfo not found") type CacheProvider string const ( FILESYSTEM CacheProvider = "FILESYSTEM" ) type Config struct { Bucket *client.BucketConfig `yaml:"bucket"` Cache *CacheConfig `yaml:"cache"` } type FilesystemCacheConfig struct { Directory string `yaml:"directory"` } type CacheConfig struct { Type CacheProvider `yaml:"type"` Config interface{} `yaml:"config"` } type MetadataManager interface { MarkAsDebuginfodSource(ctx context.Context, servers []string, buildID string, typ debuginfopb.DebuginfoType) error MarkAsUploading(ctx context.Context, buildID, uploadID, hash string, typ debuginfopb.DebuginfoType, startedAt *timestamppb.Timestamp) error MarkAsUploaded(ctx context.Context, buildID, uploadID string, typ debuginfopb.DebuginfoType, finishedAt *timestamppb.Timestamp) error Fetch(ctx context.Context, buildID string, typ debuginfopb.DebuginfoType) (*debuginfopb.Debuginfo, error) } type Store struct { debuginfopb.UnimplementedDebuginfoServiceServer tracer trace.Tracer logger log.Logger bucket objstore.Bucket metadata MetadataManager debuginfodClients DebuginfodClients signedUpload SignedUpload maxUploadDuration time.Duration maxUploadSize int64 timeNow func() time.Time } type SignedUploadClient interface { SignedPUT(ctx context.Context, objectKey string, size int64, expiry time.Time) (signedURL string, err error) } type SignedUpload struct { Enabled bool Client SignedUploadClient } // NewStore returns a new debug info store. func NewStore( tracer trace.Tracer, logger log.Logger, metadata MetadataManager, bucket objstore.Bucket, debuginfodClients DebuginfodClients, signedUpload SignedUpload, maxUploadDuration time.Duration, maxUploadSize int64, ) (*Store, error) { return &Store{ tracer: tracer, logger: log.With(logger, "component", "debuginfo"), bucket: bucket, metadata: metadata, debuginfodClients: debuginfodClients, signedUpload: signedUpload, maxUploadDuration: maxUploadDuration, maxUploadSize: maxUploadSize, timeNow: time.Now, }, nil } const ( ReasonDebuginfoInDebuginfod = "Debuginfo exists in debuginfod, therefore no upload is necessary." ReasonFirstTimeSeen = "First time we see this Build ID, and it does not exist in debuginfod, therefore please upload!" ReasonUploadStale = "A previous upload was started but not finished and is now stale, so it can be retried." ReasonUploadInProgress = "A previous upload is still in-progress and not stale yet (only stale uploads can be retried)." ReasonDebuginfoAlreadyExists = "Debuginfo already exists and is not marked as invalid, therefore no new upload is needed." ReasonDebuginfoAlreadyExistsButForced = "Debuginfo already exists and is not marked as invalid, therefore wouldn't have accepted a new upload, but accepting it because it's requested to be forced." ReasonDebuginfoInvalid = "Debuginfo already exists but is marked as invalid, therefore a new upload is needed. Hash the debuginfo and initiate the upload." ReasonDebuginfoEqual = "Debuginfo already exists and is marked as invalid, but the proposed hash is the same as the one already available, therefore the upload is not accepted as it would result in the same invalid debuginfos." ReasonDebuginfoNotEqual = "Debuginfo already exists but is marked as invalid, therefore a new upload will be accepted." ReasonDebuginfodSource = "Debuginfo is available from debuginfod already and not marked as invalid, therefore no new upload is needed." ReasonDebuginfodInvalid = "Debuginfo is available from debuginfod already but is marked as invalid, therefore a new upload is needed." ) // ShouldInitiateUpload returns whether an upload should be initiated for the // given build ID. Checking if an upload should even be initiated allows the // parca-agent to avoid extracting debuginfos unnecessarily from a binary. func (s *Store) ShouldInitiateUpload(ctx context.Context, req *debuginfopb.ShouldInitiateUploadRequest) (*debuginfopb.ShouldInitiateUploadResponse, error) { ctx, span := s.tracer.Start(ctx, "ShouldInitiateUpload") defer span.End() span.SetAttributes(attribute.String("build_id", req.BuildId)) buildID := req.BuildId if err := validateInput(buildID); err != nil { return nil, status.Error(codes.InvalidArgument, err.Error()) } dbginfo, err := s.metadata.Fetch(ctx, buildID, req.Type) if err != nil && !errors.Is(err, ErrMetadataNotFound) { return nil, status.Error(codes.Internal, err.Error()) } else if errors.Is(err, ErrMetadataNotFound) { // First time we see this Build ID. existsInDebuginfods, err := s.debuginfodClients.Exists(ctx, buildID) if err != nil { return nil, status.Error(codes.Internal, err.Error()) } if len(existsInDebuginfods) > 0 { if err := s.metadata.MarkAsDebuginfodSource(ctx, existsInDebuginfods, buildID, req.Type); err != nil { return nil, status.Error(codes.Internal, fmt.Errorf("mark Build ID to be available from debuginfod: %w", err).Error()) } return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: false, Reason: ReasonDebuginfoInDebuginfod, }, nil } return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: true, Reason: ReasonFirstTimeSeen, }, nil } else { // We have seen this Build ID before and there is metadata for it. switch dbginfo.Source { case debuginfopb.Debuginfo_SOURCE_UPLOAD: if dbginfo.Upload == nil { return nil, status.Error(codes.Internal, "metadata inconsistency: upload is nil") } switch dbginfo.Upload.State { case debuginfopb.DebuginfoUpload_STATE_UPLOADING: if s.uploadIsStale(dbginfo.Upload) { return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: true, Reason: ReasonUploadStale, }, nil } return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: false, Reason: ReasonUploadInProgress, }, nil case debuginfopb.DebuginfoUpload_STATE_UPLOADED: if dbginfo.Quality == nil || !dbginfo.Quality.NotValidElf { if req.Force { return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: true, Reason: ReasonDebuginfoAlreadyExistsButForced, }, nil } return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: false, Reason: ReasonDebuginfoAlreadyExists, }, nil } if req.Hash == "" { return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: true, Reason: ReasonDebuginfoInvalid, }, nil } if dbginfo.Upload.Hash == req.Hash { return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: false, Reason: ReasonDebuginfoEqual, }, nil } return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: true, Reason: ReasonDebuginfoNotEqual, }, nil default: return nil, status.Error(codes.Internal, "metadata inconsistency: unknown upload state") } case debuginfopb.Debuginfo_SOURCE_DEBUGINFOD: if dbginfo.Quality == nil || !dbginfo.Quality.NotValidElf { // We already have debuginfo that's also not marked to be // invalid, so we don't need to upload it again. return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: false, Reason: ReasonDebuginfodSource, }, nil } return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: true, Reason: ReasonDebuginfodInvalid, }, nil default: return nil, status.Errorf(codes.Internal, "unknown debuginfo source %q", dbginfo.Source) } } } func (s *Store) InitiateUpload(ctx context.Context, req *debuginfopb.InitiateUploadRequest) (*debuginfopb.InitiateUploadResponse, error) { ctx, span := s.tracer.Start(ctx, "InitiateUpload") defer span.End() span.SetAttributes(attribute.String("build_id", req.BuildId)) if req.Hash == "" { return nil, status.Error(codes.InvalidArgument, "hash must be set") } if req.Size == 0 { return nil, status.Error(codes.InvalidArgument, "size must be set") } // We don't want to blindly accept upload initiation requests that // shouldn't have happened. shouldInitiateResp, err := s.ShouldInitiateUpload(ctx, &debuginfopb.ShouldInitiateUploadRequest{ BuildId: req.BuildId, Hash: req.Hash, Force: req.Force, Type: req.Type, }) if err != nil { return nil, err } if !shouldInitiateResp.ShouldInitiateUpload { if shouldInitiateResp.Reason == ReasonDebuginfoEqual { return nil, status.Error(codes.AlreadyExists, ReasonDebuginfoEqual) } return nil, status.Errorf(codes.FailedPrecondition, "upload should not have been attempted to be initiated, a previous check should have failed with: %s", shouldInitiateResp.Reason) } if req.Size > s.maxUploadSize { return nil, status.Errorf(codes.InvalidArgument, "upload size %d exceeds maximum allowed size %d", req.Size, s.maxUploadSize) } uploadID := uuid.New().String() uploadStarted := s.timeNow() uploadExpiry := uploadStarted.Add(s.maxUploadDuration) if !s.signedUpload.Enabled { if err := s.metadata.MarkAsUploading(ctx, req.BuildId, uploadID, req.Hash, req.Type, timestamppb.New(uploadStarted)); err != nil { return nil, fmt.Errorf("mark debuginfo upload as uploading via gRPC: %w", err) } return &debuginfopb.InitiateUploadResponse{ UploadInstructions: &debuginfopb.UploadInstructions{ BuildId: req.BuildId, UploadId: uploadID, UploadStrategy: debuginfopb.UploadInstructions_UPLOAD_STRATEGY_GRPC, Type: req.Type, }, }, nil } signedURL, err := s.signedUpload.Client.SignedPUT(ctx, objectPath(req.BuildId, req.Type), req.Size, uploadExpiry) if err != nil { return nil, status.Error(codes.Internal, err.Error()) } if err := s.metadata.MarkAsUploading(ctx, req.BuildId, uploadID, req.Hash, req.Type, timestamppb.New(uploadStarted)); err != nil { return nil, fmt.Errorf("mark debuginfo upload as uploading via signed URL: %w", err) } return &debuginfopb.InitiateUploadResponse{ UploadInstructions: &debuginfopb.UploadInstructions{ BuildId: req.BuildId, UploadId: uploadID, UploadStrategy: debuginfopb.UploadInstructions_UPLOAD_STRATEGY_SIGNED_URL, SignedUrl: signedURL, Type: req.Type, }, }, nil } func (s *Store) MarkUploadFinished(ctx context.Context, req *debuginfopb.MarkUploadFinishedRequest) (*debuginfopb.MarkUploadFinishedResponse, error) { ctx, span := s.tracer.Start(ctx, "MarkUploadFinished") defer span.End() span.SetAttributes(attribute.String("build_id", req.BuildId)) span.SetAttributes(attribute.String("upload_id", req.UploadId)) buildID := req.BuildId if err := validateInput(buildID); err != nil { return nil, status.Error(codes.InvalidArgument, err.Error()) } err := s.metadata.MarkAsUploaded(ctx, buildID, req.UploadId, req.Type, timestamppb.New(s.timeNow())) if errors.Is(err, ErrDebuginfoNotFound) { return nil, status.Error(codes.NotFound, "no debuginfo metadata found for build id") } if errors.Is(err, ErrUploadMetadataNotFound) { return nil, status.Error(codes.NotFound, "no debuginfo upload metadata found for build id") } if errors.Is(err, ErrUploadIDMismatch)
if err != nil { return nil, status.Error(codes.Internal, err.Error()) } return &debuginfopb.MarkUploadFinishedResponse{}, nil } func (s *Store) Upload(stream debuginfopb.DebuginfoService_UploadServer) error { if s.signedUpload.Enabled { return status.Error(codes.Unimplemented, "signed URL uploads are the only supported upload strategy for this service") } req, err := stream.Recv() if err != nil { return status.Errorf(codes.Unknown, "failed to receive upload info: %q", err) } var ( buildID = req.GetInfo().BuildId uploadID = req.GetInfo().UploadId r = &UploadReader{stream: stream} typ = req.GetInfo().Type ) ctx, span := s.tracer.Start(stream.Context(), "Upload") defer span.End() span.SetAttributes(attribute.String("build_id", buildID)) span.SetAttributes(attribute.String("upload_id", uploadID)) if err := s.upload(ctx, buildID, uploadID, typ, r); err != nil { return err } return stream.SendAndClose(&debuginfopb.UploadResponse{ BuildId: buildID, Size: r.size, }) } func (s *Store) upload(ctx context.Context, buildID, uploadID string, typ debuginfopb.DebuginfoType, r io.Reader) error { if err := validateInput(buildID); err != nil { return status.Errorf(codes.InvalidArgument, "invalid build ID: %q", err) } dbginfo, err := s.metadata.Fetch(ctx, buildID, typ) if err != nil { if errors.Is(err, ErrMetadataNotFound) { return status.Error(codes.FailedPrecondition, "metadata not found, this indicates that the upload was not previously initiated") } return status.Error(codes.Internal, err.Error()) } if dbginfo.Upload == nil { return status.Error(codes.FailedPrecondition, "upload metadata not found, this indicates that the upload was not previously initiated") } if dbginfo.Upload.Id != uploadID { return status.Error(codes.InvalidArgument, "the upload ID does not match the one returned by the InitiateUpload call") } if err := s.bucket.Upload(ctx, objectPath(buildID, typ), r); err != nil { return status.Error(codes.Internal, fmt.Errorf("upload debuginfo: %w", err).Error()) } return nil } func (s *Store) uploadIsStale(upload *debuginfopb.DebuginfoUpload) bool { return upload.StartedAt.AsTime().Add(s.maxUploadDuration + 2*time.Minute).Before(s.timeNow()) } func validateInput(id string) error { _, err := hex.DecodeString(id) if err != nil { return fmt.Errorf("failed to validate input: %w", err) } if len(id) <= 2 { return errors.New("unexpectedly short input") } return nil } func objectPath(buildID string, typ debuginfopb.DebuginfoType) string { switch typ { case debuginfopb.DebuginfoType_DEBUGINFO_TYPE_EXECUTABLE: return path.Join(buildID, "executable") case debuginfopb.DebuginfoType_DEBUGINFO_TYPE_SOURCES: return path.Join(buildID, "sources") default: return path.Join(buildID, "debuginfo") } }
{ return nil, status.Error(codes.InvalidArgument, "upload id mismatch") }
conditional_block
store.go
// Copyright 2022-2023 The Parca Authors // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package debuginfo import ( "context" "encoding/hex" "errors" "fmt" "io" "path" "time" "github.com/go-kit/log" "github.com/google/uuid" "github.com/thanos-io/objstore" "github.com/thanos-io/objstore/client" "go.opentelemetry.io/otel/attribute" "go.opentelemetry.io/otel/trace" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" "google.golang.org/protobuf/types/known/timestamppb" debuginfopb "github.com/parca-dev/parca/gen/proto/go/parca/debuginfo/v1alpha1" ) var ErrDebuginfoNotFound = errors.New("debuginfo not found") type CacheProvider string const ( FILESYSTEM CacheProvider = "FILESYSTEM" ) type Config struct { Bucket *client.BucketConfig `yaml:"bucket"` Cache *CacheConfig `yaml:"cache"` } type FilesystemCacheConfig struct { Directory string `yaml:"directory"` } type CacheConfig struct { Type CacheProvider `yaml:"type"` Config interface{} `yaml:"config"` } type MetadataManager interface { MarkAsDebuginfodSource(ctx context.Context, servers []string, buildID string, typ debuginfopb.DebuginfoType) error MarkAsUploading(ctx context.Context, buildID, uploadID, hash string, typ debuginfopb.DebuginfoType, startedAt *timestamppb.Timestamp) error MarkAsUploaded(ctx context.Context, buildID, uploadID string, typ debuginfopb.DebuginfoType, finishedAt *timestamppb.Timestamp) error Fetch(ctx context.Context, buildID string, typ debuginfopb.DebuginfoType) (*debuginfopb.Debuginfo, error) } type Store struct { debuginfopb.UnimplementedDebuginfoServiceServer tracer trace.Tracer logger log.Logger bucket objstore.Bucket metadata MetadataManager debuginfodClients DebuginfodClients signedUpload SignedUpload maxUploadDuration time.Duration maxUploadSize int64 timeNow func() time.Time } type SignedUploadClient interface { SignedPUT(ctx context.Context, objectKey string, size int64, expiry time.Time) (signedURL string, err error) } type SignedUpload struct { Enabled bool Client SignedUploadClient } // NewStore returns a new debug info store. func NewStore( tracer trace.Tracer, logger log.Logger, metadata MetadataManager, bucket objstore.Bucket, debuginfodClients DebuginfodClients, signedUpload SignedUpload, maxUploadDuration time.Duration, maxUploadSize int64, ) (*Store, error) { return &Store{ tracer: tracer, logger: log.With(logger, "component", "debuginfo"), bucket: bucket, metadata: metadata, debuginfodClients: debuginfodClients, signedUpload: signedUpload, maxUploadDuration: maxUploadDuration, maxUploadSize: maxUploadSize, timeNow: time.Now, }, nil } const ( ReasonDebuginfoInDebuginfod = "Debuginfo exists in debuginfod, therefore no upload is necessary." ReasonFirstTimeSeen = "First time we see this Build ID, and it does not exist in debuginfod, therefore please upload!" ReasonUploadStale = "A previous upload was started but not finished and is now stale, so it can be retried." ReasonUploadInProgress = "A previous upload is still in-progress and not stale yet (only stale uploads can be retried)." ReasonDebuginfoAlreadyExists = "Debuginfo already exists and is not marked as invalid, therefore no new upload is needed." ReasonDebuginfoAlreadyExistsButForced = "Debuginfo already exists and is not marked as invalid, therefore wouldn't have accepted a new upload, but accepting it because it's requested to be forced." ReasonDebuginfoInvalid = "Debuginfo already exists but is marked as invalid, therefore a new upload is needed. Hash the debuginfo and initiate the upload." ReasonDebuginfoEqual = "Debuginfo already exists and is marked as invalid, but the proposed hash is the same as the one already available, therefore the upload is not accepted as it would result in the same invalid debuginfos." ReasonDebuginfoNotEqual = "Debuginfo already exists but is marked as invalid, therefore a new upload will be accepted." ReasonDebuginfodSource = "Debuginfo is available from debuginfod already and not marked as invalid, therefore no new upload is needed." ReasonDebuginfodInvalid = "Debuginfo is available from debuginfod already but is marked as invalid, therefore a new upload is needed." ) // ShouldInitiateUpload returns whether an upload should be initiated for the // given build ID. Checking if an upload should even be initiated allows the // parca-agent to avoid extracting debuginfos unnecessarily from a binary. func (s *Store) ShouldInitiateUpload(ctx context.Context, req *debuginfopb.ShouldInitiateUploadRequest) (*debuginfopb.ShouldInitiateUploadResponse, error) { ctx, span := s.tracer.Start(ctx, "ShouldInitiateUpload") defer span.End() span.SetAttributes(attribute.String("build_id", req.BuildId)) buildID := req.BuildId if err := validateInput(buildID); err != nil { return nil, status.Error(codes.InvalidArgument, err.Error()) } dbginfo, err := s.metadata.Fetch(ctx, buildID, req.Type) if err != nil && !errors.Is(err, ErrMetadataNotFound) { return nil, status.Error(codes.Internal, err.Error()) } else if errors.Is(err, ErrMetadataNotFound) { // First time we see this Build ID. existsInDebuginfods, err := s.debuginfodClients.Exists(ctx, buildID) if err != nil { return nil, status.Error(codes.Internal, err.Error()) } if len(existsInDebuginfods) > 0 { if err := s.metadata.MarkAsDebuginfodSource(ctx, existsInDebuginfods, buildID, req.Type); err != nil { return nil, status.Error(codes.Internal, fmt.Errorf("mark Build ID to be available from debuginfod: %w", err).Error()) } return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: false, Reason: ReasonDebuginfoInDebuginfod, }, nil } return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: true, Reason: ReasonFirstTimeSeen, }, nil } else { // We have seen this Build ID before and there is metadata for it. switch dbginfo.Source { case debuginfopb.Debuginfo_SOURCE_UPLOAD: if dbginfo.Upload == nil { return nil, status.Error(codes.Internal, "metadata inconsistency: upload is nil") } switch dbginfo.Upload.State { case debuginfopb.DebuginfoUpload_STATE_UPLOADING: if s.uploadIsStale(dbginfo.Upload) { return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: true, Reason: ReasonUploadStale, }, nil } return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: false, Reason: ReasonUploadInProgress, }, nil case debuginfopb.DebuginfoUpload_STATE_UPLOADED: if dbginfo.Quality == nil || !dbginfo.Quality.NotValidElf { if req.Force { return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: true, Reason: ReasonDebuginfoAlreadyExistsButForced, }, nil } return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: false, Reason: ReasonDebuginfoAlreadyExists, }, nil } if req.Hash == "" { return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: true, Reason: ReasonDebuginfoInvalid, }, nil } if dbginfo.Upload.Hash == req.Hash { return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: false, Reason: ReasonDebuginfoEqual, }, nil } return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: true, Reason: ReasonDebuginfoNotEqual, }, nil default: return nil, status.Error(codes.Internal, "metadata inconsistency: unknown upload state") } case debuginfopb.Debuginfo_SOURCE_DEBUGINFOD: if dbginfo.Quality == nil || !dbginfo.Quality.NotValidElf { // We already have debuginfo that's also not marked to be // invalid, so we don't need to upload it again. return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: false, Reason: ReasonDebuginfodSource, }, nil } return &debuginfopb.ShouldInitiateUploadResponse{ ShouldInitiateUpload: true, Reason: ReasonDebuginfodInvalid, }, nil default: return nil, status.Errorf(codes.Internal, "unknown debuginfo source %q", dbginfo.Source) } } } func (s *Store) InitiateUpload(ctx context.Context, req *debuginfopb.InitiateUploadRequest) (*debuginfopb.InitiateUploadResponse, error) { ctx, span := s.tracer.Start(ctx, "InitiateUpload") defer span.End() span.SetAttributes(attribute.String("build_id", req.BuildId)) if req.Hash == "" { return nil, status.Error(codes.InvalidArgument, "hash must be set") } if req.Size == 0 { return nil, status.Error(codes.InvalidArgument, "size must be set") } // We don't want to blindly accept upload initiation requests that // shouldn't have happened. shouldInitiateResp, err := s.ShouldInitiateUpload(ctx, &debuginfopb.ShouldInitiateUploadRequest{ BuildId: req.BuildId, Hash: req.Hash, Force: req.Force, Type: req.Type,
if err != nil { return nil, err } if !shouldInitiateResp.ShouldInitiateUpload { if shouldInitiateResp.Reason == ReasonDebuginfoEqual { return nil, status.Error(codes.AlreadyExists, ReasonDebuginfoEqual) } return nil, status.Errorf(codes.FailedPrecondition, "upload should not have been attempted to be initiated, a previous check should have failed with: %s", shouldInitiateResp.Reason) } if req.Size > s.maxUploadSize { return nil, status.Errorf(codes.InvalidArgument, "upload size %d exceeds maximum allowed size %d", req.Size, s.maxUploadSize) } uploadID := uuid.New().String() uploadStarted := s.timeNow() uploadExpiry := uploadStarted.Add(s.maxUploadDuration) if !s.signedUpload.Enabled { if err := s.metadata.MarkAsUploading(ctx, req.BuildId, uploadID, req.Hash, req.Type, timestamppb.New(uploadStarted)); err != nil { return nil, fmt.Errorf("mark debuginfo upload as uploading via gRPC: %w", err) } return &debuginfopb.InitiateUploadResponse{ UploadInstructions: &debuginfopb.UploadInstructions{ BuildId: req.BuildId, UploadId: uploadID, UploadStrategy: debuginfopb.UploadInstructions_UPLOAD_STRATEGY_GRPC, Type: req.Type, }, }, nil } signedURL, err := s.signedUpload.Client.SignedPUT(ctx, objectPath(req.BuildId, req.Type), req.Size, uploadExpiry) if err != nil { return nil, status.Error(codes.Internal, err.Error()) } if err := s.metadata.MarkAsUploading(ctx, req.BuildId, uploadID, req.Hash, req.Type, timestamppb.New(uploadStarted)); err != nil { return nil, fmt.Errorf("mark debuginfo upload as uploading via signed URL: %w", err) } return &debuginfopb.InitiateUploadResponse{ UploadInstructions: &debuginfopb.UploadInstructions{ BuildId: req.BuildId, UploadId: uploadID, UploadStrategy: debuginfopb.UploadInstructions_UPLOAD_STRATEGY_SIGNED_URL, SignedUrl: signedURL, Type: req.Type, }, }, nil } func (s *Store) MarkUploadFinished(ctx context.Context, req *debuginfopb.MarkUploadFinishedRequest) (*debuginfopb.MarkUploadFinishedResponse, error) { ctx, span := s.tracer.Start(ctx, "MarkUploadFinished") defer span.End() span.SetAttributes(attribute.String("build_id", req.BuildId)) span.SetAttributes(attribute.String("upload_id", req.UploadId)) buildID := req.BuildId if err := validateInput(buildID); err != nil { return nil, status.Error(codes.InvalidArgument, err.Error()) } err := s.metadata.MarkAsUploaded(ctx, buildID, req.UploadId, req.Type, timestamppb.New(s.timeNow())) if errors.Is(err, ErrDebuginfoNotFound) { return nil, status.Error(codes.NotFound, "no debuginfo metadata found for build id") } if errors.Is(err, ErrUploadMetadataNotFound) { return nil, status.Error(codes.NotFound, "no debuginfo upload metadata found for build id") } if errors.Is(err, ErrUploadIDMismatch) { return nil, status.Error(codes.InvalidArgument, "upload id mismatch") } if err != nil { return nil, status.Error(codes.Internal, err.Error()) } return &debuginfopb.MarkUploadFinishedResponse{}, nil } func (s *Store) Upload(stream debuginfopb.DebuginfoService_UploadServer) error { if s.signedUpload.Enabled { return status.Error(codes.Unimplemented, "signed URL uploads are the only supported upload strategy for this service") } req, err := stream.Recv() if err != nil { return status.Errorf(codes.Unknown, "failed to receive upload info: %q", err) } var ( buildID = req.GetInfo().BuildId uploadID = req.GetInfo().UploadId r = &UploadReader{stream: stream} typ = req.GetInfo().Type ) ctx, span := s.tracer.Start(stream.Context(), "Upload") defer span.End() span.SetAttributes(attribute.String("build_id", buildID)) span.SetAttributes(attribute.String("upload_id", uploadID)) if err := s.upload(ctx, buildID, uploadID, typ, r); err != nil { return err } return stream.SendAndClose(&debuginfopb.UploadResponse{ BuildId: buildID, Size: r.size, }) } func (s *Store) upload(ctx context.Context, buildID, uploadID string, typ debuginfopb.DebuginfoType, r io.Reader) error { if err := validateInput(buildID); err != nil { return status.Errorf(codes.InvalidArgument, "invalid build ID: %q", err) } dbginfo, err := s.metadata.Fetch(ctx, buildID, typ) if err != nil { if errors.Is(err, ErrMetadataNotFound) { return status.Error(codes.FailedPrecondition, "metadata not found, this indicates that the upload was not previously initiated") } return status.Error(codes.Internal, err.Error()) } if dbginfo.Upload == nil { return status.Error(codes.FailedPrecondition, "upload metadata not found, this indicates that the upload was not previously initiated") } if dbginfo.Upload.Id != uploadID { return status.Error(codes.InvalidArgument, "the upload ID does not match the one returned by the InitiateUpload call") } if err := s.bucket.Upload(ctx, objectPath(buildID, typ), r); err != nil { return status.Error(codes.Internal, fmt.Errorf("upload debuginfo: %w", err).Error()) } return nil } func (s *Store) uploadIsStale(upload *debuginfopb.DebuginfoUpload) bool { return upload.StartedAt.AsTime().Add(s.maxUploadDuration + 2*time.Minute).Before(s.timeNow()) } func validateInput(id string) error { _, err := hex.DecodeString(id) if err != nil { return fmt.Errorf("failed to validate input: %w", err) } if len(id) <= 2 { return errors.New("unexpectedly short input") } return nil } func objectPath(buildID string, typ debuginfopb.DebuginfoType) string { switch typ { case debuginfopb.DebuginfoType_DEBUGINFO_TYPE_EXECUTABLE: return path.Join(buildID, "executable") case debuginfopb.DebuginfoType_DEBUGINFO_TYPE_SOURCES: return path.Join(buildID, "sources") default: return path.Join(buildID, "debuginfo") } }
})
random_line_split
server.go
// Copyright (c) 2017-2020 VMware, Inc. or its affiliates // SPDX-License-Identifier: Apache-2.0 package hub import ( "context" "encoding/json" "fmt" "io" "net" "os" "strconv" "strings" "sync" "time" "github.com/greenplum-db/gp-common-go-libs/gplog" "github.com/hashicorp/go-multierror" "github.com/pkg/errors" "golang.org/x/xerrors" "google.golang.org/grpc" "google.golang.org/grpc/codes" "google.golang.org/grpc/connectivity" "google.golang.org/grpc/reflection" grpcStatus "google.golang.org/grpc/status" "github.com/greenplum-db/gpupgrade/greenplum" "github.com/greenplum-db/gpupgrade/idl" "github.com/greenplum-db/gpupgrade/upgrade" "github.com/greenplum-db/gpupgrade/utils" "github.com/greenplum-db/gpupgrade/utils/daemon" "github.com/greenplum-db/gpupgrade/utils/log" ) var DialTimeout = 3 * time.Second // Returned from Server.Start() if Server.Stop() has already been called. var ErrHubStopped = errors.New("hub is stopped") type Dialer func(ctx context.Context, target string, opts ...grpc.DialOption) (*grpc.ClientConn, error) type Server struct { *Config StateDir string agentConns []*Connection grpcDialer Dialer mu sync.Mutex server *grpc.Server lis net.Listener // This is used both as a channel to communicate from Start() to // Stop() to indicate to Stop() that it can finally terminate // and also as a flag to communicate from Stop() to Start() that // Stop() had already beed called, so no need to do anything further // in Start(). // Note that when used as a flag, nil value means that Stop() has // been called. stopped chan struct{} daemon bool } type Connection struct { Conn *grpc.ClientConn AgentClient idl.AgentClient Hostname string CancelContext func() } func New(conf *Config, grpcDialer Dialer, stateDir string) *Server { h := &Server{ Config: conf, StateDir: stateDir, stopped: make(chan struct{}, 1), grpcDialer: grpcDialer, } return h } // MakeDaemon tells the Server to disconnect its stdout/stderr streams after // successfully starting up. func (s *Server) MakeDaemon() { s.daemon = true } func (s *Server) Start() error { lis, err := net.Listen("tcp", ":"+strconv.Itoa(s.Port)) if err != nil { return xerrors.Errorf("listen on port %d: %w", s.Port, err) } // Set up an interceptor function to log any panics we get from request // handlers. interceptor := func(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (resp interface{}, err error) { defer log.WritePanics() return handler(ctx, req) } server := grpc.NewServer(grpc.UnaryInterceptor(interceptor)) s.mu.Lock() if s.stopped == nil { // Stop() has already been called; return without serving. s.mu.Unlock() return ErrHubStopped } s.server = server s.lis = lis s.mu.Unlock() idl.RegisterCliToHubServer(server, s) reflection.Register(server) if s.daemon { fmt.Printf("Hub started on port %d (pid %d)\n", s.Port, os.Getpid()) daemon.Daemonize() } err = server.Serve(lis) if err != nil { err = xerrors.Errorf("serve: %w", err) } // inform Stop() that is it is OK to stop now s.stopped <- struct{}{} return err } func (s *Server) StopServices(ctx context.Context, in *idl.StopServicesRequest) (*idl.StopServicesReply, error) { err := s.StopAgents() if err != nil { gplog.Debug("failed to stop agents: %#v", err) } s.Stop(false) return &idl.StopServicesReply{}, nil } // TODO: add unit tests for this; this is currently tricky due to h.AgentConns() // mutating global state func (s *Server) StopAgents() error { // FIXME: s.AgentConns() fails fast if a single agent isn't available // we need to connect to all available agents so we can stop just those _, err := s.AgentConns() if err != nil { return err } var wg sync.WaitGroup errs := make(chan error, len(s.agentConns)) for _, conn := range s.agentConns { conn := conn wg.Add(1) go func() { defer wg.Done() _, err := conn.AgentClient.StopAgent(context.Background(), &idl.StopAgentRequest{}) if err == nil { // no error means the agent did not terminate as expected errs <- xerrors.Errorf("failed to stop agent on host: %s", conn.Hostname) return } // XXX: "transport is closing" is not documented but is needed to uniquely interpret codes.Unavailable // https://github.com/grpc/grpc/blob/v1.24.0/doc/statuscodes.md errStatus := grpcStatus.Convert(err) if errStatus.Code() != codes.Unavailable || errStatus.Message() != "transport is closing" { errs <- xerrors.Errorf("failed to stop agent on host %s : %w", conn.Hostname, err) } }() } wg.Wait() close(errs) var multiErr *multierror.Error for err := range errs { multiErr = multierror.Append(multiErr, err) } return multiErr.ErrorOrNil() } func (s *Server) Stop(closeAgentConns bool) { s.mu.Lock() defer s.mu.Unlock() // StopServices calls Stop(false) because it has already closed the agentConns if closeAgentConns { s.closeAgentConns() } if s.server != nil { s.server.Stop() <-s.stopped // block until it is OK to stop } // Mark this server stopped so that a concurrent Start() doesn't try to // start things up again. s.stopped = nil } func (s *Server) RestartAgents(ctx context.Context, in *idl.RestartAgentsRequest) (*idl.RestartAgentsReply, error) { restartedHosts, err := RestartAgents(ctx, nil, AgentHosts(s.Source), s.AgentPort, s.StateDir) return &idl.RestartAgentsReply{AgentHosts: restartedHosts}, err } func RestartAgents(ctx context.Context, dialer func(context.Context, string) (net.Conn, error), hostnames []string, port int, stateDir string) ([]string, error) { var wg sync.WaitGroup restartedHosts := make(chan string, len(hostnames)) errs := make(chan error, len(hostnames)) for _, host := range hostnames { wg.Add(1) go func(host string) { defer wg.Done() address := host + ":" + strconv.Itoa(port) timeoutCtx, cancelFunc := context.WithTimeout(ctx, 3*time.Second) opts := []grpc.DialOption{ grpc.WithBlock(), grpc.WithInsecure(), grpc.FailOnNonTempDialError(true), } if dialer != nil { opts = append(opts, grpc.WithContextDialer(dialer)) } conn, err := grpc.DialContext(timeoutCtx, address, opts...) cancelFunc() if err == nil { err = conn.Close() if err != nil { gplog.Error("failed to close agent connection to %s: %+v", host, err) } return } gplog.Debug("failed to dial agent on %s: %+v", host, err) gplog.Info("starting agent on %s", host) agentPath, err := getAgentPath() if err != nil { errs <- err return } cmd := execCommand("ssh", host, fmt.Sprintf("bash -c \"%s agent --daemonize --port %d --state-directory %s\"", agentPath, port, stateDir)) stdout, err := cmd.Output() if err != nil { errs <- err return } gplog.Debug(string(stdout)) restartedHosts <- host }(host) } wg.Wait() close(errs) close(restartedHosts) var hosts []string for h := range restartedHosts { hosts = append(hosts, h) } var multiErr *multierror.Error for err := range errs { multiErr = multierror.Append(multiErr, err) } return hosts, multiErr.ErrorOrNil() } func (s *Server) AgentConns() ([]*Connection, error) { // Lock the mutex to protect against races with Server.Stop(). // XXX This is a *ridiculously* broad lock. Have fun waiting for the dial // timeout when calling Stop() and AgentConns() at the same time, for // instance. We should not lock around a network operation, but it seems // like the AgentConns concept is not long for this world anyway. s.mu.Lock() defer s.mu.Unlock() if s.agentConns != nil { err := EnsureConnsAreReady(s.agentConns) if err != nil { gplog.Error("ensureConnsAreReady failed: %s", err) return nil, err } return s.agentConns, nil } hostnames := AgentHosts(s.Source) for _, host := range hostnames { ctx, cancelFunc := context.WithTimeout(context.Background(), DialTimeout) conn, err := s.grpcDialer(ctx, host+":"+strconv.Itoa(s.AgentPort), grpc.WithInsecure(), grpc.WithBlock()) if err != nil { err = xerrors.Errorf("grpcDialer failed: %w", err) gplog.Error(err.Error()) cancelFunc() return nil, err } s.agentConns = append(s.agentConns, &Connection{ Conn: conn, AgentClient: idl.NewAgentClient(conn), Hostname: host, CancelContext: cancelFunc, }) } return s.agentConns, nil } func EnsureConnsAreReady(agentConns []*Connection) error { hostnames := []string{} for _, conn := range agentConns { if conn.Conn.GetState() != connectivity.Ready { hostnames = append(hostnames, conn.Hostname) } } if len(hostnames) > 0 { return fmt.Errorf("the connections to the following hosts were not ready: %s", strings.Join(hostnames, ",")) } return nil } // Closes all h.agentConns. Callers must hold the Server's mutex. // TODO: this function assumes that all h.agentConns are _not_ in a terminal // state(e.g. already closed). If so, conn.Conn.WaitForStateChange() can block // indefinitely. func (s *Server) closeAgentConns() { for _, conn := range s.agentConns { defer conn.CancelContext() currState := conn.Conn.GetState() err := conn.Conn.Close() if err != nil { gplog.Info(fmt.Sprintf("Error closing hub to agent connection. host: %s, err: %s", conn.Hostname, err.Error())) } conn.Conn.WaitForStateChange(context.Background(), currState) } } type InitializeConfig struct { Standby greenplum.SegConfig Master greenplum.SegConfig Primaries []greenplum.SegConfig Mirrors []greenplum.SegConfig } // Config contains all the information that will be persisted to/loaded from // from disk during calls to Save() and Load(). type Config struct { Source *greenplum.Cluster Target *greenplum.Cluster // TargetInitializeConfig contains all the info needed to initialize the // target cluster's master, standby, primaries and mirrors. TargetInitializeConfig InitializeConfig Port int AgentPort int UseLinkMode bool UpgradeID upgrade.ID // Tablespaces contains the tablespace in the database keyed by // dbid and tablespace oid Tablespaces greenplum.Tablespaces TablespacesMappingFilePath string } func (c *Config) Load(r io.Reader) error { dec := json.NewDecoder(r) return dec.Decode(c) } func (c *Config) Save(w io.Writer) error { enc := json.NewEncoder(w) enc.SetIndent("", " ") return enc.Encode(c) } // SaveConfig persists the hub's configuration to disk. func (s *Server) SaveConfig() (err error) { // TODO: Switch to an atomic implementation like renameio. Consider what // happens if Config.Save() panics: we'll have truncated the file // on disk and the hub will be unable to recover. For now, since we normally // only save the configuration during initialize and any configuration // errors could be fixed by reinitializing, the risk seems small. file, err := utils.System.Create(upgrade.GetConfigFile()) if err != nil { return err } defer func() { if cerr := file.Close(); cerr != nil { cerr = xerrors.Errorf("closing hub configuration: %w", cerr) err = multierror.Append(err, cerr).ErrorOrNil() } }() err = s.Config.Save(file) if err != nil { return xerrors.Errorf("saving hub configuration: %w", err) } return nil } func LoadConfig(conf *Config, path string) error { file, err := os.Open(path) if err != nil { return xerrors.Errorf("opening configuration file: %w", err) } defer file.Close() err = conf.Load(file) if err != nil { return xerrors.Errorf("reading configuration file: %w", err) } return nil } func AgentHosts(c *greenplum.Cluster) []string { uniqueHosts := make(map[string]bool) excludingMaster := func(seg *greenplum.SegConfig) bool { return !seg.IsMaster() } for _, seg := range c.SelectSegments(excludingMaster) { uniqueHosts[seg.Hostname] = true } hosts := make([]string, 0) for host := range uniqueHosts { hosts = append(hosts, host) } return hosts } func
(target *greenplum.Cluster) *idl.Message { data := make(map[string]string) data[idl.ResponseKey_target_port.String()] = strconv.Itoa(target.MasterPort()) data[idl.ResponseKey_target_master_data_directory.String()] = target.MasterDataDir() return &idl.Message{ Contents: &idl.Message_Response{ Response: &idl.Response{Data: data}, }, } }
MakeTargetClusterMessage
identifier_name