content
stringlengths
4
1.04M
lang
stringclasses
358 values
score
int64
0
5
repo_name
stringlengths
5
114
repo_path
stringlengths
4
229
repo_licenses
sequencelengths
1
8
#![feature(unboxed_closures)] trait Trait {} fn f<F:Trait(isize) -> isize>(x: F) {} //~^ ERROR this trait takes 0 generic arguments but 1 generic argument //~| ERROR associated type `Output` not found for `Trait` fn main() {}
Rust
2
mbc-git/rust
src/test/ui/unboxed-closures/unboxed-closure-sugar-wrong-trait.rs
[ "ECL-2.0", "Apache-2.0", "MIT-0", "MIT" ]
function assert --wraps test if builtin test $argv emit assertion_success else set -l assertion_status $status emit assertion_error (assert.error_message $argv) return $assertion_status end end
fish
4
codetriage-readme-bot/oh-my-fish
pkg/fish-spec/functions/assert.fish
[ "MIT" ]
<div class="row-fluid"> <div class="span9"> {{message}} <p class="from-server">Posted by <a href="#servers/{{server_id}}">{{server}}</a> </p> </div> <div class="span3"> <time class="timeago datetime" title="{{timestamp}}" datetime="{{iso}}"></time> </div> </div>
Handlebars
3
zadcha/rethinkdb
admin/static/handlebars/log-entry.hbs
[ "Apache-2.0" ]
fileFormatVersion: 2 guid: 61f80571717f9114fb4c577bfd90e7b4 PrefabImporter: externalObjects: {} userData: assetBundleName: assetBundleVariant:
Unity3D Asset
1
samisuleman10/open-project-1
UOP1_Project/Assets/Prefabs/_Compositions/Nature/Stump_WithShiitake.prefab.meta
[ "Apache-2.0" ]
@import "./test/library.css"; main { display: none; }
CSS
1
idanilt/parcel
packages/core/integration-tests/test/integration/sourcemap-css-existing/style.css
[ "MIT" ]
(ns lt.util.events "Provide DOM event related functions.") (defn capture "Add function `handler` to trigger when event listener `ev` fires on `elem`. If `elem` is not provided then the event `ev` and its `handler` are bound to the document." ([ev handler] (capture js/document ev handler)) ([elem ev handler] (.addEventListener elem (name ev) handler true)))
Clojure
5
sam-aldis/LightTable
src/lt/util/events.cljs
[ "MIT" ]
// run-pass // After the work to reoptimize structs, it became possible for immediate logic to fail. // This test verifies that it actually works. fn main() { let c = |a: u8, b: u16, c: u8| { assert_eq!(a, 1); assert_eq!(b, 2); assert_eq!(c, 3); }; c(1, 2, 3); }
Rust
4
Eric-Arellano/rust
src/test/ui/functions-closures/closure-immediate.rs
[ "ECL-2.0", "Apache-2.0", "MIT-0", "MIT" ]
#lang scribble/manual @begin[(require "../utils.rkt" (for-label typed/racket/base) (for-label (only-in rnrs/lists-6 fold-left)))] @title[#:tag "varargs"]{Variable-Arity Functions: Programming with Rest Arguments} Typed Racket can handle some uses of rest arguments. @section{Uniform Variable-Arity Functions} In Racket, one can write a function that takes an arbitrary number of arguments as follows: @racketmod[ racket (define (sum . xs) (if (null? xs) 0 (+ (car xs) (apply sum (cdr xs))))) (sum) (sum 1 2 3 4) (sum 1 3)] The arguments to the function that are in excess to the non-rest arguments are converted to a list which is assigned to the rest parameter. So the examples above evaluate to @racketresult[0], @racketresult[10], and @racketresult[4]. We can define such functions in Typed Racket as well: @racketmod[ typed/racket (: sum (-> Number #,** Number)) (define (sum . xs) (if (null? xs) 0 (+ (car xs) (apply sum (cdr xs)))))] This type can be assigned to the function when each element of the rest parameter is used at the same type. @section{Non-Uniform Variable-Arity Functions} However, the rest argument may be used as a heterogeneous list. Take this (simplified) definition of the R6RS function @racket[fold-left]: @racketmod[ racket (define (fold-left f i as . bss) (if (or (null? as) (ormap null? bss)) i (apply fold-left f (apply f i (car as) (map car bss)) (cdr as) (map cdr bss)))) (fold-left + 0 (list 1 2 3 4) (list 5 6 7 8)) (fold-left + 0 (list 1 2 3) (list 2 3 4) (list 3 4 5) (list 4 5 6)) (fold-left (λ (i v n s) (string-append i (vector-ref v n) s)) "" (list (vector "A cat" "A dog" "A mouse") (vector "tuna" "steak" "cheese")) (list 0 2) (list " does not eat " "."))] Here the different lists that make up the rest argument @racket[bss] can be of different types, but the type of each list in @racket[bss] corresponds to the type of the corresponding argument of @racket[f]. We also know that, in order to avoid arity errors, the length of @racket[bss] must be two less than the arity of @racket[f]. The first argument to @racket[f] is the accumulator, and @racket[as] corresponds to the second argument of @racket[f]. The example uses of @racket[fold-left] evaluate to @racketresult[36], @racketresult[42], and @racketresult["A cat does not eat cheese."]. In Typed Racket, we can define @racket[fold-left] as follows: @racketmod[ typed/racket (: fold-left (All (C A B ...) (-> (-> C A B ... B C) C (Listof A) (Listof B) ... B C))) (define (fold-left f i as . bss) (if (or (null? as) (ormap null? bss)) i (apply fold-left f (apply f i (car as) (map car bss)) (cdr as) (map cdr bss))))] Note that the type variable @racket[B] is followed by an ellipsis. This denotes that B is a dotted type variable which corresponds to a list of types, much as a rest argument corresponds to a list of values. When the type of @racket[fold-left] is instantiated at a list of types, then each type @racket[t] which is bound by @racket[B] (notated by the dotted pre-type @racket[t ... B]) is expanded to a number of copies of @racket[t] equal to the length of the sequence assigned to @racket[B]. Then @racket[B] in each copy is replaced with the corresponding type from the sequence. So the type of @racket[(inst fold-left Integer Boolean String Number)] is @racket[(-> (-> Integer Boolean String Number Integer) Integer (Listof Boolean) (Listof String) (Listof Number) Integer)].
Racket
5
SnapCracklePopGone/typed-racket
typed-racket-doc/typed-racket/scribblings/guide/varargs.scrbl
[ "Apache-2.0", "MIT" ]
sleep 5 t app appmode photo sleep 3 t app button shutter P sleep 3 t app button shutter R
AGS Script
1
waltersgrey/autoexechack
ContinuousPhotoHacks/3SPS/10SecsShoots10Photos/autoexec.ash
[ "MIT" ]
#!/usr/bin/env sh # Qtlocation plugin with extra fields parsed from api response cd /tmp git clone https://github.com/commaai/qtlocation.git cd qtlocation qmake make -j$(nproc)
Shell
2
robin-reckmann/openpilot
third_party/qt-plugins/build_qtlocation.sh
[ "MIT" ]
#include "../ngx_tcp_upstream_check.h" #include <stdio.h> #include <assert.h> #include <stdlib.h> #include <ctype.h> #include <string.h> #define LEN(AT, FPC) (FPC - buffer - parser->AT) #define MARK(M,FPC) (parser->M = (FPC) - buffer) #define PTR_TO(F) (buffer + parser->F) /** Machine **/ %%{ machine smtp_parser; action mark {MARK(mark, fpc);} action domain { if(parser->domain != NULL) { parser->domain(parser->data, PTR_TO(mark), LEN(mark, fpc)); } } action greeting_text { if(parser->greeting_text != NULL) parser->greeting_text(parser->data, PTR_TO(mark), LEN(mark, fpc)); } action reply_code { if(parser->reply_code != NULL) parser->reply_code(parser->data, PTR_TO(mark), LEN(mark,fpc)); } action reply_text { if(parser->reply_text != NULL) parser->reply_text(parser->data, PTR_TO(mark), LEN(mark,fpc)); } action done { if(parser->smtp_done != NULL) parser->smtp_done(parser->data, fpc + 1, pe - fpc - 1); fbreak; } #### SMTP PROTOCOL GRAMMAR CRLF = "\r\n"; SP = " "; Let_dig = alnum; Ldh_str = ( alnum | "-" )* alnum; Snum = digit{1,3}; #Standardized_tag = Ldh_str; #Not supported yet #General_address_literal = Standardized_tag ":" content{1,d}; IPv4_address_literal = Snum ("." Snum){3}; IPv6_hex = xdigit{1,4}; IPv6_full = IPv6_hex ( ":" IPv6_hex ){7}; IPv6_comp = (IPv6_hex (":" IPv6_hex){0,5})? "::" (IPv6_hex (":" IPv6_hex){0,5})?; IPv6v4_full = IPv6_hex (":" IPv6_hex){5} ":" IPv4_address_literal; IPv6v4_comp = (IPv6_hex (":" IPv6_hex){0,3})? "::" (IPv6_hex (":" IPv6_hex){0,3} ":")? IPv4_address_literal; IPv6_addr = ( IPv6_full | IPv6_comp | IPv6v4_full | IPv6v4_comp ); IPv6_address_literal = "IPv6:" IPv6_addr; Sub_domain = Let_dig Ldh_str?; #Address_literal = "[" ( Pv4_address_literal | IPv6_address_literal | General_address_literal ) "]"; Address_literal = "[" ( IPv4_address_literal | IPv6_address_literal ) "]"; #It should be '+', but smtp.163.com is sucks. #Domain = (( Sub_domain ( '.' Sub_domain )+ ) | Address_literal ) >mark %domain; Domain = (( Sub_domain ( '.' Sub_domain )? ) | Address_literal ) >mark %domain; Greeting_text = ( ascii -- ("\r" | "\n") )+ >mark %greeting_text; Greeting_line = "220 " Domain ( SP Greeting_text )? CRLF; Reply_code = ( digit+ ) >mark %reply_code; Ehlo_keyword = Let_dig ( Let_dig | "-" )*; Ehlo_param = ( ascii -- ( cntrl | SP ) )+; #the "=" is not in the RFC, the reason see also: http://www.linuxquestions.org/questions/linux-networking-3/qmail-auth-login-auth%3Dlogin-arghhhhhhhh-226524/ Ehlo_line = ( Ehlo_keyword ( ( SP | "=" ) Ehlo_param )* ) >mark %reply_text; Ehlo_reply_ok = ( ( "250" Domain ( SP Greeting_text )? CRLF ) | ("250-" Domain ( SP Greeting_text)? CRLF ( "250-" Ehlo_line CRLF )* Reply_code SP Ehlo_line CRLF) ); Reply_text = ( ascii -- ("\r" | "\n") )+ >mark %reply_text; General_reply_line = Reply_code ( SP Reply_text )? CRLF; Reply_line = ( General_reply_line | Ehlo_reply_ok ); Response = Greeting_line Reply_line @done; main := Response; }%% /** Data **/ %% write data; int smtp_parser_init(smtp_parser *parser) { int cs = 0; %% write init; parser->cs = cs; parser->mark = 0; parser->nread = 0; return(1); } /** exec **/ size_t smtp_parser_execute(smtp_parser *parser, const signed char *buffer, size_t len, size_t off) { const signed char *p, *pe; int cs = parser->cs; assert(off <= len && "offset past end of buffer"); p = buffer + off; pe = buffer + len; %% write exec; if (!smtp_parser_has_error(parser)) parser->cs = cs; parser->nread += p - (buffer + off); return(parser->nread); } int smtp_parser_finish(smtp_parser *parser) { if (smtp_parser_has_error(parser) ) { return -1; } else if (smtp_parser_is_finished(parser) ) { return 1; } else { return 0; } } int smtp_parser_has_error(smtp_parser *parser) { return parser->cs == smtp_parser_error; } int smtp_parser_is_finished(smtp_parser *parser) { return parser->cs >= smtp_parser_first_final; }
Ragel in Ruby Host
4
guileen/SEnginx
3rd-party/nginx_tcp_proxy_module/parsers/smtp_response_parser.rl
[ "BSD-2-Clause" ]
{- Alertmanager API API of the Prometheus Alertmanager (https://github.com/prometheus/alertmanager) OpenAPI spec version: 0.0.1 NOTE: This file is auto generated by the openapi-generator. https://github.com/openapitools/openapi-generator.git Do not edit this file manually. -} module Data.Receiver exposing (Receiver, decoder, encoder) import Dict exposing (Dict) import Json.Decode as Decode exposing (Decoder) import Json.Decode.Pipeline exposing (optional, required) import Json.Encode as Encode type alias Receiver = { name : String } decoder : Decoder Receiver decoder = Decode.succeed Receiver |> required "name" Decode.string encoder : Receiver -> Encode.Value encoder model = Encode.object [ ( "name", Encode.string model.name ) ]
Elm
4
jtlisi/alertmanager
ui/app/src/Data/Receiver.elm
[ "ECL-2.0", "Apache-2.0" ]
/* File: Reachability.m Abstract: Basic demonstration of how to use the SystemConfiguration Reachablity APIs. Version: 2.2 Disclaimer: IMPORTANT: This Apple software is supplied to you by Apple Inc. ("Apple") in consideration of your agreement to the following terms, and your use, installation, modification or redistribution of this Apple software constitutes acceptance of these terms. If you do not agree with these terms, please do not use, install, modify or redistribute this Apple software. In consideration of your agreement to abide by the following terms, and subject to these terms, Apple grants you a personal, non-exclusive license, under Apple's copyrights in this original Apple software (the "Apple Software"), to use, reproduce, modify and redistribute the Apple Software, with or without modifications, in source and/or binary forms; provided that if you redistribute the Apple Software in its entirety and without modifications, you must retain this notice and the following text and disclaimers in all such redistributions of the Apple Software. Neither the name, trademarks, service marks or logos of Apple Inc. may be used to endorse or promote products derived from the Apple Software without specific prior written permission from Apple. Except as expressly stated in this notice, no other rights or licenses, express or implied, are granted by Apple herein, including but not limited to any patent rights that may be infringed by your derivative works or by other works in which the Apple Software may be incorporated. The Apple Software is provided by Apple on an "AS IS" basis. APPLE MAKES NO WARRANTIES, EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION THE IMPLIED WARRANTIES OF NON-INFRINGEMENT, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE, REGARDING THE APPLE SOFTWARE OR ITS USE AND OPERATION ALONE OR IN COMBINATION WITH YOUR PRODUCTS. IN NO EVENT SHALL APPLE BE LIABLE FOR ANY SPECIAL, INDIRECT, INCIDENTAL OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ARISING IN ANY WAY OUT OF THE USE, REPRODUCTION, MODIFICATION AND/OR DISTRIBUTION OF THE APPLE SOFTWARE, HOWEVER CAUSED AND WHETHER UNDER THEORY OF CONTRACT, TORT (INCLUDING NEGLIGENCE), STRICT LIABILITY OR OTHERWISE, EVEN IF APPLE HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Copyright (C) 2010 Apple Inc. All Rights Reserved. */ #import <sys/socket.h> #import <netinet/in.h> #import <netinet6/in6.h> #import <arpa/inet.h> #import <ifaddrs.h> #import <netdb.h> #import <CoreFoundation/CoreFoundation.h> #import "comm/objc/Reachability.h" #if TARGET_OS_WATCH //nothing #else #define kShouldPrintReachabilityFlags 0 static MarsReachability* gs_MarsnetReach = [[MarsReachability reachabilityForInternetConnection] retain]; static BOOL gs_Marsstartnotify = [gs_MarsnetReach MarsstartNotifier]; static MarsNetworkStatus gs_Marsstatus = [gs_MarsnetReach currentReachabilityStatus]; static void PrintReachabilityFlags(SCNetworkReachabilityFlags flags, const char* comment) { #if kShouldPrintReachabilityFlags #if TARGET_OS_IPHONE NSLog(@"Reachability Flag Status: %c%c %c%c%c%c%c%c%c %s\n", (flags & kSCNetworkReachabilityFlagsIsWWAN) ? 'W' : '-', #else NSLog(@"Reachability Flag Status: %c %c%c%c%c%c%c%c %s\n", #endif (flags & kSCNetworkReachabilityFlagsReachable) ? 'R' : '-', (flags & kSCNetworkReachabilityFlagsTransientConnection) ? 't' : '-', (flags & kSCNetworkReachabilityFlagsConnectionRequired) ? 'c' : '-', (flags & kSCNetworkReachabilityFlagsConnectionOnTraffic) ? 'C' : '-', (flags & kSCNetworkReachabilityFlagsInterventionRequired) ? 'i' : '-', (flags & kSCNetworkReachabilityFlagsConnectionOnDemand) ? 'D' : '-', (flags & kSCNetworkReachabilityFlagsIsLocalAddress) ? 'l' : '-', (flags & kSCNetworkReachabilityFlagsIsDirect) ? 'd' : '-', comment ); #endif } @implementation MarsReachability static void ReachabilityCallback(SCNetworkReachabilityRef target, SCNetworkReachabilityFlags flags, void* info) { #pragma unused (target, flags) NSCAssert(info != NULL, @"info was NULL in ReachabilityCallback"); NSCAssert([(NSObject*) info isKindOfClass: [MarsReachability class]], @"info was wrong class in ReachabilityCallback"); //We're on the main RunLoop, so an NSAutoreleasePool is not necessary, but is added defensively // in case someon uses the Reachablity object in a different thread. NSAutoreleasePool* myPool = [[NSAutoreleasePool alloc] init]; MarsReachability* noteObject = (MarsReachability*) info; gs_Marsstatus = [noteObject currentReachabilityStatus]; // Post a notification to notify the client that the network reachability changed. [[NSNotificationCenter defaultCenter] postNotificationName: kReachabilityChangedNotification object: noteObject]; [myPool release]; } - (BOOL) MarsstartNotifier { BOOL retVal = NO; SCNetworkReachabilityContext context = {0, self, NULL, NULL, NULL}; if(SCNetworkReachabilitySetCallback(reachabilityRef, ReachabilityCallback, &context)) { //use CFRunLoopGetMain instead of CFRunLoopGetCurrent, because threads created by pthread do not have runloop if(SCNetworkReachabilityScheduleWithRunLoop(reachabilityRef, CFRunLoopGetMain(), kCFRunLoopDefaultMode)) { retVal = YES; } } return retVal; } - (void) MarsstopNotifier { if(reachabilityRef!= NULL) { //use CFRunLoopGetMain instead of CFRunLoopGetCurrent, because threads created by pthread do not have runloop SCNetworkReachabilityUnscheduleFromRunLoop(reachabilityRef, CFRunLoopGetMain(), kCFRunLoopDefaultMode); } } - (void) dealloc { [self MarsstopNotifier]; if(reachabilityRef!= NULL) { CFRelease(reachabilityRef); } [super dealloc]; } + (MarsReachability*) reachabilityWithHostName: (NSString*) hostName; { MarsReachability* retVal = NULL; SCNetworkReachabilityRef reachability = SCNetworkReachabilityCreateWithName(NULL, [hostName UTF8String]); if(reachability!= NULL) { retVal= [[[self alloc] init] autorelease]; if(retVal!= NULL) { retVal->reachabilityRef = reachability; retVal->localWiFiRef = NO; } } return retVal; } + (MarsReachability*) reachabilityWithAddress: (const struct sockaddr*) hostAddress; { SCNetworkReachabilityRef reachability = SCNetworkReachabilityCreateWithAddress(kCFAllocatorDefault, (const struct sockaddr*)hostAddress); MarsReachability* retVal = NULL; if(reachability!= NULL) { retVal= [[[self alloc] init] autorelease]; if(retVal!= NULL) { retVal->reachabilityRef = reachability; retVal->localWiFiRef = NO; } } return retVal; } + (MarsReachability*) reachabilityForInternetConnection; { struct sockaddr_in zeroAddress; bzero(&zeroAddress, sizeof(zeroAddress)); zeroAddress.sin_len = sizeof(zeroAddress); zeroAddress.sin_family = AF_INET; MarsReachability* netReach = [self reachabilityWithAddress: (const struct sockaddr*)&zeroAddress]; if (NotReachable != [netReach currentReachabilityStatus]) return netReach; struct sockaddr_in6 zeroAddress6; bzero(&zeroAddress6, sizeof(zeroAddress6)); zeroAddress6.sin6_len = sizeof(zeroAddress6); zeroAddress6.sin6_family = AF_INET6; return [self reachabilityWithAddress: (const struct sockaddr*)&zeroAddress6]; } + (MarsReachability*) reachabilityForLocalWiFi; { struct sockaddr_in localWifiAddress; bzero(&localWifiAddress, sizeof(localWifiAddress)); localWifiAddress.sin_len = sizeof(localWifiAddress); localWifiAddress.sin_family = AF_INET; // IN_LINKLOCALNETNUM is defined in <netinet/in.h> as 169.254.0.0 localWifiAddress.sin_addr.s_addr = htonl(IN_LINKLOCALNETNUM); MarsReachability* retVal = [self reachabilityWithAddress: (const struct sockaddr*)&localWifiAddress]; if(retVal!= NULL) { retVal->localWiFiRef = YES; } return retVal; } + (MarsNetworkStatus) getCacheReachabilityStatus:(BOOL) flash { if(!flash) return gs_Marsstatus; gs_Marsstatus = [[self reachabilityForInternetConnection] currentReachabilityStatus]; return gs_Marsstatus; } #pragma mark Network Flag Handling - (MarsNetworkStatus) localWiFiStatusForFlags: (SCNetworkReachabilityFlags) flags { PrintReachabilityFlags(flags, "localWiFiStatusForFlags"); MarsNetworkStatus retVal = NotReachable; if((flags & kSCNetworkReachabilityFlagsReachable) && (flags & kSCNetworkReachabilityFlagsIsDirect)) { retVal = ReachableViaWiFi; } return retVal; } - (MarsNetworkStatus) networkStatusForFlags: (SCNetworkReachabilityFlags) flags { PrintReachabilityFlags(flags, "networkStatusForFlags"); if ((flags & kSCNetworkReachabilityFlagsReachable) == 0) { // if target host is not reachable return NotReachable; } MarsNetworkStatus retVal = NotReachable; if ((flags & kSCNetworkReachabilityFlagsConnectionRequired) == 0) { // if target host is reachable and no connection is required // then we'll assume (for now) that your on Wi-Fi retVal = ReachableViaWiFi; } if ((((flags & kSCNetworkReachabilityFlagsConnectionOnDemand ) != 0) || (flags & kSCNetworkReachabilityFlagsConnectionOnTraffic) != 0)) { // ... and the connection is on-demand (or on-traffic) if the // calling application is using the CFSocketStream or higher APIs if ((flags & kSCNetworkReachabilityFlagsInterventionRequired) == 0) { // ... and no [user] intervention is needed retVal = ReachableViaWiFi; } } #if TARGET_OS_IPHONE if ((flags & kSCNetworkReachabilityFlagsIsWWAN) == kSCNetworkReachabilityFlagsIsWWAN) { // ... but WWAN connections are OK if the calling application // is using the CFNetwork (CFSocketStream?) APIs. retVal = ReachableViaWWAN; } #endif return retVal; } - (BOOL) connectionRequired; { NSAssert(reachabilityRef != NULL, @"connectionRequired called with NULL reachabilityRef"); SCNetworkReachabilityFlags flags; if (SCNetworkReachabilityGetFlags(reachabilityRef, &flags)) { return (flags & kSCNetworkReachabilityFlagsConnectionRequired); } return NO; } - (MarsNetworkStatus) currentReachabilityStatus { NSAssert(reachabilityRef != NULL, @"currentNetworkStatus called with NULL reachabilityRef"); MarsNetworkStatus retVal = NotReachable; SCNetworkReachabilityFlags flags; if (SCNetworkReachabilityGetFlags(reachabilityRef, &flags)) { if(localWiFiRef) { retVal = [self localWiFiStatusForFlags: flags]; } else { retVal = [self networkStatusForFlags: flags]; } } return retVal; } @end #endif void comm_export_symbols_2(){}
Objective-C++
5
jonetomtom/mars
mars/comm/objc/Reachability.mm
[ "BSD-2-Clause", "Apache-2.0" ]
#define PY_SSIZE_T_CLEAN #include <Python.h> #include <stdlib.h> #include <inttypes.h> int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size); static PyObject* _fuzz_run(PyObject* self, PyObject* args) { const char* buf; Py_ssize_t size; if (!PyArg_ParseTuple(args, "s#", &buf, &size)) { return NULL; } int rv = LLVMFuzzerTestOneInput((const uint8_t*)buf, size); if (PyErr_Occurred()) { return NULL; } if (rv != 0) { // Nonzero return codes are reserved for future use. PyErr_Format( PyExc_RuntimeError, "Nonzero return code from fuzzer: %d", rv); return NULL; } Py_RETURN_NONE; } static PyMethodDef module_methods[] = { {"run", (PyCFunction)_fuzz_run, METH_VARARGS, ""}, {NULL}, }; static struct PyModuleDef _fuzzmodule = { PyModuleDef_HEAD_INIT, "_fuzz", NULL, 0, module_methods, NULL, NULL, NULL, NULL }; PyMODINIT_FUNC PyInit__xxtestfuzz(void) { return PyModule_Create(&_fuzzmodule); }
C
4
shawwn/cpython
Modules/_xxtestfuzz/_xxtestfuzz.c
[ "0BSD" ]
= Changelog This file keeps track of changes in the different request-log-analyzer releases. When creating a pull request, please add an entry to the "Not yet released" section in this document. == Not yet released * Nothing yet! == Request-log-analyzer 1.13 release cycle === Version 1.13.4 * Catch SyntaxErrors when eval'ing lines. * Scrub parameter with file upload line in Rails requests, so it will parse successfully. (Michael Stark) === Version 1.13.3 * Allow more flexible Apache user strings * Rspec 3 * Reintroduce class attribute inheritance to keep writing your own custom analyser based on an existing one simple. * Rubucop cleanup === Version 1.13.1 * Adapt Delayed Job 4 Log format changes (Michael Stark) * Catch all errors when using eval (temporary workaround to prevent crashing) * Fix Rubinus in Travis config. * Enable the sender name and email address to be specified on the command line (Joshua Pettett) === Version 1.13.0 * Dropped Ruby 1.8 support * Dropped ActiveRecord 2 support * Removed OrdereredHash * Removed force_encoding check. == Request-log-analyzer 1.12 release cycle === Version 1.12.11 * Millisecond measuring units when measurements are under 1 second * Added specific class to html report * Allow floats for durations when using the %T directive in Apache format * Rubinius Fixes === Version 1.12.10 * Apache vhost combined now supported * Removed old blog from readme * S3 format parser more permissive, as it rejected real log files * Redid Travis build environment & requirements * Added MIT license to gemspec === Version 1.12.9 * Switched to new NGINX apache default (combined) * Enable database testing on Travis * Use bundler gem tasks for gemification === Version 1.12.8 * Rails 3 routing errors are now reported on. === Version 1.12.7 * Delayed Job 3 format now supported === Version 1.12.6 * Added NGINX support * Analyse & Report Rails3 partials * Added directories parsing support === Version 1.12.5 * Filename is now displayed in the header of the output * Added orderedhash class for Rubinius support * Fixed a number of Travis issues === Version 1.12.4 * Fixed version number being off. * Rake tasks are now Rails 2 & 3 compatible. * Ftools is replaced with fileutils. === Version 1.12.3 * Cleaned up internal API for nices Ruby integration. * Fixed small typo. * Replaced relative path reference to database_console_init with standard reference === Version 1.12.2 * Removed Encoding.default_external as it was unnecessary and caused problems with Ruby 1.9.3-head. === Version 1.12.1 * Database inserter now working for ActiveRecord 3: NOTE! The minimal ActiveRecord version required is 3! * Travis CI build support (http://travis-ci.org/#!/wvanbergen/request-log-analyzer) * Standard error and out are now cleaned up (#107) === Version 1.12.0 * Loading the database console now works again. == Request-log-analyzer 1.11 release cycle === Version 1.11.0 *Important:* The @--gets-memory-protection@ command line switch is gone. Ruby 1.9 offers this functionality without a performance penalty. If you want to keep using this option in Ruby 1.8, please do not upgrade to this version or copy paste the mixin. Also, removed some half-finished features that were not in a usable state and are not going to be further developed, like using a database as a request source and the "FancyHTML" output format. * The HAProxy file format now supports more old versions. * The Rails 3 format now reads "parameters" lines. * Better parsing of hostnames and IP addresses, fixing some edge cases in all file formats that use these. * Change @autoload@ magic to use regular @require@ to make the source code more understandable. * File format specs are refactored to be more readable. PostgreSQL format bug: we found that for some log files, about 50% of all queries will not get parsed. We are working on a fix for the file format; in the mean time you can still use it to get a feel of the performance. == Request-log-analyzer 1.10 release cycle === Version 1.10.1 * Small bugfix when using custom file formats in Ruby 1.9.2. * Small fix for Rails 3 processing line edge case. * Set up CI testing framework. Request-log-analyzer is now tested on Ruby 1.8.7, 1.9.2 and Ruby enterprise edition. === Version 1.10.0 * Add support for the W3C log format. @--format w3c@ * Add support for HAProxy logs. @--format haproxy@ * Add support for providing the unit used for the Apache %D directive. Default: @%D@, or @%{micro}D@, in which case the values will be interpreted as microseconds. @%{milli}D@ will interpret them as milliseconds instead. (@%T@ interprets the value as seconds.) * Fixed parsing of Rails 3 Completed lines when another duration is specified (ex: Sphinx) == Request-log-analyzer 1.9 release cycle === Version 1.9.10 * PostgreSQL format fixes * Fix in loading custom output formats from the command line. === Version 1.9.9 * Added parsing of object instantiation counts to Oink file format. === Version 1.9.8 * Fixed issue in Delayed::Job log parser when using multiple workers. * Added some stuff to the YAML result export. === Version 1.9.7 * Fixed a bug in parsing rails3's started line for Ruby 1.9.2 Time#to_s * Added benchmarks of different rubies === Version 1.9.6 * Small improvements to the Rails 3 file format. === Version 1.9.5 * Fixed a issue with the --select command line option not being parsed correctly. === Version 1.9.4 * Small fixes in both the Rails 2 and Rails 3 file formats. === Version 1.9.3 * Fixed a small date handling issue in Rails 3 file format. === Version 1.9.2 * Fixed an issue with the Rails 2 file format when ActiveRecord is disabled. === Version 1.9.1 * Fixed format auto-detection issue that caused a Rails log file to show up as an Oink file. === Version 1.9.0 * Added 90-percentile interval to numerical reports. * Some fixes for the Rails 3 file format == Request-log-analyzer 1.8 release cycle === Version 1.8.1 * Ruby 1.9.2 and Rails 3 compatibility fixes * Rails 3 log format improvements * Added log splitting on PID to Rails rake task @rake rla:split@ (Issue 56). * Several smaller bugfixes === Version 1.8.0 * Added Oink support for Rails. * Added @--mailsubject@ option (Issue 51). * Fixed require exception on empty rails folder == Request-log-analyzer 1.7 release cycle === Version 1.7.0 * @--silent@ option implemented. No output is given when this option is used. Ideal for cron job usage (Issue 46). * @--gets-memory-protection@ Includes a mixin to prevent Ruby gets from using huge amounts of memory when extremely long lines are encountered in a log (Issue 47). Please note that this option will suffer a 10% performance hit. == Request-log-analyzer 1.6 release cycle === Version 1.6.4 * Fixed a bug that disallowed line definition names that are also methods names of the Object class. === Version 1.6.3 * Initial support for Rails 3 logs. Add @--format rails3@ to your command line if auto-detection fails. <br /> <em>Note:</em> The log format has changed since the release of the Rails 3 beta. Only the HEAD version of Rails 3 is supported by request-log-analyzer. * Fixed a bug in the [[Amazon S3 access log]] parser. * Improved the graphs when using the @FancyHTML@ output. * Added a nicer API to define capture values in a file format. === Version 1.6.2 * Rails 1.9 compatibility fixes related to string encodings. === Version 1.6.1 * Fixed a bug in the request database schema creation. * Minor bugfixes. === Version 1.6.0 * [[PostgreSQL query log]] support. (Add @--format postgresql@ to your command if auto-detection fails.) <br /> We only had a limited sample of PostgreSQL log to implement the file format.Please support us and send in a fragment of your PostgreSQL query log file! * Fixed Rails action cache hit detection for newer Rails versions. == Request-log-analyzer 1.5 release cycle === Version 1.5.4 * Added support for [[DelayedJob log]] files. (Add @--format delayed_job@ to your command if auto-detection fails.) * Fixed Rails 3 ActiveRecord deprecation notice. === Version 1.5.3 * Fixed a small bug in the Rails development file format report. * Fixed the @:multiple => true@ option for frequency trackers * Added a beginning for a new output format: FancyHTML, which includes charts using the Google charts API. Add @--output FancyHTML@ to your command line to activate it. * Improved Sinatra/Rack format auto-detection. * Extracted regular expressions for common log fragments, like IP addresses and timestamps, to a shared module. === Version 1.5.2 * Support parsing IPv6 IP address as well. Thanks to "dissemble":http://github.com/dissemble for contributing the patch. * JRuby related fixes === Version 1.5.1 * Fixed a crasher bug in the HTML output of traffic reports. === Version 1.5.0 * Added file format autodetection: in most cases, providing the @--format@ parameter is no longer needed to select the correct file format. However, it is still possible to explicitly set the format if autodetection fails. * Added a [[MySQL slow query log]] parser (@--format mysql@). <br /> Thanks "Bart Zonneveld":http://github.com/bartzon for contributing this functionality. * Mail host now configurable. Use @--mailhost <server>@ to specify this * Added specs to test mailing functionality using a mock SMTP server (mailtrap) == Request-log-analyzer 1.4 release cycle === Version 1.4.2 * Added correct content-type to emails sent via the @--mail@ option. === Version 1.4.1 * Fixed @--mail@ command line option which was broken since version 1.4.0. * Added Scout & Munin plugin integration tests === Version 1.4.0 * Initial version of request-log-analyzer API. * Added standard deviation to duration and traffic reports. * Added @--report-amount <n>@ to define the number of entries in the table in the report. Use @all@ to specify that all entries should be included. The default value is 20 * Added @--report-sort <sortings>@ options to specify how the resulting duration and traffic tables should be sorted. You can specify multiple sort fields using a comma separated list, which results in multiple tables. Supported sortings are @sum@, @mean@, @stddev@ and @hits@. The default value is @sum,mean@. * Restructured the [[Rails request log]] parser to be more flexible about what lines to parse, using the @--rails-format <lines>@ command line switch. ** @--rails-format minimal@ will only parse _processing_ and _completed_ lines. ** @--rails-format production@ will parse exceptions and cache hits besides the minimal lines. ** @--rails-format development@ will also parse SQL queries and template renderings ** @--rails-format all@ will parse every known line type. ** @--rails-format minimal,failure@ will parse the minimal lines and exceptions. Use a comma-separated list like this. to define your own line set. * Significantly improved the test coverage of the Rails log format. == Request-log-analyzer 1.3 release cycle === Version 1.3.7 * Fixed a crasher bug when parsing compressed files. * Speed optimizations by enabling teaser checks for lines. === Version 1.3.6 * Speed optimizations to improve parsing performance of frequently called methods, i.e. @Tracker#should_update@, @Request#convert_value@, @Request#convert_timestamp@, @Request#convert_traffic@ and @Request#convert_duration@. * Tweaked all file formats for increased performance * Cleaned up and simplified some old code. === Version 1.3.5 * Added statistics about data traffic for the file formats that contain this information (Apache, Amazon S3, Rack) * Added support for Rack CommonLogger logs (@--format rack@). * Added "agent" and "referer" Apache log format constants. * Improved request duration parsing (%D directive) from [[Apache access log]] files. * Fixed a bug in the database inserter that caused tables to be created with wrong fields. === Version 1.3.4 * Added [[Amazon S3 access log]] parser. Add @--format amazon_s3@ to you command line to use it. S3 access logs are disabled by default. See "the S3 documentation on server logging":http://docs.amazonwebservices.com/AmazonS3/latest/index.html?ServerLogs.html for more information to enable it. === Version 1.3.3 _Retracted release_ === Version 1.3.2 * Tweaking to increase performance significantly. * Added a @--no-progress@ command line switch to disable the progress bar parsing for an additional speed improvement. === Version 1.3.1 * E-mail option to automatically send you reports: add <code>--mail [email protected]</code> to your command line. * Allow other database connections than SQLite3 by passing a connection string to the @--database@ command line option. Examples: ** SQLite3 file database: @--database filename.db@ (as before) ** Connection URI: <code>--database postgresql://rla:rla@localhost/requests</code> ** ODBC-style: @--database adapter=mysql;username=root;host=localhost;database=requests@ ** Memory database: @--database adapter=sqlite3;database=:memory:@ * Keep the previous database by default, pass the @--reset-database@ option to clear the database before parsing. * Added an IRB console to inspect a request database just like Rails's @script/console@. Run with <br />@request-log-analyzer console -d <connection string>@ * Fixed some require paths that were causing problems. === Version 1.3.0 _Retracted release_ == Request-log-analyzer 1.2 release cycle === Version 1.2.9 * Improved [[Apache access log]] format. Can now handle any kind of "Apache access log file format":http://httpd.apache.org/docs/1.3/mod/mod_log_config.html by providing the @--apache-format <format>@ command line argument. * Allow @FileFormats@ to be constructed dynamically (used for the Apache format implementation). * Refactored parameter passing during parsing to allow for more flexibility. === Version 1.2.8 _Retracted release_ === Version 1.2.7 * Added [[Apache access log]] parser. To use, add @--format apache@ to your command line invocation * Make the release script send the gem to RubyForge as well. === Version 1.2.6 * New, more robust gem release script using the ruby-git gem. * Updated outdated information in the gemspec file === Version 1.2.5 * Improved Merb log format parser === Version 1.2.4 * More robust handling of database connections in database inserter. === Version 1.2.3 * Refactored database functions * Cleaned up specs and tried some heckle * Interrupts are handled more gracefully * Moved munin tracker to a "separate Github project":http://github.com/barttenbrinke/munin-plugins-rails/ === Version 1.2.2 _Retracted release_ === Version 1.2.1 * Compressed logfile support * Parsable YAML results as output (use @--dump <filename>@ command line option ) * "Full Rdoc documentation":http://wvanbergen.github.com/request-log-analyzer * Introduced [[Changelog]] * Updated rails rake tasks === Version 1.2.0 * Ruby 1.9 support * Rcov coverage (use @rake spec:rcov@ to generate the coverage report)
RDoc
2
will3216/request-log-analyzer
CHANGELOG.rdoc
[ "MIT" ]
print "input 'q' to quit:"; while(true){ printf("> "); line = stdin.readline(); line = line.strip().lower(); if(line == 'q'){ print "bye."; break; }else{ print 'your input:', repr(line); } }
COBOL
3
cau991/ssdb
deps/cpy/samples/stdin.cpy
[ "BSD-3-Clause" ]
# CNTK Dockerfile # CPU only # No 1-bit SGD # # To build, run from the parent with the command line: # docker build -t <image name> -f CNTK-CPUOnly-Image/Dockerfile . FROM ubuntu:16.04 RUN apt-get update && apt-get install -y --no-install-recommends \ autotools-dev \ build-essential \ git \ g++-multilib \ gcc-multilib \ gfortran-multilib \ libavcodec-dev \ libavformat-dev \ libjasper-dev \ libjpeg-dev \ libpng-dev \ liblapacke-dev \ libswscale-dev \ libtiff-dev \ pkg-config \ wget \ zlib1g-dev \ # OPENSSL support libssl-dev \ openssl \ # Protobuf ca-certificates \ curl \ libcurl4-openssl-dev \ unzip \ # For Kaldi python-dev \ automake \ libtool-bin \ autoconf \ subversion \ # For Kaldi's dependencies libapr1 libaprutil1 libltdl-dev libltdl7 libserf-1-1 libsigsegv2 libsvn1 m4 \ # For Java Bindings openjdk-8-jdk \ # For SWIG libpcre3-dev \ # For graphics managed lib libgdiplus \ # .NET Core SDK apt-transport-https && \ # Cleanup rm -rf /var/lib/apt/lists/* ARG CMAKE_DOWNLOAD_VERSION=3.11 ARG CMAKE_BUILD_VERSION=4 RUN DEBIAN_FRONTEND=noninteractive && \ wget --no-verbose https://cntkbuildstorage.blob.core.windows.net/cntk-ci-dependencies/cmake/$CMAKE_DOWNLOAD_VERSION/cmake-$CMAKE_DOWNLOAD_VERSION.$CMAKE_BUILD_VERSION.tar.gz && \ tar -xzvf cmake-$CMAKE_DOWNLOAD_VERSION.$CMAKE_BUILD_VERSION.tar.gz && \ cd cmake-$CMAKE_DOWNLOAD_VERSION.$CMAKE_BUILD_VERSION && \ ./bootstrap --system-curl -- -DCMAKE_USE_OPENSSL=ON && \ make -j $(nproc) install && \ cd .. && \ rm -rf cmake-$CMAKE_DOWNLOAD_VERSION.$CMAKE_BUILD_VERSION ARG OPENMPI_VERSION=1.10.7 RUN wget -q -O - https://cntkbuildstorage.blob.core.windows.net/cntk-ci-dependencies/openmpi/$OPENMPI_VERSION/openmpi-$OPENMPI_VERSION.tar.gz | tar -xzf - && \ cd openmpi-${OPENMPI_VERSION} && \ apt-get -y update && \ apt-get -y -f install && \ apt-get -y install libsysfs2 libsysfs-dev && \ ./configure --with-verbs --with-cuda=/usr/local/cuda --prefix=/usr/local/mpi && \ make -j $(nproc) install && \ cd .. && \ rm -rf openmpi-${OPENMPI_VERSION} ENV PATH /usr/local/mpi/bin:$PATH ENV LD_LIBRARY_PATH /usr/local/mpi/lib:$LD_LIBRARY_PATH ARG LIBZIP_VERSION=1.1.2 RUN wget -q -O - https://cntkbuildstorage.blob.core.windows.net/cntk-ci-dependencies/libzip/$LIBZIP_VERSION/libzip-$LIBZIP_VERSION.tar.gz | tar -xzf - && \ cd libzip-${LIBZIP_VERSION} && \ ./configure && \ make -j $(nproc) install && \ cd .. && \ rm -rf libzip-${LIBZIP_VERSION} ENV LD_LIBRARY_PATH /usr/local/lib:$LD_LIBRARY_PATH ARG OPENCV_VERSION=3.1.0 RUN wget -q -O - https://cntkbuildstorage.blob.core.windows.net/cntk-ci-dependencies/opencv/$OPENCV_VERSION/opencv-$OPENCV_VERSION.tar.gz | tar -xzf - && \ cd opencv-${OPENCV_VERSION} && \ cmake -DWITH_CUDA=OFF -DCMAKE_BUILD_TYPE=RELEASE -DCMAKE_INSTALL_PREFIX=/usr/local/opencv-${OPENCV_VERSION} . && \ make -j $(nproc) install && \ cd .. && \ rm -rf opencv-${OPENCV_VERSION} ARG OPENBLAS_VERSION=0.2.18 RUN wget -q -O - https://cntkbuildstorage.blob.core.windows.net/cntk-ci-dependencies/openblas/$OPENBLAS_VERSION/OpenBLAS-$OPENBLAS_VERSION.tar.gz | tar -xzf - && \ cd OpenBLAS-${OPENBLAS_VERSION} && \ make -j 2 MAKE_NB_JOBS=0 USE_OPENMP=1 | tee make.log && \ grep -qF 'OpenBLAS build complete. (BLAS CBLAS LAPACK LAPACKE)' make.log && \ grep -qF 'Use OpenMP in the multithreading.' make.log && \ make PREFIX=/usr/local/openblas install && \ cd .. && \ rm -rf OpenBLAS-${OPENBLAS_VERSION} ENV LD_LIBRARY_PATH /usr/local/openblas/lib:$LD_LIBRARY_PATH # Install Boost ARG BOOST_VERSION=1.60.0 RUN BOOST_UNDERSCORE_VERSION=$(echo $BOOST_VERSION | tr . _) && \ wget -q -O - https://cntkbuildstorage.blob.core.windows.net/cntk-ci-dependencies/boost/$BOOST_VERSION/boost_$BOOST_UNDERSCORE_VERSION.tar.gz | tar -xzf - && \ cd boost_${BOOST_UNDERSCORE_VERSION} && \ ./bootstrap.sh --prefix=/usr/local/boost-${BOOST_VERSION} && \ ./b2 -d0 -j $(nproc) install && \ cd .. && \ rm -rf boost_${BOOST_UNDERSCORE_VERSION} # Install Protobuf ARG PROTOBUF_VERSION=3.1.0 RUN PROTOBUF_STRING=protobuf-$PROTOBUF_VERSION && \ wget -O - --no-verbose https://cntkbuildstorage.blob.core.windows.net/cntk-ci-dependencies/protobuf/$PROTOBUF_VERSION/protobuf-$PROTOBUF_VERSION.tar.gz | tar -xzf - && \ cd $PROTOBUF_STRING && \ ./autogen.sh && \ ./configure CFLAGS=-fPIC CXXFLAGS=-fPIC --disable-shared --prefix=/usr/local/$PROTOBUF_STRING && \ make -j $(nproc) install && \ cd .. && \ rm -rf $PROTOBUF_STRING # Install MKLDNN and MKLML ARG MKLDNN_VERSION=0.14 ARG MKLDNN_LONG_VERSION=mklml_lnx_2018.0.3.20180406 RUN mkdir /usr/local/mklml && \ wget --no-verbose -O - https://cntkbuildstorage.blob.core.windows.net/cntk-ci-dependencies/mkl-dnn/$MKLDNN_VERSION/$MKLDNN_LONG_VERSION.tgz | \ tar -xzf - -C /usr/local/mklml && \ MKLDNN_STRING=mkl-dnn-${MKLDNN_VERSION} && \ wget --no-verbose -O - https://cntkbuildstorage.blob.core.windows.net/cntk-ci-dependencies/mkl-dnn/$MKLDNN_VERSION/mkl-dnn-$MKLDNN_VERSION.tar.gz | tar -xzf - && \ cd ${MKLDNN_STRING} && \ ln -s /usr/local external && \ mkdir -p build && \ cd build && \ cmake .. -DCMAKE_INSTALL_PREFIX=/ && \ make && \ make install DESTDIR=/usr/local && \ make install DESTDIR=/usr/local/mklml/${MKLDNN_LONG_VERSION} && \ cd ../.. && \ rm -rf ${MKLDNN_STRING} # Install Kaldi ARG KALDI_VERSION=c024e8aa ARG KALDI_PATH=/usr/local/kaldi-$KALDI_VERSION RUN mv /bin/sh /bin/sh.orig && \ ln -s -f /bin/bash /bin/sh && \ mkdir $KALDI_PATH && \ wget --no-verbose -O - https://cntkbuildstorage.blob.core.windows.net/cntk-ci-dependencies/kaldi/$KALDI_VERSION/kaldi-$KALDI_VERSION.tar.gz | tar -xzf - --strip-components=1 -C $KALDI_PATH && \ cd $KALDI_PATH && \ cd tools && \ perl -pi -e 's/^# (OPENFST_VERSION = 1.4.1)$/\1/' Makefile && \ ./extras/check_dependencies.sh && \ make -j $(nproc) all && \ cd ../src && \ # remove Fermi support as CUDA 9 no longer works on it perl -pi -e 's/-gencode arch=compute_20,code=sm_20//' cudamatrix/Makefile && \ ./configure --openblas-root=/usr/local/openblas --shared && \ make -j $(nproc) depend && \ make -j $(nproc) all && \ # Remove some unneeded stuff in $KALDI_PATH to reduce size find $KALDI_PATH -name '*.o' -print0 | xargs -0 rm && \ for dir in $KALDI_PATH/src/*bin; do make -C $dir clean; done && \ mv -f /bin/sh.orig /bin/sh ## PYTHON # Commit that will be used for Python environment creation (and later, compilation) ARG COMMIT=master # Swig ARG SWIG_VERSION=3.0.10 ARG CACHEBUST=1 RUN wget -q https://cntkbuildstorage.blob.core.windows.net/cntk-ci-dependencies/swig/$SWIG_VERSION/swig-$SWIG_VERSION.tar.gz -O - | tar xvfz - && \ cd swig-$SWIG_VERSION && \ # Note: we specify --without-alllang to suppress building tests and examples for specific languages. ./configure --prefix=/usr/local/swig-$SWIG_VERSION --without-perl5 --without-alllang && \ make -j $(nproc) && \ make install && \ cd .. && \ rm -rf swig-$SWIG_VERSION COPY ./Patches /tmp/patches RUN /tmp/patches/patch_swig.sh /usr/local/share/swig/3.0.10 && \ rm -rfd /tmp/patches # .NET Core SDK RUN wget -q https://cntkbuildstorage.blob.core.windows.net/cntk-ci-dependencies/packages-microsoft-prod/deb/packages-microsoft-prod.deb && \ dpkg -i packages-microsoft-prod.deb && \ apt-get -y -f install apt-transport-https && \ apt-get -y update && \ apt-get -y -f install dotnet-sdk-2.1 && \ rm ./packages-microsoft-prod.deb # Anaconda ARG ANACONDA_VERSION=4.2.0 RUN wget -q https://cntkbuildstorage.blob.core.windows.net/cntk-ci-dependencies/anaconda3/$ANACONDA_VERSION/Anaconda3-$ANACONDA_VERSION-Linux-x86_64.sh && \ bash Anaconda3-$ANACONDA_VERSION-Linux-x86_64.sh -b && \ rm Anaconda3-$ANACONDA_VERSION-Linux-x86_64.sh RUN CONDA_ENV_PATH=/tmp/conda-linux-cntk-py35-environment.yml; \ wget -q https://raw.githubusercontent.com/Microsoft/CNTK/$COMMIT/Scripts/install/linux/conda-linux-cntk-py35-environment.yml -O "$CONDA_ENV_PATH" && \ /root/anaconda3/bin/conda env create -p /root/anaconda3/envs/cntk-py35 --file "$CONDA_ENV_PATH" && \ rm -f "$CONDA_ENV_PATH" ENV PATH /root/anaconda3/envs/cntk-py35/bin:$PATH WORKDIR /cntk # Build CNTK RUN git clone --depth=1 --recursive -b $COMMIT https://github.com/Microsoft/CNTK.git cntksrc && \ cd cntksrc && \ MKLML_VERSION_DETAIL=${MKLDNN_LONG_VERSION} && \ CONFIGURE_OPTS="\ --with-kaldi=${KALDI_PATH} \ --with-py35-path=/root/anaconda3/envs/cntk-py35" && \ mkdir -p build/cpu/release && \ cd build/cpu/release && \ ../../../configure $CONFIGURE_OPTS --with-openblas=/usr/local/openblas && \ make -j"$(nproc)" all && \ cd ../../.. && \ mkdir -p build-mkl/cpu/release && \ cd build-mkl/cpu/release && \ ../../../configure $CONFIGURE_OPTS --with-mkl=/usr/local/mklml/${MKLML_VERSION_DETAIL} && \ make -j"$(nproc)" all RUN cd cntksrc/Examples/Image/DataSets/CIFAR-10 && \ python install_cifar10.py && \ cd ../../../.. RUN cd cntksrc/Examples/Image/DataSets/MNIST && \ python install_mnist.py && \ cd ../../../.. ENV PATH=/cntk/cntksrc/build/gpu/release/bin:$PATH PYTHONPATH=/cntk/cntksrc/bindings/python LD_LIBRARY_PATH=/cntk/cntksrc/bindings/python/cntk/libs:$LD_LIBRARY_PATH # Install CNTK as the default backend for Keras ENV KERAS_BACKEND=cntk
Dockerfile
4
shyamalschandra/CNTK
Tools/docker/CNTK-CPUOnly-Image/Dockerfile
[ "MIT" ]
light 0,0,1000 light black, white light wave(100)*100,0,1000 scale 2 rotate stroke 1 color red tile 10,10,1 for i: 0 to 10 step 1 push scale noise(i, wave(1000))*2 rotate noise(10+i, wave(2000))*100, noise(100+i, wave(1233))*100, noise(100+i, wave(3331))*100 sphereDetail 3 sphere pop end end
Cycript
3
marcinbiegun/creativecoding-sketches
Cyril/data/code_experiments/1.cy
[ "MIT" ]
*** Settings *** Documentation PROBLEM: ... You want to test the existence of a file in an AWS S3 bucket ... without using lower level Python code or developing a custom library. ... DISCUSSION: ... This recipe demonstrates: ... - using a Suite Teardown to end the test suite cleanly ... - using keywords from an external library ... - accessing OS Environment Variables directly using %{} syntax ... This recipe has the following external dependencies: ... $ pip install --upgrade robotframework-aws ... This recipe also requires the following OS environment variables: ... AWS_ACCESS_KEY_ID ... AWS_SECRET_ACCESS_KEY Suite Teardown  Delete All Sessions Library  AWSLibrary Force Tags  no-ci-testing *** Variables *** ${recipe} Recipe 14.1 AWS Simple Storage Service ${level} Intermediate ${category} External Library: AWSLibrary ${REGION} us-east-1 ${BUCKET} YOUR_BUCKET_NAME_GOES_HERE ${KEY} YOUR_FILE_PATH_GOES_HERE *** Test Cases *** Check Key Exists In Bucket  Log Variables  Create Session With Keys ${REGION} %{AWS_ACCESS_KEY_ID} %{AWS_SECRET_ACCESS_KEY}  Key Should Exist ${BUCKET} ${KEY}
RobotFramework
5
ka7/bat
tests/syntax-tests/highlighted/Robot Framework/recipe141_aws_simple_storage_service.robot
[ "Apache-2.0", "MIT" ]
<template> <recycle-list for="item in longList" switch="type"> <cell-slot case="A"> <text v-on:click="handler" @longpress="move">A</text> <text @touchend="move">B</text> </cell-slot> </recycle-list> </template> <script> module.exports = { data () { return { longList: [ { type: 'A' }, { type: 'A' } ] } }, methods: { handler () {}, move () {} } } </script>
Vue
3
Rewats/vue
test/weex/cases/recycle-list/v-on.vue
[ "MIT" ]
# Copyright 2021 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """This is a Python API fuzzer for tf.raw_ops.SparseCountSparseOutput.""" import atheris with atheris.instrument_imports(): import sys from python_fuzzing import FuzzingHelper import tensorflow as tf @atheris.instrument_func def TestOneInput(input_bytes): """Test randomized integer fuzzing input for tf.raw_ops.SparseCountSparseOutput.""" fh = FuzzingHelper(input_bytes) shape1 = fh.get_int_list(min_length=0, max_length=8, min_int=0, max_int=8) shape2 = fh.get_int_list(min_length=0, max_length=8, min_int=0, max_int=8) shape3 = fh.get_int_list(min_length=0, max_length=8, min_int=0, max_int=8) shape4 = fh.get_int_list(min_length=0, max_length=8, min_int=0, max_int=8) seed = fh.get_int() indices = tf.random.uniform( shape=shape1, minval=0, maxval=1000, dtype=tf.int64, seed=seed) values = tf.random.uniform( shape=shape2, minval=0, maxval=1000, dtype=tf.int64, seed=seed) dense_shape = tf.random.uniform( shape=shape3, minval=0, maxval=1000, dtype=tf.int64, seed=seed) weights = tf.random.uniform( shape=shape4, minval=0, maxval=1000, dtype=tf.int64, seed=seed) binary_output = fh.get_bool() minlength = fh.get_int() maxlength = fh.get_int() name = fh.get_string() try: _, _, _, = tf.raw_ops.SparseCountSparseOutput( indices=indices, values=values, dense_shape=dense_shape, weights=weights, binary_output=binary_output, minlength=minlength, maxlength=maxlength, name=name) except tf.errors.InvalidArgumentError: pass def main(): atheris.Setup(sys.argv, TestOneInput, enable_python_coverage=True) atheris.Fuzz() if __name__ == "__main__": main()
Python
5
EricRemmerswaal/tensorflow
tensorflow/security/fuzzing/sparseCountSparseOutput_fuzz.py
[ "Apache-2.0" ]
instruct! data do name 'John Smith' end
Ox
2
JackDanger/ox-builder
spec/support/test.ox
[ "MIT" ]
import QtQuick 2.3 import QtQuick.Controls 1.2 import QtQuick.Layouts 1.2 RowLayout { property string labelText: "Label" property string valueText: "value" width: parent.width QGCLabel { id: label text: labelText } QGCLabel { text: valueText elide: Text.ElideRight horizontalAlignment: Text.AlignRight Layout.fillWidth: true } }
QML
3
uavosky/uavosky-qgroundcontrol
src/QmlControls/VehicleSummaryRow.qml
[ "Apache-2.0" ]
(:~ : This script adds all JSON files to the : specified database. :) declare variable $database := 'db'; (: add new files :) for $name in file:list('.', false(), '*.json') let $file := file:read-text($name) let $json := json:parse($file) return db:add($database, document { $json }, $name)
XQuery
3
JensErat/basex
basex-examples/src/main/resources/json/add-to-database.xq
[ "BSD-3-Clause" ]
CREATE DATABASE defcol;
SQL
2
imtbkcat/tidb-lightning
tests/default-columns/data/defcol-schema-create.sql
[ "Apache-2.0" ]
--- title: "How to enable AAD authentication for a storage account" author: Hong Ooi output: rmarkdown::html_vignette vignette: > %\VignetteIndexEntry{AAD authentication setup} %\VignetteEngine{knitr::rmarkdown} %\VignetteEncoding{utf8} --- It's possible to authenticate to a storage account using an OAuth token obtained via Azure Active Directory (AAD). This has several advantages: - You don't need to pass around the storage account's access key, which is like a master password: it controls all access to the account. If it gets compromised, the account is no longer secure. - You can use role-based access control to limit which users are allowed to use the account, and what actions they can perform. - Unlike a shared access signature (SAS), AAD authentication doesn't have a hard expiry date. As long as an AAD identity (user, service principal, etc) has the correct permissions, it can always connect to the storage account. Similarly, you can easily revoke access by removing the necessary permissions from the identity. Here, we'll take you through the steps involved to configure a storage account for AAD authentication. The assumption here is that you're an administrator for your AAD tenant, or have the appropriate rights to create AAD app registrations and set role assignments on resources---if you don't know what these terms mean, you probably don't have such rights! ## Authenticate as a user Authenticating as a user is relatively straightforward: you can think of it as "logging into" the storage account with your username. This involves the following: - Create an app registration; this essentially tells Azure that the AzureStor package is allowed to access storage in your tenant - Give the app the "user_impersonation" delegated permission for storage - Assign your users the appropriate roles in the storage account ### Create an app registration You can create a new app registration using any of the usual methods. For example to create an app registration in the Azure Portal (`https://portal.azure.com/`), click on "Azure Active Directory" in the menu bar down the left, go to "App registrations" and click on "New registration". Name the app something suitable, eg "AzureStor R interface to storage". - If you want your users to be able to login with the authorization code flow, you must add a **public client/native redirect URI** of `http://localhost:1410`. This is appropriate if your users will be running R on their local PCs, with an Internet browser available. - If you want your users to be able to login with the device code flow, you must **enable the "Allow public client flows" setting** for your app. In the Portal, you can find this setting in the "Authentication" pane once the app registration is complete. This is appropriate if your users are running R in a remote session, for example in RStudio Server, Databricks, or a VM terminal window over ssh. Once the app registration has been created, note the app ID. ### Set the app permissions To enable users to authenticate to storage with this app, add the "user_impersonation" delegated permission for the Azure Storage API. In the Portal, you can set this by going to the "API permissions" pane for your app reigstration, then clicking on "Add a permission". ### Give users a role assignment in the storage account Having registered an app ID for AzureStor, you then add the appropriate role assignments for your users. These role assignments are set for the resource, _not_ the app registration. In the Portal, you can set these by going to your storage account resource, then clicking on "Access Control (IAM)". The main role assignments to be aware of are: - **Storage blob data reader**: read (but not write) blob containers and blobs. Because blob storage and ADLS2 storage are interoperable, this role also lets users read ADLS2 filesystems and files. - **Storage blob data contributor**: read and write blob/ADLS2 containers and files. - **Storage blob data owner**: read and write blob/ADLS2 containers and files; in addition, allow setting POSIX ACLs for ADLS2. - **Storage queue data reader**: read (but now write or delete) queues and queue messages. - **Storage queue data contributor**: read, write and delete queues and queue messages. - **Storage queue data message sender**: send (write) queue messages. - **Storage queue data message processor**: read and delete queue messages. Note that AzureStor does not provide an R interface to queue storage; for that, you can use the AzureQstor package. ### Authenticating Once this is done, your users can authenticate to storage as follows. Here. `app_id` is the ID of the app registration you've just created. ```r # obtaining a token from an R session on the local machine token <- AzureAuth::get_azure_token("https://storage.azure.com", tenant="yourtenant", app="app_id") # obtaining a token from a remote R session: RStudio Server/Databricks token <- AzureAuth::get_azure_token("https://storage.azure.com", tenant="yourtenant", app="app_id", auth_type="device_code") # use the token to login to storage (blob in this case) endp <- storage_endpoint("https://yourstorageacct.blob.core.windows.net", token=token) ``` ## Authenticate as the application In the previous section, we described how users can authenticate as themselves with AzureStor. Here, we'll describe how to authenticate as the _application_, that is, without a signed-in user. This is useful in a scenario such as a CI/CD or deployment pipeline that needs to run without user intervention. The process is as follows: - Create an app registration as before - Give the app a client secret - Assign the app's service principal the appropriate role in the storage account ### Create the app registration and give it a client secret Creating the app registration is much the same as before, except that you don't need to set a redirect URI or enable public client flows. Instead you give the app a **client secret**, which is much the same as a password (and should similarly be kept secure). In the Portal, you can set this in the "Certificates and Secrets" pane for your app registration. It's also possible to authenticate with a **client certificate (public key)**, but this is more complex and we won't go into it here. For more details, see the [Azure Active Directory documentation](https://docs.microsoft.com/en-au/azure/active-directory/develop/v2-oauth2-client-creds-grant-flow) and the [AzureAuth intro vignette](https://cran.r-project.org/package=AzureAuth/vignettes/token.html). ### Give the app's service principal a role assignment in the storage account This is again similar to assigning a user a role, except now you assign it to the service principal for your app. The same roles assignments as before can be used. ### Authenticating To authenticate as the app, use the following code: ```r # use the app ID and client secret you noted before token <- AzureAuth::get_azure_token("https://storage.azure.com", tenant="yourtenant", app="app_id", password="client_secret") endp <- storage_endpoint("https://yourstorageacct.blob.core.windows.net", token=token) ```
RMarkdown
5
KoderKow/AzureStor
vignettes/aad.rmd
[ "MIT" ]
' ********** Copyright 2016 Roku Corp. All Rights Reserved. ********** sub init() m.top.SetFocus(true) mslb1 = m.top.findNode("mslb1") mslb1.drawingStyles = { "GothamPurpleBold": { "fontUri": "pkg:/fonts/Gotham-Bold.otf" "fontSize":36 "color": "#662d91" } "GothamBoldWhite": { "fontUri": "pkg:/fonts/Gotham-Bold.otf" "fontSize":36 "color": "#FFFAFA" } "GothamPurple": { "fontUri": "pkg:/fonts/Gotham-Medium.otf" "fontSize":36 "color": "#662d91" } "GothamPurpleLarge": { "fontUri": "pkg:/fonts/Gotham-Medium.otf" "fontSize":54 "color": "#662d91" } "GothamWhite": { "fontUri": "pkg:/fonts/Gotham-Medium.otf" "fontSize":36 "color": "#FFFAFA" } "HandprintedWhite": { "fontUri": "pkg:/fonts/vSHandprinted.otf" "fontSize":36 "color": "#FFFAFA" } "HandprintedGreen": { "fontUri": "pkg:/fonts/vSHandprinted.otf" "fontSize": 36 "color": "#00FF00FF" } "Noto": { "fontUri": "pkg:/fonts/OpenSansEmoji.ttf" "fontSize": 36 "color": "#662d91FF" } "default": { "fontSize": 12 "fontUri": "font:LargeSystemFont" "color": "#DDDDDDFF" } } mslb2 = m.top.findNode("mslb2") mslb2.drawingStyles = { "GothamPurpleBold": { "fontUri": "pkg:/fonts/Gotham-Bold.otf" "fontSize":36 "color": "#662d91" } "GothamBoldWhite": { "fontUri": "pkg:/fonts/Gotham-Bold.otf" "fontSize":36 "color": "#FFFAFA" } "GothamPurple": { "fontUri": "pkg:/fonts/Gotham-Medium.otf" "fontSize":36 "color": "#662d91" } "GothamPurpleLarge": { "fontUri": "pkg:/fonts/Gotham-Medium.otf" "fontSize":72 "color": "#662d91" } "GothamWhite": { "fontUri": "pkg:/fonts/Gotham-Medium.otf" "fontSize":36 "color": "#FFFAFA" } "HandprintedWhite": { "fontUri": "pkg:/fonts/vSHandprinted.otf" "fontSize":36 "color": "#FFFAFA" } "HandprintedGreen": { "fontUri": "pkg:/fonts/vSHandprinted.otf" "fontSize": 36 "color": "#00FF00FF" } "Noto": { "fontUri": "pkg:/fonts/OpenSansEmoji.ttf" "fontSize": 36 "color": "#662d91FF" } "default": { "fontSize": 36 "fontUri": "font:LargeSystemFont" "color": "#DDDDDDFF" } } mslb1.text = "<GothamWhite>Developers can use the new </GothamWhite><GothamBoldWhite>MultiStyleLabel </GothamBoldWhite><GothamWhite>node class to create labels with multiple </GothamWhite><HandprintedWhite>fonts,</HandprintedWhite><GothamPurple>colors, and </GothamPurple><GothamPurpleLarge>sizes.</GothamPurpleLarge>" mslb2.text = "<GothamWhite>This enables developers to, for example, bold and/or color </GothamWhite><GothamPurpleBold>important text </GothamPurpleBold><GothamWhite>within a label and display emojis </GothamWhite><GothamPurpleBold>(</GothamPurpleBold>" + "<Noto>" + chr(128250) +"</Noto>" + "<GothamPurpleBold>)</GothamPurpleBold><GothamWhite>.</GothamWhite>" End sub function doTest() as void mslb1 = m.top.findNode("mslb1") mslb2 = m.top.findNode("mslb2") if mslb1.isTextEllipsized print "mslb1.isTextEllipsized = TRUE" else print "mslb1.isTextEllipsized = FALSE" endif if mslb2.isTextEllipsized print "mslb2.isTextEllipsized = TRUE" else print "mslb2.isTextEllipsized = FALSE" endif end function function onKeyEvent(key as String, press as Boolean) as Boolean result = false if press = true if key = "OK" doTest() endif endif return result end function
Brightscript
3
khangh/samples
ux components/screen elements/renderable nodes/MultiStyleLabelExample/components/MainScene.brs
[ "MIT" ]
import Array "mo:base/Array"; module { public type Cell = Bool; public type State = { #v1 : [[var Cell]] }; public func new(size : Nat, f : (i : Nat, j : Nat) -> Cell) : State { #v1 ( Array.tabulate(size, func (i : Nat) : [var Cell] { let a : [var Cell] = Array.init(size, false); for (j in a.keys()) { a[j] := f(i,j); }; a })) } }
Modelica
4
DaveSimplifire/examples
motoko/life/versions/v1/life/State.mo
[ "Apache-2.0" ]
// Ternary and higher-order pointwise operations #include <ATen/native/PointwiseOps.h> #include <ATen/ATen.h> #include <ATen/NativeFunctions.h> #include <ATen/MemoryOverlap.h> #include <ATen/native/TensorIterator.h> #include <ATen/NamedTensorUtils.h> namespace at { namespace meta { TORCH_META_FUNC(addcmul) (const Tensor& self, const Tensor& tensor1, const Tensor& tensor2, const Scalar& value) { build_ternary_op(maybe_get_output(), self, tensor1, tensor2); } TORCH_META_FUNC(addcdiv) (const Tensor& self, const Tensor& tensor1, const Tensor& tensor2, const Scalar& value) { if (isIntegralType(tensor1.scalar_type(), /*includeBool=*/true) && isIntegralType(tensor2.scalar_type(), /*includeBool=*/true)) { TORCH_CHECK( false, "Integer division with addcdiv is no longer supported, and in a future ", "release addcdiv will perform a true division of tensor1 and tensor2. ", "The historic addcdiv behavior can be implemented as ", "(input + value * torch.trunc(tensor1 / tensor2)).to(input.dtype) ", "for integer inputs and as ", "(input + value * tensor1 / tensor2) for float inputs. ", "The future addcdiv behavior is just the latter implementation: ", "(input + value * tensor1 / tensor2), for all dtypes."); } build_ternary_op(maybe_get_output(), self, tensor1, tensor2); } } // namespace meta namespace native { TORCH_IMPL_FUNC(addcmul_out) (const Tensor& self, const Tensor& tensor1, const Tensor& tensor2, const Scalar& value, const Tensor& result) { addcmul_stub(device_type(), *this, value); } TORCH_IMPL_FUNC(addcdiv_out) (const Tensor& self, const Tensor& tensor1, const Tensor& tensor2, const Scalar& value, const Tensor& result) { addcdiv_stub(device_type(), *this, value); } DEFINE_DISPATCH(addcmul_stub); DEFINE_DISPATCH(addcdiv_stub); } // namespace native } // namespace at
C++
5
Hacky-DH/pytorch
aten/src/ATen/native/PointwiseOps.cpp
[ "Intel" ]
<?xml version='1.0' encoding='utf-8'?> <?python import time title = "A Kid Template" ?> <html xmlns="http://www.w3.org/1999/xhtml" xmlns:py="http://purl.org/kid/ns#"> <head> <title py:content="title"> This is replaced with the value of the title variable. </title> </head> <body style="color:black;background-color:white"> <div style="font-family:sans-serif;text-align:center"> <h2> Time Example 2 </h2> <p> <i> This page is a stand-alone page. </i> </p> <p> The current time is ${time.ctime()}. </p> </div> </body> </html>
Genshi
4
PeaceWorksTechnologySolutions/w4py
KidKit/Examples/Time2.kid
[ "MIT" ]
TODO MUNDO FALA DE MIM "Hello World" BRO;
Bro
0
conorpreid/hello-world
r/Rafflalang.bro
[ "MIT" ]
SUMMARY OF PM7 CALCULATION, Site No: 6541 MOPAC2016 (Version: 16.093M) Fri Apr 8 18:36:56 2016 No. of days remaining = 360 Empirical Formula: O2 = 2 atoms LARGE UHF TRIPLET Title GEOMETRY OPTIMISED USING EIGENVECTOR FOLLOWING (EF). SCF FIELD WAS ACHIEVED HEAT OF FORMATION = -9.17144 KCAL/MOL = -38.37332 KJ/MOL TOTAL ENERGY = -587.42567 EV ELECTRONIC ENERGY = -951.56226 EV CORE-CORE REPULSION = 364.13659 EV GRADIENT NORM = 0.21509 DIPOLE = 0.00000 DEBYE POINT GROUP: D*h (SZ) = 1.000000 (S**2) = 2.003405 NO. OF ALPHA ELECTRONS = 7 NO. OF BETA ELECTRONS = 5 IONIZATION POTENTIAL = 10.667813 EV ALPHA SOMO LUMO (EV) = -10.668 2.451 BETA SOMO LUMO (EV) = -15.302 0.614 MOLECULAR WEIGHT = 31.9988 COSMO AREA = 49.05 SQUARE ANGSTROMS COSMO VOLUME = 31.42 CUBIC ANGSTROMS MOLECULAR DIMENSIONS (Angstroms) Atom Atom Distance O 2 O 1 1.13095 O 2 O 1 0.00000 O 1 O 2 0.00000 SCF CALCULATIONS = 7 WALL-CLOCK TIME = 0.023 SECONDS COMPUTATION TIME = 0.011 SECONDS FINAL GEOMETRY OBTAINED LARGE UHF TRIPLET Title O -5.44209638 +1 -0.48410726 +1 -0.00000000 +1 O -4.32579361 +1 -0.30269274 +1 -0.00000000 +1
Arc
1
pstjohn/cclib
data/MOPAC/o2_trip.arc
[ "BSD-3-Clause" ]
<faces-config> <faces-config-extension> <namespace-uri>http://example.com/xsp/control</namespace-uri> <default-prefix>xlb</default-prefix> </faces-config-extension> </faces-config>
XPages
3
camac/XLogback
org.openntf.base.logback.xsp/src/main/java/META-INF/xlb.xsp-config
[ "Apache-2.0", "MIT" ]
%{-- - Copyright 2016 SimplifyOps, Inc. (http://simplifyops.com) - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. --}% <g:if test="${flash.message || request.message}"> <g:autoLink><g:enc>${flash.message}</g:enc><g:enc>${request.message}</g:enc></g:autoLink> </g:if> <g:if test="${beanErrors ||flash.errors ||flash.error || request.error|| request.errorMessage || request.errors || flash.errorCode || request.errorCode}"> <g:autoLink><g:enc>${flash.error}</g:enc>${request.error && request.error instanceof String? enc(html:request.error): request.errorMessage && request.errorMessage instanceof String? enc(html:request.errorMessage):''}</g:autoLink> <g:if test="${flash.errors instanceof org.springframework.validation.Errors}"> <g:renderErrors bean="${flash.errors}" as="list"/> </g:if> <g:if test="${request.errors instanceof org.springframework.validation.Errors}"> <g:renderErrors bean="${request.errors}" as="list"/> </g:if> <g:if test="${request.errors instanceof java.util.Collection}"> <ul> <g:each in="${request.errors}" var="err"> <li><g:enc>${err}</g:enc></li> </g:each> </ul> </g:if> <g:if test="${beanErrors instanceof org.springframework.validation.Errors}"> <g:renderErrors bean="${beanErrors}" as="list"/> </g:if> <g:if test="${flash.errorCode ?: request.errorCode}"> <g:message code="${flash.errorCode ?: request.errorCode}" args="${flash.errorArgs ?: request.errorArgs}"/> </g:if> </g:if> <g:if test="${flash.warn || request.warn}"> <g:autoLink><g:enc>${flash.warn}</g:enc><g:enc>${request.warn}</g:enc></g:autoLink> </g:if>
Groovy Server Pages
3
kbens/rundeck
rundeckapp/grails-app/views/common/_messagesText.gsp
[ "Apache-2.0" ]
describe("foreach should support iterating through String (Iterable)", || a := "hello" b := Buffer new() for (c in a) { b append(c) } result := b toString() expect(a, result) )
ooc
4
shamanas/rock
test/compiler/control/foreach.ooc
[ "MIT" ]
@[if DEVELSPACE]@ set(@(PROJECT_NAME)_CMAKE_DIR @(CMAKE_CURRENT_SOURCE_DIR)/cmake) @[else]@ set(catkin_virtualenv_CMAKE_DIR ${@(PROJECT_NAME)_DIR}) @[end if]@ # Include cmake modules from @(PROJECT_NAME) include(${@(PROJECT_NAME)_CMAKE_DIR}/catkin_generate_virtualenv.cmake) include(${@(PROJECT_NAME)_CMAKE_DIR}/catkin_install_python.cmake)
EmberScript
3
Tobias-Fischer/catkin_virtualenv
catkin_virtualenv/cmake/catkin_virtualenv-extras.cmake.em
[ "CNRI-Python" ]
#+TITLE: Global History Tree # NOTE: Should have been called "shared history tree" instead? This data structure can be used to store the history of visited paths or URLs with a file or web browser, in a way that no "forward" element is ever forgotten. The history tree is "global" in the sense that multiple owners (e.g. tabs) can have overlapping histories. On top of that, an owner can spawn another one, /starting from one of its nodes/ (typically when you open a URL in a new tab). This global history tree structure reifies all this. * Goals ** History tree In many popular web browsers and file managers, the history is linear. This is unfortunate because it loses information such as the branches that are created when going back, then forward. Take this linear history: #+begin_src text A -> B #+end_src We are on =B=. If we go back to =A=, then to =C=, the linear history becomes #+begin_src text A -> C #+end_src We lost the information that we visited =B=. A tree history solves this issue by encoding the history as a tree-like data structure instead of a list. With a tree history, the above example would yield #+begin_src text A -> C \-> B #+end_src ** Inter-owner relationships In web browsers and file managers, content is browsed by what this library calls an /owner/ (e.g. a tab). It's common practice to create a new owner visiting content coming from an existing owner (e.g. opening a link in a new tab). This induces a relationship between owners. In particular, this type of owner relationships means that the history of related owners can be related. This library allows us to have such trees: #+begin_src lisp (X-A (X-B1 (X-C1 X-C2)) (X-B2 (X-D1 Y-D2))) #+end_src The X-prefixed nodes belong to owner X, while the Y-ones belong to Y. X current node may be X-B2, while Y current node may be Y-D2. X is said to be the =creator= of Y, and Y-D2 is the =origin= node of Y. Y owns only 1 node, while X owns 6 nodes. None of them overlaps, but wait. With current owner being X, if we go forward to Y-D2, then we own it and it becomes the new /current node/ of X. Now Y-D2 is owned both by X and Y. Similarly, Y can "go back" to X-A, which becomes its /current node/, while X-B2 becomes the /forward child/ of X-A _for_ Y. If Y now visits X-B1, it becomes the /forward child/ of X-A for Y (while X-B2 is still the forward child of X-A for X). A node may have multiple children. For each owner, the "forward child" is the default choice of which child to visit when calling the =forward= function. Observe that each node may have different /forward children/ for each of their owners. * Vocabulary notes - Data :: Whenever we refer to =data=, we mean the arbitrary content the user stores in the tree (such as URLs or paths). The data is automatically deduplicated. - Branch :: since "history tree" refers to the whole data structure, we avoid using the term "tree" otherwise to avoid any confusion. Instead we talk about "branches". Whenever we refer to a branch, we mean to whole set of inter-connected nodes up to the root (a node without parent). Note that the history tree may have multiple, non connected branches. For =entry=, =node=, =binding=, =owner=, see the documentation of the respective classes. * Integrity and garbage collection While the history tree is not immutable /per se/, it tries to retain as much information as possible. But keeping nodes forever would lead to an ever-growing tree, which is usually not desirable. So the policy is to delete all the nodes of a /branch/ only when they become owner-less. So this happens only on owner deletion. Everything else remains: - Bindings (ownership) cannot be removed as long as the owner is not deleted. - Nodes cannot be deleted other than by the aforementioned mechanism. * History data deletion Entries can only be deleted with =delete-data= if no node refers to the entry. This can be inconvenient if there are many nodes used by many owners which refer to the entries we would like to delete. A few options: - =delete-owner= removes an owner. If all the owners are removed from a branch, the branch is garbage-collected. If the entries that were pointed to by the branch nodes are not referenced in any other branch, the entries effectively become node-less and thus available for deletion. - =reset-owner= disowns all the nodes of a given owner and creates a new root node pointing to the current entry of the owner. This makes it possible to free nodes and entries without deleting an owner. * Concurrency This library is not thread-safe. The user is expected to use a mechanism such as mutexes to guarantee the integrity of the global history tree. Rationale: It's only too common that the user wants to persist the global history tree to disk. In this case, some form of thread-safety should already be used for the persisted file. This safety can be trivially used to guarantee the integrity of the global history tree in memory as well. * Customizable entry uniqueness The =entries= must be unique in a sense that's defined by the user. For instance, if the user wants to store #+begin_src lisp (defclass web-page () ((url :accessor url) (title))) #+end_src entries, the =title= might be irrelevant for uniqueness. Thus, to store =web-page='s by unique URL, you can create a =history= with the =url= accessor as a key: #+begin_src lisp (htree:make :key 'url) #+end_src When adding an entry with the same URL but with a different title, the existing entry's title is automatically updated to the new one, but the entry object stored in the tree remains the same. * Future work ** Hash tables vs. lists Initially it was decided to encode the set of unique entries as a hash-table for performance reasons. The reasoning was that hash-tables have constant-time access to their elements as opposed to the more practical Lisp lists, for which access is in linear time. It turns out that element access in a list is extremely fast with SBCL, and a quick benchmark shows that it's only when exceeding about 10.000.000 entries that hash tables start becoming more interesting. So maybe hash tables were not the best choice for a set that's unlikely to have more than 100.000--1.000.000 entries. Previously we explained how the uniqueness is customizable. In standard Common Lisp, hash tables accept only =eq=, =eql=, =equal= or =equalp= as test function. So to allow full customizability as in the previous example, we resort to the [[https://github.com/metawilm/cl-custom-hash-table][cl-custom-hash-table]] library. Custom hash tables have restricted the design somewhat. For instance, the =entries= hash table values are the entries themselves, so that we have a way to access the stored keys in constant time. (Indeed, when you call =(gethash my-web-page entries)=, there is no guarantee that the matched key is identical to =my-web-page=.) ** Immutability The global history tree strives to be as immutable as possible, as we explain in the sections on integrity and deletion. This helps both the developers and the users understand what's going on, which is essential for such a complex data structure. It could have been better to have a fully immutable data structure (in the functional programming sense), e.g. using the [[https://common-lisp.net/project/fset/Site/index.html][FSet]] library. It's unclear whether the performance penalty would be too important. We would need some benchmark here. One benefit of full immutability is that we can know precisely when the global history tree was modified (e.g. when =my-history= is reassigned to the new history value). This allows us, for instance, to serialize only on modification and thus avoid useless serializations, which may be expensive when the history grows big.
Org
5
atlas-engineer/next
libraries/history-tree/README.org
[ "BSD-3-Clause" ]
// rustfmt-format_strings: true // rustfmt-max_width: 50 const foo: String = "trailing_spaces!! keep them! Amet neque. Praesent rhoncus eros non velit.";
Rust
1
mbc-git/rust
src/tools/rustfmt/tests/source/issue-1210/c.rs
[ "ECL-2.0", "Apache-2.0", "MIT-0", "MIT" ]
package com.baeldung.guava.tutorial; import com.google.common.util.concurrent.Monitor; import org.junit.Assert; import org.junit.Test; public class MonitorUnitTest { @Test public void whenGaurdConditionIsTrue_IsSuccessful() { Monitor monitor = new Monitor(); boolean enteredInCriticalSection = false; Monitor.Guard gaurdCondition = monitor.newGuard(this::returnTrue); if (monitor.enterIf(gaurdCondition)) { try { System.out.println("Entered in critical section"); enteredInCriticalSection = true; } finally { monitor.leave(); } } Assert.assertTrue(enteredInCriticalSection); } @Test public void whenGaurdConditionIsFalse_IsSuccessful() { Monitor monitor = new Monitor(); boolean enteredInCriticalSection = false; Monitor.Guard gaurdCondition = monitor.newGuard(this::returnFalse); if (monitor.enterIf(gaurdCondition)) { try { System.out.println("Entered in critical section"); enteredInCriticalSection = true; } finally { monitor.leave(); } } Assert.assertFalse(enteredInCriticalSection); } private boolean returnTrue() { return true; } private boolean returnFalse() { return false; } }
Java
4
zeesh49/tutorials
guava-modules/guava-21/src/test/java/com/baeldung/guava/tutorial/MonitorUnitTest.java
[ "MIT" ]
<vector xmlns:android="http://schemas.android.com/apk/res/android" android:width="20dp" android:height="20dp" android:viewportWidth="20" android:viewportHeight="20" android:tint="?attr/colorControlNormal"> <path android:fillColor="@android:color/white" android:pathData="M15,9V5.5C15,4.67 14.33,4 13.5,4h-9C3.67,4 3,4.67 3,5.5v9C3,15.33 3.67,16 4.5,16h9c0.83,0 1.5,-0.67 1.5,-1.5V11l2.15,2.15c0.31,0.31 0.85,0.09 0.85,-0.35V7.21c0,-0.45 -0.54,-0.67 -0.85,-0.35L15,9zM10.5,11.29v-0.54h-3v0.54c0,0.45 -0.54,0.67 -0.85,0.35l-1.29,-1.29c-0.2,-0.2 -0.2,-0.51 0,-0.71l1.29,-1.29C6.96,8.04 7.5,8.26 7.5,8.71v0.54h3V8.71c0,-0.45 0.54,-0.67 0.85,-0.35l1.29,1.29c0.2,0.2 0.2,0.51 0,0.71l-1.29,1.29C11.04,11.96 10.5,11.74 10.5,11.29z"/> </vector>
XML
3
Imudassir77/material-design-icons
android/image/switch_video/materialiconsround/black/res/drawable/round_switch_video_20.xml
[ "Apache-2.0" ]
module module3; int something;
D
0
moroten/scons
test/D/CoreScanner/Image/module3.di
[ "MIT" ]
create table t ( a int not null primary key, b varchar(10) unique );
SQL
4
cuishuang/tidb
br/tests/lightning_checkpoint_engines_order/data/disk_quota_checkpoint.t-schema.sql
[ "Apache-2.0" ]
// Copyright 2021 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. //go:build aix || darwin || freebsd || solaris // +build aix darwin freebsd solaris package net import "syscall" const readMsgFlags = 0 func setReadMsgCloseOnExec(oob []byte) { scms, err := syscall.ParseSocketControlMessage(oob) if err != nil { return } for _, scm := range scms { if scm.Header.Level == syscall.SOL_SOCKET && scm.Header.Type == syscall.SCM_RIGHTS { fds, err := syscall.ParseUnixRights(&scm) if err != nil { continue } for _, fd := range fds { syscall.CloseOnExec(fd) } } } }
Go
3
PhilYue/go
src/net/unixsock_readmsg_cloexec.go
[ "BSD-3-Clause" ]
/* * This file is part of flex. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the University nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED ``AS IS'' AND WITHOUT ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE. */ /* How to compile: bison --defines --output-file="parser.c" --name-prefix="test" parser.y */ %{ #include <stdio.h> #include <stdlib.h> #include <string.h> #include "config.h" #include "bison_nr_parser.h" #include "bison_nr_scanner.h" #define YYERROR_VERBOSE 1 /* #define YYPARSE_PARAM scanner */ /* #define YYLEX_PARAM scanner */ int yyerror(const char* msg); /* A dummy function. A check against seg-faults in yylval->str. */ static int process_text(char* s) { int total =0; while(*s) { total += (int) *s; ++s; } return total; } %} %pure-parser %union { int lineno; char * str; } %token <str> IDENT %token <lineno> LINENO %token EQUAL "=" %token COLON ":" %token SPACE " " %% file: line | file line ; line: LINENO COLON SPACE IDENT EQUAL IDENT { process_text($4); process_text($6); /* Check lineno. */ if( $1 != @1.first_line || $1 != testget_lineno()) { yyerror("Parse failed: Line numbers do not match."); YYABORT; } /* Recreate the line to stdout. */ printf ( "%04d: %s=%s\n", @1.first_line, $4, $6); } ; %% int yyerror(const char* msg) { fprintf(stderr,"%s\n",msg); return 0; }
Yacc
4
DemiMarie/flex
tests/bison_nr_parser.y
[ "BSD-4-Clause-UC" ]
<p:declare-step version="3.0" name="validate-with-schematron" type="schxslt:validate-with-schematron" xmlns:p="http://www.w3.org/ns/xproc" xmlns:schxslt="https://doi.org/10.5281/zenodo.1495494" xmlns:xs="http://www.w3.org/2001/XMLSchema"> <p:import href="compile-schematron.xpl"/> <p:option name="phase" as="xs:string" select="'#DEFAULT'"/> <p:input port="source"/> <p:input port="schema"/> <p:output port="result" pipe="source@validate-with-schematron"/> <p:output port="report" pipe="result@validate"/> <schxslt:compile-schematron name="compile-schematron" phase="{$phase}"> <p:with-input pipe="source@validate-with-schematron"/> </schxslt:compile-schematron> <p:xslt name="validate"> <p:with-input port="source" pipe="source@validate-with-schematron"/> <p:with-input port="stylesheet" pipe="result@compile-schematron"/> </p:xslt> </p:declare-step>
XProc
4
ahenket/schxslt
core/src/main/resources/xproc/3.0/validate-with-schematron.xpl
[ "MIT" ]
// MIR for `Test::X` 0 mir_map fn Test::X(_1: usize) -> Test { let mut _0: Test; // return place in scope 0 at $DIR/unusual-item-types.rs:16:5: 16:13 bb0: { ((_0 as X).0: usize) = move _1; // scope 0 at $DIR/unusual-item-types.rs:16:5: 16:13 discriminant(_0) = 0; // scope 0 at $DIR/unusual-item-types.rs:16:5: 16:13 return; // scope 0 at $DIR/unusual-item-types.rs:16:5: 16:13 } }
Mirah
3
Eric-Arellano/rust
src/test/mir-opt/unusual_item_types.Test-X-{constructor#0}.mir_map.0.64bit.mir
[ "ECL-2.0", "Apache-2.0", "MIT-0", "MIT" ]
{ "http://example.com/vocab/indexMap": { "@value": "simple string", "@language": "en", "@index": true } }
JSONLD
0
fsteeg/json-ld-api
tests/expand/e031-in.jsonld
[ "W3C" ]
/**************************************************************************** * * (c) 2009-2019 QGROUNDCONTROL PROJECT <http://www.qgroundcontrol.org> * * QGroundControl is licensed according to the terms in the file * COPYING.md in the root of the source code directory. * * @file * @author Gus Grubba <[email protected]> */ import QtQuick 2.11 import QtQuick.Controls 1.4 import QGroundControl 1.0 import QGroundControl.Controls 1.0 import QGroundControl.ScreenTools 1.0 import QGroundControl.Palette 1.0 Item { width: size height: size property real size: 50 property real percent: 0 QGCPalette { id: qgcPal } function getIcon() { if (percent < 20) return "/custom/img/menu_signal_0.svg" if (percent < 40) return "/custom/img/menu_signal_25.svg" if (percent < 60) return "/custom/img/menu_signal_50.svg" if (percent < 90) return "/custom/img/menu_signal_75.svg" return "/custom/img/menu_signal_100.svg" } QGCColoredImage { source: getIcon() fillMode: Image.PreserveAspectFit anchors.fill: parent sourceSize.height: size color: qgcPal.text } }
QML
4
uav-operation-system/qgroundcontrol
custom-example/res/Custom/Widgets/CustomSignalStrength.qml
[ "Apache-2.0" ]
#[cfg(doc)] pub struct Foo; fn main() { let f = Foo; //~ ERROR }
Rust
2
Eric-Arellano/rust
src/test/ui/cfg-rustdoc.rs
[ "ECL-2.0", "Apache-2.0", "MIT-0", "MIT" ]
trait val FormatSpec primitive FormatDefault is FormatSpec primitive FormatUTF32 is FormatSpec primitive FormatBinary is FormatSpec primitive FormatBinaryBare is FormatSpec primitive FormatOctal is FormatSpec primitive FormatOctalBare is FormatSpec primitive FormatHex is FormatSpec primitive FormatHexBare is FormatSpec primitive FormatHexSmall is FormatSpec primitive FormatHexSmallBare is FormatSpec type FormatInt is ( FormatDefault | FormatUTF32 | FormatBinary | FormatBinaryBare | FormatOctal | FormatOctalBare | FormatHex | FormatHexBare | FormatHexSmall | FormatHexSmallBare ) primitive FormatExp is FormatSpec primitive FormatExpLarge is FormatSpec primitive FormatFix is FormatSpec primitive FormatFixLarge is FormatSpec primitive FormatGeneral is FormatSpec primitive FormatGeneralLarge is FormatSpec type FormatFloat is ( FormatDefault | FormatExp | FormatExpLarge | FormatFix | FormatFixLarge | FormatGeneral | FormatGeneralLarge )
Pony
3
presidentbeef/ponyc
packages/format/format_spec.pony
[ "BSD-2-Clause" ]
/*--------------------------------------------------*/ /* SAS Programming for R Users - code for exercises */ /* Copyright 2016 SAS Institute Inc. */ /*--------------------------------------------------*/ /*SP4R03s03*/ /*Part A*/ proc fcmp outlib=work.functions.newfuncs; function tier(val) $; length newval $ 6; if val < 20 then newval = 'Low'; else if val <30 then newval='Medium'; else newval='High'; return(newval); endsub; quit; /*Part B*/ options cmplib=work.functions; data sp4r.cars; set sp4r.cars; mpg_quality2=tier(mpg_average); run; /*Part C*/ proc print data=sp4r.cars (firstobs=65 obs=70); var mpg_average mpg_quality mpg_quality2; run;
SAS
4
snowdj/sas-prog-for-r-users
code/SP4R03s03.sas
[ "CC-BY-4.0" ]
/* * Copyright 2018-2021 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy * in the file LICENSE in the source distribution or at * https://www.openssl.org/source/license.html */ #include <openssl/evp.h> #include <openssl/core_names.h> #include <openssl/proverr.h> #include <openssl/err.h> #include "prov/provider_ctx.h" #include "prov/providercommon.h" #include "prov/implementations.h" #include "prov/provider_util.h" static OSSL_FUNC_kdf_newctx_fn kdf_pvk_new; static OSSL_FUNC_kdf_freectx_fn kdf_pvk_free; static OSSL_FUNC_kdf_reset_fn kdf_pvk_reset; static OSSL_FUNC_kdf_derive_fn kdf_pvk_derive; static OSSL_FUNC_kdf_settable_ctx_params_fn kdf_pvk_settable_ctx_params; static OSSL_FUNC_kdf_set_ctx_params_fn kdf_pvk_set_ctx_params; static OSSL_FUNC_kdf_gettable_ctx_params_fn kdf_pvk_gettable_ctx_params; static OSSL_FUNC_kdf_get_ctx_params_fn kdf_pvk_get_ctx_params; typedef struct { void *provctx; unsigned char *pass; size_t pass_len; unsigned char *salt; size_t salt_len; PROV_DIGEST digest; } KDF_PVK; static void kdf_pvk_init(KDF_PVK *ctx); static void *kdf_pvk_new(void *provctx) { KDF_PVK *ctx; if (!ossl_prov_is_running()) return NULL; ctx = OPENSSL_zalloc(sizeof(*ctx)); if (ctx == NULL) { ERR_raise(ERR_LIB_PROV, ERR_R_MALLOC_FAILURE); return NULL; } ctx->provctx = provctx; kdf_pvk_init(ctx); return ctx; } static void kdf_pvk_cleanup(KDF_PVK *ctx) { ossl_prov_digest_reset(&ctx->digest); OPENSSL_free(ctx->salt); OPENSSL_clear_free(ctx->pass, ctx->pass_len); OPENSSL_cleanse(ctx, sizeof(*ctx)); } static void kdf_pvk_free(void *vctx) { KDF_PVK *ctx = (KDF_PVK *)vctx; if (ctx != NULL) { kdf_pvk_cleanup(ctx); OPENSSL_free(ctx); } } static void kdf_pvk_reset(void *vctx) { KDF_PVK *ctx = (KDF_PVK *)vctx; void *provctx = ctx->provctx; kdf_pvk_cleanup(ctx); ctx->provctx = provctx; kdf_pvk_init(ctx); } static void kdf_pvk_init(KDF_PVK *ctx) { OSSL_PARAM params[2] = { OSSL_PARAM_END, OSSL_PARAM_END }; OSSL_LIB_CTX *provctx = PROV_LIBCTX_OF(ctx->provctx); params[0] = OSSL_PARAM_construct_utf8_string(OSSL_KDF_PARAM_DIGEST, SN_sha1, 0); if (!ossl_prov_digest_load_from_params(&ctx->digest, params, provctx)) /* This is an error, but there is no way to indicate such directly */ ossl_prov_digest_reset(&ctx->digest); } static int pvk_set_membuf(unsigned char **buffer, size_t *buflen, const OSSL_PARAM *p) { OPENSSL_clear_free(*buffer, *buflen); *buffer = NULL; *buflen = 0; if (p->data_size == 0) { if ((*buffer = OPENSSL_malloc(1)) == NULL) { ERR_raise(ERR_LIB_PROV, ERR_R_MALLOC_FAILURE); return 0; } } else if (p->data != NULL) { if (!OSSL_PARAM_get_octet_string(p, (void **)buffer, 0, buflen)) return 0; } return 1; } static int kdf_pvk_derive(void *vctx, unsigned char *key, size_t keylen, const OSSL_PARAM params[]) { KDF_PVK *ctx = (KDF_PVK *)vctx; const EVP_MD *md; EVP_MD_CTX *mctx; int res; if (!ossl_prov_is_running() || !kdf_pvk_set_ctx_params(ctx, params)) return 0; if (ctx->pass == NULL) { ERR_raise(ERR_LIB_PROV, PROV_R_MISSING_PASS); return 0; } if (ctx->salt == NULL) { ERR_raise(ERR_LIB_PROV, PROV_R_MISSING_SALT); return 0; } md = ossl_prov_digest_md(&ctx->digest); if (md == NULL) { ERR_raise(ERR_LIB_PROV, PROV_R_INVALID_DIGEST); return 0; } res = EVP_MD_get_size(md); if (res <= 0) { ERR_raise(ERR_LIB_PROV, PROV_R_BAD_LENGTH); return 0; } if ((size_t)res > keylen) { ERR_raise(ERR_LIB_PROV, PROV_R_LENGTH_TOO_LARGE); return 0; } mctx = EVP_MD_CTX_new(); res = mctx != NULL && EVP_DigestInit_ex(mctx, md, NULL) && EVP_DigestUpdate(mctx, ctx->salt, ctx->salt_len) && EVP_DigestUpdate(mctx, ctx->pass, ctx->pass_len) && EVP_DigestFinal_ex(mctx, key, NULL); EVP_MD_CTX_free(mctx); return res; } static int kdf_pvk_set_ctx_params(void *vctx, const OSSL_PARAM params[]) { const OSSL_PARAM *p; KDF_PVK *ctx = vctx; OSSL_LIB_CTX *provctx = PROV_LIBCTX_OF(ctx->provctx); if (params == NULL) return 1; if (!ossl_prov_digest_load_from_params(&ctx->digest, params, provctx)) return 0; if ((p = OSSL_PARAM_locate_const(params, OSSL_KDF_PARAM_PASSWORD)) != NULL) if (!pvk_set_membuf(&ctx->pass, &ctx->pass_len, p)) return 0; if ((p = OSSL_PARAM_locate_const(params, OSSL_KDF_PARAM_SALT)) != NULL) { if (!pvk_set_membuf(&ctx->salt, &ctx->salt_len, p)) return 0; } return 1; } static const OSSL_PARAM *kdf_pvk_settable_ctx_params(ossl_unused void *ctx, ossl_unused void *p_ctx) { static const OSSL_PARAM known_settable_ctx_params[] = { OSSL_PARAM_utf8_string(OSSL_KDF_PARAM_PROPERTIES, NULL, 0), OSSL_PARAM_utf8_string(OSSL_KDF_PARAM_DIGEST, NULL, 0), OSSL_PARAM_octet_string(OSSL_KDF_PARAM_PASSWORD, NULL, 0), OSSL_PARAM_octet_string(OSSL_KDF_PARAM_SALT, NULL, 0), OSSL_PARAM_END }; return known_settable_ctx_params; } static int kdf_pvk_get_ctx_params(void *vctx, OSSL_PARAM params[]) { OSSL_PARAM *p; if ((p = OSSL_PARAM_locate(params, OSSL_KDF_PARAM_SIZE)) != NULL) return OSSL_PARAM_set_size_t(p, SIZE_MAX); return -2; } static const OSSL_PARAM *kdf_pvk_gettable_ctx_params(ossl_unused void *ctx, ossl_unused void *p_ctx) { static const OSSL_PARAM known_gettable_ctx_params[] = { OSSL_PARAM_size_t(OSSL_KDF_PARAM_SIZE, NULL), OSSL_PARAM_END }; return known_gettable_ctx_params; } const OSSL_DISPATCH ossl_kdf_pvk_functions[] = { { OSSL_FUNC_KDF_NEWCTX, (void(*)(void))kdf_pvk_new }, { OSSL_FUNC_KDF_FREECTX, (void(*)(void))kdf_pvk_free }, { OSSL_FUNC_KDF_RESET, (void(*)(void))kdf_pvk_reset }, { OSSL_FUNC_KDF_DERIVE, (void(*)(void))kdf_pvk_derive }, { OSSL_FUNC_KDF_SETTABLE_CTX_PARAMS, (void(*)(void))kdf_pvk_settable_ctx_params }, { OSSL_FUNC_KDF_SET_CTX_PARAMS, (void(*)(void))kdf_pvk_set_ctx_params }, { OSSL_FUNC_KDF_GETTABLE_CTX_PARAMS, (void(*)(void))kdf_pvk_gettable_ctx_params }, { OSSL_FUNC_KDF_GET_CTX_PARAMS, (void(*)(void))kdf_pvk_get_ctx_params }, { 0, NULL } };
C
4
lbbxsxlz/openssl
providers/implementations/kdfs/pvkkdf.c
[ "Apache-2.0" ]
;; ;; One Click Login v3 ;; + ROTMG Exalt ;; + https://github.com/jakcodex/muledump/wiki/One+Click+Login for more information ;; ;; If configuring via Muledump then you don't need to change anything in this file. ;; Be sure to run the script with AutoIt and choose "reinstall" if prompted. ;; Global $config = ObjCreate("Scripting.Dictionary") ;; path to RotMG Exalt.exe $config.Add("path", "%USERPROFILE%\Documents\RealmOfTheMadGod\Production\RotMG Exalt.exe"); ;; ocl operational mode (only 'exalt' is supported in ocl v3) $config.Add("mode", "exalt"); ;; account username $config.Add("username", "jakcodex-ocl-exalt-misconfigured"); ;; account password $config.Add("password", ""); ;; run in admin mode $config.Add("admin", "false"); ;; enable runtime parameter support $config.Add("params", "true") ;; enable setting admin permissions via runtime params $config.Add("adminparams", "false") ;; enforce parameter security $config.Add("paramsecurity", "true") ;; parameter separator $config.Add("paramseparator", "++++") ;; output debugging information $config.Add("debug", "false") ;; default window title $config.Add("title", "Muledump One Click Login") ;; account ign (included by request) $config.Add("ign", ""); #include <String.au3> #include <File.au3> #include <Array.au3> Global $string, $password, $username, $data, $path, $search, $file, $root $root = "HKEY_CLASSES_ROOT\muledump" $title = "Muledump One Click Login Exalt Installer" $adminRightsError = "Error - Requires Admin Privileges" & @CRLF & @CRLF & "Either edit ocl-exalt.au3 in a text editor and set 'admin' to true in the config or update your request parameters" & @CRLF & @CRLF & "For more help see:" & @CRLF & "https://github.com/jakcodex/muledump/wiki/One-Click-Login" Func _GetAdminRights() If Not IsAdmin() and $config.Item("admin") == "true" Then ShellExecute(@AutoItExe, $CmdLineRaw, "", "runas") ProcessClose(@AutoItPID) Exit EndIf EndFunc Func _error($msg='There was an error') MsgBox(0, "Error", $msg) ConsoleWrite("state:false") Exit EndFunc Func _write() RegWrite($root,"","REG_SZ","URL: muledump Protocol") RegWrite($root,"URL Protocol","REG_SZ","") RegWrite($root & "\shell") RegWrite($root & "\shell\open") RegWrite($root & "\shell\open\command","","REG_SZ", @AutoItExe & ' "' & @ScriptFullPath & '" %1') If RegRead("HKEY_CLASSES_ROOT\muledump","") Then MsgBox(64,$title,"One Click Login Exalt: installed" & @CRLF & @CRLF & "Now go to Muledump and click Setup > Settings > One Click Login to finish setup") Else MsgBox(16,$title,$adminRightsError) EndIf Exit EndFunc Func _install() $config.Item('admin') = 'true'; _GetAdminRights() Local $k $k = RegEnumKey($root, 1) If @error == 2 Then MsgBox(16,$title,$adminRightsError) Exit EndIf If @error == 1 Then _write() $k = MsgBox(6 + 32, $title, _ 'One Click Login is already installed.' & @CRLF & @CRLF & 'What would you like to do?' & @CRLF & @CRLF & _ '"Cancel" to do nothing' & @CRLF & _ '"Try Again" to reinstall' & @CRLF & _ '"Continue" to uninstall') If $k == 10 Then _write() If $k == 11 Then RegDelete($root) if @error <> 0 Then MsgBox(16,$title,$adminRightsError) Else MsgBox(64,$title,"One Click Login: uninstalled") EndIf EndIf Exit EndFunc Func _ProcessGetHWnd($iPid, $iOption = 1, $sTitle = "", $iTimeout = 2000) Local $aReturn[1][1] = [[0]], $aWin, $hTimer = TimerInit() While 1 ; Get list of windows $aWin = WinList($sTitle) ; Searches thru all windows For $i = 1 To $aWin[0][0] ; Found a window owned by the given PID If $iPid = WinGetProcess($aWin[$i][1]) Then ; Option 0 or 1 used If $iOption = 1 OR ($iOption = 0 And $aWin[$i][0] <> "") Then Return $aWin[$i][1] ; Option 2 is used ElseIf $iOption = 2 Then ReDim $aReturn[UBound($aReturn) + 1][2] $aReturn[0][0] += 1 $aReturn[$aReturn[0][0]][0] = $aWin[$i][0] $aReturn[$aReturn[0][0]][1] = $aWin[$i][1] EndIf EndIf Next ; If option 2 is used and there was matches then the list is returned If $iOption = 2 And $aReturn[0][0] > 0 Then Return $aReturn ; If timed out then give up If TimerDiff($hTimer) > $iTimeout Then ExitLoop ; Waits before new attempt Sleep(Opt("WinWaitDelay")) WEnd ; No matches SetError(1) Return 0 EndFunc Func _Base64Encode($sData) Local $oXml = ObjCreate("Msxml2.DOMDocument") If Not IsObj($oXml) Then SetError(1, 1, 0) EndIf Local $oElement = $oXml.createElement("b64") If Not IsObj($oElement) Then SetError(2, 2, 0) EndIf $oElement.dataType = "bin.base64" $oElement.nodeTypedValue = Binary($sData) Local $sReturn = $oElement.Text If StringLen($sReturn) = 0 Then SetError(3, 3, 0) EndIf Return $sReturn EndFunc ;==>_Base64Encode If $CmdLine[0] = 0 Then _install() ;; process the command input $data = StringReplace($CmdLine[1],"muledump:","") $data = StringSplit($data,"-") $username = _Base64Encode(_HexToString($data[1])) $password = _Base64Encode(_HexToString($data[2])) ;; if parameters were passed we will parse them into the runtime config If UBound($data) == 4 and $config.Item("params") == "true" Then $params = StringSplit($data[3], $config.Item("paramseparator")) If IsArray($params) Then Local Const $paramsLength = UBound($params) For $i = 0 To $paramsLength-1 $paramPieces = StringSplit($params[$i], "=") If IsArray($paramPieces) Then If $config.Exists($paramPieces[1]) Then If $paramPieces[1] == "paramsecurity" Then ContinueLoop If $config.Item("paramsecurity") == "true" and $paramPieces[1] == "admin" and $config.Item("adminparams") == "false" Then ContinueLoop If $paramPieces[2] == "" Then ContinueLoop $config.Item($paramPieces[1]) = $paramPieces[2] $config.Item($paramPieces[1]) = StringReplace($config.Item($paramPieces[1]), "%5C", "\") $config.Item($paramPieces[1]) = StringReplace($config.Item($paramPieces[1]), "%2F", "/") $config.Item($paramPieces[1]) = StringReplace($config.Item($paramPieces[1]), "%20", " ") EndIf EndIf Next EndIf EndIf ;; obtain admin privileges if enabled _GetAdminRights() ;; display debugging information If $config.Item("debug") == "true" Then MsgBox(0, "Config", "admin => " & $config.Item("admin") & @CRLF & "mode => " & $config.Item("mode") & @CRLF & "path => " & $config.Item("path") & @CRLF & "paths => " & $config.Item("paths") & @CRLF); EndIf ;; ; launch one-click login ;; Global $pid = 0 If $config.Item("mode") == "exalt" Then If $config.Item("params") == "true" and $config.Item("paramsecurity") == "true" Then Local $result ;; the exe has a particular filename $result = StringRegExp($config.Item("path"), "^[a-zA-Z]:\\[a-zA-Z0-9-_\\]*?RotMG Exalt\.exe$"); If @error or $result == 0 Then _error("Invalid path provided: " & $config.Item("path") & @CRLF & @CRLF & "If the value is correct then try disabling param security in the au3 file config.") ;; username should be valid base64 $result = StringRegExp($username, "^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$"); If @error or $result == 0 Then _error("Invalid username provided. " & @CRLF & @CRLF & "If the value is correct then try disabling param security in the au3 file config.") ;; password should be valid base64 $result = StringRegExp($password, "^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$"); If @error or $result == 0 Then _error("Invalid password provided. " & @CRLF & @CRLF & "If the value is correct then try disabling param security in the au3 file config.") EndIf $args = "data:{platform:Deca,password:" & $password & ",guid:" & $username & ",env:4}" $pid = ShellExecute($config.Item("path"), $args) If $pid > 0 And Not($config.Item("title") == "" Or $config.Item("title") == "false") Then Local $name = $config.Item("title") If Not($config.Item("ign") == "") Then $name &= " - " & $config.Item("ign") EndIf Local $win = _ProcessGetHWnd($pid) WinSetTitle($win, "", $name) Else If $config.Item("debug") == "true" Then MsgBox(0, "Error", "Failed to launch game client") EndIf EndIf Else MsgBox(0, "Error", "Invalid mode provided. Valid modes are: exalt") EndIf ;; ;; Are you looking to customize your One Click Login? This new version can be customized in Muledump! ;; Check out the wiki for more information: https://github.com/jakcodex/muledump/wiki/One-Click-Login ;; ;; Don't want to customize in Muledump? Head up to the top of the file to modify the configuration. ;;
AutoIt
4
jakcodex/muledump
lib/ocl-exalt.au3
[ "BSD-3-Clause" ]
DROP TABLE IF EXISTS employee; CREATE TABLE employee ( id bigint auto_increment primary key, title varchar(255), name varchar(255) )
SQL
4
zeesh49/tutorials
ratpack/src/main/resources/DDL.sql
[ "MIT" ]
;; test_onlisp.nu ;; more tests for Nu macros. ;; ;; Copyright (c) 2008 Jeff Buck ;; Some macro tests adapted from Paul Graham's book OnLisp (class TestOnLisp is NuTestCase (- (id) testNil is (macro nil! (var) `(set ,var nil)) (set newBody (macrox (nil! a))) (assert_equal "(set a nil)" (newBody stringValue)) (set a 5) (nil! a) (assert_equal nil a)) (- (id) testOurWhen is (macro our-when (test *body) `(if ,test (progn ,@*body))) (set n 1) (our-when (< n 5) (set a "a is defined") (set n (+ n 1)) ) (assert_equal "a is defined" a) (set n 6) (our-when (< n 5) (set b "b is defined")) (assert_throws "NuUndefinedSymbol" b)) (- (id) testOurAnd is (macro our-and (*args) (case (*args length) (0 t) (1 (car *args)) (else `(if ,(car *args) (our-and ,@(cdr *args))) ))) (assert_equal 3 (our-and 1 2 (set m 3))) ; Make sure namespace scoping is correct (assert_equal 3 m) (assert_equal nil (our-and 1 nil (set n 1))) ; Make sure short circuit boolean logic is working (assert_throws "NuUndefinedSymbol" n)) (- (id) testOurSum is (macro our-sum (*args) `(+ ,@*args)) (assert_equal 10 (our-sum 1 2 3 4))) (- (id) testOurFor is (macro myfor ((var start stop) *body) `(let ((,var ,start) (__gstop ,stop)) ;; Only evaluate stop once (while (<= ,var __gstop) ,@*body (set ,var (+ ,var 1))))) (set var 0) (myfor (i 1 10) (set var (+ var i))) (assert_equal 55 var) ;; Make sure we didn't pollute our context (assert_throws "NuUndefinedSymbol" i) ;; Make sure evals work in the parameter list (set var 0) (set n 20) (myfor (i (* 1 1) (- n 10)) (set var (+ var i))) (assert_equal 55 var) (macro inc! (n) `(set ,n (+ ,n 1))) (set var 0) (set n 9) ;; Make sure we only eval "stop" one time ;; otherwise, we'd have an infinite loop (myfor (i 1 (inc! n)) (set var (+ var i))) (assert_equal 55 var)) (- (id) testOurApply is (macro our-apply (f *data) `(eval (cons ,f ,@*data))) (assert_equal 6 (our-apply + '(1 2 3)))) (- (id) fixme_testOurLet is (macro mylet (bindings *body) `((do ,(bindings map: (do (x) (car x))) ,@*body) ,@(bindings map: (do (x) (second x))))) (assert_equal 3 (mylet ((x 1) (y 2)) (+ x y)))) (- (id) testNumericIf is (macro numeric-if (expr pos zero neg) `(let ((__expr ,expr)) (cond ((> __expr 0) ,pos) ((eq __expr 0) ,zero) (t ,neg)))) (assert_equal '(p z n) ('(1 0 -1) map: (do (n) (numeric-if n 'p 'z 'n))))))
Nu
5
mattbeshara/nu
test/test_onlisp.nu
[ "Apache-2.0" ]
-- -- basic talking to a memory cell! -- %handles: MEM = PUT GET CLS IntTerm = Get Put Close %cohandles: Console = Get Put Close %constructors: Bool = False 0 True 0 List = Nil 0 Cons 2 GPt = Gt 1 Pt 1 Ready 0 %processes: memory (x | ch => ) = hcase ch of MEM.PUT: get ch store y run memory(y|ch =>) MEM.GET: load x put ch run memory(x|ch => ) MEM.CLS: halt ch %functions: append(xs,ys) = load xs case of List.Nil: load ys ret List.Cons (x,xss): call append (xss,ys) store z List.Cons(x,z) ret ret %run ( | console => intTerm1): plug ch with [intTerm1,console]: hput intTerm1 IntTerm.Get get intTerm1 store x hput ch MEM.PUT load x put ch hput ch MEM.GET get ch store x hput intTerm1 IntTerm.Put load x put intTerm1 hput ch MEM.CLS close ch hput intTerm1 IntTerm.Close close intTerm1 hput console Console.Close halt console []: cInt 1 store x run memory(x|ch => )
AMPL
3
prashant007/AMPL
myAMPL/src/Examples/test2.ampl
[ "MIT" ]
FROM circleci/postgres:13.0-postgis ENV POSTGRES_PASSWORD="postgres"
Dockerfile
1
gh-oss-contributor/graphql-engine-1
.circleci/postgres-13.0-alpine-postgis3/Dockerfile
[ "Apache-2.0", "MIT" ]
/*############################################################################## HPCC SYSTEMS software Copyright (C) 2012 HPCC Systems®. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ############################################################################## */ unsigned8 bigUnsigned := 18446744073709551615; integer1 intTen := 10; unsigned1 uTen := 10; bigUnsigned % intTen; // -1 bigUnsigned % uTen; // -1 (unsigned8)bigUnsigned % intTen; // -1 (unsigned8)bigUnsigned % uTen; // -1 bigUnsigned % (unsigned1)intTen; // -1 bigUnsigned % (unsigned1)uTen; // -1 (unsigned8)bigUnsigned % (unsigned1)intTen; // -1 (unsigned8)bigUnsigned % (unsigned1)uTen; // -1 // casting to unsigned8 produces correct result, but casting to any other unsigned does not bigUnsigned % (unsigned8)intTen; // 5 bigUnsigned % (unsigned8)uTen; // 5 (unsigned8)bigUnsigned % (unsigned8)intTen; // 5 (unsigned8)bigUnsigned % (unsigned8)uTen; // 5
ECL
3
miguelvazq/HPCC-Platform
ecl/regress/unsigned2.ecl
[ "Apache-2.0" ]
include(joinpath(@__DIR__, "types.jl")) function get_yaml_path(platform::Platform) lib_dir = @__DIR__ buildkite_dir = dirname(lib_dir) yaml_path = joinpath(buildkite_dir, "test-$(platform.arch).yml") return yaml_path end
Julia
4
mozilla/rr
.buildkite/lib/common.jl
[ "BSD-1-Clause" ]
[{:name "if", :display-string "if if cond then", :insert-string "if cond then\n\t-- do things\nend", :doc "", :tab-triggers {:select ["cond" "-- do things" "end"]}} {:name "else", :display-string "else else end", :insert-string "else\n\t-- do things\nend", :doc "", :tab-triggers {:select ["-- do things"], :exit "end"}} {:name "elsif", :display-string "elsif elseif cond end", :insert-string "elseif cond then\n\t-- do things\nend", :doc "", :tab-triggers {:select ["cond" "-- do things" "end"]}} {:name "while", :display-string "while while cond", :insert-string "while cond do\n\t-- do things\nend", :doc "", :tab-triggers {:select ["cond" "-- do things"], :exit "end"}} {:name "repeat", :display-string "repeat repeat until cond", :insert-string "repeat\n\t-- do things\nuntil cond", :doc "", :tab-triggers {:select ["-- do things" "cond"]}} {:name "function", :display-string "function function function_name()", :insert-string "function function_name(self)\n\t-- do things\nend", :doc "", :tab-triggers {:select ["function_name" "self" "-- do things"] :types [:name :arglist :expr] :exit "end" :start "function"}} {:name "local", :display-string "local local name = value", :insert-string "local name = value", :doc "", :tab-triggers {:select ["name" "value"] :types [:name :expr]}} {:name "for", :display-string "for for i = 1, 10", :insert-string "for i = 1, 10 do\n\t-- do things\nend", :doc "", :tab-triggers {:select ["i" "1" "10" "-- do things"] :types [:name :expr :expr :expr] :exit "end" :start "for"}} {:name "fori", :display-string "fori for i, v in ipairs()", :insert-string "for i, v in ipairs(table_name) do\n\t-- do things\nend", :doc "", :tab-triggers {:select ["i" "v" "table_name" "-- do things"] :types [:name :name :expr :expr] :exit "end" :start "for"}} {:name "forp", :display-string "forp for k, v in pairs()", :insert-string "for k, v in pairs(table_name) do\n\t-- do things\nend", :doc "", :tab-triggers {:select ["k" "v" "table_name" "-- do things"] :types [:name :name :expr :expr] :exit "end" :start "for"}}]
edn
4
cmarincia/defold
editor/resources/lua-base-snippets.edn
[ "ECL-2.0", "Apache-2.0" ]
// Copyright 2017 The Ray Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #pragma once #include <atomic> #include <chrono> #include <memory> #include <string> #include <unordered_map> #include <unordered_set> #include <vector> #include "ray/common/asio/io_service_pool.h" #include "ray/common/id.h" #include "ray/common/test_util.h" #include "ray/gcs/store_client/store_client.h" #include "ray/util/logging.h" namespace ray { namespace gcs { class StoreClientTestBase : public ::testing::Test { public: StoreClientTestBase() = default; virtual ~StoreClientTestBase() {} void SetUp() override { io_service_pool_ = std::make_shared<IOServicePool>(io_service_num_); io_service_pool_->Run(); InitStoreClient(); GenTestData(); } void TearDown() override { DisconnectStoreClient(); io_service_pool_->Stop(); key_to_value_.clear(); key_to_index_.clear(); index_to_keys_.clear(); } virtual void InitStoreClient() = 0; virtual void DisconnectStoreClient() = 0; protected: void Put() { auto put_calllback = [this](const Status &status) { RAY_CHECK_OK(status); --pending_count_; }; for (const auto &elem : key_to_value_) { ++pending_count_; RAY_CHECK_OK(store_client_->AsyncPut(table_name_, elem.first.Binary(), elem.second.SerializeAsString(), put_calllback)); } WaitPendingDone(); } void Delete() { auto delete_calllback = [this](const Status &status) { RAY_CHECK_OK(status); --pending_count_; }; for (const auto &elem : key_to_value_) { ++pending_count_; RAY_CHECK_OK( store_client_->AsyncDelete(table_name_, elem.first.Binary(), delete_calllback)); } WaitPendingDone(); } void Get() { auto get_callback = [this](const Status &status, const boost::optional<std::string> &result) { RAY_CHECK_OK(status); RAY_CHECK(result); rpc::ActorTableData data; RAY_CHECK(data.ParseFromString(*result)); ActorID actor_id = ActorID::FromBinary(data.actor_id()); auto it = key_to_value_.find(actor_id); RAY_CHECK(it != key_to_value_.end()); --pending_count_; }; for (const auto &elem : key_to_value_) { ++pending_count_; RAY_CHECK_OK( store_client_->AsyncGet(table_name_, elem.first.Binary(), get_callback)); } WaitPendingDone(); } void GetEmpty() { for (const auto &elem : key_to_value_) { auto key = elem.first.Binary(); auto get_callback = [this, key](const Status &status, const boost::optional<std::string> &result) { RAY_CHECK_OK(status); RAY_CHECK(!result); --pending_count_; }; ++pending_count_; RAY_CHECK_OK(store_client_->AsyncGet(table_name_, key, get_callback)); } WaitPendingDone(); } void PutWithIndex() { auto put_calllback = [this](const Status &status) { --pending_count_; }; for (const auto &elem : key_to_value_) { ++pending_count_; RAY_CHECK_OK(store_client_->AsyncPutWithIndex( table_name_, elem.first.Binary(), key_to_index_[elem.first].Hex(), elem.second.SerializeAsString(), put_calllback)); } WaitPendingDone(); } void GetByIndex() { auto get_calllback = [this](const std::unordered_map<std::string, std::string> &result) { if (!result.empty()) { auto key = ActorID::FromBinary(result.begin()->first); auto it = key_to_index_.find(key); RAY_CHECK(it != key_to_index_.end()); RAY_CHECK(index_to_keys_[it->second].size() == result.size()); } pending_count_ -= result.size(); }; auto iter = index_to_keys_.begin(); pending_count_ += iter->second.size(); RAY_CHECK_OK( store_client_->AsyncGetByIndex(table_name_, iter->first.Hex(), get_calllback)); WaitPendingDone(); } void DeleteByIndex() { auto delete_calllback = [this](const Status &status) { RAY_CHECK_OK(status); --pending_count_; }; for (const auto &elem : index_to_keys_) { ++pending_count_; RAY_CHECK_OK(store_client_->AsyncDeleteByIndex(table_name_, elem.first.Hex(), delete_calllback)); } WaitPendingDone(); } void GetAll() { auto get_all_callback = [this](const std::unordered_map<std::string, std::string> &result) { static std::unordered_set<ActorID> received_keys; for (const auto &item : result) { const ActorID &actor_id = ActorID::FromBinary(item.first); auto it = received_keys.find(actor_id); RAY_CHECK(it == received_keys.end()); received_keys.emplace(actor_id); auto map_it = key_to_value_.find(actor_id); RAY_CHECK(map_it != key_to_value_.end()); } RAY_CHECK(received_keys.size() == key_to_value_.size()); pending_count_ -= result.size(); }; pending_count_ += key_to_value_.size(); RAY_CHECK_OK(store_client_->AsyncGetAll(table_name_, get_all_callback)); WaitPendingDone(); } void BatchDelete() { auto delete_calllback = [this](const Status &status) { RAY_CHECK_OK(status); --pending_count_; }; ++pending_count_; std::vector<std::string> keys; for (auto &elem : key_to_value_) { keys.push_back(elem.first.Binary()); } RAY_CHECK_OK(store_client_->AsyncBatchDelete(table_name_, keys, delete_calllback)); WaitPendingDone(); } void DeleteWithIndex() { auto delete_calllback = [this](const Status &status) { RAY_CHECK_OK(status); --pending_count_; }; for (const auto &elem : key_to_value_) { ++pending_count_; RAY_CHECK_OK(store_client_->AsyncDeleteWithIndex(table_name_, elem.first.Binary(), key_to_index_[elem.first].Hex(), delete_calllback)); } WaitPendingDone(); } void BatchDeleteWithIndex() { auto delete_calllback = [this](const Status &status) { RAY_CHECK_OK(status); --pending_count_; }; ++pending_count_; std::vector<std::string> keys; std::vector<std::string> index_keys; for (auto &elem : key_to_value_) { keys.push_back(elem.first.Binary()); index_keys.push_back(key_to_index_[elem.first].Hex()); } RAY_CHECK_OK(store_client_->AsyncBatchDeleteWithIndex(table_name_, keys, index_keys, delete_calllback)); WaitPendingDone(); } void TestAsyncPutAndAsyncGet() { // AsyncPut without index. Put(); // AsyncGet Get(); // AsyncDelete Delete(); GetEmpty(); } void TestAsyncPutAndDeleteWithIndex() { // AsyncPut with index PutWithIndex(); // AsyncGet with index GetByIndex(); // AsyncDelete by index DeleteByIndex(); // AsyncGet GetEmpty(); } void TestAsyncGetAllAndBatchDelete() { // AsyncPut Put(); // AsyncGetAll GetAll(); // AsyncBatchDelete BatchDelete(); // AsyncGet GetEmpty(); } void TestAsyncDeleteWithIndex() { // AsyncPut with index PutWithIndex(); // AsyncGet with index GetByIndex(); // AsyncDelete key-value and index-key DeleteWithIndex(); // AsyncGet GetEmpty(); } void TestAsyncBatchDeleteWithIndex() { // AsyncPut with index PutWithIndex(); // AsyncGetAll GetByIndex(); // AsyncBatchDeleteWithIndex BatchDeleteWithIndex(); // AsyncGet GetEmpty(); } void GenTestData() { for (size_t i = 0; i < key_count_; i++) { rpc::ActorTableData actor; actor.set_max_restarts(1); actor.set_num_restarts(0); JobID job_id = JobID::FromInt(i % index_count_); actor.set_job_id(job_id.Binary()); actor.set_state(rpc::ActorTableData::ALIVE); ActorID actor_id = ActorID::Of(job_id, RandomTaskId(), /*parent_task_counter=*/i); actor.set_actor_id(actor_id.Binary()); key_to_value_[actor_id] = actor; key_to_index_[actor_id] = job_id; auto it = index_to_keys_.find(job_id); if (it != index_to_keys_.end()) { it->second.emplace(actor_id); } else { std::unordered_set<ActorID> key_set; key_set.emplace(actor_id); index_to_keys_.emplace(job_id, std::move(key_set)); } } } void WaitPendingDone() { WaitPendingDone(pending_count_); } void WaitPendingDone(std::atomic<int> &pending_count) { auto condition = [&pending_count]() { return pending_count == 0; }; EXPECT_TRUE(WaitForCondition(condition, wait_pending_timeout_.count())); } protected: size_t io_service_num_{2}; std::shared_ptr<IOServicePool> io_service_pool_; std::shared_ptr<StoreClient> store_client_; std::string table_name_{"test_table"}; size_t key_count_{5000}; size_t index_count_{100}; std::unordered_map<ActorID, rpc::ActorTableData> key_to_value_; std::unordered_map<ActorID, JobID> key_to_index_; std::unordered_map<JobID, std::unordered_set<ActorID>> index_to_keys_; std::atomic<int> pending_count_{0}; std::chrono::milliseconds wait_pending_timeout_{5000}; }; } // namespace gcs } // namespace ray
C
4
77loopin/ray
src/ray/gcs/store_client/test/store_client_test_base.h
[ "Apache-2.0" ]
:export { a: a; } :export { abc: a b c; comments: abc/****/ /* hello world *//****/ def } :export { white space : abc def } :export{default:default}
CSS
1
etc-tiago/webpack
test/configCases/css/exports/style.module.css
[ "MIT" ]
"""Support for RESTful API.""" import logging import httpx from homeassistant.helpers import template from homeassistant.helpers.httpx_client import get_async_client DEFAULT_TIMEOUT = 10 _LOGGER = logging.getLogger(__name__) class RestData: """Class for handling the data retrieval.""" def __init__( self, hass, method, resource, auth, headers, params, data, verify_ssl, timeout=DEFAULT_TIMEOUT, ): """Initialize the data object.""" self._hass = hass self._method = method self._resource = resource self._auth = auth self._headers = headers self._params = params self._request_data = data self._timeout = timeout self._verify_ssl = verify_ssl self._async_client = None self.data = None self.last_exception = None self.headers = None def set_url(self, url): """Set url.""" self._resource = url async def async_update(self, log_errors=True): """Get the latest data from REST service with provided method.""" if not self._async_client: self._async_client = get_async_client( self._hass, verify_ssl=self._verify_ssl ) rendered_headers = template.render_complex(self._headers, parse_result=False) rendered_params = template.render_complex(self._params) _LOGGER.debug("Updating from %s", self._resource) try: response = await self._async_client.request( self._method, self._resource, headers=rendered_headers, params=rendered_params, auth=self._auth, data=self._request_data, timeout=self._timeout, follow_redirects=True, ) self.data = response.text self.headers = response.headers except httpx.RequestError as ex: if log_errors: _LOGGER.error( "Error fetching data: %s failed with %s", self._resource, ex ) self.last_exception = ex self.data = None self.headers = None
Python
5
MrDelik/core
homeassistant/components/rest/data.py
[ "Apache-2.0" ]
// Copyright 2009 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. #include "textflag.h" // On FreeBSD argc/argv are passed in DI, not SP, so we can't use _rt0_amd64. TEXT _rt0_amd64_freebsd(SB),NOSPLIT,$-8 LEAQ 8(DI), SI // argv MOVQ 0(DI), DI // argc JMP runtime·rt0_go(SB) TEXT _rt0_amd64_freebsd_lib(SB),NOSPLIT,$0 JMP _rt0_amd64_lib(SB)
GAS
3
Havoc-OS/androidprebuilts_go_linux-x86
src/runtime/rt0_freebsd_amd64.s
[ "BSD-3-Clause" ]
/*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ import { registerAction2 } from 'vs/platform/actions/common/actions'; import { registerSingleton } from 'vs/platform/instantiation/common/extensions'; import { WebLocaleService } from 'vs/workbench/contrib/localization/browser/localeService'; import { ClearDisplayLanguageAction, ConfigureDisplayLanguageAction } from 'vs/workbench/contrib/localization/browser/localizationsActions'; import { ILocaleService } from 'vs/workbench/contrib/localization/common/locale'; registerSingleton(ILocaleService, WebLocaleService, true); // Register action to configure locale and related settings registerAction2(ConfigureDisplayLanguageAction); registerAction2(ClearDisplayLanguageAction);
TypeScript
4
EngineLessCC/vscode
src/vs/workbench/contrib/localization/browser/localization.contribution.ts
[ "MIT" ]
{# OPNsense® is Copyright © 2021 Frank Wall OPNsense® is Copyright © 2021 Jan Winkler OPNsense® is Copyright © 2014 – 2015 by Deciso B.V. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED “AS IS” AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #} <script> $( document ).ready(function() { var data_get_map = {'frm_GeneralSettings':"/api/puppetagent/settings/get"}; mapDataToFormUI(data_get_map).done(function(data){ // place actions to run after load, for example update form styles. }); // link save button to API set action $("#saveAct").SimpleActionButton({ onPreAction: function() { const dfObj = new $.Deferred(); saveFormToEndpoint(url="/api/puppetagent/settings/set",formid='frm_GeneralSettings', function() { dfObj.resolve(); }); return dfObj; } }); updateServiceControlUI('puppetagent'); }); </script> <ul class="nav nav-tabs" role="tablist" id="maintabs"> <li class="active"><a data-toggle="tab" id="settings-introduction" href="#subtab_settings-introduction"><b>{{ lang._('Introduction') }}</b></a></li> <li><a data-toggle="tab" id="settings-tab" href="#settings"><b>{{ lang._('Settings') }}</b></a></li> </ul> <div class="content-box tab-content"> <div id="subtab_settings-introduction" class="tab-pane fade in active"> <div class="col-md-12"> <h1>{{ lang._('Quick Start Guide') }}</h1> <p>{{ lang._("Welcome to the Puppet Agent plugin! This plugin allows you to integrate OPNsense with your Puppet environment.") }}</p> <p>{{ lang._("Keep in mind that you should not treat OPNsense like any other operating system. Most notably you should not modify system files or packages. Instead use the OPNsense API to make configuration changes and to manage plugins. The following tools are a good starting point when trying to automate OPNsense with Puppet:") }}</p> <ul> <li>{{ lang._("%sopn-cli:%s A command line client to configure OPNsense core and plugin components through their respective APIs.") | format('<a href="https://github.com/andeman/opn-cli" target="_blank">', '</a>') }}</li> <li>{{ lang._("%spuppet/opnsense:%s A read-to-use Puppet module for automating the OPNsense firewall.") | format('<a href="https://github.com/andeman/puppet-opnsense" target="_blank">', '</a>') }}</li> </ul> <p>{{ lang._("Note that these tools are not directly related to this plugin. Please report issues and missing features directly to the author.") }}</p> </div> </div> <div id="settings" class="tab-pane fade"> {{ partial("layout_partials/base_form",['fields':generalForm,'id':'frm_GeneralSettings'])}} <div class="col-md-12"> <hr/> <button class="btn btn-primary" id="saveAct" data-endpoint='/api/puppetagent/service/reconfigure' data-label="{{ lang._('Save') }}" data-service-widget="puppetagent" data-error-title="{{ lang._('Error reconfiguring puppetagent') }}" type="button"> </button> <br/> </div> </div> </div>
Volt
3
marcquark/plugins
sysutils/puppet-agent/src/opnsense/mvc/app/views/OPNsense/PuppetAgent/index.volt
[ "BSD-2-Clause" ]
/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #define EIGEN_USE_THREADS #include <functional> #include <memory> #include <vector> #include "tensorflow/cc/client/client_session.h" #include "tensorflow/cc/ops/audio_ops.h" #include "tensorflow/cc/ops/const_op.h" #include "tensorflow/cc/ops/math_ops.h" #include "tensorflow/core/framework/tensor_testutil.h" #include "tensorflow/core/framework/types.h" #include "tensorflow/core/framework/types.pb.h" #include "tensorflow/core/kernels/ops_util.h" #include "tensorflow/core/lib/core/status_test_util.h" #include "tensorflow/core/platform/test.h" namespace tensorflow { namespace ops { namespace { TEST(MfccOpTest, SimpleTest) { Scope root = Scope::DisabledShapeInferenceScope(); Tensor spectrogram_tensor(DT_FLOAT, TensorShape({1, 1, 513})); test::FillIota<float>(&spectrogram_tensor, 1.0f); Output spectrogram_const_op = Const(root.WithOpName("spectrogram_const_op"), Input::Initializer(spectrogram_tensor)); Output sample_rate_const_op = Const(root.WithOpName("sample_rate_const_op"), 22050); Mfcc mfcc_op = Mfcc(root.WithOpName("mfcc_op"), spectrogram_const_op, sample_rate_const_op); TF_ASSERT_OK(root.status()); ClientSession session(root); std::vector<Tensor> outputs; TF_EXPECT_OK( session.Run(ClientSession::FeedType(), {mfcc_op.output}, &outputs)); const Tensor& mfcc_tensor = outputs[0]; EXPECT_EQ(3, mfcc_tensor.dims()); EXPECT_EQ(13, mfcc_tensor.dim_size(2)); EXPECT_EQ(1, mfcc_tensor.dim_size(1)); EXPECT_EQ(1, mfcc_tensor.dim_size(0)); test::ExpectTensorNear<float>( mfcc_tensor, test::AsTensor<float>( {29.13970072, -6.41568601, -0.61903012, -0.96778652, -0.26819878, -0.40907028, -0.15614748, -0.23203119, -0.10481487, -0.1543029, -0.0769791, -0.10806114, -0.06047613}, TensorShape({1, 1, 13})), 1e-3); } } // namespace } // namespace ops } // namespace tensorflow
C++
4
abhaikollara/tensorflow
tensorflow/core/kernels/mfcc_op_test.cc
[ "Apache-2.0" ]
#summary ChangeLog of all releases ===Release 1.7.7=== *Date: 25 Sep 2014* * Issue854 Jshint plugin should not try processing css resources * Issue885 Update less4j dependency to latest version * Issue891 Optimize hashing implementation * Issue893 Upgrade google closure compiler dependency * Issue894 Upgrade jruby dependency (performance optimization) * Issue895 ChainedProcessor uses WroTestUtils which uses junit, making it not work at runtime * Issue896 Confusing exception with incrementalBuildEnabled * Issue897 Optimize how often files are checksummed * Issue900 Prevent buildDirectory from being null * Issue902 Race condition in LazyProcessorDecorator when parallelProcessing is enabled * Issue903 Update coffee-script webjars dependency to latest version ===Release 1.7.6=== *Date: 18 Jun 2014* * Issue865 wro4j-maven-plugin : cssUrlRewriting act differently on Windows and Linux * Issue869 "MinimizeEnabled" not handled by wro.properties * Issue871 DisableCache clears the Model before the ResourceAuthorizationManager access it * Issue872 ResourceProxyRequestHandler throwing UnauthorizedRequestException when resource uri contains question mark and pound * Issue876 Update less4j dependency to latest version * Issue877 CssUrlRewritingProcessor does not handle properly empty url reference [url("")] * Issue878 Add provision to take context path into account - Build Time Solution using MavenPlugin * Issue879 Adding CssUrlAuthorizationProcessor post processor * Issue880 Update webjars-locator dependency to latest version * Issue882 Update google closure dependency * Issue883 Update dependency of emberjs webjar ===Release 1.7.5=== *Date: 9 Apr 2014* * Issue860 mvn wro4j:run fails due to optional configuration missing * Issue861 Less4jProcessor doesn't process properly imports on windows platform * Issue862 Update less4j dependency to latest version * Issue864 File descriptor leak * Issue866 Update handlebars webjar dependency version ===Release 1.7.4=== *Date: 21 Mar 2014* * Issue511 ResourceWatcher should check for changes in parallel * Issue715 java.lang.NullPointerException in AbstractNodeWithFallbackProcessor.java:62 * Issue830 Using 'parallelProcessing' in Maven plugin produces intermittent NullPointerException during build process * Issue834 The grails plugin sometimes returns 404 for first request * Issue835 Upgrade coffee-script webjar dependency * Issue836 Upgrade emberjs webjar dependency * Issue837 Upgrade handlebars webjar dependency * Issue838 Resource watcher doesn't invalidate cache when change detected for wro4j-grails-plugin * Issue840 ResourceProxyRequestHandler accept method failing to match in v1.7.3, results in 404s * Issue841 Update jruby dependency version * Issue842 Create Less4jProcessorFilter * Issue843 ro.isdc.wro.extensions.processor.js.NgMinProcessor is never supported * Issue844 Upgrade jshint webjar version * Issue845 Less4jProcessor should process import statements * Issue850 Update less4j dependency to latest version * Issue851 Improve google closure processor in multithreaded environment * Issue853 ServletContextUriLocator fails to use requestDispatcher * Issue855 cssImport tries to import absolute URLs (http://...) * Issue858 Fix RubySass compile error for non-ascii characters * Issue859 AbstractProcessorFilter doesn't handle 30x reponse status code ===Release 1.7.3=== *Date: 23 Jan 2014* * Issue789 java.lang.StringIndexOutOfBoundsException: at org.webjars.WebJarAssetLocator.aggregateFile() * Issue813 Combining a javascript file that ends with a comment (without a newline) results in unexpected output * Issue814 Update less4j dependency to latest version * Issue822 ResourceWatcher should allow check for changes asynchronously * Issue824 Context leaks when requests are nested * Issue826 UriLocator fails to retrieve valid resource when it contains question mark * Issue827 Update jshint dependency * Issue828 Update emberjs dependency to latest version. * Issue829 Update handlebars webjar dependency to latest version * Issue832 BuildContextHolder used for incremental build does not clean up properly ===Release 1.7.2=== *Date: 5 Nov 2013* * Issue660 JsMin fails when processing a regex * Issue785 Wro4jCommandLineRunner not governed by wro.properties * Issue786 Extending customization using SPI doesn't work with maven plugin * Issue787 wro4j-runner should allow custom location of wro.properties * Issue788 Multiple context folders for maven plugin * Issue790 ignoreMissingResources property from wro.properties ignored when using maven plugin * Issue792 Incremental build support for linters in maven plugin * Issue793 Update emberjs dependency version * Issue794 Update handlebars dependency version * Issue800 groupNameMappingFile doesn't support filepaths that don't yet exist * Issue802 Expose an alias for SingleLineCommentStripperProcessor * Issue803 Allow wro4j-maven-plugin callers to skip execution * Issue805 upgrade to latest version of jruby * Issue807 Listener for resourceWatcher ===Release 1.7.1=== *Date: 19 Sept 2013* * Issue536 parallelize maven plugin * Issue745 Pooling processors may leak timer threads * Issue746 Loading WebJar assets fails if path contains whitespace * Issue747 Aggregate Path Prefix for ImageUrlRewriter calculated incorrectly on Windows * Issue749 ResourceWatcher is not working since 1.7.0 * Issue751 No pre/postProcessors with extended ConfigurableWroManagerFactory * Issue752 Maven plugin won't run on ARM architecture - jruby failure * Issue753 jruby-complete pulls in unwanted classes * Issue757 Allow turning off of resource minification through JMX MBean * Issue760 failFast option is causing Exception in onAfterExecute * Issue764 Handling Request, forward and error dispatches with grails plugin * Issue773 Update less4j dependency to latest version * Issue774 Move wro4j-examples projects to a dedicated repository * Issue775 Move wro4j-grails project to a dedicated repository * Issue777 add SERVLET_CONTEXT_ONLY LocatorStrategy * Issue779 Incremental build support for maven plugin * Issue782 Parallel pre processing is not enabled with maven plugin. ===Release 1.7.0=== *Date: 10 Jun 2013* * Issue619 Simplify jsHint upgrade without a wro4j release required * Issue631 Import of model with no groups or model not found crashes the model creation * Issue633 ResourceWatcherUpdatePeriod clear the cache also when there is no change * Issue642 Create webjar resource locator * Issue648 Update JsHintProcessor to latest dependency * Issue692 Upgrade less4j to latest version * Issue693 Update to latest version of google closure * Issue695 AbstractCssImportPreProcessor leaks ThreadLocal variable during Tomcat shutdown * Issue696 Error with v1.6.3 and CSS data URLs * Issue697 wro4j-runner missing required dependency * Issue701 Allow all errors to be found before the build fails. * Issue702 wro4j-maven-plugin doesn't persist fingerprints for imported css * Issue703 Rhino Less CSS processor breaks on // in a string, eg http://example.com * Issue704 Less4j warning log improvement * Issue705 Create filter responsible for gzipping resource contents * Issue709 Upgrade rhinoCoffeeScriptProcessor to version 1.6.2 * Issue711 import-once not working * Issue716 Update jslint to latest version * Issue720 JMX bean is not unregistered during destroy * Issue722 NodeCoffeeScriptProcessor does not support additional command-line options, such as --bare * Issue723 No exception thrown when using invalid resource uri is used * Issue724 DefaultUriLocator should use all locators found in classpath * Issue726 Upgrade handlebars processor to latest version * Issue728 Improve problem reporting for Less4jProcessor * Issue729 Upgrade ember.js processor to latest version * Issue730 Incremental change build should process the target group when the target folder does not exist anymore * Issue731 Reduce logging of the JSHint maven plugin * Issue732 Incremental build feature doesn't use {{{<targetGroups>}}} configuration * Issue733 consoleStripper regex changes. * Issue736 ConfigurableWroFilter ignores managerFactoryClassName configuration property * Issue739 Improve error message when jshint is provided with invalid options * Issue740 Non-synchronized WeakHashMap causes infinite loop in Injector * Issue742 CssUrlRewriting issue when dealing with classpath resource referring context relative resources * Issue743 Resources served through ResourceProxyRequestHandler do not use cache headers * Issue744 Empty content gzip response issue ===Release 1.6.3=== *Date: 11 Mar 2013* * Issue458 LifecycleCallback should be thread safe. * Issue544 Improved configuration of the preferred model DSL * Issue558 Support for abstract group in Groovy DSL * Issue628 WroManager should be immutable * Issue650 cssImport processor translates urls incorrectly * Issue654 Create RhinoTypeScriptProcessor. * Issue655 Less4jProcessor should log all errors when fails * Issue656 Upgrade cssLintProcessor * Issue658 Upgrade ember.js to latest version * Issue659 Maven-plugin: ReportFormat set in pom file seem to not be read * Issue661 Add a reportFormat option to the JSHint processor that works similar to the CSSLint processor's reportFormat option * Issue664 Running JSHint processor in an Execution step via the WRO4J 1.6.2 Maven Plugin is outputting {{{<lint>}}} rather than {{{<jslint>}}} element * Issue665 Deprecate InjectableWroManagerFactoryDecorator * Issue668 groupNameMappingFile and incremental eclipse builds * Issue670 Minimize dependencies of SmartWroModelFactory * Issue672 Last-modified and Expires headers use system-default Locale * Issue675 Get rid of largest dependencies for wro4j-runner * Issue679 Ember precompiler for 1.0.0 RC1 * Issue681 HoganJs processor compilation missing semicolon * Issue682 Allow Closure Compiler errors to fail the build * Issue684 cssImport should support less import-once * Issue685 Upgrade less4j to latest version * Issue686 Upgrade rhinoCoffeeScriptProcessor to version 1.6.1 * Issue687 ClasspathUriLocator fails to find wildcards resources when the application path has spaces in it * Issue688 Additional JSHint property: failThreshold * Issue690 JSHint execution summary ===Release 1.6.2=== *Date: 10 Jan 2013* * Issue480 Css url rewriting doesn't compute replace properly font rule with multiple url's * Issue622 Upgrade to latest version of less4j * Issue623 Invalid Import of xml model shows misleading stacktrace * Issue624 WildcardExpanderModelTransformer throws failure warnings if no assets are found * Issue625 setting up encoding in command line * Issue632 wro4j:jshint triggers slf4j warning * Issue635 change csslint.xml root xml element from csslint to lint * Issue636 Create emberjs processor * Issue637 Upgrade HandlebarsJsProcessor * Issue638 The groupName does not strip jsessionID * Issue643 wro4j-core doesn't compile with jdk 1.5 * Issue644 Providers loaded from ServiceLoader cannot override default providers * Issue645 Provide an alias for google closureProcessor using WHITESPACE_ONLY optimization level * Issue646 Update google closure dependency version * Issue647 Update rhinoLessCss processor to 1.3.3 * Issue649 Update JsLintProcessor with latest jslint version ===Release 1.6.1=== *Date: 25 Nov 2012* * Issue598 NPE with GoogleClosureCompressorProcessor in wro4j 1.6.0 * Issue599 Make DustJs compiler configurable with System property * Issue602 Resource watcher thinks files have been modified when they in fact have not, after upgrade to 1.6 * Issue603 JawrCssMinifier creates overly long lines * Issue604 Upgrade Less4j dependency to latest version * Issue606 jshint maven goal fails with an exception during report generation * Issue608 Included Hogan JS Processor Does Not Appear Useable * Issue613 Update JSHint & JsLint dependencies * Issue617 JsHint xml report does not respect format expected by Jenkins * Issue618 JsHint generated xml report is empty when there are errors ===Release 1.6.0=== *Date: 24 Oct 2012* * Issue465 The cache key should be configurable with custom attributes * Issue563 ResourceWatcher cannot detect change of resources referred by @import directive * Issue565 Close of FileOutputStream in Wro4jMojo.writeGroupNameMap() * Issue566 NodeLessCssProcessor support is not computed properly on Windows * Issue567 Create TypeScriptProcessor * Issue569 CssImportPreProcessor fails with stackOverflowException * Issue571 Incremental build should detect changes of resources referred by @import statements * Issue572 Create less4j processor * Issue574 Use different log level when ignoreMissingResource=true * Issue576 CssImport issue with LessCss processor * Issue579 NoClassDefFoundError for 1.5.0 * Issue580 wro4j-runner-1.5.0 doesn't support cssImport anymore * Issue581 resourceWatcherUpdatePeriod does not work for nested groups * Issue585 Remove YUIJSCompressorProcessor * Issue586 Add enable flag to WroFilter * Issue587 Update rhinoLessCss processor to 1.3.1 * Issue589 Meven plugin error after upgrading to 1.5.0 * Issue590 Alternative way of configuring processors * Issue591 ConfigurableWroManagerFactory fails when cacheUpdatePeriod is greater than 0. * Issue592 Replace existing DustJS compiler with LinkedIn's updated version * Issue596 Upgrade coffeeScriptProcessor to coffee-script-1.4.0 * Issue597 Create NodeCoffeeScriptProcessor ===Release 1.5.0=== *Date: 27 Sep 2012* * Issue257 XML reporting for cssLint & jsHint maven plugin * Issue423 Use {{{<group-ref>}}} defined in an {{{<import>}}} wro.xml * Issue435 Create PathPatternProcessorDecorator * Issue459 wro4j maven plugin should support incremental build * Issue523 Upgrade rubySassCss processor to 3.2.1 * Issue530 css image request causes http 403 * Issue537 Update evnjs in LessCssProcessor * Issue539 Background:url("" ) drops the closing " * Issue541 Improve lessCss error reporting * Issue542 Create NodeLessCssProcessor based on lessc binary * Issue543 Create Fallback aware LessCss processor * Issue549 Update cssLint to latest version * Issue550 Update rhino based processors to latest version * Issue551 CssMin semicolon bug * Issue552 Register RequestHandlers as service provider interface (spi) * Issue553 Update google closure dependency version * Issue554 reloading the cache fails * Issue555 CacheStrategy should be configurable with ConfigurableWroManagerFactory * Issue557 ServletContextAttributeHelper returns uninitialized managerFactory * Issue560 wro4jrunner missing log4j dependency * Issue561 ModelAsJsonRequestHandler should be enabled only in DEVELOPMENT mode * Issue562 A reload of model should not trigger cache reload * Issue564 Too verbose logging on missing resources ===Release 1.4.9=== *Date: 7 Sep 2012* * Issue499 Upgrade google closure to latest version * Issue513 ERROR ResourceWatcherRunnable:81 - Exception while checking for resource changes logged on tomcat shutdown * Issue514 Make ResourceWatcher run efficiently * Issue518 Maven plugin generated resource location enhancement * Issue519 Add support for abstract group concept * Issue524 maven plugin configuration issue * Issue526 Resource leak caused by CssImportPreProcessor * Issue528 Redundand CacheStrategy decoration * Issue529 Missing cache header attributes in css images * Issue534 Suppress logging of ClientAbortException in WroFilter. ===Release 1.4.8.1=== *Date: 12 Aug 2012* * Issue507 Processing cssImport of custom jquery-ui.css StackOverflowError * Issue510 Log version when logging configuration * Issue512 Too verbose logging on missing resources when resourceWatcher is enabled * Issue515 Upgrade sass-gems to latest version ===Release 1.4.8=== *Date: 9 Aug 2012* * Issue185 Invalidate parts of the cache on resource change * Issue478 WroFilter#newWroConfigurationFactory() extendability is difficult * Issue479 Too verbose logging on missing resources * Issue482 Referencing not existing files in path in the same as wroFilter mapped to causes threads spawning and locking in when disableCache=true * Issue483 ServletContextPropertyWroConfigurationFactory#createProperties is not closing stream. * Issue484 HandlebarsJs & HoganJs processors always generate null template name * Issue485 Content length is not computed correctly * Issue495 Default expires headers should be configurable * Issue196 ConfigurableWroManagerFactory does not invoke methods responsible for contributing processors * Issue497 ModelAsJsonRequestHandler doesn't display the minimize attribute * Issue498 GoogleClosure processor is not threadSafe * Issue500 WroContextFilter causing the Context to create new WroConfiguration everytime * Issue502 Twitter Bootstrap 2.0 is not compiled well by less-processor * Issue505 CssImport processor recursion detection is not thread-safe * Issue506 CssImportPreProcessor: Remove imports in CSS comments ===Release 1.4.7=== *Date: 30 June 2012* * Issue225 Expose model resources in filter as JSON * Issue405 cssUrlRewriting does not take context path into account * Issue414 Create handlebars processor * Issue430 Add support for HoganJs * Issue431 WroManager cannot be created using ServletContextAttributeHelper outside the request cycle * Issue432 ConfigurableWroFilter cannot load extentions * Issue433 Bourbon Sass Mixins library support * Issue434 CSS being randomly mixed in with JS (on OSx) * Issue436 Change http-server used in demo-projects * Issue438 Support for RequestHandlers concept * Issue439 reloadCache & reloadModel api calls are broken * Issue440 WildcardExpanderModelTransformer problem with /** url * Issue442 Allow configuration of processor failing behavior * Issue443 Add options to the uglifyJs processor or an uglifyJsAdvanced processor * Issue445 Resources cannot be located when jRebel is enabled * Issue447 ConfigurableProcessorsFactory processor creation is not thread safe * Issue448 Create alternative cssDataUri processor implementation * Issue449 Create a ProcessorsFactory which uses ServiceRegistry for loading processors * Issue452 DefaultWroManagerFactory doesn't implement WroConfigurationChangeListener * Issue453 Headers set after content send * Issue454 Simplify NamingStrategy & HashStrategy configuration using ConfigurableWroManagerFactory * Issue455 WildcardExpanderModelTransformer is not thread safe * Issue456 SassCssSupport doesn't handle spaces instead of tabs * Issue460 Stack overflow compiling wro4j-bootstrap-sample using the default !WroManagerFactory * Issue462 CssDataUriPreProcessor hits error (unknown mime type) for linked fonts * Issue463 CSS Image URL rewriting not working for CSS hosted external servers. * Issue467 Provide access to individual wro-resources * Issue468 Allow UriLocators configuration using ServiceRegistry * Issue469 Given a resource URI, simplify a way to find out which group it belongs to * Issue472 UriLocators configuration are not picked up from wro.properties config file * Issue473 ServletContextUriLocator should use fallback strategy when dispatcher fails * Issue475 Update rhino based processors to latest version * Issue476 CSSMin.parseProperties doesn't check for empty property ===Release 1.4.6=== *Date: 10 May 2012* * Issue304 Use of !InheritableThreadLocal in ro.isdc.wro.config.Context questionable * Issue358 Migrate to rhino 1.7R3 version * Issue372 Create !SassCssProcessor using jRuby underlying implementation * Issue400 Lazy loading instantiation of processors and locators * Issue415 Maven plugin processor extension configuration * Issue416 !ServletContextUriLocator not working with Spring mvc resources * Issue417 Allow configuration using !ServletContextListener * Issue418 Suppress logging level for maven plugin * Issue419 Encoding value is not initialized properly * Issue420 Content length is not set * Issue424 Memory & Performance improvements * Issue425 !CssImportPreProcessor and !CssUrlRewritingProcessor produce invalid image URL when using nested imports * Issue427 Allow !ConsoleStripperProcessor be added through wro.properties * Issue428 Update rhino based processors to latest version * Issue429 Update google closure dependency version ===Release 1.4.5=== *Date: 7 April 2012* * Issue12 Create console.log & console.debug stripper JS processor * Issue382 Rhino based processors fails when slf4j-api is not included * Issue383 Backslashes in CSS duplicated * Issue384 Test failing on osx platform * Issue385 !ServletContextUriLocator does not work on !WebSphere 6.1 * Issue389 Configure connectionTimeout in milliseconds instead of seconds * Issue390 Create !DustJs processor * Issue391 Create !CoffeeScript & Less Filters * Issue393 Error while using the 'import' tag in the wro.xml file * Issue394 !CopyrightKeeperProcessorDecorator ignores Resource#isMinimize() * Issue395 Add postProcessors support to wro4j-runner * Issue397 Update less.js processor * Issue407 Update rhino based processors to latest version * Issue408 Update google closure dependency version * Issue409 Proceed with filter chain when requesting a group with no resources * Issue410 Reload Cache scheduler should process only requested groups * Issue411 Locator cannot find resources during scheduler update * Issue412 !ConnectionTimeout cannot be configured using wro.properties * Issue413 Group is not extracted correctly when resource url contains jsessionID ===Release 1.4.4=== *Date: 20 February 2012* * Issue88 Invalidate the cache when model is updated * Issue359 !PerformanceLoggerCallback doesn't work properly in some situations * Issue361 !LessCssProcessor fails silently * Issue362 Deep recursive wildcard pattern ** in classpath groups * Issue364 !ObjectPoolHelper should use WHEN_EXHAUSTED_GROW policy * Issue365 Problem in !LessCss extensibility * Issue366 csslint not working via maven since 1.4.2 * Issue369 Upgrade rhino based processors to latest versions * Issue370 reloadModel and reloadCache should be ignored if wro is not initialized yet * Issue373 !NullPointerException while creating a !ProcessorsFactory which has decorated processors * Issue378 CSSMin bug on Properties containing colon ":" * Issue381 Exception while rewriting css url's containing $ character. ===Release 1.4.3=== *Date: 8 January 2012* * Issue253 New UTF-8 encoding issues * Issue316 Callbacks support * Issue339 Add a timestamp naming strategy. * Issue341 No content type header sent for large resources * Issue343 !WroManagerFactory should be injectable to the filter * Issue345 ETag must be set before the content is written. * Issue346 Reserved names support for !UglifyJsProcessor * Issue347 !WroConfiguration and Context should be Injectable * Issue350 Upgrade uglifyJs processor to latest version * Issue353 wro4j-core doesn't work with jdk 1.5 * Issue354 Parallel preProcessing flag for wro4j-runner * Issue355 Update less.js processor to latest version * Issue356 Update uglifyJs processor to latest version * Issue357 Update linters dependencies to latest version ===Release 1.4.2=== *Date: 7 December 2011* * Issue100 JS Lint integration * Issue305 Multiple concurrent calls into !WroFilter when Context.get().getConfig().getCacheUpdatePeriod() > 0 cause multiple watcher thread factories * Issue306 wro4j filter fails on concurrent requests * Issue308 Update google closure dependency version * Issue309 Incorrect WARN statements report * Issue310 Rhino is not exited, leaks memory * Issue312 Wildcard resources are not order properly * Issue321 FileNotFoundException and NullPointerException are raised when multiple applications on tomcat * Issue322 CSS url rewriting doesn't handle properly whitespace * Issue323 Allow gzipped content to be cached * Issue324 Changing cacheUpdatePeriod with JMX is broken * Issue325 Expose additional properties through JMX * Issue326 Google Closure externs configuration support * Issue327 Processors relying on rhino are not thread-safe * Issue328 Multiline comment stripper processor issue * Issue329 Update cssLint processor to latest version * Issue331 Parallel resource preProcessing support * Issue332 Update uglifyJs processor to latest version * Issue334 Runner: exceptions are caught & logged, but, should exit with System.exit(1) so Ant and other CLI interfaces know there was an error * Issue335 JsHint predef options bug * Issue336 Maven plugin configuration issue ===Release 1.4.1=== *Date: 18 September 2011* * Issue286 Upgrade dojoShrinksafe processor * Issue287 !ConfigurableWroFilter and no processors defined * Issue288 jshint maven plugin goal and customManagerFactory * Issue290 Update jsHint processor to latest version * Issue291 Update cssLint processor to latest version * Issue293 Exceptions are not always logged * Issue295 Create !ExtensionAwareProcessorDecorator * Issue296 Extend !JsHint option configuration * Issue297 Wrong report of the line number in jsHint * Issue298 Make alias processor configuration extension aware * Issue299 Problem with image background in maven plugin * Issue300 Update uglifyJs processor to latest version * Issue301 Upgrade !CoffeeScript to latest version ===Release 1.4.0=== [Release_Highlights_1_4_0 Release Higlights] *Date: 26 August 2011* * Issue23 Create [GrailsPlugin Grails plugin] * Issue196 Build wro model with [GroovyWroModel groovy script] * Issue221 Normalize css url path generated by !CssUrlRewritingProcessor * Issue245 Wildcard classpath resources and maven plugin * Issue246 JsonHPack packer should accept plain JSON object * Issue247 Wildcards expander support * Issue252 Create [SmartWroModelFactory] * Issue254 Simplify maven plugin processors configuration * Issue255 Add !ModelTransformer support * Issue256 Update uglifyJs processor to version 1.0.6 * Issue259 Problem with background url in css after aggregation * Issue260 Update less.js to latest version * Issue261 Remove BOM characters by default * Issue262 Google closure compiler extensibility * Issue263 Update cssLint processor to latest version * Issue264 Configuration of pre processors for wro4j-runner * Issue269 Processors configuration in config properties file * Issue270 Improve performance of processors depending on Rhino * Issue272 OOM in !LessCSS engine * Issue277 google closure version upgrade * Issue281 Reload model bug * Issue283 Create a properties file to hold the mapping between original & renamed resource for maven plugin * Issue284 Simplify processors configuration with !ConfigurableWroFilter ===New release (1.3.8)=== *Date: 22 June 2011* * Issue226 !CopyrightKeeperProcessorDecorator doesn't inherit @Minimize annotation * Issue228 !ServletContextLocator doesn't handle jsp files * Issue229 Create JSON pack/unpack processors * Issue230 Create !PlaceholderProcessor * Issue231 minimizing new CSS 3 "@media" features. * Issue232 Create !CssLint processor * Issue233 WroConfiguration always print a warning message "You cannot disable cache in DEPLOYMENT mode" * Issue234 Create !GoogleAdvancedStandaloneManagerFactory * Issue235 Allow configure !WroFilter from properties file * Issue236 Configure managerFactoryClassName from property configuration file * Issue237 Create !CssLint maven plugin * Issue238 Pre processors error reporting enhancement * Issue239 Change Wro4j maven plugin execution phase to compile * Issue240 !SemicolonAppenderPreProcessor and empty scripts * Issue241 !CssImportPreProcessor and ignoreMissingResources * Issue242 Add more processors to wro4j runner Details: http://code.google.com/p/wro4j/issues/list?can=1&q=milestone:1.3.8 ===New release (1.3.7)=== *Date: 31 May 2011* * Issue190 Add coffeScript support to wro4j-runner * Issue208 Ability to specify file.encoding in ConfigurableWroFilter * Issue209 wro4j maven plugin detailed exception message * Issue212 use a factory for WroConfiguration creation * Issue214 Create Copyright Information Processor * Issue215 Update google closure dependency version * Issue216 ConfigurableWroFilter configuration with Properties file * Issue222 Upgrade CoffeeScript to version 1.1.1 & processor extensibility support * Issue223 Update uglifyJs processor to version 1.0.2 * Issue224 upgrade LessCss processor to latest version Details: http://code.google.com/p/wro4j/issues/list?can=1&q=milestone:1.3.7 ===New release (1.3.6)=== *Date: 12 April 2011* * Issue33 Integrate LRU cache strategy * Issue92 Build wro model with JSON * Issue138 Externalize processor provider capability * Issue181 Allow multiple wro.xml * Issue182 WroModelFactory refactoring * Issue183 ClassPathUriLocator wildcard support doesn't work with resources inside JARs * Issue186 Less Css processor errors are not intuitive * Issue187 Create [http://jashkenas.github.com/coffee-script/ Coffee script] processor * Issue192 wro4j with jawrCssMinifier cannot handle css3 attribute selectors * Issue197 Update google closure dependency version * Issue200 Update uglifyJs compressor to latest version (1.0.1) * Issue203 CssDataUri doesn't work with absolute url images * Issue204 Add CssDataUriPreProcessor to ConfigurableWroManagerFactory Details: http://code.google.com/p/wro4j/issues/list?can=1&q=milestone:1.3.6 ===New release (1.3.5)=== *Date: 10 March 2011* * Issue141 CssDataUriPreProcessor should detect duplicate uri's * Issue171 Browser loads optimized script/css and then waits for 20 seconds... * Issue172 Create a jsHint processor * Issue174 Gzipping resources doesn't not work on server enforcing response.setContentLength * Issue176 Create JsHint maven plugin (@see http://web-resource-optimization.blogspot.com/2011/03/build-time-javascript-code-analysis.html) * Issue180 Upgrade uglifyJs Details: http://code.google.com/p/wro4j/issues/list?can=1&q=milestone:1.3.5 ===New release (1.3.4)=== *Date: 13 February 2011* * Issue46 Create command line tool (http://web-resource-optimization.blogspot.com/2011/02/simple-client-side-build-system-with.html) called wro4j-runner. * Issue162 Input stream has been finalized or forced closed without being explicitly closed * Issue163 lessCss parser shows INFO logging * Issue164 Wildcard resources and classpath locator are not working properly * Issue166 Add disableCacheInDevelopment flag to settings * Issue168 Change Caching Headers in DEVELOPMENT Mode * Issue169 Upgrade less.js version to 1.0.41 Details: http://code.google.com/p/wro4j/issues/list?can=1&q=milestone:1.3.4 ===New release (1.3.3)=== *Date: 11 January 2011* * Issue155 Use DefaultCodingConvention for GoogleClosure compiler processor. * Issue156 Improve Gzip compression support * Issue157 Enclose ETag value in quotes * Issue158 Prevent specific files from being compressed/minified * Issue159 SemicolonAppenderPreProcessor should append semicolon only if needed * Issue160 Update uglifyJs processor to latest version * Issue161 Supress spurious duplicate resource detection on reload Details: http://code.google.com/p/wro4j/issues/list?can=1&q=milestone:1.3.3 ===New release (1.3.2)=== *Date: 11 December 2010* * Issue146 Resource status code is always 200 * Issue147 Use official google closure dependency * Issue148 Update less.js processor * Issue149 CssImportPreProcessor uses a too restrictive PATTERN for finding imports * Issue150 Maven artifact for wro4j-core and wro4j-extensions pulls unnecessary dependencies * Issue151 ServletContext missing resources on Tomcat * Issue152 Wildcard Resources are not ordered alphabetically * Issue153 Update uglifyJs processor to latest version Details: http://code.google.com/p/wro4j/issues/list?can=1&q=milestone:1.3.2 ===New release (1.3.1)=== *Date: 7 November 2010* * Issue122 Create UglifyJs processor * Issue142 YuiJsMin compressor is broken in wro4j-1.3.0 * Issue143 Integrate DojoShrinksafe compressor * Issue144 Prevent caching of wro api requests * Issue145 Create beautifyJsProcessor based on UglifyJs beautifier Details: http://code.google.com/p/wro4j/issues/list?can=1&q=milestone:1.3.1 ===New release (1.3.0)=== *Date: 1 November 2010* * Issue13 Resource fingerprinting * Issue68 Find an alternative way to set configuration mode * Issue80 Maven plugin classpath resources support * Issue86 Integrate SASS css meta framework * Issue96 Demo web module * Issue97 Trigger cache & model update through http request * Issue99 Enable customized versioning of output resource for wro4j maven plugin * Issue101 Dynamic resource locator must support redirects * Issue102 Replace current LessCss processor implementation with a newer one * Issue103 Create a processor for Packer JS compressor * Issue104 Create preconfigured WroManagerFactories for maven using YUI & Google Closure * Issue105 WroConfiguration should not be the same for many applications * Issue106 Make targetGroups parameter optional for wro4j maven plugin * Issue107 Use daemon threads for schedulers * Issue108 Use scheduleWithFixedDelay when scheduling model update * Issue109 Create Conform Colors Css processor * Issue110 Create VariablizeColors css processor * Issue112 Create a css processor based on Andy Roberts CssCompressor * Issue113 Encoding issue * Issue114 Maven plugin doesn't handle correctly wildcards * Issue115 Detect duplicated resources * Issue116 Maven plugin shouldn't create empty files * Issue117 Maven plugin should allow configuration of naming strategy * Issue121 wro4j does not work behind a RequestDispatcher.include * Issue123 Reuse YUICompressor code & remove dependency * Issue124 newCacheStrategy method should be protected in BaseWroManagerFactory * Issue125 Processors execution order * Issue128 Upgrade LessCss to 1.0.36 version * Issue129 Upgrade google closure dependency to latest revision * Issue131 Classpath UriLocator doesn't accept empty spaces * Issue134 Resource comparison test - refactoring * Issue135 Get rid of wro4j-test-utils artifact See details here: http://code.google.com/p/wro4j/issues/list?can=1&q=milestone:1.3.0 ===New release (1.2.8)=== *Date: 27 June 2010* * Issue91 CSS url rewriting creates incorrect urls for CSS rules that include quoted urls * Issue93 Use scheduleWithFixedDelay when scheduling cache update See details here: http://code.google.com/p/wro4j/issues/list?can=1&q=milestone:1.2.8 ===New release (1.2.7)=== *Date: 10 May 2010* * Issue21 Wildcard syntax support in group definition * Issue38 Add support for base 64-encoded image in CSS * Issue48 interpoloation of the wro.xml * Issue81 Create NoProcessors ManagerFactory * Issue83 GroupExtractor should use HttpServletRequest to get the group name * Issue84 Slf4j dependency is not added by maven * Issue85 Create a google closure distribution for wro4j integration * Issue87 UriLocators implementations shouldn't be final * Issue89 Add ServletContext parameter to newModelFactory method * Issue90 BomStripper should support also CSS resources See details here: http://code.google.com/p/wro4j/issues/list?can=1&q=milestone:1.2.7 ===New release (1.2.6)=== *Date: 22 apr 2010* * Issue15 Integrate CSS meta frameworks (See [LessCssSupport] wiki page) * Issue77 Compatibility with servlet-api-2.3 * Issue78 Add granular destinationFolder control for maven plugin * Issue79 HashCode implementation of Group and Resource classes See details here: http://code.google.com/p/wro4j/issues/list?can=1&q=milestone:1.2.6 ===New release (1.2.5)=== *Date: 19 apr 2010* * Issue74 wro4j-maven-plugin-1.2.3 is broken * Issue76 Make wro4j compatible with java 1.5 See details here: http://code.google.com/p/wro4j/issues/list?can=1&q=milestone:1.2.5 ===New release (1.2.4)=== *Date: 18 apr 2010* * Issue25 Integrate Google Closure compiler * Issue69 Configurable MBean object name * Issue70 ConfigurableWroManagerFactory JMX problem * Issue71 CssVariablesProcessor must be before CssUrlRewritingProcessor * Issue72 Add ignoreMissingResources to wro4j Maven Plugin * Issue73 Processors execution order in ConfigurableWroManagerFactory * Issue75 Create ExtensionsConfigurableWroManagerFactory See details here: http://code.google.com/p/wro4j/issues/list?can=1&q=milestone:1.2.4 ===New release (1.2.3)=== *Date: 15 apr 2010* * Issue27 Configure expires headers using init-param * Issue61 ConfigurableWroManagerFactory related exception * Issue62 Create a fallback aware ModelFactory * Issue63 Reloading cache is not working properly * Issue64 CssUrlRewriting for css from WEB-INF folder * Issue65 Exception when dealing with dynamic resources * Issue66 Update caching headers when the resources cache is updated * Issue67 Allow custom handling of WroRuntimeException See details here: http://code.google.com/p/wro4j/issues/list?can=1&q=milestone:1.2.3 ===New release (1.2.2)=== *Date: 9 apr 2010* * Issue58 Configurable GroupsProcessors for Wro4j maven plugin * Issue59 JMX Configurations * Issue60 Failure when attempting to set Content-Encoding See details here: http://code.google.com/p/wro4j/issues/list?can=1&q=milestone:1.2.2 ===New release (1.2.1)=== * Issue51 wro4j maven plugin enhancements * Issue52 Invalid resource handling * Issue54 Make cacheUpdatePeriod and modelUpdatePeriod configurable * Issue55 Create semicolon Appender Javascript pre processor * Issue56 Switch minimization on/off in DEVELOPMENT mode See details here: http://code.google.com/p/wro4j/issues/list?can=1&q=milestone:1.2.1 ===New release (1.2.0)=== * Issue2 Enable/disable Gzip using request parameter * Issue3 Remove logic from WroFilter * Issue5 Document "How to extend & integrate" * Issue6 Create maven 2 plugin * Issue16 Upload wro4j to maven central repository * Issue17 Support @import in css resources * Issue18 Variables cannot be externalized * Issue26 Create WroManagerFactory capable of being configured using init-params * Issue28 JMX support to change the behaviour at runtime * Issue30 Create security strategy for resource streaming * Issue35 Create MultipleGroup uriRequestProcessor * Issue36 Runtime Configuration Option * Issue40 XmlModelFactory improvements * Issue43 Move code base to GitHub * Issue47 BOM Characters at beginning of JS files breaks JS concatenation * Issue50 Core dependency to slf4j-log4j12 See details here: http://code.google.com/p/wro4j/issues/list?can=1&q=milestone:1.2.0
MediaWiki
2
supakiad/wro4j
docs/ReleaseNotes.wiki
[ "Apache-2.0" ]
#!/usr/bin/env bash # # Copyright (c) 2016-present, Facebook, Inc. # All rights reserved. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # sudo pip install --index-url https://test.pypi.org/simple/ fasttext python runtests.py -u
Shell
3
KatyaKos/fastText
.circleci/pip_test.sh
[ "MIT" ]
//===--- TypeLayoutDumper.cpp ---------------------------------------------===// // // This source file is part of the Swift.org open source project // // Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors // Licensed under Apache License v2.0 with Runtime Library Exception // // See https://swift.org/LICENSE.txt for license information // See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors // //===----------------------------------------------------------------------===// // // This file defines a tool for dumping layouts of fixed-size types in a simple // YAML format. // //===----------------------------------------------------------------------===// #include "TypeLayoutDumper.h" #include "FixedTypeInfo.h" #include "GenType.h" #include "IRGen.h" #include "IRGenModule.h" #include "LegacyLayoutFormat.h" #include "swift/AST/ASTContext.h" #include "swift/AST/ASTMangler.h" #include "swift/AST/ASTWalker.h" #include "swift/AST/IRGenOptions.h" #include "swift/AST/Types.h" #include "swift/SIL/SILModule.h" #include "swift/Subsystems.h" #include "llvm/ADT/ArrayRef.h" #include "llvm/ADT/SmallVector.h" #include "llvm/Support/FileSystem.h" #include "llvm/Support/YAMLTraits.h" #include "llvm/Support/raw_ostream.h" #include <string> #include <vector> using namespace swift; using namespace irgen; namespace { class NominalTypeWalker : public ASTWalker { std::vector<NominalTypeDecl *> &Results; public: NominalTypeWalker(std::vector<NominalTypeDecl *> &Results) : Results(Results) {} bool walkToDeclPre(Decl *D) override { if (auto *NTD = dyn_cast<NominalTypeDecl>(D)) Results.push_back(NTD); return true; } }; } // end anonymous namespace static std::string mangleTypeAsContext(const NominalTypeDecl *type) { Mangle::ASTMangler Mangler; return Mangler.mangleTypeAsContextUSR(type); } static YAMLTypeInfoNode createYAMLTypeInfoNode(NominalTypeDecl *NTD, IRGenModule &IGM, const FixedTypeInfo *fixedTI) { return {mangleTypeAsContext(NTD), fixedTI->getFixedSize().getValue(), fixedTI->getFixedAlignment().getValue(), fixedTI->getFixedExtraInhabitantCount(IGM)}; } static void addYAMLTypeInfoNode(NominalTypeDecl *NTD, IRGenModule &IGM, std::vector<YAMLTypeInfoNode> &Result) { // We only care about public and @usableFromInline declarations. if (NTD->getEffectiveAccess() < AccessLevel::Public) return; // We don't care about protocols or classes. if (isa<ProtocolDecl>(NTD) || isa<ClassDecl>(NTD)) return; assert(isa<StructDecl>(NTD) || isa<EnumDecl>(NTD)); auto &Opts = IGM.getOptions(); switch (Opts.TypeInfoFilter) { case IRGenOptions::TypeInfoDumpFilter::All: break; case IRGenOptions::TypeInfoDumpFilter::Resilient: if (!NTD->isFormallyResilient()) return; break; case IRGenOptions::TypeInfoDumpFilter::Fragile: if (NTD->isFormallyResilient()) return; break; } auto *TI = &IGM.getTypeInfoForUnlowered(NTD->getDeclaredTypeInContext()); auto *fixedTI = dyn_cast<FixedTypeInfo>(TI); if (!fixedTI) return; Result.push_back(createYAMLTypeInfoNode(NTD, IGM, fixedTI)); } static Optional<YAMLModuleNode> createYAMLModuleNode(ModuleDecl *Mod, IRGenModule &IGM) { std::vector<NominalTypeDecl *> Decls; NominalTypeWalker Walker(Decls); // Collect all nominal types, including nested types. SmallVector<Decl *, 16> TopLevelDecls; Mod->getTopLevelDecls(TopLevelDecls); for (auto *D : TopLevelDecls) D->walk(Walker); std::vector<YAMLTypeInfoNode> Nodes; // Convert each nominal type. for (auto *D : Decls) { if (auto *NTD = dyn_cast<NominalTypeDecl>(D)) { addYAMLTypeInfoNode(NTD, IGM, Nodes); } } if (Nodes.empty()) return None; std::sort(Nodes.begin(), Nodes.end()); return YAMLModuleNode{Mod->getName().str(), Nodes}; } void TypeLayoutDumper::write(ArrayRef<ModuleDecl *> AllModules, llvm::raw_ostream &os) const { llvm::yaml::Output yout(os); // Collect all nominal types, including nested types. for (auto *Mod : AllModules) { auto Node = createYAMLModuleNode(Mod, IGM); if (Node) yout << *Node; } } bool swift::performDumpTypeInfo(const IRGenOptions &Opts, SILModule &SILMod) { auto &Ctx = SILMod.getASTContext(); assert(!Ctx.hadError()); (void)Ctx; IRGenerator IRGen(Opts, SILMod); IRGenModule IGM(IRGen, IRGen.createTargetMachine()); // We want to bypass resilience. LoweringModeScope scope(IGM, TypeConverter::Mode::CompletelyFragile); auto *Mod = SILMod.getSwiftModule(); SmallVector<Decl *, 16> AllDecls; Mod->getTopLevelDecls(AllDecls); SmallVector<ModuleDecl *, 4> AllModules; for (auto *D : AllDecls) { if (auto *ID = dyn_cast<ImportDecl>(D)) { if (auto *M = ID->getModule()) AllModules.push_back(M); } } TypeLayoutDumper dumper(IGM); dumper.write(AllModules, llvm::outs()); return false; }
C++
4
gandhi56/swift
lib/IRGen/TypeLayoutDumper.cpp
[ "Apache-2.0" ]
# x53 is capital S <http://www.w3.org/2013/TurtleTests/\U00000053> <http://www.w3.org/2013/TurtleTests/p> <http://www.w3.org/2013/TurtleTests/o> .
Turtle
2
joshrose/audacity
lib-src/lv2/serd/tests/TurtleTests/turtle-syntax-uri-03.ttl
[ "CC-BY-3.0" ]
D:/gitee/open/tinyriscv/tests/riscv-compliance/build_generated/rv32im/MULHU.elf: file format elf32-littleriscv Disassembly of section .text.init: 00000000 <_start>: 0: 04c0006f j 4c <reset_vector> 00000004 <trap_vector>: 4: 34202f73 csrr t5,mcause 8: 00800f93 li t6,8 c: 03ff0a63 beq t5,t6,40 <write_tohost> 10: 00900f93 li t6,9 14: 03ff0663 beq t5,t6,40 <write_tohost> 18: 00b00f93 li t6,11 1c: 03ff0263 beq t5,t6,40 <write_tohost> 20: 00000f17 auipc t5,0x0 24: fe0f0f13 addi t5,t5,-32 # 0 <_start> 28: 000f0463 beqz t5,30 <trap_vector+0x2c> 2c: 000f0067 jr t5 30: 34202f73 csrr t5,mcause 34: 000f5463 bgez t5,3c <handle_exception> 38: 0040006f j 3c <handle_exception> 0000003c <handle_exception>: 3c: 5391e193 ori gp,gp,1337 00000040 <write_tohost>: 40: 00001f17 auipc t5,0x1 44: fc3f2023 sw gp,-64(t5) # 1000 <tohost> 48: ff9ff06f j 40 <write_tohost> 0000004c <reset_vector>: 4c: 00000193 li gp,0 50: 00000297 auipc t0,0x0 54: fb428293 addi t0,t0,-76 # 4 <trap_vector> 58: 30529073 csrw mtvec,t0 5c: 30005073 csrwi mstatus,0 60: 00000297 auipc t0,0x0 64: 02028293 addi t0,t0,32 # 80 <begin_testcode> 68: 34129073 csrw mepc,t0 6c: 00000293 li t0,0 70: 10000337 lui t1,0x10000 74: 01030313 addi t1,t1,16 # 10000010 <_end+0xfffde0c> 78: 00532023 sw t0,0(t1) 7c: 30200073 mret 00000080 <begin_testcode>: 80: 00002117 auipc sp,0x2 84: f8010113 addi sp,sp,-128 # 2000 <begin_signature> 88: 00000913 li s2,0 8c: 00000893 li a7,0 90: 031938b3 mulhu a7,s2,a7 94: 01112023 sw a7,0(sp) 98: 00000a13 li s4,0 9c: 00100993 li s3,1 a0: 033a39b3 mulhu s3,s4,s3 a4: 01312223 sw s3,4(sp) a8: 00000b13 li s6,0 ac: fff00a93 li s5,-1 b0: 035b3ab3 mulhu s5,s6,s5 b4: 01512423 sw s5,8(sp) b8: 00000c13 li s8,0 bc: 80000bb7 lui s7,0x80000 c0: fffb8b93 addi s7,s7,-1 # 7fffffff <_end+0x7fffddfb> c4: 037c3bb3 mulhu s7,s8,s7 c8: 01712623 sw s7,12(sp) cc: 00000d13 li s10,0 d0: 80000cb7 lui s9,0x80000 d4: 039d3cb3 mulhu s9,s10,s9 d8: 01912823 sw s9,16(sp) dc: 00002117 auipc sp,0x2 e0: f3810113 addi sp,sp,-200 # 2014 <test_2_res> e4: 00100e13 li t3,1 e8: 00000d93 li s11,0 ec: 03be3db3 mulhu s11,t3,s11 f0: 01b12023 sw s11,0(sp) f4: 00100f13 li t5,1 f8: 00100e93 li t4,1 fc: 03df3eb3 mulhu t4,t5,t4 100: 01d12223 sw t4,4(sp) 104: 00100193 li gp,1 108: fff00a93 li s5,-1 10c: 0351bab3 mulhu s5,gp,s5 110: 01512423 sw s5,8(sp) 114: 00100413 li s0,1 118: 80000237 lui tp,0x80000 11c: fff20213 addi tp,tp,-1 # 7fffffff <_end+0x7fffddfb> 120: 02443233 mulhu tp,s0,tp 124: 00412623 sw tp,12(sp) 128: 00100593 li a1,1 12c: 800004b7 lui s1,0x80000 130: 0295b4b3 mulhu s1,a1,s1 134: 00912823 sw s1,16(sp) 138: 00002117 auipc sp,0x2 13c: ef010113 addi sp,sp,-272 # 2028 <test_3_res> 140: fff00693 li a3,-1 144: 00000613 li a2,0 148: 02c6b633 mulhu a2,a3,a2 14c: 00c12023 sw a2,0(sp) 150: fff00793 li a5,-1 154: 00100713 li a4,1 158: 02e7b733 mulhu a4,a5,a4 15c: 00e12223 sw a4,4(sp) 160: fff00893 li a7,-1 164: fff00813 li a6,-1 168: 0308b833 mulhu a6,a7,a6 16c: 01012423 sw a6,8(sp) 170: fff00993 li s3,-1 174: 80000937 lui s2,0x80000 178: fff90913 addi s2,s2,-1 # 7fffffff <_end+0x7fffddfb> 17c: 0329b933 mulhu s2,s3,s2 180: 01212623 sw s2,12(sp) 184: fff00a93 li s5,-1 188: 80000a37 lui s4,0x80000 18c: 034aba33 mulhu s4,s5,s4 190: 01412823 sw s4,16(sp) 194: 00002117 auipc sp,0x2 198: ea810113 addi sp,sp,-344 # 203c <test_4_res> 19c: 80000bb7 lui s7,0x80000 1a0: fffb8b93 addi s7,s7,-1 # 7fffffff <_end+0x7fffddfb> 1a4: 00000b13 li s6,0 1a8: 036bbb33 mulhu s6,s7,s6 1ac: 01612023 sw s6,0(sp) 1b0: 80000cb7 lui s9,0x80000 1b4: fffc8c93 addi s9,s9,-1 # 7fffffff <_end+0x7fffddfb> 1b8: 00100c13 li s8,1 1bc: 038cbc33 mulhu s8,s9,s8 1c0: 01812223 sw s8,4(sp) 1c4: 80000db7 lui s11,0x80000 1c8: fffd8d93 addi s11,s11,-1 # 7fffffff <_end+0x7fffddfb> 1cc: fff00d13 li s10,-1 1d0: 03adbd33 mulhu s10,s11,s10 1d4: 01a12423 sw s10,8(sp) 1d8: 80000eb7 lui t4,0x80000 1dc: fffe8e93 addi t4,t4,-1 # 7fffffff <_end+0x7fffddfb> 1e0: 80000e37 lui t3,0x80000 1e4: fffe0e13 addi t3,t3,-1 # 7fffffff <_end+0x7fffddfb> 1e8: 03cebe33 mulhu t3,t4,t3 1ec: 01c12623 sw t3,12(sp) 1f0: 80000ab7 lui s5,0x80000 1f4: fffa8a93 addi s5,s5,-1 # 7fffffff <_end+0x7fffddfb> 1f8: 80000f37 lui t5,0x80000 1fc: 03eabf33 mulhu t5,s5,t5 200: 01e12823 sw t5,16(sp) 204: 00002117 auipc sp,0x2 208: e4c10113 addi sp,sp,-436 # 2050 <test_5_res> 20c: 80000237 lui tp,0x80000 210: 00000193 li gp,0 214: 023231b3 mulhu gp,tp,gp 218: 00312023 sw gp,0(sp) 21c: 800004b7 lui s1,0x80000 220: 00100413 li s0,1 224: 0284b433 mulhu s0,s1,s0 228: 00812223 sw s0,4(sp) 22c: 80000637 lui a2,0x80000 230: fff00593 li a1,-1 234: 02b635b3 mulhu a1,a2,a1 238: 00b12423 sw a1,8(sp) 23c: 80000737 lui a4,0x80000 240: 800006b7 lui a3,0x80000 244: fff68693 addi a3,a3,-1 # 7fffffff <_end+0x7fffddfb> 248: 02d736b3 mulhu a3,a4,a3 24c: 00d12623 sw a3,12(sp) 250: 80000837 lui a6,0x80000 254: 800007b7 lui a5,0x80000 258: 02f837b3 mulhu a5,a6,a5 25c: 00f12823 sw a5,16(sp) 260: 00002297 auipc t0,0x2 264: da028293 addi t0,t0,-608 # 2000 <begin_signature> 268: 10000337 lui t1,0x10000 26c: 00830313 addi t1,t1,8 # 10000008 <_end+0xfffde04> 270: 00532023 sw t0,0(t1) 274: 00002297 auipc t0,0x2 278: e5c28293 addi t0,t0,-420 # 20d0 <end_signature> 27c: 10000337 lui t1,0x10000 280: 00c30313 addi t1,t1,12 # 1000000c <_end+0xfffde08> 284: 00532023 sw t0,0(t1) 288: 00100293 li t0,1 28c: 10000337 lui t1,0x10000 290: 01030313 addi t1,t1,16 # 10000010 <_end+0xfffde0c> 294: 00532023 sw t0,0(t1) 298: 00000013 nop 29c: 00100193 li gp,1 2a0: 00000073 ecall 000002a4 <end_testcode>: 2a4: c0001073 unimp ... Disassembly of section .tohost: 00001000 <tohost>: ... 00001100 <fromhost>: ... Disassembly of section .data: 00002000 <begin_signature>: 2000: ffff 0xffff 2002: ffff 0xffff 2004: ffff 0xffff 2006: ffff 0xffff 2008: ffff 0xffff 200a: ffff 0xffff 200c: ffff 0xffff 200e: ffff 0xffff 2010: ffff 0xffff 2012: ffff 0xffff 00002014 <test_2_res>: 2014: ffff 0xffff 2016: ffff 0xffff 2018: ffff 0xffff 201a: ffff 0xffff 201c: ffff 0xffff 201e: ffff 0xffff 2020: ffff 0xffff 2022: ffff 0xffff 2024: ffff 0xffff 2026: ffff 0xffff 00002028 <test_3_res>: 2028: ffff 0xffff 202a: ffff 0xffff 202c: ffff 0xffff 202e: ffff 0xffff 2030: ffff 0xffff 2032: ffff 0xffff 2034: ffff 0xffff 2036: ffff 0xffff 2038: ffff 0xffff 203a: ffff 0xffff 0000203c <test_4_res>: 203c: ffff 0xffff 203e: ffff 0xffff 2040: ffff 0xffff 2042: ffff 0xffff 2044: ffff 0xffff 2046: ffff 0xffff 2048: ffff 0xffff 204a: ffff 0xffff 204c: ffff 0xffff 204e: ffff 0xffff 00002050 <test_5_res>: 2050: ffff 0xffff 2052: ffff 0xffff 2054: ffff 0xffff 2056: ffff 0xffff 2058: ffff 0xffff 205a: ffff 0xffff 205c: ffff 0xffff 205e: ffff 0xffff 2060: ffff 0xffff 2062: ffff 0xffff 00002064 <test_6_res>: 2064: ffff 0xffff 2066: ffff 0xffff 2068: ffff 0xffff 206a: ffff 0xffff 206c: ffff 0xffff 206e: ffff 0xffff 2070: ffff 0xffff 2072: ffff 0xffff 2074: ffff 0xffff 2076: ffff 0xffff 00002078 <test_7_res>: 2078: ffff 0xffff 207a: ffff 0xffff 207c: ffff 0xffff 207e: ffff 0xffff 2080: ffff 0xffff 2082: ffff 0xffff 2084: ffff 0xffff 2086: ffff 0xffff 2088: ffff 0xffff 208a: ffff 0xffff 0000208c <test_8_res>: 208c: ffff 0xffff 208e: ffff 0xffff 2090: ffff 0xffff 2092: ffff 0xffff 2094: ffff 0xffff 2096: ffff 0xffff 2098: ffff 0xffff 209a: ffff 0xffff 209c: ffff 0xffff 209e: ffff 0xffff 000020a0 <test_9_res>: 20a0: ffff 0xffff 20a2: ffff 0xffff 20a4: ffff 0xffff 20a6: ffff 0xffff 20a8: ffff 0xffff 20aa: ffff 0xffff 20ac: ffff 0xffff 20ae: ffff 0xffff 20b0: ffff 0xffff 20b2: ffff 0xffff 000020b4 <test_10_res>: 20b4: ffff 0xffff 20b6: ffff 0xffff 20b8: ffff 0xffff 20ba: ffff 0xffff 20bc: ffff 0xffff 20be: ffff 0xffff 20c0: ffff 0xffff 20c2: ffff 0xffff 20c4: ffff 0xffff 20c6: ffff 0xffff ... 000020d0 <end_signature>: ... 00002100 <begin_regstate>: 2100: 0080 addi s0,sp,64 ... 00002200 <end_regstate>: 2200: 0004 0x4 ...
ObjDump
4
DuBirdFly/TinyRISCV_Learn
tests/riscv-compliance/build_generated/rv32im/MULHU.elf.objdump
[ "Apache-2.0" ]
#include <Wire.h> #define DEBUG_MODE 0 // Address Pins #define AD0 11 #define AD1 12 // I2C Configuration #define I2C_DEFAULT_ADDRESS 0x0A #define BUFFER_SIZE 8 // FUNCTION COMMANDS #define NO_TONE 0x00 #define TONE 0x01 byte buffer[BUFFER_SIZE]; int addressPins[] = { AD0, AD1 }; int address = I2C_DEFAULT_ADDRESS; void setup() { // Determine the I2C addresss // by reading the designated pins int offset = 0; for (int i = 0; i < 2; i++) { pinMode(addressPins[i], INPUT); if (digitalRead(addressPins[i])) { offset |= 1 << i; } } address += offset; #if DEBUG_MODE Serial.begin(9600); #endif Wire.begin(address); Wire.onReceive(onReceive); } void loop() {} void onReceive(int howMany) { if (howMany > BUFFER_SIZE) { howMany = BUFFER_SIZE; } memset(&buffer[0], 0, howMany); uint8_t command; uint8_t pin; uint16_t hz; uint32_t ms; for (int i = 0; i < howMany; i++) { buffer[i] = Wire.read(); } command = buffer[0]; pin = buffer[1]; hz = (buffer[2] << 8) | buffer[3]; if (howMany == 8) { ms = (buffer[4] << 24) | (buffer[5] << 16) | (buffer[6] << 8) | buffer[7]; } #if DEBUG_MODE Serial.print("Bytes Received: "); Serial.println(howMany); Serial.print("command: "); Serial.println(command); Serial.print("pin: "); Serial.println(pin); Serial.print("hz: "); Serial.println(hz); Serial.print("ms: "); Serial.println(ms); Serial.println("--------------------"); #endif pinMode(pin, OUTPUT); if (command == NO_TONE) { noTone(pin); } if (command == TONE) { if (ms == 0) { tone(pin, hz); } else { tone(pin, hz, ms); } } }
Arduino
5
mattp94/johnny-five
firmwares/piezo_i2c_backpack.ino
[ "MIT" ]
{define} {if true} {* error, unclosed macro *} {/define}
Latte
1
timfel/netbeans
php/php.latte/test/unit/data/testfiles/parser/issue245728_02.latte
[ "Apache-2.0" ]
# Script to copy waveforms from realtime antelope machine to a large # archival disk partition so that the realtime database can be cleaned up. # Taimi Mulder, Geological Survey of Canada, Sidney, B.C., Dec 2004 # Modified Feb. 2005 by Taimi Mulder and Kent Lindquist, Lindquist Consulting Inc. use Datascope; use Fcntl ':flock'; use Getopt::Std; sub my_system { my( $command ) = @_; if( $opt_v || $opt_n ) { elog_notify( "Running command: $command\n" ); } if( ! $opt_n ) { $rc = system( "$command" ); } else { elog_notify( "\t(skipping)\n" ); } return $rc; } sub check_lock { my( $prog_name ) = @_ ; my( $lockfile ) = ".$prog_name" ; if( $opt_v ) { elog_notify( "Locking $lockfile\n" ); } open ( LOCK, ">$lockfile" ) ; if ( flock( LOCK, LOCK_EX | LOCK_NB ) != 1 ) { elog_die( "Can't lock file '$lockfile'. Quitting to avoid potential collision with other programs.\n" ); } print LOCK "$$\n" ; } sub release_lock { my( $prog_name ) = @_; my( $lockfile ) = ".$prog_name"; if( $opt_v ) { elog_notify( "Unlocking $lockfile\n" ); } flock( LOCK, LOCK_UN ); close( LOCK ); unlink( $lockfile ); } sub oldest_realtime { my( $oldest_realtime_timestamp ); if( $opt_v ) { elog_notify( "Checking real-time database..." ); } my( @jdates ) = `dbsubset $real_time_wf_dbname.wfdisc 'jdate != NULL && jdate != 1970001' | dbsort - jdate | dbselect - jdate | uniq`; chomp( @jdates ); if( @jdates < 1 ) { elog_die( "Can't find any jdates in $real_time_wf_dbname.wfdisc. Bye!\n" ); } my( $ndays_in_rt_db ) = scalar( @jdates ) - 1; my( $oldest_jdate ) = $jdates[0]; my( %rtcrontab ) = %{pfget( "rtexec", "crontab" )}; if( grep( /rtdbclean/, values( %rtcrontab ) ) ) { my( $rtdbclean_max_days_db ) = pfget( "rtdbclean", "max_days_db" ); my( $ndays_at_cleanup_risk ) = $ndays_in_rt_db - $rtdbclean_max_days_db; if( $ndays_at_cleanup_risk > 0 && $ndays_at_cleanup_risk <= $ndays_in_rt_db ) { $oldest_jdate = $jdates[$ndays_at_cleanup_risk-1]; if( $opt_v ) { elog_notify( "rtdbclean appears to be enabled, with max_days_db set to " . "$rtdbclean_max_days_db days. There are currently $ndays_in_rt_db complete days in the " . "real-time database. Consider oldest day in real-time db to be " . "$oldest_jdate instead of $jdates[0], adjusting for pending rtdbclean operation." ); } } } $oldest_realtime_timestamp = str2epoch( "$oldest_jdate" ); if( $opt_v ) { elog_notify( "Oldest data in real-time database (excluding data scheduled for cleanup) starts at " . strtime( $oldest_realtime_timestamp ) ); } return $oldest_realtime_timestamp; } sub latest_archived { my( $latest_jdate, $latest_archive_wfdisc ); my( $archive_wf_base_dbname ) = $archive_wf_dbname; # Assume everything after any percent-escapes is used # to label individual archive volumes: $archive_wf_base_dbname =~ s/%.*//; my( @archive_wfdiscs ) = glob( "$archive_wf_base_dbname*\.wfdisc" ); if( scalar( @archive_wfdiscs ) < 1 ) { elog_die( "No pre-existing archives; can't continue. Bye!\n" ); } # Rely on glob's default sort in ascending order # (as documented in File::Glob(3), on which the # perl glob() built-in command is based): $latest_archive_wfdisc = pop( @archive_wfdiscs ); if( $opt_v ) { elog_notify( "Examining $latest_archive_wfdisc for latest jdate..." ); } chomp( $latest_jdate = `dbsort $latest_archive_wfdisc jdate | dbselect - jdate | tail -1` ); if( $opt_v ) { elog_notify( "...latest archived jdate is $latest_jdate\n" ); } return ( $latest_jdate, $latest_archive_wfdisc ); } sub increment_jdate { my( $start ) = @_; return yearday( str2epoch( "$start" ) + 86400 ); } elog_init( $0, @ARGV ); if( ! getopts( 'j:vn' ) || @ARGV != 0 ) { elog_die( "Usage: rtbackup_wfdisc [-j jdate] [-v] [-n]\n" ); } if( $opt_v ) { elog_notify( "Starting rtbackup_wfdisc.\n" ); } check_lock( "rtdbclean" ); $Pf = "rtbackup_wfdisc"; $real_time_wfdir = pfget( $Pf, "real_time_wfdir" ); $real_time_wf_dbname = pfget( $Pf, "real_time_wf_dbname" ); $wf_subdir_pattern = pfget( $Pf, "wf_subdir_pattern" ); $archive_wfdir = pfget( $Pf, "archive_wfdir" ); $archive_wf_dbname = pfget( $Pf, "archive_wf_dbname" ); $copy = pfget( $Pf, "copy" ); $cat = pfget( $Pf, "cat" ); $ignore_most_recent_ndays = pfget( $Pf, "ignore_most_recent_ndays" ); $minimum_database_overlap_ndays = pfget( $Pf, "minimum_database_overlap_ndays" ); $warning_email = pfget( $Pf, "warning_email" ); ( $latest_archived_jdate, $latest_archive_wfdisc ) = latest_archived(); if( $opt_j ) { $active_jdate = $opt_j; } else { $active_jdate = increment_jdate( $latest_archived_jdate ); } $active_timestamp = str2epoch( "$active_jdate" ); $subdir = epoch2str( $active_timestamp, $wf_subdir_pattern ); $movedir = "$real_time_wfdir/$subdir"; if( $active_timestamp > ( str2epoch( yearday( str2epoch( "now" ) ) ) - $ignore_most_recent_ndays * 86400 ) ) { release_lock( "rtdbclean" ); elog_die( "Ignoring data for jdate $active_jdate: more recent than $ignore_most_recent_ndays days. Bye!\n" ); } if( ! -d $movedir ) { release_lock( "rtdbclean" ); elog_die( "Intending to copy subdirectory '$movedir' but '$movedir' does not exist. Bye!\n" ); } $archive_wfdir = epoch2str( $active_timestamp, "$archive_wfdir" ); $archive_wf_dbname = epoch2str( $active_timestamp, "$archive_wf_dbname" ); $tempdb = "/tmp/rtbackup_wfdisc_$<_$$"; my_system( "dbsubset $real_time_wf_dbname.wfdisc 'jdate == $active_jdate' | dbselect - > $tempdb.wfdisc" ); my_system( "dbset $tempdb.wfdisc dir '*' \"$archive_wfdir/$subdir\"" ); if( ! $opt_n ) { if( ! -e "$tempdb.wfdisc" || -z "$tempdb.wfdisc" ) { release_lock( "rtdbclean" ); elog_die( "CRITICAL ERROR (RISK OF DATA-LOSS): Failed to extract rows for jdate " . "$active_jdate from $real_time_wf_dbname.wfdisc. No waveforms copied. Bye!\n" ); } else { chomp( $nrows = `wc -l $tempdb.wfdisc` ); $nrows =~ s/^\s*//; $nrows = (split( /\s+/, $nrows ))[0]; if( ! defined( $nrows ) || $nrows <= 0 ) { release_lock( "rtdbclean" ); unlink( "$tempdb.wfdisc" ); elog_die( "CRITICAL ERROR (RISK OF DATA-LOSS): No rows in extracted " . "wfdisc for $active_jdate. Bye!\n" ); } } } if( ( ! -d "$archive_wfdir" ) && ( ! $opt_n ) && ( mkdir( "$archive_wfdir" ) == 0 ) ) { release_lock( "rtdbclean" ); unlink( "$tempdb.wfdisc" ); elog_die( "Directory '$archive_wfdir' doesn't exist; Failed to create it (error: $!)\n" ); } if( $opt_v ) { elog_notify( "Copy waveform directory $movedir into $archive_wfdir:\n" ); } my_system( "$copy $movedir $archive_wfdir" ); if( $opt_v ) { elog_notify( "Concatenate wfdisc rows for jdate $active_jdate onto $archive_wf_dbname.wfdisc:\n" ); } my_system( "$cat $tempdb.wfdisc >> $archive_wf_dbname.wfdisc" ); unlink( "$tempdb.wfdisc" ); $oldest_realtime_timestamp = oldest_realtime(); $actual_overlap_ndays = ( $active_timestamp - $oldest_realtime_timestamp ) / 86400; if( $actual_overlap_ndays <= $minimum_database_overlap_ndays ) { if( $actual_overlap_ndays <= 0 ) { $warning_message = "No overlap between real-time and archive " . "databases (want $minimum_database_overlap_ndays days); " . "there's a " . -$actual_overlap_ndays . "-day gap. Is rtbackup_wfdisc failing?"; } else { $warning_message = "Only $actual_overlap_ndays days overlapping between real-time and archive " . "databases (want $minimum_database_overlap_ndays days). Is rtbackup_wfdisc failing?"; } $warning_subject = "rtbackup_wfdisc warning: suspect risk of upcoming data loss"; elog_complain( "$warning_message...sending email to $warning_email.\n" ); if( $opt_v ) { $v = "-v"; } else { $v = ""; } $message_tempfile = "/tmp/rtbackup_wfdisc_$<_$$.warning"; open( M, ">$message_tempfile" ); print M $warning_message; close M; my_system( "$cat $message_tempfile | rtmail $v -s '$warning_subject' $warning_email" ); unlink( $message_tempfile ); } elsif( $opt_v ) { elog_notify( "Overlap between real-time and archive databases is $actual_overlap_ndays days " . "(want $minimum_database_overlap_ndays days)...OK\n" ); } release_lock( "rtdbclean" ); if( $opt_v ) { elog_notify( "Done with rtbackup_wfdisc.\n" ); }
XProc
4
jreyes1108/antelope_contrib
nobuild/bin/rt/rtbackup_wfdisc/rtbackup_wfdisc.xpl
[ "BSD-2-Clause", "MIT" ]
/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #include "pybind11/pybind11.h" #include "tensorflow/lite/python/analyzer_wrapper/model_analyzer.h" PYBIND11_MODULE(_pywrap_analyzer_wrapper, m) { m.def( "ModelAnalyzer", [](const std::string& model_path, bool input_is_filepath, bool gpu_compatibility) { return ::tflite::model_analyzer(model_path, input_is_filepath, gpu_compatibility); }, R"pbdoc( Returns txt dump of the given TFLite file. )pbdoc"); }
C++
4
EricRemmerswaal/tensorflow
tensorflow/lite/python/analyzer_wrapper/analyzer_wrapper.cc
[ "Apache-2.0" ]
// run-pass // Regression test for issue #10682 // Nested `proc` usage can't use outer owned data // pretty-expanded FIXME #23616 #![feature(box_syntax)] fn work(_: Box<isize>) {} fn foo<F:FnOnce()>(_: F) {} pub fn main() { let a = box 1; foo(move|| { foo(move|| { work(a) }) }) }
Rust
4
Eric-Arellano/rust
src/test/ui/issues/issue-10682.rs
[ "ECL-2.0", "Apache-2.0", "MIT-0", "MIT" ]
io.stderr\write "WARNING: The module `lapis.nginx.postgres` has moved to `lapis.db.postgres` Please update your require statements as the old path will no longer be available in future versions of lapis.\n\n" require "lapis.db.postgres"
MoonScript
3
tommy-mor/lapis
lapis/nginx/postgres.moon
[ "MIT", "Unlicense" ]
#!/usr/bin/env bash set -e cd "$(dirname "$(readlink -f "$BASH_SOURCE")")/.." # see also ".mailmap" for how email addresses and names are deduplicated { cat <<- 'EOH' # This file lists all individuals having contributed content to the repository. # For how it is generated, see `hack/generate-authors.sh`. EOH echo git log --format='%aN <%aE>' | LC_ALL=C.UTF-8 sort -uf } > AUTHORS
Shell
4
wuxin66/moby
hack/generate-authors.sh
[ "Apache-2.0" ]
/* Reconnecting MQTT example - non-blocking This sketch demonstrates how to keep the client connected using a non-blocking reconnect function. If the client loses its connection, it attempts to reconnect every 5 seconds without blocking the main loop. */ #include <SPI.h> #include <Ethernet.h> #include <PubSubClient.h> // Update these with values suitable for your hardware/network. byte mac[] = { 0xDE, 0xED, 0xBA, 0xFE, 0xFE, 0xED }; IPAddress ip(172, 16, 0, 100); IPAddress server(172, 16, 0, 2); void callback(char* topic, byte* payload, unsigned int length) { // handle message arrived } EthernetClient ethClient; PubSubClient client(ethClient); long lastReconnectAttempt = 0; boolean reconnect() { if (client.connect("arduinoClient")) { // Once connected, publish an announcement... client.publish("outTopic","hello world"); // ... and resubscribe client.subscribe("inTopic"); } return client.connected(); } void setup() { client.setServer(server, 1883); client.setCallback(callback); Ethernet.begin(mac, ip); delay(1500); lastReconnectAttempt = 0; } void loop() { if (!client.connected()) { long now = millis(); if (now - lastReconnectAttempt > 5000) { lastReconnectAttempt = now; // Attempt to reconnect if (reconnect()) { lastReconnectAttempt = 0; } } } else { // Client connected client.loop(); } }
Arduino
5
rovale/pubsubclient
examples/mqtt_reconnect_nonblocking/mqtt_reconnect_nonblocking.ino
[ "MIT" ]
source "${0:h}/z.sh"
Shell
0
chensanle/ohmyzsh
plugins/z/z.plugin.zsh
[ "MIT" ]
(* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) module Ast = Flow_ast open Token open Parser_env open Flow_ast module SSet = Flow_set.Make (String) open Parser_common open Comment_attachment module type STATEMENT = sig val for_ : env -> (Loc.t, Loc.t) Statement.t val if_ : env -> (Loc.t, Loc.t) Statement.t val let_ : env -> (Loc.t, Loc.t) Statement.t val try_ : env -> (Loc.t, Loc.t) Statement.t val while_ : env -> (Loc.t, Loc.t) Statement.t val with_ : env -> (Loc.t, Loc.t) Statement.t val block : env -> (Loc.t, Loc.t) Statement.t val break : env -> (Loc.t, Loc.t) Statement.t val continue : env -> (Loc.t, Loc.t) Statement.t val debugger : env -> (Loc.t, Loc.t) Statement.t val declare : ?in_module:bool -> env -> (Loc.t, Loc.t) Statement.t val declare_export_declaration : ?allow_export_type:bool -> env -> (Loc.t, Loc.t) Statement.t val declare_opaque_type : env -> (Loc.t, Loc.t) Statement.t val do_while : env -> (Loc.t, Loc.t) Statement.t val empty : env -> (Loc.t, Loc.t) Statement.t val export_declaration : decorators:(Loc.t, Loc.t) Class.Decorator.t list -> env -> (Loc.t, Loc.t) Statement.t val expression : env -> (Loc.t, Loc.t) Statement.t val import_declaration : env -> (Loc.t, Loc.t) Statement.t val interface : env -> (Loc.t, Loc.t) Statement.t val maybe_labeled : env -> (Loc.t, Loc.t) Statement.t val opaque_type : env -> (Loc.t, Loc.t) Statement.t val return : env -> (Loc.t, Loc.t) Statement.t val switch : env -> (Loc.t, Loc.t) Statement.t val throw : env -> (Loc.t, Loc.t) Statement.t val type_alias : env -> (Loc.t, Loc.t) Statement.t val var : env -> (Loc.t, Loc.t) Statement.t val const : env -> (Loc.t, Loc.t) Statement.t end module Statement (Parse : PARSER) (Type : Type_parser.TYPE) (Declaration : Declaration_parser.DECLARATION) (Object : Object_parser.OBJECT) (Pattern_cover : Pattern_cover.COVER) : STATEMENT = struct type for_lhs = | For_expression of pattern_cover | For_declaration of (Loc.t * (Loc.t, Loc.t) Ast.Statement.VariableDeclaration.t) type semicolon_type = | Explicit of Loc.t Comment.t list | Implicit of Comment_attachment.trailing_and_remover_result (* FunctionDeclaration is not a valid Statement, but Annex B sometimes allows it. However, AsyncFunctionDeclaration and GeneratorFunctionDeclaration are never allowed as statements. We still parse them as statements (and raise an error) to recover gracefully. *) let function_as_statement env = let func = Declaration._function env in ( if in_strict_mode env then function_as_statement_error_at env (fst func) else let open Ast.Statement in match func with | (loc, FunctionDeclaration { Ast.Function.async = true; _ }) -> error_at env (loc, Parse_error.AsyncFunctionAsStatement) | (loc, FunctionDeclaration { Ast.Function.generator = true; _ }) -> error_at env (loc, Parse_error.GeneratorFunctionAsStatement) | _ -> () ); func (* https://tc39.es/ecma262/#sec-exports-static-semantics-early-errors *) let assert_identifier_name_is_identifier ?restricted_error env (loc, { Ast.Identifier.name; comments = _ }) = match name with | "let" -> (* "let" is disallowed as an identifier in a few situations. 11.6.2.1 lists them out. It is always disallowed in strict mode *) if in_strict_mode env then strict_error_at env (loc, Parse_error.StrictReservedWord) else if no_let env then error_at env (loc, Parse_error.Unexpected (Token.quote_token_value name)) | "await" -> (* `allow_await` means that `await` is allowed to be a keyword, which makes it illegal to use as an identifier. https://tc39.github.io/ecma262/#sec-identifiers-static-semantics-early-errors *) if allow_await env then error_at env (loc, Parse_error.UnexpectedReserved) | "yield" -> (* `allow_yield` means that `yield` is allowed to be a keyword, which makes it illegal to use as an identifier. https://tc39.github.io/ecma262/#sec-identifiers-static-semantics-early-errors *) if allow_yield env then error_at env (loc, Parse_error.UnexpectedReserved) else strict_error_at env (loc, Parse_error.StrictReservedWord) | _ when is_strict_reserved name -> strict_error_at env (loc, Parse_error.StrictReservedWord) | _ when is_reserved name -> error_at env (loc, Parse_error.Unexpected (Token.quote_token_value name)) | _ -> begin match restricted_error with | Some err when is_restricted name -> strict_error_at env (loc, err) | _ -> () end let string_literal env (loc, value, raw, octal) = if octal then strict_error env Parse_error.StrictOctalLiteral; let leading = Peek.comments env in Expect.token env (T_STRING (loc, value, raw, octal)); let trailing = Eat.trailing_comments env in ( loc, { StringLiteral.value; raw; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () } ) (* Semicolon insertion is handled here :(. There seem to be 2 cases where * semicolons are inserted. First, if we reach the EOF. Second, if the next * token is } or is separated by a LineTerminator. *) let semicolon ?(expected = "the token `;`") ?(required = true) env = match Peek.token env with | T_EOF | T_RCURLY -> Implicit { trailing = Eat.trailing_comments env; remove_trailing = (fun x _ -> x) } | T_SEMICOLON -> Eat.token env; (match Peek.token env with | T_EOF | T_RCURLY -> Explicit (Eat.trailing_comments env) | _ when Peek.is_line_terminator env -> Explicit (Eat.comments_until_next_line env) | _ -> Explicit []) | _ when Peek.is_line_terminator env -> Implicit (Comment_attachment.trailing_and_remover_after_last_line env) | _ -> if required then error_unexpected ~expected env; Explicit [] (* Consumes and returns the trailing comments after the end of a statement. Also returns a remover that can remove all comments that are not trailing the previous token. If a statement is the end of a block or file, all comments are trailing. Otherwise, if a statement is followed by a new line, only comments on the current line are trailing. If a statement is not followed by a new line, it does not have trailing comments as they are instead leading comments for the next statement. *) let statement_end_trailing_comments env = match Peek.token env with | T_EOF | T_RCURLY -> { trailing = Eat.trailing_comments env; remove_trailing = (fun x _ -> x) } | _ when Peek.is_line_terminator env -> Comment_attachment.trailing_and_remover_after_last_line env | _ -> Comment_attachment.trailing_and_remover_after_last_loc env let variable_declaration_end ~kind env declarations = match semicolon env with | Explicit comments -> (comments, declarations) | Implicit { remove_trailing; _ } -> (* Remove trailing comments from the last declarator *) let declarations = match List.rev declarations with | [] -> [] | decl :: decls -> let decl' = remove_trailing decl (fun remover decl -> remover#variable_declarator ~kind decl) in List.rev (decl' :: decls) in ([], declarations) let rec empty env = let loc = Peek.loc env in let leading = Peek.comments env in Expect.token env T_SEMICOLON; let { trailing; _ } = statement_end_trailing_comments env in ( loc, Statement.Empty { Statement.Empty.comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () } ) and break env = let leading = Peek.comments env in let (loc, (label, trailing)) = with_loc (fun env -> Expect.token env T_BREAK; let label = if Peek.token env = T_SEMICOLON || Peek.is_implicit_semicolon env then None else let ((_, { Identifier.name; comments = _ }) as label) = Parse.identifier env in if not (SSet.mem name (labels env)) then error env (Parse_error.UnknownLabel name); Some label in let (trailing, label) = match (semicolon env, label) with | (Explicit trailing, _) | (Implicit { trailing; _ }, None) -> (trailing, label) | (Implicit { remove_trailing; _ }, Some label) -> ([], Some (remove_trailing label (fun remover label -> remover#identifier label))) in (label, trailing)) env in if label = None && not (in_loop env || in_switch env) then error_at env (loc, Parse_error.IllegalBreak); let comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () in (loc, Statement.Break { Statement.Break.label; comments }) and continue env = let leading = Peek.comments env in let (loc, (label, trailing)) = with_loc (fun env -> Expect.token env T_CONTINUE; let label = if Peek.token env = T_SEMICOLON || Peek.is_implicit_semicolon env then None else let ((_, { Identifier.name; comments = _ }) as label) = Parse.identifier env in if not (SSet.mem name (labels env)) then error env (Parse_error.UnknownLabel name); Some label in let (trailing, label) = match (semicolon env, label) with | (Explicit trailing, _) | (Implicit { trailing; _ }, None) -> (trailing, label) | (Implicit { remove_trailing; _ }, Some label) -> ([], Some (remove_trailing label (fun remover label -> remover#identifier label))) in (label, trailing)) env in if not (in_loop env) then error_at env (loc, Parse_error.IllegalContinue); ( loc, Statement.Continue { Statement.Continue.label; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing (); } ) and debugger = with_loc (fun env -> let leading = Peek.comments env in Expect.token env T_DEBUGGER; let pre_semicolon_trailing = if Peek.token env = T_SEMICOLON then Eat.trailing_comments env else [] in let trailing = match semicolon env with | Explicit trailing | Implicit { trailing; _ } -> pre_semicolon_trailing @ trailing in Statement.Debugger { Statement.Debugger.comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () } ) and do_while = with_loc (fun env -> let leading = Peek.comments env in Expect.token env T_DO; let body = Parse.statement (env |> with_in_loop true) in (* Annex B allows labelled FunctionDeclarations (see sec-labelled-function-declarations), but not in IterationStatement (see sec-semantics-static-semantics-early-errors). *) if (not (in_strict_mode env)) && is_labelled_function body then function_as_statement_error_at env (fst body); let pre_keyword_trailing = Eat.trailing_comments env in Expect.token env T_WHILE; let pre_cond_trailing = Eat.trailing_comments env in Expect.token env T_LPAREN; let test = Parse.expression env in Expect.token env T_RPAREN; let past_cond_trailing = if Peek.token env = T_SEMICOLON then Eat.trailing_comments env else [] in (* The rules of automatic semicolon insertion in ES5 don't mention this, * but the semicolon after a do-while loop is optional. This is properly * specified in ES6 *) let past_cond_trailing = match semicolon ~required:false env with | Explicit trailing -> past_cond_trailing @ trailing | Implicit { trailing; _ } -> trailing in let trailing = pre_keyword_trailing @ pre_cond_trailing @ past_cond_trailing in Statement.DoWhile { Statement.DoWhile.body; test; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing (); } ) and for_ = let assert_can_be_forin_or_forof env err = function | (loc, { Statement.VariableDeclaration.declarations; _ }) -> (* Only a single declarator is allowed, without an init. So * something like * * for (var x in y) {} * * is allowed, but we disallow * * for (var x, y in z) {} * for (var x = 42 in y) {} *) (match declarations with | [(_, { Statement.VariableDeclaration.Declarator.init = None; _ })] -> () | _ -> error_at env (loc, err)) in (* Annex B allows labelled FunctionDeclarations (see sec-labelled-function-declarations), but not in IterationStatement (see sec-semantics-static-semantics-early-errors). *) let assert_not_labelled_function env body = if (not (in_strict_mode env)) && is_labelled_function body then function_as_statement_error_at env (fst body) else () in with_loc (fun env -> let leading = Peek.comments env in Expect.token env T_FOR; let async = allow_await env && Eat.maybe env T_AWAIT in let leading = leading @ Peek.comments env in Expect.token env T_LPAREN; let comments = Flow_ast_utils.mk_comments_opt ~leading () in let (init, errs) = let env = env |> with_no_in true in match Peek.token env with | T_SEMICOLON -> (None, []) | T_LET -> let (loc, (declarations, leading, errs)) = with_loc Declaration.let_ env in ( Some (For_declaration ( loc, { Statement.VariableDeclaration.kind = Statement.VariableDeclaration.Let; declarations; comments = Flow_ast_utils.mk_comments_opt ~leading (); } ) ), errs ) | T_CONST -> let (loc, (declarations, leading, errs)) = with_loc Declaration.const env in ( Some (For_declaration ( loc, { Statement.VariableDeclaration.kind = Statement.VariableDeclaration.Const; declarations; comments = Flow_ast_utils.mk_comments_opt ~leading (); } ) ), errs ) | T_VAR -> let (loc, (declarations, leading, errs)) = with_loc Declaration.var env in ( Some (For_declaration ( loc, { Statement.VariableDeclaration.kind = Statement.VariableDeclaration.Var; declarations; comments = Flow_ast_utils.mk_comments_opt ~leading (); } ) ), errs ) | _ -> let expr = Parse.expression_or_pattern (env |> with_no_let true) in (Some (For_expression expr), []) in match Peek.token env with (* If `async` is true, this must be a for-await-of loop. *) | t when t = T_OF || async -> let left = let open Statement in match init with | Some (For_declaration decl) -> assert_can_be_forin_or_forof env Parse_error.InvalidLHSInForOf decl; ForOf.LeftDeclaration decl | Some (For_expression expr) -> (* #sec-for-in-and-for-of-statements-static-semantics-early-errors *) let patt = Pattern_cover.as_pattern ~err:Parse_error.InvalidLHSInForOf env expr in ForOf.LeftPattern patt | None -> assert false in (* This is a for of loop *) Expect.token env T_OF; let right = Parse.assignment env in Expect.token env T_RPAREN; let body = Parse.statement (env |> with_in_loop true) in assert_not_labelled_function env body; Statement.ForOf { Statement.ForOf.left; right; body; await = async; comments } | T_IN -> let left = match init with | Some (For_declaration decl) -> assert_can_be_forin_or_forof env Parse_error.InvalidLHSInForIn decl; Statement.ForIn.LeftDeclaration decl | Some (For_expression expr) -> (* #sec-for-in-and-for-of-statements-static-semantics-early-errors *) let patt = Pattern_cover.as_pattern ~err:Parse_error.InvalidLHSInForIn env expr in Statement.ForIn.LeftPattern patt | None -> assert false in (* This is a for in loop *) Expect.token env T_IN; let right = Parse.expression env in Expect.token env T_RPAREN; let body = Parse.statement (env |> with_in_loop true) in assert_not_labelled_function env body; Statement.ForIn { Statement.ForIn.left; right; body; each = false; comments } | _ -> (* This is a for loop *) errs |> List.iter (error_at env); Expect.token env T_SEMICOLON; let init = match init with | Some (For_declaration decl) -> Some (Statement.For.InitDeclaration decl) | Some (For_expression expr) -> Some (Statement.For.InitExpression (Pattern_cover.as_expression env expr)) | None -> None in let test = match Peek.token env with | T_SEMICOLON -> None | _ -> Some (Parse.expression env) in Expect.token env T_SEMICOLON; let update = match Peek.token env with | T_RPAREN -> None | _ -> Some (Parse.expression env) in Expect.token env T_RPAREN; let body = Parse.statement (env |> with_in_loop true) in assert_not_labelled_function env body; Statement.For { Statement.For.init; test; update; body; comments } ) and if_ = (* * Either the consequent or alternate of an if statement *) let if_branch env = (* Normally this would just be a Statement, but Annex B allows FunctionDeclarations in non-strict mode. See sec-functiondeclarations-in-ifstatement-statement-clauses *) let stmt = if Peek.is_function env then function_as_statement env else Parse.statement env in (* Annex B allows labelled FunctionDeclarations in non-strict mode (see sec-labelled-function-declarations), but not in IfStatement (see sec-if-statement-static-semantics-early-errors). *) if (not (in_strict_mode env)) && is_labelled_function stmt then function_as_statement_error_at env (fst stmt); stmt in let alternate env = let leading = Peek.comments env in Expect.token env T_ELSE; let body = if_branch env in { Statement.If.Alternate.body; comments = Flow_ast_utils.mk_comments_opt ~leading () } in with_loc (fun env -> let pre_if_leading = Peek.comments env in Expect.token env T_IF; let pre_cond_leading = Peek.comments env in let leading = pre_if_leading @ pre_cond_leading in Expect.token env T_LPAREN; let test = Parse.expression env in Expect.token env T_RPAREN; let consequent = if_branch env in let alternate = if Peek.token env = T_ELSE then Some (with_loc alternate env) else None in Statement.If { Statement.If.test; consequent; alternate; comments = Flow_ast_utils.mk_comments_opt ~leading (); } ) and return = with_loc (fun env -> if not (in_function env) then error env Parse_error.IllegalReturn; let leading = Peek.comments env in Expect.token env T_RETURN; let trailing = if Peek.token env = T_SEMICOLON then Eat.trailing_comments env else [] in let argument = if Peek.token env = T_SEMICOLON || Peek.is_implicit_semicolon env then None else Some (Parse.expression env) in let (trailing, argument) = match (semicolon env, argument) with | (Explicit comments, _) | (Implicit { trailing = comments; _ }, None) -> (trailing @ comments, argument) | (Implicit { remove_trailing; _ }, Some arg) -> (trailing, Some (remove_trailing arg (fun remover arg -> remover#expression arg))) in Statement.Return { Statement.Return.argument; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing (); } ) and switch = let rec case_list env (seen_default, acc) = match Peek.token env with | T_EOF | T_RCURLY -> List.rev acc | _ -> let start_loc = Peek.loc env in let leading = Peek.comments env in let (test, trailing) = match Peek.token env with | T_DEFAULT -> if seen_default then error env Parse_error.MultipleDefaultsInSwitch; Expect.token env T_DEFAULT; (None, Eat.trailing_comments env) | _ -> Expect.token env T_CASE; (Some (Parse.expression env), []) in let seen_default = seen_default || test = None in let end_loc = Peek.loc env in Expect.token env T_COLON; let { trailing = line_end_trailing; _ } = statement_end_trailing_comments env in let trailing = trailing @ line_end_trailing in let term_fn = function | T_RCURLY | T_DEFAULT | T_CASE -> true | _ -> false in let consequent = Parse.statement_list ~term_fn (env |> with_in_switch true) in let end_loc = match List.rev consequent with | last_stmt :: _ -> fst last_stmt | _ -> end_loc in let acc = ( Loc.btwn start_loc end_loc, Statement.Switch.Case. { test; consequent; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () } ) :: acc in case_list env (seen_default, acc) in with_loc (fun env -> let leading = Peek.comments env in Expect.token env T_SWITCH; Expect.token env T_LPAREN; let discriminant = Parse.expression env in Expect.token env T_RPAREN; Expect.token env T_LCURLY; let cases = case_list env (false, []) in Expect.token env T_RCURLY; let { trailing; _ } = statement_end_trailing_comments env in Statement.Switch { Statement.Switch.discriminant; cases; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing (); } ) and throw = with_loc (fun env -> let leading = Peek.comments env in let start_loc = Peek.loc env in Expect.token env T_THROW; if Peek.is_line_terminator env then error_at env (start_loc, Parse_error.NewlineAfterThrow); let argument = Parse.expression env in let (trailing, argument) = match semicolon env with | Explicit trailing -> (trailing, argument) | Implicit { remove_trailing; _ } -> ([], remove_trailing argument (fun remover arg -> remover#expression arg)) in let open Statement in Throw { Throw.argument; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () } ) and try_ = with_loc (fun env -> let leading = Peek.comments env in Expect.token env T_TRY; let block = let block = Parse.block_body env in if Peek.token env = T_CATCH then block_remove_trailing env block else block in let handler = match Peek.token env with | T_CATCH -> let catch = with_loc (fun env -> let leading = Peek.comments env in Expect.token env T_CATCH; let trailing = Eat.trailing_comments env in let param = if Peek.token env = T_LPAREN then ( Expect.token env T_LPAREN; let p = Some (Parse.pattern env Parse_error.StrictCatchVariable) in Expect.token env T_RPAREN; p ) else None in let body = Parse.block_body env in (* Fix trailing comment attachment if catch block is end of statement *) let body = if Peek.token env <> T_FINALLY then let { remove_trailing; _ } = statement_end_trailing_comments env in remove_trailing body (fun remover (loc, body) -> (loc, remover#block loc body)) else body in { Ast.Statement.Try.CatchClause.param; body; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing (); }) env in Some catch | _ -> None in let finalizer = match Peek.token env with | T_FINALLY -> Expect.token env T_FINALLY; let (loc, body) = Parse.block_body env in let { remove_trailing; _ } = statement_end_trailing_comments env in let body = remove_trailing body (fun remover body -> remover#block loc body) in Some (loc, body) | _ -> None in (* No catch or finally? That's an error! *) if handler = None && finalizer = None then error_at env (fst block, Parse_error.NoCatchOrFinally); Statement.Try { Statement.Try.block; handler; finalizer; comments = Flow_ast_utils.mk_comments_opt ~leading (); } ) and var = with_loc (fun env -> let kind = Statement.VariableDeclaration.Var in let (declarations, leading, errs) = Declaration.var env in let (trailing, declarations) = variable_declaration_end ~kind env declarations in errs |> List.iter (error_at env); Statement.VariableDeclaration { Statement.VariableDeclaration.kind; declarations; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing (); } ) and const = with_loc (fun env -> let kind = Statement.VariableDeclaration.Const in let (declarations, leading, errs) = Declaration.const env in let (trailing, declarations) = variable_declaration_end ~kind env declarations in errs |> List.iter (error_at env); Statement.VariableDeclaration { Statement.VariableDeclaration.kind; declarations; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing (); } ) and let_ = with_loc (fun env -> let kind = Statement.VariableDeclaration.Let in let (declarations, leading, errs) = Declaration.let_ env in let (trailing, declarations) = variable_declaration_end ~kind env declarations in errs |> List.iter (error_at env); Statement.VariableDeclaration { Statement.VariableDeclaration.kind; declarations; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing (); } ) and while_ = with_loc (fun env -> let leading = Peek.comments env in Expect.token env T_WHILE; let leading = leading @ Peek.comments env in Expect.token env T_LPAREN; let test = Parse.expression env in Expect.token env T_RPAREN; let body = Parse.statement (env |> with_in_loop true) in (* Annex B allows labelled FunctionDeclarations in non-strict mode (see sec-labelled-function-declarations), but not in IterationStatement (see sec-semantics-static-semantics-early-errors). *) if (not (in_strict_mode env)) && is_labelled_function body then function_as_statement_error_at env (fst body); Statement.While { Statement.While.test; body; comments = Flow_ast_utils.mk_comments_opt ~leading () } ) and with_ env = let (loc, stmt) = with_loc (fun env -> let leading = Peek.comments env in Expect.token env T_WITH; let leading = leading @ Peek.comments env in Expect.token env T_LPAREN; let _object = Parse.expression env in Expect.token env T_RPAREN; let body = Parse.statement env in (* Annex B allows labelled FunctionDeclarations in non-strict mode (see sec-labelled-function-declarations), but not in WithStatement (see sec-with-statement-static-semantics-early-errors). *) if (not (in_strict_mode env)) && is_labelled_function body then function_as_statement_error_at env (fst body); Statement.With { Statement.With._object; body; comments = Flow_ast_utils.mk_comments_opt ~leading () }) env in strict_error_at env (loc, Parse_error.StrictModeWith); (loc, stmt) and block env = let (loc, block) = Parse.block_body env in let { remove_trailing; _ } = statement_end_trailing_comments env in let block = remove_trailing block (fun remover block -> remover#block loc block) in (loc, Statement.Block block) and maybe_labeled = with_loc (fun env -> let leading = Peek.comments env in match (Parse.expression env, Peek.token env) with | ((loc, Ast.Expression.Identifier label), T_COLON) -> let (_, { Identifier.name; comments = _ }) = label in Expect.token env T_COLON; if SSet.mem name (labels env) then error_at env (loc, Parse_error.Redeclaration ("Label", name)); let env = add_label env name in let body = (* labelled FunctionDeclarations are allowed in non-strict mode (see #sec-labelled-function-declarations) *) if Peek.is_function env then function_as_statement env else Parse.statement env in Statement.Labeled { Statement.Labeled.label; body; comments = Flow_ast_utils.mk_comments_opt ~leading () } | (expression, _) -> let (trailing, expression) = match semicolon ~expected:"the end of an expression statement (`;`)" env with | Explicit comments -> (comments, expression) | Implicit { remove_trailing; _ } -> ([], remove_trailing expression (fun remover expr -> remover#expression expr)) in let open Statement in Expression { Expression.expression; directive = None; comments = Flow_ast_utils.mk_comments_opt ~trailing (); } ) and expression = with_loc (fun env -> let expression = Parse.expression env in let (trailing, expression) = match semicolon ~expected:"the end of an expression statement (`;`)" env with | Explicit comments -> (comments, expression) | Implicit { remove_trailing; _ } -> ([], remove_trailing expression (fun remover expr -> remover#expression expr)) in let directive = if allow_directive env then match expression with | (_, Ast.Expression.Literal { Ast.Literal.value = Ast.Literal.String _; raw; _ }) -> Some (String.sub raw 1 (String.length raw - 2)) | _ -> None else None in Statement.Expression { Statement.Expression.expression; directive; comments = Flow_ast_utils.mk_comments_opt ~trailing (); } ) and type_alias_helper ~leading env = if not (should_parse_types env) then error env Parse_error.UnexpectedTypeAlias; let leading = leading @ Peek.comments env in Expect.token env T_TYPE; Eat.push_lex_mode env Lex_mode.TYPE; let id = let id = Type.type_identifier env in if Peek.token env = T_LESS_THAN then id_remove_trailing env id else id in let tparams = Type.type_params env in Expect.token env T_ASSIGN; let right = Type._type env in Eat.pop_lex_mode env; let (trailing, right) = match semicolon env with | Explicit comments -> (comments, right) | Implicit { remove_trailing; _ } -> ([], remove_trailing right (fun remover right -> remover#type_ right)) in Statement.TypeAlias. { id; tparams; right; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () } and declare_type_alias env = with_loc (fun env -> let leading = Peek.comments env in Expect.token env T_DECLARE; let type_alias = type_alias_helper ~leading env in Statement.DeclareTypeAlias type_alias) env (** Type aliases squeeze into an unambiguous unused portion of the grammar: `type` is not a reserved word, so `type T` is otherwise two identifiers in a row and that's never valid JS. However, if there's a line separator between the two, ASI makes it valid JS, so line separators are disallowed. *) and type_alias env = if Peek.ith_is_identifier ~i:1 env && not (Peek.ith_is_implicit_semicolon ~i:1 env) then let (loc, type_alias) = with_loc (type_alias_helper ~leading:[]) env in (loc, Statement.TypeAlias type_alias) else Parse.statement env and opaque_type_helper ?(declare = false) ~leading env = if not (should_parse_types env) then error env Parse_error.UnexpectedOpaqueTypeAlias; let leading_opaque = leading @ Peek.comments env in Expect.token env T_OPAQUE; let leading_type = Peek.comments env in Expect.token env T_TYPE; let leading = leading_opaque @ leading_type in Eat.push_lex_mode env Lex_mode.TYPE; let id = let id = Type.type_identifier env in if Peek.token env = T_LESS_THAN then id_remove_trailing env id else id in let tparams = Type.type_params env in let supertype = match Peek.token env with | T_COLON -> Expect.token env T_COLON; Some (Type._type env) | _ -> None in let impltype = if declare then match Peek.token env with | T_ASSIGN -> error env Parse_error.DeclareOpaqueTypeInitializer; Eat.token env; if Peek.token env = T_SEMICOLON || Peek.is_implicit_semicolon env then None else Some (Type._type env) | _ -> None else ( Expect.token env T_ASSIGN; Some (Type._type env) ) in Eat.pop_lex_mode env; let (trailing, id, tparams, supertype, impltype) = match (semicolon env, tparams, supertype, impltype) with (* opaque type Foo = Bar; *) | (Explicit comments, _, _, _) -> (comments, id, tparams, supertype, impltype) (* opaque type Foo = Bar *) | (Implicit { remove_trailing; _ }, _, _, Some impl) -> ( [], id, tparams, supertype, Some (remove_trailing impl (fun remover impl -> remover#type_ impl)) ) (* opaque type Foo: Super *) | (Implicit { remove_trailing; _ }, _, Some super, None) -> ( [], id, tparams, Some (remove_trailing super (fun remover super -> remover#type_ super)), None ) (* opaque type Foo<T> *) | (Implicit { remove_trailing; _ }, Some tparams, None, None) -> ( [], id, Some (remove_trailing tparams (fun remover tparams -> remover#type_params tparams)), None, None ) (* declare opaque type Foo *) | (Implicit { remove_trailing; _ }, None, None, None) -> ([], remove_trailing id (fun remover id -> remover#identifier id), None, None, None) in Statement.OpaqueType. { id; tparams; impltype; supertype; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing (); } and declare_opaque_type env = with_loc (fun env -> let leading = Peek.comments env in Expect.token env T_DECLARE; let opaque_t = opaque_type_helper ~declare:true ~leading env in Statement.DeclareOpaqueType opaque_t) env and opaque_type env = match Peek.ith_token ~i:1 env with | T_TYPE -> let (loc, opaque_t) = with_loc (opaque_type_helper ~declare:false ~leading:[]) env in (loc, Statement.OpaqueType opaque_t) | _ -> Parse.statement env and interface_helper ~leading env = if not (should_parse_types env) then error env Parse_error.UnexpectedTypeInterface; let leading = leading @ Peek.comments env in Expect.token env T_INTERFACE; let id = let id = Type.type_identifier env in if Peek.token env = T_EXTENDS then id else id_remove_trailing env id in let tparams = let tparams = Type.type_params env in if Peek.token env = T_EXTENDS then tparams else type_params_remove_trailing env tparams in let (extends, body) = Type.interface_helper env in let { remove_trailing; _ } = statement_end_trailing_comments env in let body = remove_trailing body (fun remover (loc, body) -> (loc, remover#object_type loc body)) in Statement.Interface. { id; tparams; body; extends; comments = Flow_ast_utils.mk_comments_opt ~leading () } and declare_interface env = with_loc (fun env -> let leading = Peek.comments env in Expect.token env T_DECLARE; let iface = interface_helper ~leading env in Statement.DeclareInterface iface) env and interface env = (* disambiguate between a value named `interface`, like `var interface = 1; interface++`, and an interface declaration like `interface Foo {}`.` *) if Peek.ith_is_identifier_name ~i:1 env then let (loc, iface) = with_loc (interface_helper ~leading:[]) env in (loc, Statement.InterfaceDeclaration iface) else expression env and declare_class = let rec mixins env acc = let super = Type.generic env in let acc = super :: acc in match Peek.token env with | T_COMMA -> Expect.token env T_COMMA; mixins env acc | _ -> List.rev acc (* This is identical to `interface`, except that mixins are allowed *) in fun ~leading env -> let env = env |> with_strict true in let leading = leading @ Peek.comments env in Expect.token env T_CLASS; let id = let id = Parse.identifier env in match Peek.token env with | T_LESS_THAN | T_LCURLY -> id_remove_trailing env id | _ -> id in let tparams = let tparams = Type.type_params env in match Peek.token env with | T_LCURLY -> type_params_remove_trailing env tparams | _ -> tparams in let extends = if Eat.maybe env T_EXTENDS then let extends = Type.generic env in match Peek.token env with | T_LCURLY -> Some (generic_type_remove_trailing env extends) | _ -> Some extends else None in let mixins = match Peek.token env with | T_IDENTIFIER { raw = "mixins"; _ } -> Eat.token env; let mixins = mixins env [] in (match Peek.token env with | T_LCURLY -> generic_type_list_remove_trailing env mixins | _ -> mixins) | _ -> [] in let implements = match Peek.token env with | T_IMPLEMENTS -> let implements = Object.class_implements env ~attach_leading:false in (match Peek.token env with | T_LCURLY -> Some (class_implements_remove_trailing env implements) | _ -> Some implements) | _ -> None in let body = Type._object ~is_class:true env in let { remove_trailing; _ } = statement_end_trailing_comments env in let body = remove_trailing body (fun remover (loc, body) -> (loc, remover#object_type loc body)) in let comments = Flow_ast_utils.mk_comments_opt ~leading () in Statement.DeclareClass.{ id; tparams; body; extends; mixins; implements; comments } and declare_class_statement env = with_loc (fun env -> let leading = Peek.comments env in Expect.token env T_DECLARE; let fn = declare_class ~leading env in Statement.DeclareClass fn) env and declare_function ?(leading = []) env = let leading = leading @ Peek.comments env in Expect.token env T_FUNCTION; let id = id_remove_trailing env (Parse.identifier env) in let start_sig_loc = Peek.loc env in let tparams = type_params_remove_trailing env (Type.type_params env) in let params = Type.function_param_list env in Expect.token env T_COLON; let return = let return = Type._type env in let has_predicate = Eat.push_lex_mode env Lex_mode.TYPE; let type_token = Peek.token env in Eat.pop_lex_mode env; type_token = T_CHECKS in if has_predicate then type_remove_trailing env return else return in let end_loc = fst return in let loc = Loc.btwn start_sig_loc end_loc in let annot = (loc, Ast.Type.(Function { Function.params; return; tparams; comments = None })) in let predicate = Type.predicate_opt env in let (trailing, annot, predicate) = match (semicolon env, predicate) with | (Explicit comments, _) -> (comments, annot, predicate) | (Implicit { remove_trailing; _ }, None) -> ([], remove_trailing annot (fun remover annot -> remover#type_ annot), None) | (Implicit { remove_trailing; _ }, Some pred) -> ([], annot, Some (remove_trailing pred (fun remover pred -> remover#predicate pred))) in let annot = (loc, annot) in Statement.DeclareFunction. { id; annot; predicate; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () } and declare_function_statement env = with_loc (fun env -> let leading = Peek.comments env in Expect.token env T_DECLARE; begin match Peek.token env with | T_ASYNC -> error env Parse_error.DeclareAsync; Expect.token env T_ASYNC | _ -> () end; let fn = declare_function ~leading env in Statement.DeclareFunction fn) env and declare_var env leading = let leading = leading @ Peek.comments env in Expect.token env T_VAR; let name = Parse.identifier ~restricted_error:Parse_error.StrictVarName env in let annot = Type.annotation env in let (trailing, name, annot) = match semicolon env with (* declare var x; *) | Explicit trailing -> (trailing, name, annot) (* declare var x *) | Implicit { remove_trailing; _ } -> ([], name, remove_trailing annot (fun remover annot -> remover#type_annotation annot)) in Statement.DeclareVariable. { id = name; annot; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () } and declare_var_statement env = with_loc (fun env -> let leading = Peek.comments env in Expect.token env T_DECLARE; let var = declare_var env leading in Statement.DeclareVariable var) env and declare_module = let rec module_items env ~module_kind acc = match Peek.token env with | T_EOF | T_RCURLY -> (module_kind, List.rev acc) | _ -> let stmt = declare ~in_module:true env in (* TODO: This is a semantic analysis and shouldn't be in the parser *) let module_kind = let open Statement in let (loc, stmt) = stmt in match (module_kind, stmt) with (* * The first time we see either a `declare export` or a * `declare module.exports`, we lock in the kind of the module. * * `declare export type` and `declare export interface` are the two * exceptions to this rule because they are valid in both CommonJS * and ES modules (and thus do not indicate an intent for either). *) | (None, DeclareModuleExports _) -> Some (DeclareModule.CommonJS loc) | (None, DeclareExportDeclaration { DeclareExportDeclaration.declaration; _ }) -> (match declaration with | Some (DeclareExportDeclaration.NamedType _) | Some (DeclareExportDeclaration.Interface _) -> module_kind | _ -> Some (DeclareModule.ES loc)) (* * There should never be more than one `declare module.exports` * statement *) | (Some (DeclareModule.CommonJS _), DeclareModuleExports _) -> error env Parse_error.DuplicateDeclareModuleExports; module_kind (* * It's never ok to mix and match `declare export` and * `declare module.exports` in the same module because it leaves the * kind of the module (CommonJS vs ES) ambiguous. * * The 1 exception to this rule is that `export type/interface` are * both ok in CommonJS modules. *) | (Some (DeclareModule.ES _), DeclareModuleExports _) -> error env Parse_error.AmbiguousDeclareModuleKind; module_kind | ( Some (DeclareModule.CommonJS _), DeclareExportDeclaration { DeclareExportDeclaration.declaration; _ } ) -> (match declaration with | Some (DeclareExportDeclaration.NamedType _) | Some (DeclareExportDeclaration.Interface _) -> () | _ -> error env Parse_error.AmbiguousDeclareModuleKind); module_kind | _ -> module_kind in module_items env ~module_kind (stmt :: acc) in let declare_module_ env start_loc leading = let id = match Peek.token env with | T_STRING str -> Statement.DeclareModule.Literal (string_literal_remove_trailing env (string_literal env str)) | _ -> Statement.DeclareModule.Identifier (id_remove_trailing env (Parse.identifier env)) in let (body_loc, ((module_kind, body), comments)) = with_loc (fun env -> let leading = Peek.comments env in Expect.token env T_LCURLY; let (module_kind, body) = module_items env ~module_kind:None [] in let internal = if body = [] then Peek.comments env else [] in Expect.token env T_RCURLY; let { trailing; _ } = statement_end_trailing_comments env in ( (module_kind, body), Flow_ast_utils.mk_comments_with_internal_opt ~leading ~trailing ~internal () )) env in let body = (body_loc, { Statement.Block.body; comments }) in let loc = Loc.btwn start_loc body_loc in let kind = match module_kind with | Some k -> k | None -> Statement.DeclareModule.CommonJS loc in let comments = Flow_ast_utils.mk_comments_opt ~leading () in (loc, Statement.(DeclareModule DeclareModule.{ id; body; kind; comments })) in fun ?(in_module = false) env -> let start_loc = Peek.loc env in let leading = Peek.comments env in Expect.token env T_DECLARE; let leading = leading @ Peek.comments env in Expect.identifier env "module"; if in_module || Peek.token env = T_PERIOD then let (loc, exports) = with_loc (declare_module_exports ~leading) env in (Loc.btwn start_loc loc, exports) else declare_module_ env start_loc leading and declare_module_exports ~leading env = let leading_period = Peek.comments env in Expect.token env T_PERIOD; let leading_exports = Peek.comments env in Expect.identifier env "exports"; let leading_annot = Peek.comments env in let leading = List.concat [leading; leading_period; leading_exports; leading_annot] in let annot = Type.annotation env in let (annot, trailing) = match semicolon env with | Explicit trailing -> (annot, trailing) | Implicit { remove_trailing; _ } -> (remove_trailing annot (fun remover annot -> remover#type_annotation annot), []) in let comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () in Statement.DeclareModuleExports { Statement.DeclareModuleExports.annot; comments } and declare ?(in_module = false) env = if not (should_parse_types env) then error env Parse_error.UnexpectedTypeDeclaration; (* eventually, just emit a wrapper AST node *) match Peek.ith_token ~i:1 env with | T_CLASS -> declare_class_statement env | T_INTERFACE -> declare_interface env | T_TYPE -> (match Peek.token env with | T_IMPORT when in_module -> import_declaration env | _ -> declare_type_alias env) | T_OPAQUE -> declare_opaque_type env | T_TYPEOF when Peek.token env = T_IMPORT -> import_declaration env | T_FUNCTION | T_ASYNC -> declare_function_statement env | T_VAR -> declare_var_statement env | T_EXPORT when in_module -> declare_export_declaration ~allow_export_type:in_module env | T_IDENTIFIER { raw = "module"; _ } -> declare_module ~in_module env | _ when in_module -> (match Peek.token env with | T_IMPORT -> error env Parse_error.InvalidNonTypeImportInDeclareModule; Parse.statement env | _ -> (* Oh boy, found some bad stuff in a declare module. Let's just * pretend it's a declare var (arbitrary choice) *) declare_var_statement env) | _ -> Parse.statement env and export_source env = Expect.identifier env "from"; match Peek.token env with | T_STRING str -> string_literal env str | _ -> (* Just make up a string for the error case *) let ret = (Peek.loc env, { StringLiteral.value = ""; raw = ""; comments = None }) in error_unexpected ~expected:"a string" env; ret and export_source_and_semicolon env = let (source_loc, source) = export_source env in match semicolon env with | Explicit trailing -> ((source_loc, source), trailing) | Implicit { remove_trailing; _ } -> ( ( source_loc, remove_trailing source (fun remover source -> remover#string_literal_type source_loc source ) ), [] ) and extract_pattern_binding_names = let rec fold acc = let open Pattern in function | (_, Object { Object.properties; _ }) -> List.fold_left (fun acc prop -> match prop with | Object.Property (_, { Object.Property.pattern; _ }) | Object.RestElement (_, { RestElement.argument = pattern; comments = _ }) -> fold acc pattern) acc properties | (_, Array { Array.elements; _ }) -> List.fold_left (fun acc elem -> match elem with | Array.Element (_, { Array.Element.argument = pattern; default = _ }) | Array.RestElement (_, { RestElement.argument = pattern; comments = _ }) -> fold acc pattern | Array.Hole _ -> acc) acc elements | (_, Identifier { Pattern.Identifier.name; _ }) -> name :: acc | (_, Expression _) -> failwith "Parser error: No such thing as an expression pattern!" in List.fold_left fold and extract_ident_name (_, { Identifier.name; comments = _ }) = name and export_specifiers ?(preceding_comma = true) env specifiers = match Peek.token env with | T_EOF | T_RCURLY -> List.rev specifiers | _ -> if not preceding_comma then error env Parse_error.ExportSpecifierMissingComma; let specifier = with_loc (fun env -> let local = identifier_name env in let exported = match Peek.token env with | T_IDENTIFIER { raw = "as"; _ } -> Eat.token env; let exported = identifier_name env in record_export env exported; Some exported | _ -> record_export env local; None in { Statement.ExportNamedDeclaration.ExportSpecifier.local; exported }) env in let preceding_comma = Eat.maybe env T_COMMA in export_specifiers ~preceding_comma env (specifier :: specifiers) and assert_export_specifier_identifiers env specifiers = Statement.ExportNamedDeclaration.ExportSpecifier.( List.iter (function | (_, { local = id; exported = None }) -> assert_identifier_name_is_identifier ~restricted_error:Parse_error.StrictVarName env id | _ -> ()) specifiers ) and export_declaration ~decorators = with_loc (fun env -> let env = env |> with_strict true |> with_in_export true in let start_loc = Peek.loc env in let leading = Peek.comments env in Expect.token env T_EXPORT; match Peek.token env with | T_DEFAULT -> (* export default ... *) Statement.ExportDefaultDeclaration.( let leading = leading @ Peek.comments env in let (default, ()) = with_loc (fun env -> Expect.token env T_DEFAULT) env in record_export env (Flow_ast_utils.ident_of_source (Loc.btwn start_loc (Peek.loc env), "default")); let (declaration, trailing) = if Peek.is_function env then (* export default [async] function [foo] (...) { ... } *) let fn = Declaration._function env in (Declaration fn, []) else if Peek.is_class env then (* export default class foo { ... } *) let _class = Object.class_declaration env decorators in (Declaration _class, []) else if Peek.token env = T_ENUM then (* export default enum foo { ... } *) (Declaration (Declaration.enum_declaration env), []) else (* export default [assignment expression]; *) let expr = Parse.assignment env in let (expr, trailing) = match semicolon env with | Explicit trailing -> (expr, trailing) | Implicit { remove_trailing; _ } -> (remove_trailing expr (fun remover expr -> remover#expression expr), []) in (Expression expr, trailing) in Statement.ExportDefaultDeclaration { default; declaration; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing (); } ) | T_TYPE when Peek.ith_token ~i:1 env <> T_LCURLY -> (* export type ... *) Statement.ExportNamedDeclaration.( if not (should_parse_types env) then error env Parse_error.UnexpectedTypeExport; (match Peek.ith_token ~i:1 env with | T_MULT -> Expect.token env T_TYPE; let specifier_loc = Peek.loc env in Expect.token env T_MULT; let (source, trailing) = export_source_and_semicolon env in Statement.ExportNamedDeclaration { declaration = None; specifiers = Some (ExportBatchSpecifier (specifier_loc, None)); source = Some source; export_kind = Statement.ExportType; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing (); } | T_ENUM -> error env Parse_error.EnumInvalidExport; Expect.token env T_TYPE; Statement.ExportNamedDeclaration { declaration = None; specifiers = None; source = None; export_kind = Statement.ExportType; comments = Flow_ast_utils.mk_comments_opt ~leading (); } | _ -> let (loc, type_alias) = with_loc (type_alias_helper ~leading:[]) env in record_export env (Flow_ast_utils.ident_of_source (loc, extract_ident_name type_alias.Statement.TypeAlias.id) ); let type_alias = (loc, Statement.TypeAlias type_alias) in Statement.ExportNamedDeclaration { declaration = Some type_alias; specifiers = None; source = None; export_kind = Statement.ExportType; comments = Flow_ast_utils.mk_comments_opt ~leading (); }) ) | T_OPAQUE -> (* export opaque type ... *) Statement.ExportNamedDeclaration.( let (loc, opaque_t) = with_loc (opaque_type_helper ~leading:[]) env in record_export env (Flow_ast_utils.ident_of_source (loc, extract_ident_name opaque_t.Statement.OpaqueType.id) ); let opaque_t = (loc, Statement.OpaqueType opaque_t) in Statement.ExportNamedDeclaration { declaration = Some opaque_t; specifiers = None; source = None; export_kind = Statement.ExportType; comments = Flow_ast_utils.mk_comments_opt ~leading (); } ) | T_INTERFACE -> (* export interface I { ... } *) Statement.ExportNamedDeclaration.( if not (should_parse_types env) then error env Parse_error.UnexpectedTypeExport; let interface = interface env in (match interface with | (loc, Statement.InterfaceDeclaration { Statement.Interface.id; _ }) -> record_export env (Flow_ast_utils.ident_of_source (loc, extract_ident_name id)) | _ -> failwith ("Internal Flow Error! Parsed `export interface` into something " ^ "other than an interface declaration!" )); Statement.ExportNamedDeclaration { declaration = Some interface; specifiers = None; source = None; export_kind = Statement.ExportType; comments = Flow_ast_utils.mk_comments_opt ~leading (); } ) | T_LET | T_CONST | T_VAR (* not using Peek.is_class here because it would guard all of the * cases *) | T_AT | T_CLASS (* not using Peek.is_function here because it would guard all of the * cases *) | T_ASYNC | T_FUNCTION | T_ENUM -> Statement.ExportNamedDeclaration.( let stmt = Parse.statement_list_item env ~decorators in let names = let open Statement in match stmt with | (_, VariableDeclaration { VariableDeclaration.declarations; _ }) -> List.fold_left (fun names (_, declaration) -> let id = declaration.VariableDeclaration.Declarator.id in extract_pattern_binding_names names [id]) [] declarations | (loc, ClassDeclaration { Class.id = Some id; _ }) | (loc, FunctionDeclaration { Function.id = Some id; _ }) | (loc, EnumDeclaration { EnumDeclaration.id; _ }) -> [Flow_ast_utils.ident_of_source (loc, extract_ident_name id)] | (loc, ClassDeclaration { Class.id = None; _ }) -> error_at env (loc, Parse_error.ExportNamelessClass); [] | (loc, FunctionDeclaration { Function.id = None; _ }) -> error_at env (loc, Parse_error.ExportNamelessFunction); [] | _ -> failwith "Internal Flow Error! Unexpected export statement declaration!" in List.iter (record_export env) names; Statement.ExportNamedDeclaration { declaration = Some stmt; specifiers = None; source = None; export_kind = Statement.ExportValue; comments = Flow_ast_utils.mk_comments_opt ~leading (); } ) | T_MULT -> Statement.ExportNamedDeclaration.( let loc = Peek.loc env in Expect.token env T_MULT; let local_name = let parse_export_star_as = (parse_options env).esproposal_export_star_as in match Peek.token env with | T_IDENTIFIER { raw = "as"; _ } -> Eat.token env; if parse_export_star_as then Some (Parse.identifier env) else ( error env Parse_error.UnexpectedTypeDeclaration; None ) | _ -> None in let specifiers = Some (ExportBatchSpecifier (loc, local_name)) in let (source, trailing) = export_source_and_semicolon env in Statement.ExportNamedDeclaration { declaration = None; specifiers; source = Some source; export_kind = Statement.ExportValue; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing (); } ) | _ -> Statement.ExportNamedDeclaration.( let export_kind = match Peek.token env with | T_TYPE -> Eat.token env; Statement.ExportType | _ -> Statement.ExportValue in Expect.token env T_LCURLY; let specifiers = export_specifiers env [] in Expect.token env T_RCURLY; let (source, trailing) = match Peek.token env with | T_IDENTIFIER { raw = "from"; _ } -> let (source, trailing) = export_source_and_semicolon env in (Some source, trailing) | _ -> assert_export_specifier_identifiers env specifiers; let trailing = match semicolon env with | Explicit trailing -> trailing | Implicit { trailing; _ } -> trailing in (None, trailing) in Statement.ExportNamedDeclaration { declaration = None; specifiers = Some (ExportSpecifiers specifiers); source; export_kind; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing (); } ) ) and declare_export_declaration ?(allow_export_type = false) = with_loc (fun env -> if not (should_parse_types env) then error env Parse_error.UnexpectedTypeDeclaration; let leading = Peek.comments env in Expect.token env T_DECLARE; let env = env |> with_strict true |> with_in_export true in let leading = leading @ Peek.comments env in Expect.token env T_EXPORT; Statement.DeclareExportDeclaration.( match Peek.token env with | T_DEFAULT -> (* declare export default ... *) let leading = leading @ Peek.comments env in let (default, ()) = with_loc (fun env -> Expect.token env T_DEFAULT) env in let (declaration, trailing) = match Peek.token env with | T_FUNCTION -> (* declare export default function foo (...): ... *) let fn = with_loc declare_function env in (Some (Function fn), []) | T_CLASS -> (* declare export default class foo { ... } *) let class_ = with_loc (declare_class ~leading:[]) env in (Some (Class class_), []) | _ -> (* declare export default [type]; *) let type_ = Type._type env in let (type_, trailing) = match semicolon env with | Explicit trailing -> (type_, trailing) | Implicit { remove_trailing; _ } -> (remove_trailing type_ (fun remover type_ -> remover#type_ type_), []) in (Some (DefaultType type_), trailing) in let comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () in Statement.DeclareExportDeclaration { default = Some default; declaration; specifiers = None; source = None; comments } | T_LET | T_CONST | T_VAR | T_CLASS | T_FUNCTION -> let declaration = match Peek.token env with | T_FUNCTION -> (* declare export function foo (...): ... *) let fn = with_loc declare_function env in Some (Function fn) | T_CLASS -> (* declare export class foo { ... } *) let class_ = with_loc (declare_class ~leading:[]) env in Some (Class class_) | (T_LET | T_CONST | T_VAR) as token -> (match token with | T_LET -> error env Parse_error.DeclareExportLet | T_CONST -> error env Parse_error.DeclareExportConst | _ -> ()); (* declare export var foo: ... *) let var = with_loc (fun env -> declare_var env []) env in Some (Variable var) | _ -> assert false in let comments = Flow_ast_utils.mk_comments_opt ~leading () in Statement.DeclareExportDeclaration { default = None; declaration; specifiers = None; source = None; comments } | T_MULT -> (* declare export * from 'foo' *) let loc = Peek.loc env in Expect.token env T_MULT; let parse_export_star_as = (parse_options env).esproposal_export_star_as in let local_name = match Peek.token env with | T_IDENTIFIER { raw = "as"; _ } -> Eat.token env; if parse_export_star_as then Some (Parse.identifier env) else ( error env Parse_error.UnexpectedTypeDeclaration; None ) | _ -> None in let specifiers = Statement.ExportNamedDeclaration.(Some (ExportBatchSpecifier (loc, local_name))) in let (source, trailing) = export_source_and_semicolon env in let comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () in Statement.DeclareExportDeclaration { default = None; declaration = None; specifiers; source = Some source; comments } | T_TYPE when allow_export_type -> (* declare export type = ... *) let alias = with_loc (type_alias_helper ~leading:[]) env in let comments = Flow_ast_utils.mk_comments_opt ~leading () in Statement.DeclareExportDeclaration { default = None; declaration = Some (NamedType alias); specifiers = None; source = None; comments; } | T_OPAQUE -> (* declare export opaque type = ... *) let opaque = with_loc (opaque_type_helper ~declare:true ~leading:[]) env in let comments = Flow_ast_utils.mk_comments_opt ~leading () in Statement.DeclareExportDeclaration { default = None; declaration = Some (NamedOpaqueType opaque); specifiers = None; source = None; comments; } | T_INTERFACE when allow_export_type -> (* declare export interface ... *) let iface = with_loc (interface_helper ~leading:[]) env in let comments = Flow_ast_utils.mk_comments_opt ~leading () in Statement.DeclareExportDeclaration { default = None; declaration = Some (Interface iface); specifiers = None; source = None; comments; } | _ -> (match Peek.token env with | T_TYPE -> error env Parse_error.DeclareExportType | T_INTERFACE -> error env Parse_error.DeclareExportInterface | _ -> ()); Expect.token env T_LCURLY; let specifiers = export_specifiers env [] in Expect.token env T_RCURLY; let (source, trailing) = match Peek.token env with | T_IDENTIFIER { raw = "from"; _ } -> let (source, trailing) = export_source_and_semicolon env in (Some source, trailing) | _ -> assert_export_specifier_identifiers env specifiers; let trailing = match semicolon env with | Explicit trailing -> trailing | Implicit { trailing; _ } -> trailing in (None, trailing) in let comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () in Statement.DeclareExportDeclaration { default = None; declaration = None; specifiers = Some (Statement.ExportNamedDeclaration.ExportSpecifiers specifiers); source; comments; } ) ) and import_declaration = Statement.ImportDeclaration.( let missing_source env = (* Just make up a string for the error case *) let loc = Peek.loc_skip_lookahead env in (loc, { StringLiteral.value = ""; raw = ""; comments = None }) in let source env = match Peek.token env with | T_IDENTIFIER { raw = "from"; _ } -> Eat.token env; (match Peek.token env with | T_STRING str -> string_literal env str | _ -> error_unexpected ~expected:"a string" env; missing_source env) | _ -> error_unexpected ~expected:"the keyword `from`" env; missing_source env in let is_type_import = function | T_TYPE | T_TYPEOF -> true | _ -> false (* `x` or `x as y` in a specifier *) in let with_maybe_as ~for_type ?error_if_type env = let identifier env = if for_type then Type.type_identifier env else Parse.identifier env in match Peek.ith_token ~i:1 env with | T_IDENTIFIER { raw = "as"; _ } -> let remote = identifier_name env in Eat.token env; (* as *) let local = Some (identifier env) in (remote, local) | T_EOF | T_COMMA | T_RCURLY -> (identifier env, None) | _ -> begin match (error_if_type, Peek.token env) with | (Some error_if_type, T_TYPE) | (Some error_if_type, T_TYPEOF) -> error env error_if_type; Eat.token env; (* consume `type` or `typeof` *) (Type.type_identifier env, None) | _ -> (identifier env, None) end (* ImportSpecifier[Type]: [~Type] ImportedBinding [~Type] IdentifierName ImportedTypeBinding [~Type] IdentifierName IdentifierName ImportedBinding [~Type] IdentifierName IdentifierName IdentifierName ImportedTypeBinding [+Type] ImportedTypeBinding [+Type] IdentifierName IdentifierName ImportedTypeBinding Static Semantics: `IdentifierName ImportedTypeBinding`: - It is a Syntax Error if IdentifierName's StringValue is not "type" or "typeof" `IdentifierName IdentifierName ImportedBinding`: - It is a Syntax Error if the second IdentifierName's StringValue is not "as" `IdentifierName IdentifierName IdentifierName ImportedTypeBinding`: - It is a Syntax Error if the first IdentifierName's StringValue is not "type" or "typeof", and the third IdentifierName's StringValue is not "as" *) in let specifier env = let kind = match Peek.token env with | T_TYPE -> Some ImportType | T_TYPEOF -> Some ImportTypeof | _ -> None in if is_type_import (Peek.token env) then (* consume `type`, but we don't know yet whether this is `type foo` or `type as foo`. *) let type_keyword_or_remote = identifier_name env in match Peek.token env with (* `type` (a value) *) | T_EOF | T_RCURLY | T_COMMA -> let remote = type_keyword_or_remote in (* `type` becomes a value *) assert_identifier_name_is_identifier env remote; { remote; local = None; kind = None } (* `type as foo` (value named `type`) or `type as,` (type named `as`) *) | T_IDENTIFIER { raw = "as"; _ } -> begin match Peek.ith_token ~i:1 env with | T_EOF | T_RCURLY | T_COMMA -> (* `type as` *) { remote = Type.type_identifier env; local = None; kind } | T_IDENTIFIER { raw = "as"; _ } -> (* `type as as foo` *) let remote = identifier_name env in (* first `as` *) Eat.token env; (* second `as` *) let local = Some (Type.type_identifier env) in (* `foo` *) { remote; local; kind } | _ -> (* `type as foo` *) let remote = type_keyword_or_remote in (* `type` becomes a value *) assert_identifier_name_is_identifier env remote; Eat.token env; (* `as` *) let local = Some (Parse.identifier env) in { remote; local; kind = None } end (* `type x`, or `type x as y` *) | _ -> let (remote, local) = with_maybe_as ~for_type:true env in { remote; local; kind } else (* standard `x` or `x as y` *) let (remote, local) = with_maybe_as ~for_type:false env in { remote; local; kind = None } (* specifier in an `import type { ... }` *) in let type_specifier env = let (remote, local) = with_maybe_as env ~for_type:true ~error_if_type:Parse_error.ImportTypeShorthandOnlyInPureImport in { remote; local; kind = None } (* specifier in an `import typeof { ... }` *) in let typeof_specifier env = let (remote, local) = with_maybe_as env ~for_type:true ~error_if_type:Parse_error.ImportTypeShorthandOnlyInPureImport in { remote; local; kind = None } in let rec specifier_list ?(preceding_comma = true) env statement_kind acc = match Peek.token env with | T_EOF | T_RCURLY -> List.rev acc | _ -> if not preceding_comma then error env Parse_error.ImportSpecifierMissingComma; let specifier = match statement_kind with | ImportType -> type_specifier env | ImportTypeof -> typeof_specifier env | ImportValue -> specifier env in let preceding_comma = Eat.maybe env T_COMMA in specifier_list ~preceding_comma env statement_kind (specifier :: acc) in let named_or_namespace_specifier env import_kind = match Peek.token env with | T_MULT -> let id = with_loc_opt (fun env -> (* consume T_MULT *) Eat.token env; match Peek.token env with | T_IDENTIFIER { raw = "as"; _ } -> (* consume "as" *) Eat.token env; (match import_kind with | ImportType | ImportTypeof -> Some (Type.type_identifier env) | ImportValue -> Some (Parse.identifier env)) | _ -> error_unexpected ~expected:"the keyword `as`" env; None) env in (match id with | Some id -> Some (ImportNamespaceSpecifier id) | None -> None) | _ -> Expect.token env T_LCURLY; let specifiers = specifier_list env import_kind [] in Expect.token env T_RCURLY; Some (ImportNamedSpecifiers specifiers) in let semicolon_and_trailing env source = match semicolon env with | Explicit trailing -> (trailing, source) | Implicit { remove_trailing; _ } -> ( [], remove_trailing source (fun remover (loc, source) -> (loc, remover#string_literal_type loc source) ) ) in let with_specifiers import_kind env leading = let specifiers = named_or_namespace_specifier env import_kind in let source = source env in let (trailing, source) = semicolon_and_trailing env source in Statement.ImportDeclaration { import_kind; source; specifiers; default = None; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing (); } in let with_default import_kind env leading = let default_specifier = match import_kind with | ImportType | ImportTypeof -> Type.type_identifier env | ImportValue -> Parse.identifier env in let additional_specifiers = match Peek.token env with | T_COMMA -> (* `import Foo, ...` *) Expect.token env T_COMMA; named_or_namespace_specifier env import_kind | _ -> None in let source = source env in let (trailing, source) = semicolon_and_trailing env source in Statement.ImportDeclaration { import_kind; source; specifiers = additional_specifiers; default = Some default_specifier; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing (); } in with_loc (fun env -> let env = env |> with_strict true in let leading = Peek.comments env in Expect.token env T_IMPORT; match Peek.token env with (* `import * as ns from "ModuleName";` *) | T_MULT -> with_specifiers ImportValue env leading (* `import { ... } from "ModuleName";` *) | T_LCURLY -> with_specifiers ImportValue env leading (* `import "ModuleName";` *) | T_STRING str -> let source = string_literal env str in let (trailing, source) = semicolon_and_trailing env source in Statement.ImportDeclaration { import_kind = ImportValue; source; specifiers = None; default = None; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing (); } (* `import type [...] from "ModuleName";` note that if [...] is missing, we're importing a value named `type`! *) | T_TYPE when should_parse_types env -> begin match Peek.ith_token ~i:1 env with (* `import type, { other, names } from "ModuleName";` *) | T_COMMA (* `import type from "ModuleName";` *) | T_IDENTIFIER { raw = "from"; _ } -> (* Importing the exported value named "type". This is not a type-import.*) with_default ImportValue env leading (* `import type *` is invalid, since the namespace can't be a type *) | T_MULT -> (* consume `type` *) Eat.token env; (* unexpected `*` *) error_unexpected env; with_specifiers ImportType env leading | T_LCURLY -> (* consume `type` *) Eat.token env; with_specifiers ImportType env leading | _ -> (* consume `type` *) Eat.token env; with_default ImportType env leading end (* `import typeof ... from "ModuleName";` *) | T_TYPEOF when should_parse_types env -> Expect.token env T_TYPEOF; begin match Peek.token env with | T_MULT | T_LCURLY -> with_specifiers ImportTypeof env leading | _ -> with_default ImportTypeof env leading end (* import Foo from "ModuleName"; *) | _ -> with_default ImportValue env leading ) ) end
OCaml
5
zhangmaijun/flow
src/parser/statement_parser.ml
[ "MIT" ]
/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #ifndef TENSORFLOW_LITE_EXAMPLES_LABEL_IMAGE_LOG_H_ #define TENSORFLOW_LITE_EXAMPLES_LABEL_IMAGE_LOG_H_ #include <iostream> #include <sstream> namespace tflite { namespace label_image { class Log { std::stringstream stream_; public: explicit Log(const char* severity) { stream_ << severity << ": "; } std::stringstream& Stream() { return stream_; } ~Log() { std::cerr << stream_.str() << std::endl; } }; #define LOG(severity) tflite::label_image::Log(#severity).Stream() } // namespace label_image } // namespace tflite #endif // TENSORFLOW_LITE_EXAMPLES_LABEL_IMAGE_LOG_H_
C
4
EricRemmerswaal/tensorflow
tensorflow/lite/examples/label_image/log.h
[ "Apache-2.0" ]
{% extends "base.ahk" %} {% block body %} SetRegView, {{ reg_view }} {% endblock body %}
AutoHotkey
1
scslmd/ahk
ahk/templates/registery/reg_set_view.ahk
[ "MIT" ]
@article{UNDERWORLD, title={{3DUNDERWORLD-SLS}: {A}n {O}pen-{S}ource {S}tructured-{L}ight {S}canning {S}ystem for {R}apid {G}eometry {A}cquisition}, author={Herakleous, Kyriakos and Poullis, Charalambos}, journal={arXiv preprint arXiv:1406.6595}, year={2014} } @Article{pattern, author = {Salvi, Joaquim and Pag\'es, Jordi and Batlle, Joan}, title = {Pattern codification strategies in structured light systems}, journal = {Pattern Recognition}, volume = {37}, number = {4}, pages = {827-849}, year = {April 2004}, } @article{faps, title={Accurate dynamic 3D sensing with Fourier-assisted phase shifting}, author={Cong, Pengyu and Xiong, Zhiwei and Zhang, Yueyi and Zhao, Shenghui and Wu, Feng}, journal={IEEE Journal of Selected Topics in Signal Processing}, volume={9}, number={3}, pages={396--408}, year={2015}, }
TeX
1
Nondzu/opencv_contrib
modules/structured_light/doc/structured_light.bib
[ "BSD-3-Clause" ]
/***************************************************************************** * * CREATE FUNCTION stmt * *****************************************************************************/ CreateFunctionStmt: CREATE_P macro_alias qualified_name param_list AS a_expr { PGCreateFunctionStmt *n = makeNode(PGCreateFunctionStmt); n->name = $3; n->params = $4; n->function = $6; $$ = (PGNode *)n; } ; macro_alias: FUNCTION | MACRO ; param_list: '(' ')' { $$ = NIL; } | '(' func_arg_list ')' { $$ = $2; } ;
Yacc
4
AldoMyrtaj/duckdb
third_party/libpg_query/grammar/statements/create_function.y
[ "MIT" ]
template(name='inlinedForm') if isOpen.get form.inlined-form.js-inlined-form(id=id class=classNames) +Template.contentBlock else +Template.elseBlock
Jade
3
moqmar/wekan
client/components/forms/inlinedform.jade
[ "MIT" ]
#N canvas 594 23 533 668 12; #X msg 116 46 hello world; #X obj 225 82 print; #X floatatom 225 46 0 0 0 0 - - -; #X text 115 25 message; #X text 225 24 atom; #X text 274 83 object; #X text 44 112 When you first open a Pd document like this one \, your cursor will be an arrow. Select "edit mode" in the Edit menu and the cursor will change to the image of a hand. The patch is now in edit mode. You can move any object by dragging it.; #X text 44 185 Select "Edit mode" again in the Edit menu and you're back to the arrow cursor which acts on objects without moving them. ; #X text 44 227 In Edit mode \, if you click on a message \, object \, or comment \, you can then retype the text. For objects this will create a new object and delete the old one. Pd will try to reconnect the newly created object in the same way as the old one.; #X text 44 305 When you're done changing the contents of the box \, click outside the box to deselect it. This tells Pd to incorporate the new text.; #X text 44 363 You can create new objects by duplicating existing ones using the "duplicate" menu item. You can also "cut" and "paste" them. If you duplicate several connected objects the connections will be replicated too.; #X text 44 433 Edit mode also lets you make and break connections between objects. Put the "hand" cursor over a line connecting two objects: it turns into an X. Clicking will select the connection \, which you can delete with the delete key \, or "Cut" from the "Edit" menu. Hold the cursor over an outlet and it becomes a circle (a patch point). Drag to any box and release--you will be connected to the nearest inlet. ; #X text 45 545 The "put" menu creates new text items of any of the four types. You can also put a "symbol" box \, analogous to a number box but for showing and entering text strings., f 60; #X text 257 617 updated for Pd version 0.46.7; #X connect 0 0 1 0; #X connect 2 0 1 0;
Pure Data
4
mcclure/pure-data
doc/2.control.examples/02.editing.pd
[ "TCL" ]
(include-file "include/flavors.lfe") ;; Define the circle flavor. (defflavor circle (radius) (shape) ;; Settables are also gettable and inittable. (settable-instance-variables radius)) (defmethod (draw) () (lfe_io:format "Drawing circle at (~p ~p), radius ~p~n" (list (get 'x) (get 'y) (get 'radius)))) (endflavor circle)
LFE
4
rvirding/flavors
examples/shapes/circle.lfe
[ "Apache-2.0" ]
{% assign value = include.value | default: '2020-06-20' %} {% assign placeholder = include.placeholder | default: 'Select a date' %} {% assign id = include.id %} {% if id %} {% capture input %} <input class="form-control{% if include.class %} {{ include.class }}{% endif %}"{% if placeholder %} placeholder="{{ placeholder }}"{% endif %} id="datepicker-{{ id }}" value="{{ value }}"/> {% endcapture %} {% if include.inline %} <div class="datepicker-inline" id="datepicker-{{ id }}"></div> {% elsif include.layout == 'icon' %} <div class="input-icon{% if include.class %} {{ include.class }}{% endif %}"> {{ input | replace: include.class, '' }} <span class="input-icon-addon">{% include ui/icon.html icon="calendar" %}</span> </div> {% elsif include.layout == 'icon-prepend' %} <div class="input-icon{% if include.class %} {{ include.class }}{% endif %}"> <span class="input-icon-addon">{% include ui/icon.html icon="calendar" %}</span> {{ input | replace: include.class, '' }} </div> {% else %} {{ input }} {% endif %} {% capture script %} <script> // @formatter:off document.addEventListener("DOMContentLoaded", function () { {% if jekyll.environment == 'development' %} window.tabler_datepicker = window.tabler_datepicker || {}; {% endif %} window.Litepicker && ({% if jekyll.environment == 'development' %}window.tabler_datepicker["datepicker-{{ id }}"] = {% endif %}new Litepicker({ element: document.getElementById('datepicker-{{ id }}'), buttonText: { previousMonth: `{% capture icon %}{% include ui/icon.html icon="chevron-left" %}{% endcapture %}{{ icon | strip }}`, nextMonth: `{% capture icon %}{% include ui/icon.html icon="chevron-right" %}{% endcapture %}{{ icon | strip }}`, }, {% if include.inline %}inlineMode: true,{% endif %} })); }); // @formatter:on </script> {% endcapture %} {% if include.show-scripts %} {{ script }} {% else %} {% capture_global scripts %} {{ script }} {% endcapture_global %} {% endif %} {% endif %}
HTML
4
muhginanjar/tabler
src/pages/_includes/ui/datepicker.html
[ "MIT" ]
(ns rabbitmq.tutorials.rpc-server (:require [langohr.core :as lc] [langohr.channel :as lch] [langohr.queue :as lq] [langohr.basic :as lb] [langohr.consumers :as lcons])) (def ^{:const true} q "rpc_queue") (defn fib [n] (if (zero? n) 0 (if (= n 1) 1 (+ (fib (- n 1)) (fib (- n 2)))))) (defn handle-delivery "Handles message delivery" [ch {:keys [delivery-tag reply-to correlation-id]} payload] (let [n (read-string (String. payload "UTF-8"))] (println (format " [.] fib(%s)" n)) (let [response (fib n)] (lb/publish ch "" reply-to (str response) {:correlation-id correlation-id}) (lb/ack ch delivery-tag)))) (defn -main [& args] (with-open [conn (lc/connect)] (let [ch (lch/open conn)] (lq/declare ch q {:auto-delete false}) (lb/qos ch 1) (println " [x] Awaiting RPC requests") (lcons/blocking-subscribe ch "rpc_queue" handle-delivery))))
Clojure
4
Diffblue-benchmarks/Rabbitmq-rabbitmq-tutorials
clojure/src/rabbitmq/tutorials/rpc_server.clj
[ "Apache-2.0" ]
# Copyright Project Harbor Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License *** Settings *** Documentation This resource provides any keywords related to the Harbor private registry appliance *** Variables *** ${vulnerability_edit_btn} xpath=//vulnerability-config//cron-selection//button[contains(.,'EDIT')] ${vulnerability_dropdown_list} xpath=//vulnerability-config//cron-selection//select[@id='selectPolicy'] ${vulnerability_dropdown_list_item_none} xpath=//select[@id='selectPolicy']//option[contains(.,'None')] ${vulnerability_dropdown_list_item_custom} xpath=//select[@id='selectPolicy']//option[contains(.,'Custom')] ${vulnerability_save_btn} xpath=//cron-selection//button[contains(.,'SAVE')] ${scan_now_button} //vulnerability-config//button[contains(.,'NOW')] ${vulnerability_page} //clr-vertical-nav-group-children/a[contains(.,'Vulnerability')] ${set_default_scanner} //button[@id='set-default'] ${scanner_action_xpath} //span[@id='action-scanner'] ${delete_scanner_action_xpath} //span[@id='delete-scanner-action'] ${immutable_trivy_msg_xpath} //span[contains(.,'registration Trivy is not allowed to delete as it is immutable: scanner API: delete')] ${delete_scanner_confirm_btn} xpath=//clr-modal//button[contains(.,'DELETE')] ${scan_now_result} xpath=//div[@id="scan-result-container"] ${scanner_set_default} xpath=//button[@id="set-default"] ${scanner_set_default_success_xpath} //span[contains(.,'Successfully updated')] ${not_scanned_icon} xpath=//span[@class[contains(.,'not-scan')]]
RobotFramework
2
yxxhero/harbor
tests/resources/Harbor-Pages/Vulnerability_Elements.robot
[ "Apache-2.0" ]
using System; using System.Collections; using System.Text; using System.IO; using System.Threading.Tasks; using System.Threading; using Beefy.utils; using System.Diagnostics; namespace RandoCode { class Config { public int32 mTypeCount = 800; //20; //70 //300 public float mPrimitiveTypeChance = 0.0f; //public float mStructPct = 0.2f; // Otherwise class public float mStructPct = 0.0f; public float mTypeComplexityPower = 3.0f; public float mCreateGenericTypePct = 0.15f; public float mSpecializedTypePower = 3.0f; public float mUnspecializedTypeScalar = 10.0f; public float mTypeDefPoolPower = 1.1f; public float mTypeDefPoolOffset = 3.0f; public float mTypeDefPoolScalar = 15.0f; public float mFieldCountPower = 1.2f; public float mFieldCountScalar = 12.0f; public float mFieldStaticPct = 0.2f; public float mVoidReturnPct = 0.6f; public float mMethodCodeComplexityPower = 2.0f; public float mMethodLengthScalar = 100.0f; public float mMethodCountScalar = 300.0f; //2000.0f;//300.0f; public float mAssignMemberPct = 0.3f; public float mCreateLocalPct = 0.3f; public float mParamCountPower = 3.0f; public float mParamCountScalar = 5.0f; public float mNewSpacespaceChance = 0.2f; // Chance a class will generate a new namespace vs using an existing one public float mRootNamespaceChance = 0.1f; // Chance we will create root namespace vs adding to an existing one } class WordGroup { public List<String>[] mWords ~ { for (var list in mWords) DeleteContainerAndItems!(list); delete _; }; public HashSet<String> mUsedNames = new HashSet<String>() ~ DeleteContainerAndItems!(_); public void GetName(float complexity, int parts, bool firstUpper, String outName) { int listNum = (int)(complexity * 3.999f); var parts; for (int tryCount = 0; true; tryCount++) { if (tryCount > 4) parts++; for (int namePartIdx = 0; namePartIdx < parts; namePartIdx++) { int idx = Program.sRand.Next(mWords[listNum].Count); String namePart = scope String(mWords[listNum][idx]); if ((firstUpper) || (namePartIdx > 0)) namePart[0] = namePart[0].ToUpper; outName.Append(namePart); } if (mUsedNames.Contains(outName)) continue; mUsedNames.Add(new String(outName)); return; } } } class LocalDef { public String mName ~ delete _; public TypeDef mTypeDef; } class MethodDef { public String mName ~ delete _; public int mParamCount; public TypeDef mReturnType; public List<LocalDef> mLocals = new List<LocalDef>() ~ DeleteContainerAndItems!(_); public float mComplexity; public int mStatementCount; public bool mIsStatic; } class FieldDef { public String mName ~ delete _; public TypeDef mTypeDef; public bool mIsStatic; } class TypeDef { public NamespaceDef mNamespace; public String mName ~ delete _; public float mComplexity; public bool mIsPrimitive; public int mGenericParamIdx = -1; public bool mIsStruct; public int mUseCount; public bool mIsSpecializedGeneric; public bool mIsUnspecializedGeneric; public List<TypeDef> mGenericParams ~ delete _; public HashSet<NamespaceDef> mUsingNamespaces = new HashSet<NamespaceDef>() ~ delete _; public List<FieldDef> mFields = new List<FieldDef>() ~ DeleteContainerAndItems!(_); public List<MethodDef> mMethods = new List<MethodDef>() ~ DeleteContainerAndItems!(_); public List<TypeDef> mTypeDefPool = new List<TypeDef>() ~ delete _; // We only refer to types in this pool public void GetRootName(String outStr) { int ltPos = mName.IndexOf('<'); if (ltPos == -1) { outStr.Append(mName); return; } outStr.Append(mName, 0, ltPos); } public void GetFullName(String outStr) { if (mNamespace == null) { outStr.Append(mName); return; } mNamespace.GetFullName(outStr); outStr.Append(".", mName); } } class NamespaceDef { public NamespaceDef mParent; public String mName ~ delete _; public List<NamespaceDef> mChildren = new List<NamespaceDef>() ~ delete _; public void GetFullName(String outStr) { if (mParent != null) { mParent.GetFullName(outStr); outStr.Append("."); } outStr.Append(mName); } } class Program { public static Random sRand ~ delete _; int mSeed; Config mConfig = new Config() ~ delete _; String mBaseDir = new String("src") ~ delete _; WordGroup mAdjList ~ delete _; WordGroup mAdvList ~ delete _; WordGroup mNounList ~ delete _; WordGroup mVerbList ~ delete _; bool mIsCompat = true; TypeDef mCurTypeDef; MethodDef mCurMethodDef; List<NamespaceDef> mNamespaces = new List<NamespaceDef>() ~ DeleteContainerAndItems!(_); TypeDef mVoidType ~ delete _; List<TypeDef> mPrimitives = new List<TypeDef>() ~ DeleteContainerAndItems!(_); List<TypeDef> mUserTypes = new List<TypeDef>() ~ DeleteContainerAndItems!(_); List<TypeDef> mOtherTypes = new List<TypeDef>() ~ DeleteContainerAndItems!(_); bool mVerbose; int mLineCount; bool mWroteLine; String mQueuedText = new String() ~ delete _; int mIndentCount; int mStartIndentCount; this() { //mSeed = (scope Random()).Next() % 100000; mSeed = 92968; Console.WriteLine("Random seed: {0}", mSeed); sRand = new Random(mSeed); } WordGroup CreateWordGroup(String name) { WordGroup wordGroup = new WordGroup(); wordGroup.mWords = new List<String>[4]; for (int i = 0; i < 4; i++) { wordGroup.mWords[i] = new List<String>(); StreamReader file = scope StreamReader(); var filePath = scope String(); var exePath = scope String(); Environment.GetExecutableFilePath(exePath); Path.GetDirectoryPath(exePath, filePath); filePath.AppendF("/data/{0}{1}.txt", i + 1, name); if (file.Open(filePath) case .Err) continue; while (!file.EndOfStream) { String line = scope String(); file.ReadLine(line); bool isOnlyLetters = true; for (char8 c in line.RawChars) if (!c.IsLetter) isOnlyLetters = false; if (isOnlyLetters) wordGroup.mWords[i].Add(new String(line)); } } return wordGroup; } void CreatePrimitives() { mVoidType = new TypeDef(); mVoidType.mName = new String("void"); mVoidType.mIsPrimitive = true; String[] typeNames; if (mIsCompat) typeNames = scope:: .[] ( "int", "uint", "int", "float", "double" ); else typeNames = scope:: .[] ( "int", "int16", "int32", "int64", "float", "double" ); for (var typeName in typeNames) { TypeDef typeDef = new TypeDef(); typeDef.mName = new String(typeName); typeDef.mIsPrimitive = true; mPrimitives.Add(typeDef); } } float GetComplexity(float power) { return (float)Math.Pow(sRand.NextDouble(), power); } // Must return either a primitive type or a typedef whose name occurs alphabetically before the current type TypeDef GetRandomTypeDef() { bool wantPrimitive = sRand.NextDouble() < mConfig.mPrimitiveTypeChance; if ((!wantPrimitive) && (mUserTypes.Count > 0)) { for (int tryCount = 0; tryCount < 4; tryCount++) { TypeDef checkTypeDef = mUserTypes[sRand.Next(mUserTypes.Count)]; if (checkTypeDef.mIsUnspecializedGeneric) continue; if (mCurTypeDef == null) return checkTypeDef; if (checkTypeDef.mName.CompareTo(mCurTypeDef.mName) < 0) return checkTypeDef; } } return mPrimitives[sRand.Next(mPrimitives.Count)]; } TypeDef GetRandomPooledTypeDef() { return mCurTypeDef.mTypeDefPool[sRand.Next(mCurTypeDef.mTypeDefPool.Count)]; } void GenerateType() { float typeComplexity = GetComplexity(mConfig.mTypeComplexityPower); String className = new String(); mNounList.GetName(typeComplexity, 2, true, className); TypeDef typeDef = new TypeDef(); typeDef.mName = className; typeDef.mComplexity = typeComplexity; mCurTypeDef = typeDef; typeDef.mIsUnspecializedGeneric = sRand.NextDouble() < mConfig.mCreateGenericTypePct; if (typeDef.mIsUnspecializedGeneric) { typeDef.mGenericParams = new List<TypeDef>(); int genericCount = (int)(1.0f + GetComplexity(3.0f) * 3.5f); typeDef.mName.Append("<"); for (int genericIdx = 0; genericIdx < genericCount; genericIdx++) { TypeDef genericType = new TypeDef(); genericType.mGenericParamIdx = genericIdx; genericType.mName = new String(); genericType.mName.Append("T"); mOtherTypes.Add(genericType); mAdvList.GetName(0, 1, true, genericType.mName); if (genericIdx > 0) typeDef.mName.Append(", "); typeDef.mName.Append(genericType.mName); typeDef.mGenericParams.Add(genericType); typeDef.mTypeDefPool.Add(genericType); } typeDef.mName.Append(">"); } typeDef.mIsStruct = sRand.NextDouble() < mConfig.mStructPct; if ((mNamespaces.Count == 0) || (sRand.NextDouble() < mConfig.mNewSpacespaceChance)) { NamespaceDef newNamespace = new NamespaceDef(); newNamespace.mName = new String(); mAdjList.GetName((float)sRand.NextDouble(), 1, true, newNamespace.mName); if ((mNamespaces.Count > 0) && (sRand.NextDouble() >= mConfig.mRootNamespaceChance)) { NamespaceDef parentNamepace = mNamespaces[sRand.Next(mNamespaces.Count)]; parentNamepace.mChildren.Add(newNamespace); newNamespace.mParent = parentNamepace; } mNamespaces.Add(newNamespace); typeDef.mNamespace = newNamespace; } else { typeDef.mNamespace = mNamespaces[sRand.Next(mNamespaces.Count)]; } mUserTypes.Add(typeDef); } void CreateTypePool(TypeDef typeDef, int poolSize) { for (int poolIdx = 0; poolIdx < poolSize; poolIdx++) { TypeDef poolTypeDef = GetRandomTypeDef(); ReferenceType(poolTypeDef); poolTypeDef.mUseCount++; typeDef.mTypeDefPool.Add(poolTypeDef); } } void PopulateType(TypeDef typeDef) { mCurTypeDef = typeDef; int poolSize = (int)(GetComplexity(mConfig.mTypeDefPoolPower) * mConfig.mTypeDefPoolScalar + mConfig.mTypeDefPoolOffset); CreateTypePool(typeDef, poolSize); int fieldCount = (int)(GetComplexity(mConfig.mFieldCountPower) * mConfig.mFieldCountScalar); if (typeDef.mIsStruct) fieldCount++; for (int fieldIdx = 0; fieldIdx < fieldCount; fieldIdx++) { FieldDef fieldDef = new FieldDef(); fieldDef.mIsStatic = sRand.NextDouble() < mConfig.mFieldStaticPct; // Just to make sure structs have at least one non-static member if (fieldIdx == 0) fieldDef.mIsStatic = false; // Generic statics don't currently work if (typeDef.mGenericParams != null) fieldDef.mIsStatic = false; if (fieldDef.mIsStatic) { fieldDef.mName = new String("s"); mNounList.GetName((float)sRand.NextDouble(), 1, true, fieldDef.mName); } else { fieldDef.mName = new String("m"); mNounList.GetName((float)sRand.NextDouble(), 1, true, fieldDef.mName); } fieldDef.mTypeDef = GetRandomPooledTypeDef(); typeDef.mFields.Add(fieldDef); } for (int methodIdx = 0; methodIdx < (int)(typeDef.mComplexity * mConfig.mMethodCountScalar); methodIdx++) { MethodDef methodDef = new MethodDef(); mCurMethodDef = methodDef; float methodComplexity = GetComplexity(mConfig.mMethodCodeComplexityPower); methodDef.mName = new String(); mVerbList.GetName(methodComplexity, 2, true, methodDef.mName); methodDef.mComplexity = methodComplexity; if (sRand.NextDouble() < mConfig.mVoidReturnPct) methodDef.mReturnType = mVoidType; else methodDef.mReturnType = GetRandomPooledTypeDef(); int paramCount = (int)(GetComplexity(mConfig.mParamCountPower) * mConfig.mParamCountScalar); for (int paramIdx = 0; paramIdx < paramCount; paramIdx++) { LocalDef localDef = new LocalDef(); localDef.mName = new String(); mNounList.GetName((float)sRand.NextDouble(), 1, false, localDef.mName); localDef.mTypeDef = GetRandomPooledTypeDef(); methodDef.mLocals.Add(localDef); methodDef.mParamCount++; } typeDef.mMethods.Add(methodDef); } MethodDef methodDef = new MethodDef(); methodDef.mIsStatic = true; methodDef.mName = new String("Use"); methodDef.mReturnType = mVoidType; typeDef.mMethods.Add(methodDef); } void DelTree(StringView dirName) { Debug.Assert(!dirName.IsEmpty); if (!Directory.Exists(dirName)) return; for (var subDir in Directory.EnumerateDirectories(dirName)) { var filePath = scope String(); subDir.GetFilePath(filePath); DelTree(filePath); } for (var file in Directory.EnumerateFiles(dirName)) { var filePath = scope String(); file.GetFilePath(filePath); if (filePath.EndsWith(".bf")) { File.Delete(filePath); } } Directory.Delete(dirName); } FieldDef FindField(TypeDef inside, TypeDef wantTypeDef) { for (var field in inside.mFields) { if (field.mTypeDef == wantTypeDef) { if (field.mName == "sTar") { NOP!(); } return field; } } return null; } void FindData(TypeDef typeDef, String outStr) { for (int pass = 0; pass < 10; pass++) { int targetCategory = (int)(GetComplexity(1.0f) * 3); if (targetCategory == 0) // 'This' { if (typeDef == mCurTypeDef) { outStr.Append("this"); return; } } if ((targetCategory == 1) && (mCurTypeDef.mFields.Count > 0)) // Field { var fieldDef = mCurTypeDef.mFields[(int)(sRand.NextDouble() * mCurTypeDef.mFields.Count)]; if (fieldDef.mTypeDef == typeDef) { outStr.Append(fieldDef.mName); return; } FieldDef subFieldDef = FindField(fieldDef.mTypeDef, typeDef); if (subFieldDef != null) { if (subFieldDef.mName == "sTar") { } if (subFieldDef.mIsStatic) { outStr.Append(fieldDef.mTypeDef.mName, ".", subFieldDef.mName); return; } else { outStr.Append(fieldDef.mName, ".", subFieldDef.mName); return; } } } if ((targetCategory == 2) && (mCurMethodDef.mLocals.Count > 0)) // Param / Local { var localDef = mCurMethodDef.mLocals[(int)(sRand.NextDouble() * mCurMethodDef.mLocals.Count)]; if (localDef.mTypeDef == typeDef) { outStr.Append(localDef.mName); return; } FieldDef subFieldDef = FindField(localDef.mTypeDef, typeDef); if (subFieldDef != null) { if (subFieldDef.mIsStatic) { outStr.Append(localDef.mTypeDef.mName, ".", subFieldDef.mName); return; } else { outStr.Append(localDef.mName, ".", subFieldDef.mName); return; } } } } if (typeDef.mIsPrimitive) { outStr.Append("0"); return; } if ((!typeDef.mIsStruct) && (typeDef.mGenericParamIdx == -1)) { outStr.Append("null"); return; } } void WriteLine() { WriteLine(""); } void WriteLine(String str) { mLineCount++; mWroteLine = true; for (int i = 0; i < mIndentCount; i++) mQueuedText.Append("\t"); mQueuedText.Append(str, "\n"); } void WriteLine(String str, params Object[] args) { WriteLine(scope String()..AppendF(str, params args)); } void ReferenceType(TypeDef typeDef) { if (typeDef.mNamespace != null) mCurTypeDef.mUsingNamespaces.Add(typeDef.mNamespace); if (typeDef.mGenericParams != null) { for (var genericParam in typeDef.mGenericParams) ReferenceType(genericParam); } } String GetRandomTarget(out TypeDef targetType) { int targetCategory = (int)(GetComplexity(1.0f) * 3); if (targetCategory == 0) // 'This' { targetType = mCurTypeDef; return ""; } if ((targetCategory == 1) && (mCurTypeDef.mFields.Count > 0)) // Field { var fieldDef = mCurTypeDef.mFields[(int)(sRand.NextDouble() * mCurTypeDef.mFields.Count)]; targetType = fieldDef.mTypeDef; return fieldDef.mName; } if ((targetCategory == 2) && (mCurMethodDef.mLocals.Count > 0)) // Param / Local { var localDef = mCurMethodDef.mLocals[(int)(sRand.NextDouble() * mCurMethodDef.mLocals.Count)]; targetType = localDef.mTypeDef; return localDef.mName; } targetType = null; return null; } MethodDef GenerateMethodCall(String methodTarget, TypeDef targetType) { if (targetType != null) { if (targetType.mMethods.Count > 0) { var methodDef = targetType.mMethods[sRand.Next(targetType.mMethods.Count)]; if (methodDef.mIsStatic) return null; String[] targets = scope String[methodDef.mParamCount]; bool paramsMatch = true; for (int paramIdx = 0; paramIdx < methodDef.mParamCount; paramIdx++) { var paramDef = methodDef.mLocals[paramIdx]; targets[paramIdx] = scope:: String(); FindData(paramDef.mTypeDef, targets[paramIdx]); if (targets[paramIdx].IsEmpty) paramsMatch = false; } if (paramsMatch) { String str = scope String(); bool didAssign = false; if (sRand.NextDouble() < mConfig.mAssignMemberPct) { for (var fieldDef in mCurTypeDef.mFields) { if (fieldDef.mTypeDef == methodDef.mReturnType) { str.Append(fieldDef.mName, " = "); didAssign = true; break; } } } if ((!didAssign) && (sRand.NextDouble() < mConfig.mCreateLocalPct) && (methodDef.mReturnType != mVoidType)) { ReferenceType(methodDef.mReturnType); LocalDef localDef = new LocalDef(); localDef.mName = new String(); mNounList.GetName((float)sRand.NextDouble(), 1, false, localDef.mName); localDef.mTypeDef = methodDef.mReturnType; mCurMethodDef.mLocals.Add(localDef); str.Append(localDef.mTypeDef.mName); str.Append(" "); str.Append(localDef.mName); str.Append(" = "); } str.Append(methodTarget); if (methodTarget != "") str.Append("."); str.Append(methodDef.mName); str.Append("("); for (int paramIdx = 0; paramIdx < methodDef.mParamCount; paramIdx++) { if (paramIdx > 0) str.Append(", "); str.Append(targets[paramIdx]); } str.Append(");"); WriteLine(str); return methodDef; } } } return null; } MethodDef GenerateMethodCall() { TypeDef targetType = null; String methodTarget = GetRandomTarget(out targetType); return GenerateMethodCall(methodTarget, targetType); } void PopLocal() { var localDef = mCurMethodDef.mLocals.PopBack(); delete localDef; } void GetBoolExpression(String outStr) { TypeDef targetType; String checkTarget = GetRandomTarget(out targetType); if (checkTarget == null) return; if (checkTarget == "") checkTarget = "this"; if (targetType.mIsStruct) return; if (targetType.mGenericParamIdx != -1) return; String rhs = scope String(); FindData(targetType, rhs); if ((rhs != null) && (checkTarget != rhs)) { outStr.Append(checkTarget, " != ", rhs); return; } if (targetType.mIsPrimitive) { outStr.Append(checkTarget, " != 0"); return; } outStr.Append(checkTarget, " != null"); } void GenerateMainBlock() { WriteLine("{"); mIndentCount++; for (var pooledType in mUserTypes) { if (pooledType.mIsPrimitive) continue; if (pooledType.mIsUnspecializedGeneric) continue; if (pooledType.mName == "Program") continue; var line = scope String(); pooledType.GetFullName(line); line.Append(".Use();"); WriteLine(line); } WriteLine("return 0;"); mIndentCount--; WriteLine("}"); } void GenerateUseBlock() { WriteLine("{"); mIndentCount++; var line = scope String(); mCurTypeDef.GetFullName(line); line.Append(" val = "); if (!mCurTypeDef.mIsStruct) line.Append("new "); mCurTypeDef.GetFullName(line); line.Append("();"); WriteLine(line); for (var method in mCurTypeDef.mMethods) { if (method.mIsStatic) continue; line.Clear(); line.Append("val.", method.mName); line.Append("("); for (int argIdx < method.mParamCount) { if (argIdx > 0) line.Append(", "); line.Append("default"); } line.Append(");"); WriteLine(line); } mIndentCount--; WriteLine("}"); } void GenerateBlock() { int prevLocalIdx = mCurMethodDef.mLocals.Count; WriteLine("{"); mIndentCount++; int methodLength = (int)(mCurMethodDef.mComplexity * mConfig.mMethodLengthScalar); mWroteLine = false; //while (true) for (int stmtIdx < methodLength) { /*if (mCurMethodDef.mStatementCount > 0) mCurMethodDef.mStatementCount--; else if ((mWroteLine) && ((0.93 + mCurMethodDef.mComplexity * 0.03) < sRand.NextDouble())) break;*/ if (sRand.NextDouble() < 0.5f) { GenerateMethodCall(); } if (sRand.NextDouble() < 0.05f / mIndentCount) { String localVarName = new String()..AppendF("{0}", (char8)('i' + mIndentCount - 3)); String toVal = scope String(); FindData(mPrimitives[0], toVal); if (toVal.IsEmpty) { toVal = scope:: String(); sRand.Next(1000).ToString(toVal); } LocalDef localDef = new LocalDef(); localDef.mName = localVarName; localDef.mTypeDef = mPrimitives[0]; mCurMethodDef.mLocals.Add(localDef); WriteLine("for (int {0} = 0; {0} < {1}; {0}++)", localVarName, toVal); GenerateBlock(); PopLocal(); } if (sRand.NextDouble() < 0.05f / mIndentCount) { String boolExpr = scope String(); GetBoolExpression(boolExpr); if (!boolExpr.IsEmpty) { WriteLine("if ({0})", boolExpr); GenerateBlock(); if (sRand.NextDouble() < 0.35f) { WriteLine("else"); GenerateBlock(); } } } } if (mIndentCount == mStartIndentCount + 1) { if (mCurMethodDef.mReturnType.mName != "void") { String retValName = scope String(); FindData(mCurMethodDef.mReturnType, retValName); if (retValName.IsEmpty) retValName.Append("default(", mCurMethodDef.mReturnType.mName, ")"); WriteLine("return {0};", retValName); } } mIndentCount--; WriteLine("}"); while (mCurMethodDef.mLocals.Count > prevLocalIdx) PopLocal(); } TypeDef FixType(TypeDef typeDef) { if (typeDef.mGenericParamIdx != -1) return mCurTypeDef.mGenericParams[typeDef.mGenericParamIdx]; return typeDef; } void SpecializeType(TypeDef unspecializedType) { TypeDef specializedType = new TypeDef(); mCurTypeDef = specializedType; specializedType.mName = new String(); unspecializedType.GetRootName(specializedType.mName); specializedType.mIsSpecializedGeneric = true; specializedType.mGenericParams = new List<TypeDef>(); specializedType.mIsStruct = unspecializedType.mIsStruct; specializedType.mNamespace = unspecializedType.mNamespace; specializedType.mName.Append("<"); for (int genericIdx = 0; genericIdx < unspecializedType.mGenericParams.Count; genericIdx++) { if (genericIdx > 0) specializedType.mName.Append(", "); var genericArg = GetRandomTypeDef(); //specializedType.mName.Append(genericArg.mName); genericArg.GetFullName(specializedType.mName); specializedType.mGenericParams.Add(genericArg); } specializedType.mName.Append(">"); for (var srcFieldDef in unspecializedType.mFields) { FieldDef destFieldDef = new FieldDef(); destFieldDef.mName = new String(srcFieldDef.mName); destFieldDef.mTypeDef = FixType(srcFieldDef.mTypeDef); destFieldDef.mIsStatic = srcFieldDef.mIsStatic; specializedType.mFields.Add(destFieldDef); } for (var srcMethodDef in specializedType.mMethods) { MethodDef destMethodDef = new MethodDef(); destMethodDef.mName = new String(srcMethodDef.mName); destMethodDef.mReturnType = FixType(srcMethodDef.mReturnType); destMethodDef.mParamCount = srcMethodDef.mParamCount; for (var localDef in srcMethodDef.mLocals) { LocalDef destLocalDef = new LocalDef(); destLocalDef.mName = new String(localDef.mName); destLocalDef.mTypeDef = FixType(localDef.mTypeDef); destMethodDef.mLocals.Add(destLocalDef); } specializedType.mMethods.Add(destMethodDef); } mUserTypes.Add(specializedType); } void ProgressStart() { if (cProgressSize > 0) { String str = scope String(); str.Append("["); str.Append(' ', cProgressSize); str.Append("]"); str.Append('\b', cProgressSize + 1); Console.Write(str); } } int mProgressIdx = 0; void WriteProgress(float pct) { int progressIdx = (int)Math.Round(pct * cProgressSize); while (progressIdx > mProgressIdx) { mProgressIdx++; Console.Write("*"); } } const int cProgressSize = 30; void Run() { CreatePrimitives(); DelTree(mBaseDir); mAdjList = CreateWordGroup("adj"); mAdvList = CreateWordGroup("adv"); mNounList = CreateWordGroup("noun"); mVerbList = CreateWordGroup("verb"); for (int typeIdx = 0; typeIdx < mConfig.mTypeCount; typeIdx++) GenerateType(); for (int typeIdx = 0; typeIdx < mUserTypes.Count; typeIdx++ ) { TypeDef typeDef = mUserTypes[typeIdx]; if (typeDef.mIsUnspecializedGeneric) { PopulateType(typeDef); int specializeCount = (int)(GetComplexity(mConfig.mSpecializedTypePower) * mConfig.mUnspecializedTypeScalar); for (int specializedIdx = 0; specializedIdx < specializeCount; specializedIdx++) SpecializeType(typeDef); } } for (int typeIdx = 0; typeIdx < mUserTypes.Count; typeIdx++) { TypeDef typeDef = mUserTypes[typeIdx]; if ((!typeDef.mIsUnspecializedGeneric) && (!typeDef.mIsSpecializedGeneric)) PopulateType(typeDef); } { TypeDef typeDef = new TypeDef(); typeDef.mName = new String("Program"); MethodDef methodDef = new MethodDef(); methodDef.mName = new String("Main"); methodDef.mParamCount = 1; var arrTypeDef = new TypeDef(); arrTypeDef.mName = new String("System.String[]"); mOtherTypes.Add(arrTypeDef); LocalDef localDef = new LocalDef(); localDef.mName = new String("args"); localDef.mTypeDef = arrTypeDef; methodDef.mLocals.Add(localDef); methodDef.mStatementCount = mUserTypes.Count + mPrimitives.Count; methodDef.mReturnType = mPrimitives[2]; methodDef.mIsStatic = true; typeDef.mMethods.Add(methodDef); mCurTypeDef = typeDef; CreateTypePool(typeDef, mUserTypes.Count + mPrimitives.Count); mUserTypes.Add(typeDef); } if (!mVerbose) ProgressStart(); int specializedTypes = 0; int unspecializedTypes = 0; for (var typeDef in mUserTypes) UserTypeBlock: { var typeFullName = scope String(); typeDef.GetFullName(typeFullName); if (typeDef.mIsSpecializedGeneric) { specializedTypes++; if (mVerbose) Console.WriteLine("Skipping type: {0} uses: {1}", typeFullName, typeDef.mUseCount); continue; } if (typeDef.mIsUnspecializedGeneric) unspecializedTypes++; if (mVerbose) Console.WriteLine("Writing type: {0} uses: {1}", typeFullName, typeDef.mUseCount); String directory = scope String(); String namespaceStr = scope String(); if (typeDef.mNamespace != null) { typeDef.mNamespace.GetFullName(namespaceStr); directory.Append(mBaseDir, "/"); directory.Replace('.', '/'); } else { directory.Append(mBaseDir); } //StringWriter StringWriter = new StringWriter(); //mFile = StringWriter; String fullPath = scope String()..Append(directory, "/"); typeDef.GetRootName(fullPath); fullPath.Append(".bf"); FileStream file = scope FileStream(); bool isOpen = false; for (int i = 0; i < 500; i++) { Directory.CreateDirectory(directory).IgnoreError(); if (file.Create(fullPath) case .Ok) { isOpen = true; break; } Thread.Sleep(10); } if (!isOpen) Runtime.FatalError("Unable to create file"); StreamWriter streamWrite = scope .(file, Encoding.UTF8, 4096); mCurTypeDef = typeDef; //mFile = file; mIndentCount = 0; WriteLine("// RandoCode seed: {0}", mSeed); WriteLine("#pragma warning disable 0168"); WriteLine(); if (!namespaceStr.IsEmpty) { WriteLine("namespace {0}", namespaceStr); WriteLine("{"); mIndentCount++; } String typeStr = scope String(); if (typeDef.mIsStruct) typeStr.Append("struct "); else typeStr.Append("class "); typeStr.Append(typeDef.mName); WriteLine(typeStr); WriteLine("{"); mIndentCount++; for (var fieldDef in typeDef.mFields) { String str = scope String("public "); if (fieldDef.mIsStatic) str.Append("static "); str.Append(fieldDef.mTypeDef.mName); str.Append(" "); str.Append(fieldDef.mName); str.Append(";"); WriteLine(str); } for (int methodIdx = 0; methodIdx < typeDef.mMethods.Count; methodIdx++) { WriteLine(); var methodDef = typeDef.mMethods[methodIdx]; mCurMethodDef = methodDef; String str = scope String("public "); if (methodDef.mIsStatic) str.Append("static "); str.Append(methodDef.mReturnType.mName); str.Append(" "); str.Append(methodDef.mName); str.Append("("); for (int paramIdx = 0; paramIdx < methodDef.mParamCount; paramIdx++) { if (paramIdx > 0) str.Append(", "); var paramDef = methodDef.mLocals[paramIdx]; str.Append(paramDef.mTypeDef.mName); str.Append(" "); str.Append(paramDef.mName); } str.Append(")"); if (typeDef.mIsStruct) str.Append("mut "); WriteLine(str); mStartIndentCount = mIndentCount; if (methodDef.mName == "Main") GenerateMainBlock(); else if (methodDef.mName == "Use") GenerateUseBlock(); else GenerateBlock(); } mIndentCount--; WriteLine("}"); if (!namespaceStr.IsEmpty) { mIndentCount--; WriteLine("}"); } for (var namespaceDef in typeDef.mUsingNamespaces) { var namespaceName = scope String(); namespaceDef.GetFullName(namespaceName); streamWrite.WriteLine("using {0};", namespaceName); } streamWrite.Write(mQueuedText); mQueuedText.Clear(); file.Close(); if (!mVerbose) WriteProgress(@typeDef.Index / (float)mUserTypes.Count); } if (!mVerbose) Console.WriteLine(""); Console.WriteLine("Types: {0} UnspecializedGenerics: {1} SpecializedGenerics: {2} Lines: {3}", mUserTypes.Count, unspecializedTypes, specializedTypes, mLineCount); } void HandleConfig(String configPath) { StructuredData sd = scope StructuredData(); sd.Load(configPath); sd.Get("TypeCount", ref mConfig.mTypeCount); sd.Get("PrimitiveTypeChance", ref mConfig.mPrimitiveTypeChance); sd.Get("StructPct", ref mConfig.mStructPct); sd.Get("TypeComplexityPower", ref mConfig.mTypeComplexityPower); sd.Get("CreateGenericTypePct", ref mConfig.mCreateGenericTypePct); sd.Get("SpecializedTypePower", ref mConfig.mSpecializedTypePower); sd.Get("UnspecializedTypeScalar", ref mConfig.mUnspecializedTypeScalar); sd.Get("TypeDefPoolPower", ref mConfig.mTypeDefPoolPower); sd.Get("TypeDefPoolOffset", ref mConfig.mTypeDefPoolOffset); sd.Get("TypeDefPoolScalar", ref mConfig.mTypeDefPoolScalar); sd.Get("FieldCountPower", ref mConfig.mFieldCountPower); sd.Get("FieldCountScalar", ref mConfig.mFieldCountScalar); sd.Get("FieldStaticPct", ref mConfig.mFieldStaticPct); sd.Get("VoidReturnPct", ref mConfig.mVoidReturnPct); sd.Get("MethodCodeComplexityPower", ref mConfig.mMethodCodeComplexityPower); sd.Get("MethodLengthScalar", ref mConfig.mMethodLengthScalar); sd.Get("MethodCountScalar", ref mConfig.mMethodCountScalar); sd.Get("AssignMemberPct", ref mConfig.mAssignMemberPct); sd.Get("CreateLocalPct", ref mConfig.mCreateLocalPct); sd.Get("ParamCountPower", ref mConfig.mParamCountPower); sd.Get("ParamCountScalar", ref mConfig.mParamCountScalar); sd.Get("NewSpacespaceChance", ref mConfig.mNewSpacespaceChance); sd.Get("RootNamespaceChance", ref mConfig.mRootNamespaceChance); } static void Main(String[] args) { if (args.Count == 0) { Console.WriteLine("Usage: RandoCode <configFile>"); return; } String cwd = scope String(); Directory.GetCurrentDirectory(cwd); Program pg = new Program(); pg.HandleConfig(args[0]); pg.Run(); delete pg; } } }
Brainfuck
3
gammy55/linguist
samples/Beef/RandoCode.bf
[ "MIT" ]
a { value: @\,x }
CSS
0
mengxy/swc
crates/swc_css_parser/tests/fixture/esbuild/misc/xVqT8wNU4CEhaDlbLxGyaw/input.css
[ "Apache-2.0" ]
--TEST-- Bug #60611 (Segmentation fault with Cls::{expr}() syntax) --FILE-- <?php class Cls { function __call($name, $arg) { } static function __callStatic($name, $arg) { } } $cls = new Cls; $cls->{0}(); $cls->{1.0}(); $cls->{true}(); $cls->{false}(); $cls->{null}(); Cls::{0}(); Cls::{1.0}(); Cls::{true}(); Cls::{false}(); Cls::{null}(); ?> --EXPECTF-- Fatal error: Method name must be a string in %sbug60611.php on line %d
PHP
4
thiagooak/php-src
Zend/tests/bug60611.phpt
[ "PHP-3.01" ]
$1>0 {print $1, sqrt($1)}
Logos
1
Crestwave/goawk
testdata/t.f.x
[ "MIT" ]
[ define_tag:'lp_string_zap', -description='Returns a string with all non-plain-ascii characters removed.', -priority='replace', -required='text_to_zap', -optional='replacement_text'; if: !(local_defined:'replacement_text'); local:'replacement_text' = ''; /if; return: (string_replaceregexp: #text_to_zap, -find='[^\\x20-\\x7E\\x09\\x0A\\x0D]', -replace=#replacement_text); /define_tag; /* Example [var:'teststring' = 'test test 123\t123' + (decode_url:'%80') + (decode_url:'%81')] <pre> Test String: (<b>[$teststring]</b>)<br> Zap (remove bad chars): (<b>[lp_string_zap: $teststring]</b>)<br> Zap (replace bad chars with *): (<b>[lp_string_zap: $teststring, '*']</b>)<br> </pre> */ ]
Lasso
5
subethaedit/SubEthaEd
Documentation/ModeDevelopment/Reference Files/LassoScript-HTML/itpage/LassoStartup/lp_string_zap.lasso
[ "MIT" ]
#!/bin/csh pool_build_object_relational_mapping -f mapping-template-DTReadOutMapping-default.xml -d CondFormatsDTObjectsCapabilities -c sqlite_file:testfile.db -p pass -u user pool_build_object_relational_mapping -f mapping-template-DTT0-default.xml -d CondFormatsDTObjectsCapabilities -c sqlite_file:testfile.db -p pass -u user pool_build_object_relational_mapping -f mapping-template-DTTtrig-default.xml -d CondFormatsDTObjectsCapabilities -c sqlite_file:testfile.db -p pass -u user pool_build_object_relational_mapping -f mapping-template-DTMtime-default.xml -d CondFormatsDTObjectsCapabilities -c sqlite_file:testfile.db -p pass -u user
Tcsh
1
ckamtsikis/cmssw
CondTools/DT/src/poolbuildmapping.csh
[ "Apache-2.0" ]
--TEST-- Test extract() for overwrite of GLOBALS --FILE-- <?php $str = "John"; var_dump($GLOBALS["str"]); /* Extracting Global Variables */ $splat = array("foo" => "bar"); var_dump(extract(array("GLOBALS" => $splat, EXTR_OVERWRITE))); unset ($splat); var_dump($GLOBALS["str"]); echo "\nDone"; ?> --EXPECT-- string(4) "John" int(1) string(4) "John" Done
PHP
3
NathanFreeman/php-src
ext/standard/tests/array/extract_safety.phpt
[ "PHP-3.01" ]
class Cursor var pos = 5 fun forbidden_r: Array[Int] do return once [1, 4, 9, 12, 13] fun forbidden_l: Array[Int] do return once [1, 2, 5, 10, 13] fun forbidden_u: Array[Int] do return once [5, 2, 1, 4, 9] fun forbidden_d: Array[Int] do return once [5, 10, 13, 12, 9] fun move(dir: Char) do if dir == 'R' then if forbidden_r.has(pos) then return pos += 1 else if dir == 'L' then if forbidden_l.has(pos) then return pos -= 1 else if dir == 'U' then if forbidden_u.has(pos) then return if pos == 13 or pos == 3 then pos -= 2 else pos -= 4 end else if dir == 'D' then if forbidden_d.has(pos) then return if pos == 1 or pos == 11 then pos += 2 else pos += 4 end else print "Unknown command `{dir}`" end end end var input = args[0].to_path.read_all var il = input.split('\n') var curse = new Cursor for i in il do if i == "" then continue for j in i do curse.move(j) printn curse.pos.to_base(16) end print ""
Nit
3
ajnavarro/language-dataset
data/github.com/R4PaSs/advent_of_code_2016/a210a64c14374d942eaf53ff6f4d92224d63f807/day2/day2_p2.nit
[ "MIT" ]